lang
stringclasses
2 values
license
stringclasses
13 values
stderr
stringlengths
0
343
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
6
87.7k
new_contents
stringlengths
0
6.23M
new_file
stringlengths
3
311
old_contents
stringlengths
0
6.23M
message
stringlengths
6
9.1k
old_file
stringlengths
3
311
subject
stringlengths
0
4k
git_diff
stringlengths
0
6.31M
JavaScript
mit
39b95aa190fe60f5107c56a42f2cd88271269339
0
johniek/meteor-rock,johniek/meteor-rock
ca0d2cb0-2fbc-11e5-b64f-64700227155b
helloWorld.js
ca0b8e1e-2fbc-11e5-b64f-64700227155b
ca0d2cb0-2fbc-11e5-b64f-64700227155b
helloWorld.js
ca0d2cb0-2fbc-11e5-b64f-64700227155b
<ide><path>elloWorld.js <del>ca0b8e1e-2fbc-11e5-b64f-64700227155b <add>ca0d2cb0-2fbc-11e5-b64f-64700227155b
JavaScript
mit
45a88ac1f55a69b1f55450ad58a00ac388401f15
0
kidney/static-build,kidney/guido,kidney/guido,kidney/static-build,zuzucheFE/guido
/** * 资源注入到html * * webpack.config.js: * html: { * injectAsset: function(assetFile) { * return * } * } * * output files: * dist/ * css/ * commons~pageA~pageB~pageD-chunk-327fe913.css * js/ * commons~pageA~pageB~pageC~pageD-chunk-2b8836d9.js * commons~pageA~pageB~pageD-chunk-3e461ae1.js * pageA-0476b1b3.js * vendor-chunk-66e793b2.js * * hbs tpl: * <html> * <head> * {{{assets.entryA.css.toComboTag}}} * </head> * <body> * {{{assets.entryA.js.toComboTag}}} * {{#each assets.entryA.js}} * <script src="{{url}}"></script> * {{/each}} * </body> * </html> * * output html: * <html> * <head> * <link rel="stylesheet" href="../css/commons~pageA~pageB~pageD-chunk-327fe913.css"> * </head> * <body> * <script src="../js/vendor-chunk-66e793b2.js"></script> * <script src="../js/commons~pageA~pageB~pageC~pageD-chunk-2b8836d9.js"></script> * <script src="../js/commons~pageA~pageB~pageD-chunk-3e461ae1.js"></script> * <script src="../js/pageA-0476b1b3.js"></script> * </body> * </html> * */ 'use strict'; const path = require('path'); const HtmlWebpackPlugin = require('html-webpack-plugin'); const htmlTags = require('html-webpack-plugin/lib/html-tags'); const TypeOf = require('./typeof'); const includeAssetsHtmlPluginName = 'IncludeAssetsHtmlPlugin'; function injectAsset(file) { return file; } class IncludeAssetsHtmlPlugin { constructor(options = {}) { const userOptions = options || {}; const defaultOptions = { inject: injectAsset }; this.options = Object.assign(defaultOptions, userOptions); } templateParametersGenerator(compilation, assets, assetTags, options) { let self = this; let publicPath = assets.publicPath || ''; if (publicPath !== '' && !publicPath.endsWith('/')) { publicPath += '/'; } const xhtml = options.xhtml; const inject = self.options.inject; const crossOriginLoading = compilation.options.output.crossOriginLoading; let newAssetFiles = {}; compilation.entrypoints.forEach((EntryPoint) => { let entryPointFiles = {js: [], css: []}; EntryPoint.chunks.forEach((chunk) => { let chunkName = chunk.name; chunk.files.forEach((file) => { let extName = path.extname(file).replace('.', ''); let tag = {}; let url = `${publicPath}${file}`; let source = compilation.assets[file].source(); if (extName === 'js') { tag = { tagName: 'script', voidTag: false }; if (TypeOf.isNumber(inject) && source.length < inject) { tag.innerHTML = source; } else { tag.attributes = { src: url }; if (crossOriginLoading !== false) { tag.attributes.crossorigin = crossOriginLoading; } } } else if (extName === 'css') { if (TypeOf.isNumber(inject) && source.length < inject) { tag = { tagName: 'style', voidTag: false, innerHTML: source }; } else { tag = { tagName: 'link', voidTag: true, attributes: { rel: 'stylesheet', href: url } }; } } if (tag.tagName) { let file = { chunkName, url, source, tag: htmlTags.htmlTagObjectToString(tag, xhtml) }; if (TypeOf.isFunction(inject)) { file = inject(file); } entryPointFiles[extName].push(file); } }); }); entryPointFiles.js.toComboTag = entryPointFiles.js.map((assetObj) => { return assetObj.tag; }).join('\n'); entryPointFiles.css.toComboTag = entryPointFiles.css.map((assetObj) => { return assetObj.tag; }).join('\n'); newAssetFiles[EntryPoint.name] = entryPointFiles; }); return { assets: newAssetFiles } } apply(compiler) { let self = this; compiler.hooks.compilation.tap(includeAssetsHtmlPluginName, (compilation) => { const hooks = HtmlWebpackPlugin.getHooks(compilation); hooks.beforeAssetTagGeneration.tapAsync( includeAssetsHtmlPluginName, (data, cb) => { data.plugin.options.templateParameters = ( compilation, assets, assetTags, options ) => { return self.templateParametersGenerator( compilation, assets, assetTags, options ); }; return cb(null, data); }); }); } } module.exports = IncludeAssetsHtmlPlugin;
lib/utils/includeAssetsHtmlPlugin.js
/** * 资源注入到html * * webpack.config.js: * html: { * injectAsset: function(assetFile) { * return * } * } * * output files: * dist/ * * * hbs tpl: * <html> * <head> * {{{assets.entryA.css.toComboTag}}} * </head> * <body> * {{{assets.entryA.js.toComboTag}}} * {{#each assets.entryA.js}} * <script src="{{url}}"></script> * {{/each}} * </body> * </html> */ 'use strict'; const path = require('path'); const HtmlWebpackPlugin = require('html-webpack-plugin'); const htmlTags = require('html-webpack-plugin/lib/html-tags'); const TypeOf = require('./typeof'); const includeAssetsHtmlPluginName = 'IncludeAssetsHtmlPlugin'; function injectAsset(file) { return file; } class IncludeAssetsHtmlPlugin { constructor(options = {}) { const userOptions = options || {}; const defaultOptions = { inject: injectAsset }; this.options = Object.assign(defaultOptions, userOptions); } templateParametersGenerator(compilation, assets, assetTags, options) { let self = this; let publicPath = assets.publicPath || ''; if (publicPath !== '' && !publicPath.endsWith('/')) { publicPath += '/'; } const xhtml = options.xhtml; const inject = self.options.inject; const crossOriginLoading = compilation.options.output.crossOriginLoading; let newAssetFiles = {}; compilation.entrypoints.forEach((EntryPoint) => { let entryPointFiles = {js: [], css: []}; EntryPoint.chunks.forEach((chunk) => { let chunkName = chunk.name; chunk.files.forEach((file) => { let extName = path.extname(file).replace('.', ''); let tag = {}; let url = `${publicPath}${file}`; let source = compilation.assets[file].source(); if (extName === 'js') { tag = { tagName: 'script', voidTag: false }; if (TypeOf.isNumber(inject) && source.length < inject) { tag.innerHTML = source; } else { tag.attributes = { src: url }; if (crossOriginLoading !== false) { tag.attributes.crossorigin = crossOriginLoading; } } } else if (extName === 'css') { if (TypeOf.isNumber(inject) && source.length < inject) { tag = { tagName: 'style', voidTag: false, innerHTML: source }; } else { tag = { tagName: 'link', voidTag: true, attributes: { rel: 'stylesheet', href: url } }; } } if (tag.tagName) { let file = { chunkName, url, source, tag: htmlTags.htmlTagObjectToString(tag, xhtml) }; if (TypeOf.isFunction(inject)) { file = inject(file); } entryPointFiles[extName].push(file); } }); }); entryPointFiles.js.toComboTag = entryPointFiles.js.map((assetObj) => { return assetObj.tag; }).join('\n'); entryPointFiles.css.toComboTag = entryPointFiles.css.map((assetObj) => { return assetObj.tag; }).join('\n'); newAssetFiles[EntryPoint.name] = entryPointFiles; }); return { assets: newAssetFiles } } apply(compiler) { let self = this; compiler.hooks.compilation.tap(includeAssetsHtmlPluginName, (compilation) => { const hooks = HtmlWebpackPlugin.getHooks(compilation); hooks.beforeAssetTagGeneration.tapAsync( includeAssetsHtmlPluginName, (data, cb) => { data.plugin.options.templateParameters = ( compilation, assets, assetTags, options ) => { return self.templateParametersGenerator( compilation, assets, assetTags, options ); }; return cb(null, data); }); }); } } module.exports = IncludeAssetsHtmlPlugin;
html注入扩展注释
lib/utils/includeAssetsHtmlPlugin.js
html注入扩展注释
<ide><path>ib/utils/includeAssetsHtmlPlugin.js <ide> * <ide> * output files: <ide> * dist/ <del> * <add> * css/ <add> * commons~pageA~pageB~pageD-chunk-327fe913.css <add> * js/ <add> * commons~pageA~pageB~pageC~pageD-chunk-2b8836d9.js <add> * commons~pageA~pageB~pageD-chunk-3e461ae1.js <add> * pageA-0476b1b3.js <add> * vendor-chunk-66e793b2.js <ide> * <ide> * hbs tpl: <ide> * <html> <ide> * {{/each}} <ide> * </body> <ide> * </html> <add> * <add> * output html: <add> * <html> <add> * <head> <add> * <link rel="stylesheet" href="../css/commons~pageA~pageB~pageD-chunk-327fe913.css"> <add> * </head> <add> * <body> <add> * <script src="../js/vendor-chunk-66e793b2.js"></script> <add> * <script src="../js/commons~pageA~pageB~pageC~pageD-chunk-2b8836d9.js"></script> <add> * <script src="../js/commons~pageA~pageB~pageD-chunk-3e461ae1.js"></script> <add> * <script src="../js/pageA-0476b1b3.js"></script> <add> * </body> <add> * </html> <add> * <ide> */ <ide> <ide> 'use strict';
JavaScript
mit
b1e8db4ca7cb14a6c9fd40989f554bc7359401bf
0
ColorfulCakeChen/query-submit-canvas,ColorfulCakeChen/query-submit-canvas
export { Params, Base }; import * as ValueMax from "../ValueMax.js"; import * as ValueDesc from "../Unpacker/ValueDesc.js"; import * as ParamDesc from "../Unpacker/ParamDesc.js"; import * as Weights from "../Unpacker/Weights.js"; import * as PointDepthPoint from "./PointDepthPoint.js"; /** * Convolution block parameters. */ class Params extends Weights.Params { /** * If a parameter's value is null, it will be extracted from inputFloat32Array (i.e. by evolution). * * @param {Float32Array} inputFloat32Array * A Float32Array whose values will be interpreted as weights. * * @param {number} byteOffsetBegin * The position to start to decode from the inputFloat32Array. This is relative to the inputFloat32Array.buffer * (not to the inputFloat32Array.byteOffset). * * @param {number} stepCountPerBlock * There are how many steps inside this block. * - If null, it will be extracted from inputFloat32Array (i.e. by evolution). * * - If zero (== 0), the step count will be automatically calculated so that the block's output has half of source's * ( height, width ) and double channel count (depth). * - Every step will use depthwise convolution ( strides = 1, pad = "valid" ) and pointwise21. So every step will * shrink the input a little. * - The step0's depthwise convolution will also use channel multiplier 2 to double the channel count. * - The stepLast may use a smaller depthwise filter so that it could just make half source size as output size. * - If ( depthwiseFilterHeight == 1 ), the depthwiseFilterHeight will become 2 forcibly. Otherwise, the source size * could not be shrinked. * * - If positive (>= 1), this block will use one tf.depthwiseConv2d( strides = 2, pad = "same" ) to shrink (i.e. to halve * height x width) and use ( stepCountPerBlock - 1 ) times tf.depthwiseConv2d( strides = 1, pad = "same" ) until * the block end. * * @param {boolean} bChannelShuffler * Whether a (concatenator and) channel shuffler will be used. * - If ( stepCountPerBlock == 0 ), this flag will be ignored. There will be no channel shuffler. * - If ( bChannelShuffler == true ), this block will be similar to ShuffleNetV2 (i.e. split and concat channels). * - If ( bChannelShuffler == false ), this block will be similar to MobileNetV1 or MobileNetV2 (i.e. with add-input-to-output). * - If ( bChannelShuffler == null ), it will be extracted from inputFloat32Array (i.e. by evolution). * * @param {number} pointwise1ChannelCountRate * The first 1x1 pointwise convolution output channel count over of the second 1x1 pointwise convolution output channel count. * That is, pointwise1ChannelCount = ( pointwise2ChannelCount * pointwise1ChannelCountRate ). * - If ( stepCountPerBlock == 0 ), this rate will be ignored. There will be no first 1x1 pointwise. * - If ( bChannelShuffler == true ) and ( pointwise1ChannelCountRate == 0 ), will be simplified ShuffleNetV2 (expanding by once depthwise). * - If ( bChannelShuffler == true ) and ( pointwise1ChannelCountRate == 1 ), will be similar to ShuffleNetV2 (expanding by twice depthwise). * - If ( bChannelShuffler == false ) and ( pointwise1ChannelCountRate == 1 ), will be similar to MobileNetV1. * - If ( bChannelShuffler == false ) and ( pointwise1ChannelCountRate == 2 ), will be similar to MobileNetV2. * - If ( pointwise1ChannelCountRate == null ), it will be extracted from inputFloat32Array (i.e. by evolution). * * @param {boolean} bBias * If true, there will be a bias after every convolution. If null, it will be extracted from inputFloat32Array (i.e. by evolution). * * @param {string} nActivationId * The activation function id (ValueDesc.ActivationFunction.Singleton.Ids.Xxx) after every convolution. If null, it will be extracted * from inputFloat32Array (i.e. by evolution). * * @param {string} nActivationIdAtBlockEnd * The activation function id (ValueDesc.ActivationFunction.Singleton.Ids.Xxx) after the convolution of the last PointDepthPoint's * pointwise2ActivationId of this block. If null, it will be extracted from inputFloat32Array (i.e. by evolution). If the output of * this block needs to be any arbitrary value, it is recommended not to use activation at the end of this block * (i.e. nActivationIdAtBlockEnd == ValueDesc.ActivationFunction.Singleton.Ids.NONE) so that it will not be restricted by the range * of the activation function. * * @return {boolean} * Return false, if initialization failed. * * @override */ init( inputFloat32Array, byteOffsetBegin, stepCountPerBlock, bChannelShuffler, pointwise1ChannelCountRate, depthwiseChannelMultiplierStep0, depthwiseFilterHeight, bBias, nActivationId, nActivationIdAtBlockEnd ) { // Q: Why the depthwiseChannelMultiplierStep0 is not listed as a parameter? // A: After considering the following reasons, it is worth to drop this parameter. // // - In reality, it is almost no reason to use only avg/max pooling to compose a block because it keep too little information // for the next block. // // - If depthwiseChannelMultiplierStep0 is specified as Params.depthwiseChannelMultiplierStep0.valueDesc.Ids.NONE (0), the input // image will not be shrinked a little (for ( stepCountPerBlock <= 0 )) or will not be halven (for ( stepCountPerBlock >= 1 ). // If it is still a parameter it should be forced to 1 at least (always needs depthwise operation) in this case. // let parameterMap = new Map( [ [ Params.stepCountPerBlock, stepCountPerBlock ], [ Params.bChannelShuffler, bChannelShuffler ], [ Params.pointwise1ChannelCountRate, pointwise1ChannelCountRate ], [ Params.depthwiseFilterHeight, depthwiseFilterHeight ], [ Params.bBias, bBias ], [ Params.nActivationId, nActivationId ], [ Params.nActivationIdAtBlockEnd, nActivationIdAtBlockEnd ], ] ); return super.init( inputFloat32Array, byteOffsetBegin, parameterMap ); } get stepCountPerBlock() { return this.parameterMapModified.get( Params.stepCountPerBlock ); } get bChannelShuffler() { return this.parameterMapModified.get( Params.bChannelShuffler ); } get pointwise1ChannelCountRate() { return this.parameterMapModified.get( Params.pointwise1ChannelCountRate ); } get depthwiseFilterHeight() { return this.parameterMapModified.get( Params.depthwiseFilterHeight ); } get bBias() { return this.parameterMapModified.get( Params.bBias ); } get nActivationId() { return this.parameterMapModified.get( Params.nActivationId ); } get nActivationIdName() { return Params.nActivationId.getStringOfValue( this.nActivationId ); } get nActivationIdAtBlockEndId() { return this.parameterMapModified.get( Params.nActivationIdAtBlockEnd ); } get nActivationIdAtBlockEndName() { return Params.nActivationIdAtBlockEnd.getStringOfValue( this.nActivationIdAtBlockEnd ); } } // Define parameter descriptions. Params.stepCountPerBlock = new ParamDesc.Int( "stepCountPerBlock", 0, ( 1 * 1024 ) ); Params.bChannelShuffler = new ParamDesc.Bool( "bChannelShuffler" ); Params.pointwise1ChannelCountRate = new ParamDesc.Int( "pointwise1ChannelCountRate", 0, 2 ); Params.depthwiseFilterHeight = new ParamDesc.Int( "depthwiseFilterHeight", 1, 9 ); Params.bBias = new ParamDesc.Bool( "bBias" ); Params.nActivationId = new ParamDesc.ActivationFunction( "nActivationId" ); Params.nActivationIdAtBlockEnd = new ParamDesc.ActivationFunction( "nActivationIdAtBlockEnd" ); /** * Implement a block of ( depthwise convolution and pointwise convolution ) or ShuffleNetV2 (with 2 output channel groups) or MobileNetV1 * or MobileNetV2. * * * @member {boolean} bInitOk * If true, this object initialized (i.e. initer()) successfully. * * @member {number} byteOffsetBegin * The position which is started (inclusive) to extract from inputFloat32Array.buffer by initer(). * * @member {number} byteOffsetEnd * The position which is ended to (non-inclusive) extract from inputFloat32Array.buffer by initer(). Where to extract next weights. * Only meaningful when ( this.bInitOk == true ). * * @member {PointDepthPoint.Base[]} stepsArray * All computation steps of this block. * * @member {PointDepthPoint.Base} step0 * The first computation step of this block. * * @member {PointDepthPoint.Base} stepLast * The last computation step of this block. It may be the same as this.step0 when there is only one step inside this block. * * @member {number} outputHeight * The output image height of this block's last step. * * @member {number} outputWidth * The output image width of this block's last step. * * @member {number} outputChannelCount * The output channel count of this block's last step. * */ class Base { /** * Generator for initializing this object. * * @param {ValueMax.Percentage.Aggregate} progressParent * Some new progressToAdvance will be created and added to progressParent. The created progressToAdvance will be * increased when every time advanced. The progressParent.getRoot() will be returned when every time yield. * * @param {number} sourceHeight * The height of the source image which will be processed by apply_and_destroy_or_keep(). This should always be specified and can * not be null (i.e. it will never be extracted from inputFloat32Array and never by evolution). * * @param {number} sourceWidth * The width of the source image which will be processed by apply_and_destroy_or_keep().c This should always be specified and can * not be null (i.e. it will never be extracted from inputFloat32Array and never by evolution). * * @param {number} sourceChannelCount * The channel count of the source image. It may be the output channel count of the previous convolution block, so it could be large. * This should always be specified and can not be null (i.e. it will never be extracted from inputFloat32Array and never by evolution). * * @param {boolean} bKeepInputTensor * If true, apply_and_destroy_or_keep() will not dispose inputTensor (i.e. will be kept). If it is null, it will be viewed as falsy * (i.e. it will never be extracted from inputFloat32Array and never by evolution). * * @param {Params} params * A Params object. The params.extract() will be called to extract parameters. * * @yield {ValueMax.Percentage.Aggregate} * Yield ( value = progressParent.getRoot() ) when ( done = false ). * * @yield {boolean} * Yield ( value = true ) when ( done = true ) successfully. * Yield ( value = false ) when ( done = true ) failed. * * @see PointDepthPoint.Base.initer() */ * initer( progressParent, sourceHeight, sourceWidth, sourceChannelCount, bKeepInputTensor, params ) { // Both MobileNetV3 and ShuffleNetV2: // - They all do not use (depthwise convolution) channelMultiplier. // - They all use 1x1 (pointwise) convolution to expand channel count. // - They all use 1x1 (pointwise) convolution before depthwise convolution. // - They all use activation function after first pointwise convolution. // - They all use depthwise convolution with ( pad = "same" ). // - They all use depthwise convolution with ( strides = 2 ) for shrinking (halving) height x width. // - They all do not use bias after pointwise and depthwise convolution. // // Inisde one of their block, three convolutions are used: // A) 1x1 (pointwise) convolution, with activation. // B) depthwise convolution, (ShuffleNetV2) without or (MobileNetV2) with activation. // C) 1x1 (pointwise) convolution, (ShuffleNetV2) with or (MobileNetV2) without activation. // // In MobileNetV3, convolution A expands channel count (with activation), convolution C shrinks channel count (without activation). // It may use squeeze-and-excitation after convolution B (without activation). When there is necessary to increase output channel // count (usually in step 0 of a block), the convolution C is responsible for this. // // In ShuffleNetV2, convolution A (with activation), convolution B (without activation) and convolution C (with activation) never // change channel count. When there is necessary to increase output channel count (usually in step 0 of a block), it expands channel // count by concatenating two shrinked (halven) height x width. // 0. Prepare // Estimate the maximum value of progress. let progressMax = 1 // for extracting parameters from inputFloat32Array. ; let progressRoot = progressParent.getRoot(); let progressToAdvance = progressParent.addChild( new ValueMax.Percentage.Concrete( progressMax ) ); // For parameters extracting. let progressForSteps = progressParent.addChild( new ValueMax.Percentage.Aggregate() ); // for step0, step1, 2, 3, ... this.disposeTensors(); //!!! ...unfinished... (2021/07/30) should be moved into Params. Then ParamsConfig constructor accept Params (instead of Base). // So that ParamsConfig could be tested individually. this.sourceHeight = sourceHeight; this.sourceWidth = sourceWidth; this.sourceChannelCount = sourceChannelCount; this.bKeepInputTensor = bKeepInputTensor; // 1. Extract parameters. if ( !params ) return false; this.byteOffsetEnd = this.byteOffsetBegin = params.defaultByteOffsetBegin; if ( !params.extract() ) return false; // e.g. input array does not have enough data. this.byteOffsetEnd = params.defaultByteOffsetEnd; // Record where to extract next weights. Only meaningful when ( this.bInitOk == true ). // Get parameters' real (adjusted) values. // // Do not keep params in this.params so that the inputFloat32Array could be released. this.stepCountPerBlock = params.stepCountPerBlock; this.bChannelShuffler = params.bChannelShuffler this.pointwise1ChannelCountRate = params.pointwise1ChannelCountRate; this.depthwiseFilterHeight = params.depthwiseFilterHeight; // Assume depthwise filter's width equals its height. this.bBias = params.bBias; this.nActivationId = params.nActivationId; this.nActivationIdName = params.nActivationIdName; this.nActivationIdAtBlockEnd = params.nActivationIdAtBlockEnd; this.nActivationIdAtBlockEndName = params.nActivationIdAtBlockEndName; // Pre-allocate array to place intermediate 2 input tensors and 2 output tensors. This could reduce memory re-allocation. this.intermediateInputTensors = new Array( 2 ); this.intermediateOutputTensors = new Array( 2 ); ++progressToAdvance.value; yield progressRoot; // Parameters extracted. Report progress. //!!! ...unfinished... (2021/07/30) Perhaps, moved to Params.outputHeight() as a standalone function. // By default, the output ( height, width ) is half of the input (i.e. result of depthwise convolution with ( strides = 2, pad = "same" ) ). // // Note: This calculation copied from the getPadAndOutInfo() of // (https://github.com/tensorflow/tfjs/blob/tfjs-v3.8.0/tfjs-core/src/ops/conv_util.ts). // { let stridesHeight = 2, stridesWidth = 2; this.outputHeight = Math.ceil( sourceHeight / stridesHeight ); this.outputWidth = Math.ceil( sourceWidth / stridesWidth ); } let paramsConfig = Base.createParamsConfig.call( this ); paramsConfig.determine_stepCount_depthwiseFilterHeightLast(); // Calculate the real step count. for ( let i = 0; i < paramsConfig.stepCount; ++i ) { // Progress for step0, 1, 2, 3, ... progressForSteps.addChild( new ValueMax.Percentage.Aggregate() ); } let params, step, stepIniter; this.stepsArray = new Array( paramsConfig.stepCount ); for ( let i = 0; i < this.stepsArray.length; ++i ) { // Step1, 2, 3, ... if ( 0 == i ) { // Step0. paramsConfig.configTo_beforeStep0(); } // If this is the last step of this block (i.e. at-block-end) // - a different depthwise filter size may be used. // - a different activation function may be used after pointwise2 convolution. if ( ( this.stepsArray.length - 1 ) == i ) { paramsConfig.configTo_beforeStepLast(); } params = new PointDepthPoint.Params( params.defaultInput, this.byteOffsetEnd, paramsConfig.channelCount1_pointwise1Before, paramsConfig.pointwise1ChannelCount, paramsConfig.pointwise1Bias, paramsConfig.pointwise1ActivationId, paramsConfig.depthwise_AvgMax_Or_ChannelMultiplier, paramsConfig.depthwiseFilterHeight, paramsConfig.depthwiseStridesPad, paramsConfig.depthwiseBias, paramsConfig.depthwiseActivationId, paramsConfig.pointwise21ChannelCount, paramsConfig.pointwise21Bias, paramsConfig.pointwise21ActivationId, paramsConfig.pointwise22ChannelCount, paramsConfig.pointwise22Bias, paramsConfig.pointwise22ActivationId, ) step = this.stepsArray[ i ] = new PointDepthPoint.Base(); stepIniter = step.initer( progressForSteps.children[ i ], paramsConfig.channelCount0_pointwise1Before, paramsConfig.bShouldKeepInputTensor, params ); this.bInitOk = yield* stepIniter; if ( !this.bInitOk ) return false; this.byteOffsetEnd = this.step.byteOffsetEnd; if ( 0 == i ) { // After step0 (i.e. for step1, 2, 3, ...) paramsConfig.configTo_afterStep0( step ); } } this.step0 = this.stepsArray[ 0 ]; // Shortcut to the first step. this.stepLast = this.stepsArray[ this.stepsArray.length - 1 ]; // Shortcut to the last step. this.outputChannelCount = this.stepLast.outChannelsAll; this.bInitOk = true; return this.bInitOk; } /** * Initialize this object by calling initer() and advance the generator by loop until done. * * @param {ValueMax.Percentage.Aggregate} progressParent * If null, a temporary progress object will be created. * * @return {boolean} * Return true if successfully (and progressParent.valuePercentage will be equal to 100). * Return false if failed (and progressParent.valuePercentage will be less than 100). * * @see PointDepthPoint.Base.init() */ init( progressParent, sourceHeight, sourceWidth, sourceChannelCount, bKeepInputTensor, params ) { progressParent = progressParent || ( new ValueMax.Percentage.Aggregate() ); let initer = this.initer( progressParent, sourceHeight, sourceWidth, sourceChannelCount, bKeepInputTensor, params ); let initerNext; do { initerNext = initer.next(); } while ( ! initerNext.done ); // When ( false == initerNext.done ), the ( initerNext.value ) will be progressParent.getRoot(). let bInitOk = initerNext.value; // When ( true == initerNext.done ), the ( initerNext.value ) will be initialization successfully or failed. return bInitOk; } /** Release all tensors. */ disposeTensors() { if ( this.stepsArray ) { for ( let i = 0; i < this.stepsArray.length ) { let step = this.stepsArray[ i ]; step.disposeTensors(); } this.stepsArray.length = 0; } this.step0 = this.stepLast = null; // It has already de disposed by this.step0 or this.steps1After. this.apply_and_destroy_or_keep = null; this.outputChannelCount = -1; this.intermediateInputTensors = this.intermediateOutputTensors = null; this.byteOffsetBegin = this.byteOffsetEnd = -1; this.bInitOk = false; } /** * @param {Base} this * The Block object to be reference. */ static createParamsConfig() { //!!! ...unfinished... (2021/07/30) Why not use a single parameter? // * - If ( stepCountPerBlock == 0 ), this rate will be ignored. There will be no first 1x1 pointwise. // * - If ( bChannelShuffler == true ) and ( pointwise1ChannelCountRate == 0 ), will be simplified ShuffleNetV2 (expanding by once depthwise). // * - If ( bChannelShuffler == true ) and ( pointwise1ChannelCountRate == 1 ), will be similar to ShuffleNetV2 (expanding by twice depthwise). // * - If ( bChannelShuffler == false ) and ( pointwise1ChannelCountRate == 1 ), will be similar to MobileNetV1. // * - If ( bChannelShuffler == false ) and ( pointwise1ChannelCountRate == 2 ), will be similar to MobileNetV2. if ( this.stepCountPerBlock == 0 ) { // Not ShuffleNetV2, Not MobileNetV2. return new ParamsConfig_NotShuffleNet_NotMobileNet( this ); } else { if ( this.bChannelShuffler == true ) { if ( this.pointwise1ChannelCountRate == 0 ) { // will be simplified ShuffleNetV2 (expanding by once depthwise). return new ParamsConfig_ShuffleNetV2_Simplified( this ); } else { // ( pointwise1ChannelCountRate == 1 ), will be similar to ShuffleNetV2 (expanding by twice depthwise). return new ParamsConfig_ShuffleNetV2( this ); } } else { // ( bChannelShuffler == false ) // ( pointwise1ChannelCountRate == 1 ), will be similar to MobileNetV1. // ( pointwise1ChannelCountRate == 2 ), will be similar to MobileNetV2. return new ParamsConfig_MobileNet( this ); } } } /** Process input, destroy or keep input, return result. * * @param {tf.tensor3d} inputTensor * The source input image ( height x width x channel ) which will be processed. This inputTensor may or may not be disposed * according to init()'s bKeepInputTensor. * * @return {tf.tensor3d} * Return a new tensor. All other intermediate tensors were disposed. */ apply_and_destroy_or_keep( inputTensor ) { let inputTensors = this.intermediateInputTensors; let outputTensors = this.intermediateOutputTensors; outputTensors[ 0 ] = inputTensor; outputTensors[ 1 ] = null; for ( let i = 0; i < this.stepsArray.length ) { inputTensors[ 0 ] = outputTensors[ 0 ]; // Previous step's output becomes next step's input. inputTensors[ 1 ] = outputTensors[ 1 ]; let step = this.stepsArray[ i ]; step.apply_and_destroy_or_keep( inputTensors, outputTensors ); } return outputTensors[ 0 ]; // Note: The last step should only output one tensor. } } /** * Basic class for all ParamsConfig_Xxx classes. */ class ParamsConfig { /** * @param {Base} block * The Block object which provides basic parameters. */ constructor( block ) { this.block = block; this.channelCount0_pointwise1Before = this.channelCount1_pointwise1Before = this.pointwise1ChannelCount = this.pointwise21ChannelCount = this.pointwise22ChannelCount = this.depthwise_AvgMax_Or_ChannelMultiplier = this.depthwiseFilterHeight = this.depthwiseStridesPad = 0; // By default, all convolution use the same bias flag and activation function. this.pointwise1Bias = this.depthwiseBias = this.pointwise21Bias = this.pointwise22Bias = block.bBias; this.pointwise1ActivationId = this.depthwiseActivationId = this.pointwise21ActivationId = this.pointwise22ActivationId = block.nActivationId; this.bShouldKeepInputTensor = false; this.stepCount = -1; // How many step should be in the block. this.depthwiseFilterHeightLast = -1; // The last step's depthwise filter size. } /** Called to determine stepCount and depthwiseFilterHeightLast. Sub-class could override this method to adjust data members. */ determine_stepCount_depthwiseFilterHeightLast() { let block = this.block; this.stepCount = block.stepCountPerBlock; // By default, the step count is just the original step count. this.depthwiseFilterHeightLast = block.depthwiseFilterHeight; // By default, the last step still uses the original depthwise filter size. } /** Called before step0 is about to be created. Sub-class should override this method to adjust data members. * * Step 0. * * The special points of a block's step 0 are: * - halve the height x width. (Both ShuffleNetV2 and MobileNetV2) (by depthwise convolution with strides = 2) * - Double channels. (By concat if ShuffleNetV2. By second pointwise if MobileNetV2.) * - Expand channels by channelMultiplier of depthwise convolution. (Our ShuffleNetV2_Simplified.) */ configTo_beforeStep0() {} /** Called after step0 is created (i.e. before step1, 2, 3, ...). Sub-class should override this method to adjust data members. * * @param {PointDepthPoint.Base} step0 * The just created step0 object. */ configTo_afterStep0( step0 ) {} /** Called before stepLast is about to be created. Sub-class could override this method to adjust data members. */ configTo_beforeStepLast() { // By default, the stepLast of this block (i.e. at-block-end) may use a different activation function after pointwise2 convolution. // // Even if in MobileNetV2 (pointwise2 convolution does not have activation function in default), this is still true. this.pointwise21ActivationId = this.pointwise22ActivationId = block.nActivationIdAtBlockEnd; // Besides, the stepLast may use a different depthwise filter size. This is especially true for NotShuffleNet_NotMobileNet. this.depthwiseFilterHeight = this.depthwiseFilterHeightLast; } } /** Privode parameters for pure depthwise-pointwise convolutions. */ class ParamsConfig_NotShuffleNet_NotMobileNet extends ParamsConfig { /** * Compute how many step shoud be used and what is the last step's depthwise filter size, when shrink sourceHeight to outputHeight * by depthwise convolution with ( strides = 1, pad = "valid" ). * * The this.stepCount will be at least 1 (never 0). * The this.depthwiseFilterHeightLast will be at least 1 (at most this.block.depthwiseFilterHeight). * * The this.block.depthwiseFilterHeight might be modified. * * @override */ determine_stepCount_depthwiseFilterHeightLast() { let block = this.block; let differenceHeight = block.sourceHeight - block.outputHeight; //let differenceWidth = block.sourceWidth - block.outputWidth; if ( 0 == differenceHeight ) { // 1. No difference between source and output size. this.stepCount = 1; // Only one step is needed. (Avoid no steps. At least, there should be one step.) this.depthwiseFilterHeightLast = 1; // The last (and only one) ste should use filter size 1x1 so that the input size could be kept. } // Since difference between source and output exists, the filter size should be larger than 1x1. if ( block.depthwiseFilterHeight <= 1 ) block.depthwiseFilterHeight = 2; // Otherwise, the image size could not be shrinked. // The height of processed image will be reduced a little for any depthwise filter larger than 1x1. let heightReducedPerStep = block.depthwiseFilterHeight - 1; // The possible step count for reducing sourceHeight to outputHeight by tf.depthwiseConv2d( strides = 1, pad = "valid" ). // // This value may be less than real step count because the filter size of the last step may be larger than its input. let stepCountCandidate = Math.floor( differenceHeight / heightReducedPerStep ); let differenceHeightLast = differenceHeight - ( stepCountCandidate * heightReducedPerStep ); // The last step should reduce so many height. if ( 0 == differenceHeightLast ) { // 2. The original depthwiseFilterHeight could achieve the output size at the last step. this.stepCount = stepCountCandidate; // It is the real step count. this.depthwiseFilterHeightLast = block.depthwiseFilterHeight; // The last step uses the original depthwise filter size is enough. } // 3. The original depthwiseFilterHeight could not achieve the output size at the last step. // It is larger than the last step's input size. An extra step with a smaller filter size is needed. this.stepCount = stepCountCandidate + 1; // Needs one more step. // The extra last step's depthwise filter size should just eliminate the last diffference. this.depthwiseFilterHeightLast = differenceHeightLast + 1; } /** @override */ configTo_beforeStep0() { let block = this.block; this.channelCount0_pointwise1Before = block.sourceChannelCount; // Step0 uses the original input channel count. this.channelCount1_pointwise1Before = ValueDesc.channelCount1_pointwise1Before.Singleton.Ids.ONE_INPUT; // no concatenate, no add-input-to-output. this.pointwise1ChannelCount = 0; // In this mode, always no pointwise convolution before depthwise convolution. this.depthwise_AvgMax_Or_ChannelMultiplier = 2; // Step0 double the channel count by depthwise channel multiplier. this.depthwiseFilterHeight = block.depthwiseFilterHeight; // All steps (except stepLast) uses default depthwise filter size. this.depthwiseStridesPad = 0; // In this mode, always ( strides = 1, pad = "valid" ). this.pointwise21ChannelCount = block.sourceChannelCount * block.depthwise_AvgMax_Or_ChannelMultiplier; // Step0 will double channel count. this.pointwise22ChannelCount = 0; // In this mode, always no second output. this.bShouldKeepInputTensor = block.bKeepInputTensor; // Step0 may or may not keep input tensor according to caller's necessary. } /** @override */ configTo_afterStep0( step0 ) { let block = this.block; this.channelCount0_pointwise1Before = step0.outChannelsAll; // Step0's output channel count is all the other steps' input channel count. this.depthwise_AvgMax_Or_ChannelMultiplier = 1; // Except step0, all other steps will not double the channel count. this.pointwise21ChannelCount = step0.outChannelsAll; // Step0's output channel count is all the other steps' output channel count. this.bShouldKeepInputTensor = false; // No matter bKeepInputTensor, all steps (except step0) should not keep input tensor. } } /** Privode parameters for simplified ShuffleNetV2 (i.e. without pointwise1, with concatenator). * * Q: How to specify this configuration? * A: By ( bChannelShuffler == true ) and ( pointwise1ChannelCountRate == 0 ) in the parameters of Block.Base. * * (Our) Adjusted ShuffleNetV2: * * Since channel shuffler could achieved efficiently by pointwise convolution, it may be possible to combine the pointwise2 * convolution (after depthwise convolution) and the pointwise convolution (of channel shuffler). That is: * - Concatenate the output of depthwise convolution and the other output group. * - Pointwise convolution to generate output group 1. * - Pointwise convolution to generate output group 2. * * Although the channel shuffler is achieved by pointwise convolution without bias and activation function, however, * the second pointwise convolution (before channel shuffler) indeed has bias and activation function. After combining * these two pointwise convolutions (the original second and the channel shuffler), the total result is twice pointwise * convolution with bias and activation function. * * If the poitwise1 convolution (of every step (include step 0 too)) could be discarded, the step 0 and step 0's branch could * be achieved simultaneously by: * - once depthwise convolution (channelMultipler = 2, strides = 2, pad = same, bias, COS). * - No need to concatenate because the above operation already double channel count. * - twice pointwise2 convolution (every has same as block's input channel count). * * And, the step 1 (, 2, 3, ..., ( n - 2 ) ) could be achieved by: * - once depthwise convolution (channelMultipler = 1, strides = 1, pad = same, bias, COS). * - concatenate. * - twice pointwise2 convolution (every has same as block's input channel count). * * And, the last step (i.e. step ( n - 1 ) ) of the block could be achieved by: * - once depthwise convolution (channelMultipler = 1, strides = 1, pad = same, bias, COS). * - concatenate. * - once pointwise2 convolution (has double of block's input channel count). * * Note that: * - The depthwise convolution (channelMultipler = 2, strides = 2) of step 0 achieves simultaneously two depthwise * convolution (channelMultipler = 1, strides = 2) of step 0 and step 0's branch. So, it is one less depthwise * convolution, and one less concatenating. * * - The twice pointwise2 convolution (every has same as block's input channel count) achieves not only pointwise * convolution but also channel shuffling. So, it is one less pointwise convolution. * * - The once pointwise2 convolution (has double of block's input channel count) of last step achieves simultaneously * pointwise convolution, channel shuffling, and concatenating. So, it is not only one less pointwise convolution, * but also one less concatenating. * * - Even if the pointwise1 convolution is discarded and the pointwise2 convolution does not have bias and activation * function, the depthwise convolution (with bias and COS as activation function) and pointwise2 convolution together * compose an effective Fourier series which should have enough expressive power for approximating any function. * */ class ParamsConfig_ShuffleNetV2_Simplified extends ParamsConfig_ShuffleNetV2 { /** @override */ configTo_beforeStep0() { super.configTo_beforeStep0(); // Almost the same as ParamsConfig_ShuffleNetV2. Except the followings. // In this case, ( pointwise1ChannelCountRate == 0 ) so that ( this.pointwise1ChannelCount == 0 ) must true. // // In other words, step0 does not have pointwise1 convolution before depthwise convolution. So the second // depthwise convolution (in original ShuffleNetV2) is not needed. Then, a simpler configuration could be // used. // // Just use once depthwise convolution (but with channel multipler 2) to double the channel count. this.channelCount1_pointwise1Before = ValueDesc.channelCount1_pointwise1Before.Singleton.Ids.ONE_INPUT; // no concatenate, no add-input-to-output. this.depthwise_AvgMax_Or_ChannelMultiplier = 2; // Step0 double the channel count by depthwise channel multiplier. } /** @override */ configTo_afterStep0( step0 ) { super.configTo_afterStep0( step0 ); // Almost the same as ParamsConfig_ShuffleNetV2. Except the following. this.depthwise_AvgMax_Or_ChannelMultiplier = 1; // All steps (except step0) will not double the channel count. // Note: ( this.pointwise1ChannelCount == 0 ) still true here. All steps do not have pointwise1 convolution before depthwise convolution. } } /** Privode parameters for ShuffleNetV2 (i.e. with pointwise1, with concatenator). */ class ParamsConfig_ShuffleNetV2 extends ParamsConfig { /** @override */ configTo_beforeStep0() { let block = this.block; this.channelCount0_pointwise1Before = block.sourceChannelCount; // Step0 uses the original input channel count (as input0). this.channelCount1_pointwise1Before = ValueDesc.channelCount1_pointwise1Before.Singleton.Ids.ONE_INPUT_TWO_DEPTHWISE; // with concatenation. this.depthwise_AvgMax_Or_ChannelMultiplier = 1; // All steps will not double the channel count. this.depthwiseFilterHeight = this.block.depthwiseFilterHeight; // All steps uses default depthwise filter size. this.depthwiseStridesPad = 2; // Step0 uses depthwise ( strides = 2, pad = "same" ) to halve ( height, width ). // If an operation has no activation function, it can have no bias too. Because the next operation's bias can achieve the same result. this.depthwiseBias = false; this.depthwiseActivationId = PointDepthPoint.Params.Activation.Ids.NONE; // In ShuffleNetV2, depthwise convolution doesn't have activation. this.pointwise21ChannelCount = block.sourceChannelCount; // All steps' (except stepLast) output0 is the same depth as source input0. this.pointwise22ChannelCount = block.sourceChannelCount; // All steps' (except stepLast) output1 is the same depth as source input1. // In ShuffleNetV2, all steps have pointwise1 convolution before depthwise convolution. Its channel count is adjustable by user's request. this.pointwise1ChannelCount = this.pointwise21ChannelCount * block.pointwise1ChannelCountRate; // In ShuffleNetV2, the rate is usually 1. this.bShouldKeepInputTensor = block.bKeepInputTensor; // Step0 may or may not keep input tensor according to caller's necessary. } /** @override */ configTo_afterStep0( step0 ) { let block = this.block; // The ( input0, input1 ) of all steps (except step0) have the same depth as previous (also step0's) step's ( output0, output1 ). this.channelCount0_pointwise1Before = step0.outChannels0; this.channelCount1_pointwise1Before = step0.outChannels1; // i.e. TWO_INPUTS (with concatenation, without add-input-to-output). this.depthwiseStridesPad = 1; // All steps (except step0) uses depthwise ( strides = 1, pad = "same" ) to keep ( height, width ). this.bShouldKeepInputTensor = false; // No matter bKeepInputTensor, all steps (except step0) should not keep input tensor. } /** @override */ configTo_beforeStepLast() { super.configTo_beforeStepLast(); // Still, stepLast may use a different activation function after pointwise2 convolution. // In ShuffleNetV2, the stepLast only has output0 (no output1). And the output0 has double channel count of source input0. // // Note: Although pointwise21 channel count changed, however, the pointwise1ChannelCount is not changed because the final // output0 is viewed as concatenation of pointwise21 and pointwise22. In pointwise1's point of view, its pointwise2 does // not changed. this.pointwise21ChannelCount = block.sourceChannelCount * 2; this.pointwise22ChannelCount = 0; } } /** Privode parameters for MobileNetV1 or MobileNetV2 (i.e. with pointwise1, with add-input-to-output). */ class ParamsConfig_MobileNet extends ParamsConfig { /** @override */ configTo_beforeStep0() { let block = this.block; this.channelCount0_pointwise1Before = block.sourceChannelCount; // Step0 uses the original input channel count (as input0). // In MobileNet, all steps (include step0) do not use input1 and do add-input-to-output (without concatenation). this.channelCount1_pointwise1Before = ValueDesc.channelCount1_pointwise1Before.Singleton.Ids.ONE_INPUT_ADD_TO_OUTPUT; this.depthwise_AvgMax_Or_ChannelMultiplier = 1; // All steps will not double the channel count. this.depthwiseFilterHeight = this.block.depthwiseFilterHeight; // All steps uses default depthwise filter size. this.depthwiseStridesPad = 2; // Step0 uses depthwise ( strides = 2, pad = "same" ) to halve ( height, width ). this.pointwise21ChannelCount = block.sourceChannelCount * 2; // In MobileNetV2, all steps (include step0) output0 is twice depth of source input0. this.pointwise22ChannelCount = 0; // In MobileNetV2, all steps (include step0) do not have output1. // If an operation has no activation function, it can have no bias too. Because the next operation's bias can achieve the same result. this.pointwise2Bias = false; // In MobileNetV2, the second 1x1 pointwise convolution doesn't have activation function in default. // // But it could be changed by nActivationIdAtBlockEnd for the last step of the block. this.pointwise2ActivationId = PointDepthPoint.Params.Activation.Ids.NONE; // In MobileNet, all steps have pointwise1 convolution before depthwise convolution. Its channel count is adjustable by user's request. // // Q: How to know whether it is MobileNetV2 or MobileNetV1? // A: By pointwise1ChannelCountRate. // - If ( pointwise1ChannelCount < pointwise2ChannelCount ), similar to ResNet. // - If ( pointwise1ChannelCount == pointwise2ChannelCount ), similar to MobileNetV1 or ShufffleNetV2. // - If ( pointwise1ChannelCount > pointwise2ChannelCount ), similar to MobileNetV2. this.pointwise1ChannelCount = this.pointwise21ChannelCount * block.pointwise1ChannelCountRate; // In MobileNetV2, the rate is usually 2. this.bShouldKeepInputTensor = block.bKeepInputTensor; // Step0 may or may not keep input tensor according to caller's necessary. } /** @override */ configTo_afterStep0( step0 ) { let block = this.block; // The input0 of all steps (except step0) have the same depth as previous (also step0's) step's output0. this.channelCount0_pointwise1Before = step0.outChannels0; this.depthwiseStridesPad = 1; // All steps (except step0) uses depthwise ( strides = 1, pad = "same" ) to keep ( height, width ). this.bShouldKeepInputTensor = false; // No matter bKeepInputTensor, all steps (except step0) should not keep input tensor. } }
CNN/Conv/Block.js
export { Params, Base }; import * as ValueMax from "../ValueMax.js"; import * as ValueDesc from "../Unpacker/ValueDesc.js"; import * as ParamDesc from "../Unpacker/ParamDesc.js"; import * as Weights from "../Unpacker/Weights.js"; import * as PointDepthPoint from "./PointDepthPoint.js"; /** * Convolution block parameters. */ class Params extends Weights.Params { /** * If a parameter's value is null, it will be extracted from inputFloat32Array (i.e. by evolution). * * @param {Float32Array} inputFloat32Array * A Float32Array whose values will be interpreted as weights. * * @param {number} byteOffsetBegin * The position to start to decode from the inputFloat32Array. This is relative to the inputFloat32Array.buffer * (not to the inputFloat32Array.byteOffset). * * @param {number} stepCountPerBlock * There are how many steps inside this block. * - If null, it will be extracted from inputFloat32Array (i.e. by evolution). * * - If zero (== 0), the step count will be automatically calculated so that the block's output has half of source's * ( height, width ) and double channel count (depth). * - Every step will use depthwise convolution ( strides = 1, pad = "valid" ) and pointwise21. So every step will * shrink the input a little. * - The step0's depthwise convolution will also use channel multiplier 2 to double the channel count. * - The stepLast may use a smaller depthwise filter so that it could just make half source size as output size. * - If ( depthwiseFilterHeight == 1 ), the depthwiseFilterHeight will become 2 forcibly. Otherwise, the source size * could not be shrinked. * * - If positive (>= 1), this block will use one tf.depthwiseConv2d( strides = 2, pad = "same" ) to shrink (i.e. to halve * height x width) and use ( stepCountPerBlock - 1 ) times tf.depthwiseConv2d( strides = 1, pad = "same" ) until * the block end. * * @param {boolean} bChannelShuffler * Whether a (concatenator and) channel shuffler will be used. * - If ( stepCountPerBlock == 0 ), this flag will be ignored. There will be no channel shuffler. * - If ( bChannelShuffler == true ), this block will be similar to ShuffleNetV2 (i.e. split and concat channels). * - If ( bChannelShuffler == false ), this block will be similar to MobileNetV1 or MobileNetV2 (i.e. with add-input-to-output). * - If ( bChannelShuffler == null ), it will be extracted from inputFloat32Array (i.e. by evolution). * * @param {number} pointwise1ChannelCountRate * The first 1x1 pointwise convolution output channel count over of the second 1x1 pointwise convolution output channel count. * That is, pointwise1ChannelCount = ( pointwise2ChannelCount * pointwise1ChannelCountRate ). * - If ( stepCountPerBlock == 0 ), this rate will be ignored. There will be no first 1x1 pointwise. * - If ( bChannelShuffler == true ) and ( pointwise1ChannelCountRate == 0 ), will be simplified ShuffleNetV2 (expanding by once depthwise). * - If ( bChannelShuffler == true ) and ( pointwise1ChannelCountRate == 1 ), will be similar to ShuffleNetV2 (expanding by twice depthwise). * - If ( bChannelShuffler == false ) and ( pointwise1ChannelCountRate == 1 ), will be similar to MobileNetV1. * - If ( bChannelShuffler == false ) and ( pointwise1ChannelCountRate == 2 ), will be similar to MobileNetV2. * - If ( pointwise1ChannelCountRate == null ), it will be extracted from inputFloat32Array (i.e. by evolution). * * @param {boolean} bBias * If true, there will be a bias after every convolution. If null, it will be extracted from inputFloat32Array (i.e. by evolution). * * @param {string} nActivationId * The activation function id (ValueDesc.ActivationFunction.Singleton.Ids.Xxx) after every convolution. If null, it will be extracted * from inputFloat32Array (i.e. by evolution). * * @param {string} nActivationIdAtBlockEnd * The activation function id (ValueDesc.ActivationFunction.Singleton.Ids.Xxx) after the convolution of the last PointDepthPoint's * pointwise2ActivationId of this block. If null, it will be extracted from inputFloat32Array (i.e. by evolution). If the output of * this block needs to be any arbitrary value, it is recommended not to use activation at the end of this block * (i.e. nActivationIdAtBlockEnd == ValueDesc.ActivationFunction.Singleton.Ids.NONE) so that it will not be restricted by the range * of the activation function. * * @return {boolean} * Return false, if initialization failed. * * @override */ init( inputFloat32Array, byteOffsetBegin, stepCountPerBlock, bChannelShuffler, pointwise1ChannelCountRate, depthwiseChannelMultiplierStep0, depthwiseFilterHeight, bBias, nActivationId, nActivationIdAtBlockEnd ) { // Q: Why the depthwiseChannelMultiplierStep0 is not listed as a parameter? // A: After considering the following reasons, it is worth to drop this parameter. // // - In reality, it is almost no reason to use only avg/max pooling to compose a block because it keep too little information // for the next block. // // - If depthwiseChannelMultiplierStep0 is specified as Params.depthwiseChannelMultiplierStep0.valueDesc.Ids.NONE (0), the input // image will not be shrinked a little (for ( stepCountPerBlock <= 0 )) or will not be halven (for ( stepCountPerBlock >= 1 ). // If it is still a parameter it should be forced to 1 at least (always needs depthwise operation) in this case. // let parameterMap = new Map( [ [ Params.stepCountPerBlock, stepCountPerBlock ], [ Params.bChannelShuffler, bChannelShuffler ], [ Params.pointwise1ChannelCountRate, pointwise1ChannelCountRate ], [ Params.depthwiseFilterHeight, depthwiseFilterHeight ], [ Params.bBias, bBias ], [ Params.nActivationId, nActivationId ], [ Params.nActivationIdAtBlockEnd, nActivationIdAtBlockEnd ], ] ); return super.init( inputFloat32Array, byteOffsetBegin, parameterMap ); } get stepCountPerBlock() { return this.parameterMapModified.get( Params.stepCountPerBlock ); } get bChannelShuffler() { return this.parameterMapModified.get( Params.bChannelShuffler ); } get pointwise1ChannelCountRate() { return this.parameterMapModified.get( Params.pointwise1ChannelCountRate ); } get depthwiseFilterHeight() { return this.parameterMapModified.get( Params.depthwiseFilterHeight ); } get bBias() { return this.parameterMapModified.get( Params.bBias ); } get nActivationId() { return this.parameterMapModified.get( Params.nActivationId ); } get nActivationIdName() { return Params.nActivationId.getStringOfValue( this.nActivationId ); } get nActivationIdAtBlockEndId() { return this.parameterMapModified.get( Params.nActivationIdAtBlockEnd ); } get nActivationIdAtBlockEndName() { return Params.nActivationIdAtBlockEnd.getStringOfValue( this.nActivationIdAtBlockEnd ); } } // Define parameter descriptions. Params.stepCountPerBlock = new ParamDesc.Int( "stepCountPerBlock", 0, ( 1 * 1024 ) ); Params.bChannelShuffler = new ParamDesc.Bool( "bChannelShuffler" ); Params.pointwise1ChannelCountRate = new ParamDesc.Int( "pointwise1ChannelCountRate", 0, 2 ); Params.depthwiseFilterHeight = new ParamDesc.Int( "depthwiseFilterHeight", 1, 9 ); Params.bBias = new ParamDesc.Bool( "bBias" ); Params.nActivationId = new ParamDesc.ActivationFunction( "nActivationId" ); Params.nActivationIdAtBlockEnd = new ParamDesc.ActivationFunction( "nActivationIdAtBlockEnd" ); /** * Implement a block of ( depthwise convolution and pointwise convolution ) or ShuffleNetV2 (with 2 output channel groups) or MobileNetV1 * or MobileNetV2. * * * @member {boolean} bInitOk * If true, this object initialized (i.e. initer()) successfully. * * @member {number} byteOffsetBegin * The position which is started (inclusive) to extract from inputFloat32Array.buffer by initer(). * * @member {number} byteOffsetEnd * The position which is ended to (non-inclusive) extract from inputFloat32Array.buffer by initer(). Where to extract next weights. * Only meaningful when ( this.bInitOk == true ). * * @member {PointDepthPoint.Base[]} stepsArray * All computation steps of this block. * * @member {PointDepthPoint.Base} step0 * The first computation step of this block. * * @member {PointDepthPoint.Base} stepLast * The last computation step of this block. It may be the same as this.step0 when there is only one step inside this block. * * @member {number} outputHeight * The output image height of this block's last step. * * @member {number} outputWidth * The output image width of this block's last step. * * @member {number} outputChannelCount * The output channel count of this block's last step. * */ class Base { /** * Generator for initializing this object. * * @param {ValueMax.Percentage.Aggregate} progressParent * Some new progressToAdvance will be created and added to progressParent. The created progressToAdvance will be * increased when every time advanced. The progressParent.getRoot() will be returned when every time yield. * * @param {number} sourceHeight * The height of the source image which will be processed by apply_and_destroy_or_keep(). This should always be specified and can * not be null (i.e. it will never be extracted from inputFloat32Array and never by evolution). * * @param {number} sourceWidth * The width of the source image which will be processed by apply_and_destroy_or_keep().c This should always be specified and can * not be null (i.e. it will never be extracted from inputFloat32Array and never by evolution). * * @param {number} sourceChannelCount * The channel count of the source image. It may be the output channel count of the previous convolution block, so it could be large. * This should always be specified and can not be null (i.e. it will never be extracted from inputFloat32Array and never by evolution). * * @param {boolean} bKeepInputTensor * If true, apply_and_destroy_or_keep() will not dispose inputTensor (i.e. will be kept). If it is null, it will be viewed as falsy * (i.e. it will never be extracted from inputFloat32Array and never by evolution). * * @param {Params} params * A Params object. The params.extract() will be called to extract parameters. * * @yield {ValueMax.Percentage.Aggregate} * Yield ( value = progressParent.getRoot() ) when ( done = false ). * * @yield {boolean} * Yield ( value = true ) when ( done = true ) successfully. * Yield ( value = false ) when ( done = true ) failed. * * @see PointDepthPoint.Base.initer() */ * initer( progressParent, sourceHeight, sourceWidth, sourceChannelCount, bKeepInputTensor, params ) { // Both MobileNetV3 and ShuffleNetV2: // - They all do not use (depthwise convolution) channelMultiplier. // - They all use 1x1 (pointwise) convolution to expand channel count. // - They all use 1x1 (pointwise) convolution before depthwise convolution. // - They all use activation function after first pointwise convolution. // - They all use depthwise convolution with ( pad = "same" ). // - They all use depthwise convolution with ( strides = 2 ) for shrinking (halving) height x width. // - They all do not use bias after pointwise and depthwise convolution. // // Inisde one of their block, three convolutions are used: // A) 1x1 (pointwise) convolution, with activation. // B) depthwise convolution, (ShuffleNetV2) without or (MobileNetV2) with activation. // C) 1x1 (pointwise) convolution, (ShuffleNetV2) with or (MobileNetV2) without activation. // // In MobileNetV3, convolution A expands channel count (with activation), convolution C shrinks channel count (without activation). // It may use squeeze-and-excitation after convolution B (without activation). When there is necessary to increase output channel // count (usually in step 0 of a block), the convolution C is responsible for this. // // In ShuffleNetV2, convolution A (with activation), convolution B (without activation) and convolution C (with activation) never // change channel count. When there is necessary to increase output channel count (usually in step 0 of a block), it expands channel // count by concatenating two shrinked (halven) height x width. // 0. Prepare // Estimate the maximum value of progress. let progressMax = 1 // for extracting parameters from inputFloat32Array. ; let progressRoot = progressParent.getRoot(); let progressToAdvance = progressParent.addChild( new ValueMax.Percentage.Concrete( progressMax ) ); // For parameters extracting. let progressForSteps = progressParent.addChild( new ValueMax.Percentage.Aggregate() ); // for step0, step1, 2, 3, ... this.disposeTensors(); this.sourceHeight = sourceHeight; this.sourceWidth = sourceWidth; this.sourceChannelCount = sourceChannelCount; this.bKeepInputTensor = bKeepInputTensor; // 1. Extract parameters. if ( !params ) return false; this.byteOffsetEnd = this.byteOffsetBegin = params.defaultByteOffsetBegin; if ( !params.extract() ) return false; // e.g. input array does not have enough data. this.byteOffsetEnd = params.defaultByteOffsetEnd; // Record where to extract next weights. Only meaningful when ( this.bInitOk == true ). // Get parameters' real (adjusted) values. // // Do not keep params in this.params so that the inputFloat32Array could be released. this.stepCountPerBlock = params.stepCountPerBlock; this.bChannelShuffler = params.bChannelShuffler this.pointwise1ChannelCountRate = params.pointwise1ChannelCountRate; this.depthwiseFilterHeight = params.depthwiseFilterHeight; // Assume depthwise filter's width equals its height. this.bBias = params.bBias; this.nActivationId = params.nActivationId; this.nActivationIdName = params.nActivationIdName; this.nActivationIdAtBlockEnd = params.nActivationIdAtBlockEnd; this.nActivationIdAtBlockEndName = params.nActivationIdAtBlockEndName; // Pre-allocate array to place intermediate 2 input tensors and 2 output tensors. This could reduce memory re-allocation. this.intermediateInputTensors = new Array( 2 ); this.intermediateOutputTensors = new Array( 2 ); ++progressToAdvance.value; yield progressRoot; // Parameters extracted. Report progress. //!!! ...unfinished... (2021/07/30) Perhaps, moved to a standalone function. // By default, the output ( height, width ) is half of the input (i.e. result of depthwise convolution with ( strides = 2, pad = "same" ) ). // // Note: This calculation copied from the getPadAndOutInfo() of // (https://github.com/tensorflow/tfjs/blob/tfjs-v3.8.0/tfjs-core/src/ops/conv_util.ts). // { let stridesHeight = 2, stridesWidth = 2; this.outputHeight = Math.ceil( sourceHeight / stridesHeight ); this.outputWidth = Math.ceil( sourceWidth / stridesWidth ); } let paramsConfig = Base.createParamsConfig.call( this ); paramsConfig.determine_stepCount_depthwiseFilterHeightLast(); // Calculate the real step count. for ( let i = 0; i < paramsConfig.stepCount; ++i ) { // Progress for step0, 1, 2, 3, ... progressForSteps.addChild( new ValueMax.Percentage.Aggregate() ); } let params, step, stepIniter; this.stepsArray = new Array( paramsConfig.stepCount ); for ( let i = 0; i < this.stepsArray.length; ++i ) { // Step1, 2, 3, ... if ( 0 == i ) { // Step0. paramsConfig.configTo_beforeStep0(); } // If this is the last step of this block (i.e. at-block-end) // - a different depthwise filter size may be used. // - a different activation function may be used after pointwise2 convolution. if ( ( this.stepsArray.length - 1 ) == i ) { paramsConfig.configTo_beforeStepLast(); } params = new PointDepthPoint.Params( params.defaultInput, this.byteOffsetEnd, paramsConfig.channelCount1_pointwise1Before, paramsConfig.pointwise1ChannelCount, paramsConfig.pointwise1Bias, paramsConfig.pointwise1ActivationId, paramsConfig.depthwise_AvgMax_Or_ChannelMultiplier, paramsConfig.depthwiseFilterHeight, paramsConfig.depthwiseStridesPad, paramsConfig.depthwiseBias, paramsConfig.depthwiseActivationId, paramsConfig.pointwise21ChannelCount, paramsConfig.pointwise21Bias, paramsConfig.pointwise21ActivationId, paramsConfig.pointwise22ChannelCount, paramsConfig.pointwise22Bias, paramsConfig.pointwise22ActivationId, ) step = this.stepsArray[ i ] = new PointDepthPoint.Base(); stepIniter = step.initer( progressForSteps.children[ i ], paramsConfig.channelCount0_pointwise1Before, paramsConfig.bShouldKeepInputTensor, params ); this.bInitOk = yield* stepIniter; if ( !this.bInitOk ) return false; this.byteOffsetEnd = this.step.byteOffsetEnd; if ( 0 == i ) { // After step0 (i.e. for step1, 2, 3, ...) paramsConfig.configTo_afterStep0( step ); } } this.step0 = this.stepsArray[ 0 ]; // Shortcut to the first step. this.stepLast = this.stepsArray[ this.stepsArray.length - 1 ]; // Shortcut to the last step. this.outputChannelCount = this.stepLast.outChannelsAll; this.bInitOk = true; return this.bInitOk; } /** * Initialize this object by calling initer() and advance the generator by loop until done. * * @param {ValueMax.Percentage.Aggregate} progressParent * If null, a temporary progress object will be created. * * @return {boolean} * Return true if successfully (and progressParent.valuePercentage will be equal to 100). * Return false if failed (and progressParent.valuePercentage will be less than 100). * * @see PointDepthPoint.Base.init() */ init( progressParent, sourceHeight, sourceWidth, sourceChannelCount, bKeepInputTensor, params ) { progressParent = progressParent || ( new ValueMax.Percentage.Aggregate() ); let initer = this.initer( progressParent, sourceHeight, sourceWidth, sourceChannelCount, bKeepInputTensor, params ); let initerNext; do { initerNext = initer.next(); } while ( ! initerNext.done ); // When ( false == initerNext.done ), the ( initerNext.value ) will be progressParent.getRoot(). let bInitOk = initerNext.value; // When ( true == initerNext.done ), the ( initerNext.value ) will be initialization successfully or failed. return bInitOk; } /** Release all tensors. */ disposeTensors() { if ( this.stepsArray ) { for ( let i = 0; i < this.stepsArray.length ) { let step = this.stepsArray[ i ]; step.disposeTensors(); } this.stepsArray.length = 0; } this.step0 = this.stepLast = null; // It has already de disposed by this.step0 or this.steps1After. this.apply_and_destroy_or_keep = null; this.outputChannelCount = -1; this.intermediateInputTensors = this.intermediateOutputTensors = null; this.byteOffsetBegin = this.byteOffsetEnd = -1; this.bInitOk = false; } /** * @param {Base} this * The Block object to be reference. */ static createParamsConfig() { //!!! ...unfinished... (2021/07/30) Why not use a single parameter? // * - If ( stepCountPerBlock == 0 ), this rate will be ignored. There will be no first 1x1 pointwise. // * - If ( bChannelShuffler == true ) and ( pointwise1ChannelCountRate == 0 ), will be simplified ShuffleNetV2 (expanding by once depthwise). // * - If ( bChannelShuffler == true ) and ( pointwise1ChannelCountRate == 1 ), will be similar to ShuffleNetV2 (expanding by twice depthwise). // * - If ( bChannelShuffler == false ) and ( pointwise1ChannelCountRate == 1 ), will be similar to MobileNetV1. // * - If ( bChannelShuffler == false ) and ( pointwise1ChannelCountRate == 2 ), will be similar to MobileNetV2. if ( this.stepCountPerBlock == 0 ) { // Not ShuffleNetV2, Not MobileNetV2. return new ParamsConfig_NotShuffleNet_NotMobileNet( this ); } else { if ( this.bChannelShuffler == true ) { if ( this.pointwise1ChannelCountRate == 0 ) { // will be simplified ShuffleNetV2 (expanding by once depthwise). return new ParamsConfig_ShuffleNetV2_Simplified( this ); } else { // ( pointwise1ChannelCountRate == 1 ), will be similar to ShuffleNetV2 (expanding by twice depthwise). return new ParamsConfig_ShuffleNetV2( this ); } } else { // ( bChannelShuffler == false ) // ( pointwise1ChannelCountRate == 1 ), will be similar to MobileNetV1. // ( pointwise1ChannelCountRate == 2 ), will be similar to MobileNetV2. return new ParamsConfig_MobileNet( this ); } } } /** Process input, destroy or keep input, return result. * * @param {tf.tensor3d} inputTensor * The source input image ( height x width x channel ) which will be processed. This inputTensor may or may not be disposed * according to init()'s bKeepInputTensor. * * @return {tf.tensor3d} * Return a new tensor. All other intermediate tensors were disposed. */ apply_and_destroy_or_keep( inputTensor ) { let inputTensors = this.intermediateInputTensors; let outputTensors = this.intermediateOutputTensors; outputTensors[ 0 ] = inputTensor; outputTensors[ 1 ] = null; for ( let i = 0; i < this.stepsArray.length ) { inputTensors[ 0 ] = outputTensors[ 0 ]; // Previous step's output becomes next step's input. inputTensors[ 1 ] = outputTensors[ 1 ]; let step = this.stepsArray[ i ]; step.apply_and_destroy_or_keep( inputTensors, outputTensors ); } return outputTensors[ 0 ]; // Note: The last step should only output one tensor. } } /** * Basic class for all ParamsConfig_Xxx classes. */ class ParamsConfig { /** * @param {Base} block * The Block object which provides basic parameters. */ constructor( block ) { this.block = block; this.channelCount0_pointwise1Before = this.channelCount1_pointwise1Before = this.pointwise1ChannelCount = this.pointwise21ChannelCount = this.pointwise22ChannelCount = this.depthwise_AvgMax_Or_ChannelMultiplier = this.depthwiseFilterHeight = this.depthwiseStridesPad = 0; // By default, all convolution use the same bias flag and activation function. this.pointwise1Bias = this.depthwiseBias = this.pointwise21Bias = this.pointwise22Bias = block.bBias; this.pointwise1ActivationId = this.depthwiseActivationId = this.pointwise21ActivationId = this.pointwise22ActivationId = block.nActivationId; this.bShouldKeepInputTensor = false; this.stepCount = -1; // How many step should be in the block. this.depthwiseFilterHeightLast = -1; // The last step's depthwise filter size. } /** Called to determine stepCount and depthwiseFilterHeightLast. Sub-class could override this method to adjust data members. */ determine_stepCount_depthwiseFilterHeightLast() { let block = this.block; this.stepCount = block.stepCountPerBlock; // By default, the step count is just the original step count. this.depthwiseFilterHeightLast = block.depthwiseFilterHeight; // By default, the last step still uses the original depthwise filter size. } /** Called before step0 is about to be created. Sub-class should override this method to adjust data members. * * Step 0. * * The special points of a block's step 0 are: * - halve the height x width. (Both ShuffleNetV2 and MobileNetV2) (by depthwise convolution with strides = 2) * - Double channels. (By concat if ShuffleNetV2. By second pointwise if MobileNetV2.) * - Expand channels by channelMultiplier of depthwise convolution. (Our ShuffleNetV2_Simplified.) */ configTo_beforeStep0() {} /** Called after step0 is created (i.e. before step1, 2, 3, ...). Sub-class should override this method to adjust data members. * * @param {PointDepthPoint.Base} step0 * The just created step0 object. */ configTo_afterStep0( step0 ) {} /** Called before stepLast is about to be created. Sub-class could override this method to adjust data members. */ configTo_beforeStepLast() { // By default, the stepLast of this block (i.e. at-block-end) may use a different activation function after pointwise2 convolution. // // Even if in MobileNetV2 (pointwise2 convolution does not have activation function in default), this is still true. this.pointwise21ActivationId = this.pointwise22ActivationId = block.nActivationIdAtBlockEnd; // Besides, the stepLast may use a different depthwise filter size. This is especially true for NotShuffleNet_NotMobileNet. this.depthwiseFilterHeight = this.depthwiseFilterHeightLast; } } /** Privode parameters for pure depthwise-pointwise convolutions. */ class ParamsConfig_NotShuffleNet_NotMobileNet extends ParamsConfig { /** * Compute how many step shoud be used and what is the last step's depthwise filter size, when shrink sourceHeight to outputHeight * by depthwise convolution with ( strides = 1, pad = "valid" ). * * The this.stepCount will be at least 1 (never 0). * The this.depthwiseFilterHeightLast will be at least 1 (at most this.block.depthwiseFilterHeight). * * The this.block.depthwiseFilterHeight might be modified. * * @override */ determine_stepCount_depthwiseFilterHeightLast() { let block = this.block; let differenceHeight = block.sourceHeight - block.outputHeight; //let differenceWidth = block.sourceWidth - block.outputWidth; if ( 0 == differenceHeight ) { // 1. No difference between source and output size. this.stepCount = 1; // Only one step is needed. (Avoid no steps. At least, there should be one step.) this.depthwiseFilterHeightLast = 1; // The last (and only one) ste should use filter size 1x1 so that the input size could be kept. } // Since difference between source and output exists, the filter size should be larger than 1x1. if ( block.depthwiseFilterHeight <= 1 ) block.depthwiseFilterHeight = 2; // Otherwise, the image size could not be shrinked. // The height of processed image will be reduced a little for any depthwise filter larger than 1x1. let heightReducedPerStep = block.depthwiseFilterHeight - 1; // The possible step count for reducing sourceHeight to outputHeight by tf.depthwiseConv2d( strides = 1, pad = "valid" ). // // This value may be less than real step count because the filter size of the last step may be larger than its input. let stepCountCandidate = Math.floor( differenceHeight / heightReducedPerStep ); let differenceHeightLast = differenceHeight - ( stepCountCandidate * heightReducedPerStep ); // The last step should reduce so many height. if ( 0 == differenceHeightLast ) { // 2. The original depthwiseFilterHeight could achieve the output size at the last step. this.stepCount = stepCountCandidate; // It is the real step count. this.depthwiseFilterHeightLast = block.depthwiseFilterHeight; // The last step uses the original depthwise filter size is enough. } // 3. The original depthwiseFilterHeight could not achieve the output size at the last step. // It is larger than the last step's input size. An extra step with a smaller filter size is needed. this.stepCount = stepCountCandidate + 1; // Needs one more step. // The extra last step's depthwise filter size should just eliminate the last diffference. this.depthwiseFilterHeightLast = differenceHeightLast + 1; } /** @override */ configTo_beforeStep0() { let block = this.block; this.channelCount0_pointwise1Before = block.sourceChannelCount; // Step0 uses the original input channel count. this.channelCount1_pointwise1Before = ValueDesc.channelCount1_pointwise1Before.Singleton.Ids.ONE_INPUT; // no concatenate, no add-input-to-output. this.pointwise1ChannelCount = 0; // In this mode, always no pointwise convolution before depthwise convolution. this.depthwise_AvgMax_Or_ChannelMultiplier = 2; // Step0 double the channel count by depthwise channel multiplier. this.depthwiseFilterHeight = block.depthwiseFilterHeight; // All steps (except stepLast) uses default depthwise filter size. this.depthwiseStridesPad = 0; // In this mode, always ( strides = 1, pad = "valid" ). this.pointwise21ChannelCount = block.sourceChannelCount * block.depthwise_AvgMax_Or_ChannelMultiplier; // Step0 will double channel count. this.pointwise22ChannelCount = 0; // In this mode, always no second output. this.bShouldKeepInputTensor = block.bKeepInputTensor; // Step0 may or may not keep input tensor according to caller's necessary. } /** @override */ configTo_afterStep0( step0 ) { let block = this.block; this.channelCount0_pointwise1Before = step0.outChannelsAll; // Step0's output channel count is all the other steps' input channel count. this.depthwise_AvgMax_Or_ChannelMultiplier = 1; // Except step0, all other steps will not double the channel count. this.pointwise21ChannelCount = step0.outChannelsAll; // Step0's output channel count is all the other steps' output channel count. this.bShouldKeepInputTensor = false; // No matter bKeepInputTensor, all steps (except step0) should not keep input tensor. } } /** Privode parameters for simplified ShuffleNetV2 (i.e. without pointwise1, with concatenator). * * Q: How to specify this configuration? * A: By ( bChannelShuffler == true ) and ( pointwise1ChannelCountRate == 0 ) in the parameters of Block.Base. * * (Our) Adjusted ShuffleNetV2: * * Since channel shuffler could achieved efficiently by pointwise convolution, it may be possible to combine the pointwise2 * convolution (after depthwise convolution) and the pointwise convolution (of channel shuffler). That is: * - Concatenate the output of depthwise convolution and the other output group. * - Pointwise convolution to generate output group 1. * - Pointwise convolution to generate output group 2. * * Although the channel shuffler is achieved by pointwise convolution without bias and activation function, however, * the second pointwise convolution (before channel shuffler) indeed has bias and activation function. After combining * these two pointwise convolutions (the original second and the channel shuffler), the total result is twice pointwise * convolution with bias and activation function. * * If the poitwise1 convolution (of every step (include step 0 too)) could be discarded, the step 0 and step 0's branch could * be achieved simultaneously by: * - once depthwise convolution (channelMultipler = 2, strides = 2, pad = same, bias, COS). * - No need to concatenate because the above operation already double channel count. * - twice pointwise2 convolution (every has same as block's input channel count). * * And, the step 1 (, 2, 3, ..., ( n - 2 ) ) could be achieved by: * - once depthwise convolution (channelMultipler = 1, strides = 1, pad = same, bias, COS). * - concatenate. * - twice pointwise2 convolution (every has same as block's input channel count). * * And, the last step (i.e. step ( n - 1 ) ) of the block could be achieved by: * - once depthwise convolution (channelMultipler = 1, strides = 1, pad = same, bias, COS). * - concatenate. * - once pointwise2 convolution (has double of block's input channel count). * * Note that: * - The depthwise convolution (channelMultipler = 2, strides = 2) of step 0 achieves simultaneously two depthwise * convolution (channelMultipler = 1, strides = 2) of step 0 and step 0's branch. So, it is one less depthwise * convolution, and one less concatenating. * * - The twice pointwise2 convolution (every has same as block's input channel count) achieves not only pointwise * convolution but also channel shuffling. So, it is one less pointwise convolution. * * - The once pointwise2 convolution (has double of block's input channel count) of last step achieves simultaneously * pointwise convolution, channel shuffling, and concatenating. So, it is not only one less pointwise convolution, * but also one less concatenating. * * - Even if the pointwise1 convolution is discarded and the pointwise2 convolution does not have bias and activation * function, the depthwise convolution (with bias and COS as activation function) and pointwise2 convolution together * compose an effective Fourier series which should have enough expressive power for approximating any function. * */ class ParamsConfig_ShuffleNetV2_Simplified extends ParamsConfig_ShuffleNetV2 { /** @override */ configTo_beforeStep0() { super.configTo_beforeStep0(); // Almost the same as ParamsConfig_ShuffleNetV2. Except the followings. // In this case, ( pointwise1ChannelCountRate == 0 ) so that ( this.pointwise1ChannelCount == 0 ) must true. // // In other words, step0 does not have pointwise1 convolution before depthwise convolution. So the second // depthwise convolution (in original ShuffleNetV2) is not needed. Then, a simpler configuration could be // used. // // Just use once depthwise convolution (but with channel multipler 2) to double the channel count. this.channelCount1_pointwise1Before = ValueDesc.channelCount1_pointwise1Before.Singleton.Ids.ONE_INPUT; // no concatenate, no add-input-to-output. this.depthwise_AvgMax_Or_ChannelMultiplier = 2; // Step0 double the channel count by depthwise channel multiplier. } /** @override */ configTo_afterStep0( step0 ) { super.configTo_afterStep0( step0 ); // Almost the same as ParamsConfig_ShuffleNetV2. Except the following. this.depthwise_AvgMax_Or_ChannelMultiplier = 1; // All steps (except step0) will not double the channel count. // Note: ( this.pointwise1ChannelCount == 0 ) still true here. All steps do not have pointwise1 convolution before depthwise convolution. } } /** Privode parameters for ShuffleNetV2 (i.e. with pointwise1, with concatenator). */ class ParamsConfig_ShuffleNetV2 extends ParamsConfig { /** @override */ configTo_beforeStep0() { let block = this.block; this.channelCount0_pointwise1Before = block.sourceChannelCount; // Step0 uses the original input channel count (as input0). this.channelCount1_pointwise1Before = ValueDesc.channelCount1_pointwise1Before.Singleton.Ids.ONE_INPUT_TWO_DEPTHWISE; // with concatenation. this.depthwise_AvgMax_Or_ChannelMultiplier = 1; // All steps will not double the channel count. this.depthwiseFilterHeight = this.block.depthwiseFilterHeight; // All steps uses default depthwise filter size. this.depthwiseStridesPad = 2; // Step0 uses depthwise ( strides = 2, pad = "same" ) to halve ( height, width ). // If an operation has no activation function, it can have no bias too. Because the next operation's bias can achieve the same result. this.depthwiseBias = false; this.depthwiseActivationId = PointDepthPoint.Params.Activation.Ids.NONE; // In ShuffleNetV2, depthwise convolution doesn't have activation. this.pointwise21ChannelCount = block.sourceChannelCount; // All steps' (except stepLast) output0 is the same depth as source input0. this.pointwise22ChannelCount = block.sourceChannelCount; // All steps' (except stepLast) output1 is the same depth as source input1. // In ShuffleNetV2, all steps have pointwise1 convolution before depthwise convolution. Its channel count is adjustable by user's request. this.pointwise1ChannelCount = this.pointwise21ChannelCount * block.pointwise1ChannelCountRate; // In ShuffleNetV2, the rate is usually 1. this.bShouldKeepInputTensor = block.bKeepInputTensor; // Step0 may or may not keep input tensor according to caller's necessary. } /** @override */ configTo_afterStep0( step0 ) { let block = this.block; // The ( input0, input1 ) of all steps (except step0) have the same depth as previous (also step0's) step's ( output0, output1 ). this.channelCount0_pointwise1Before = step0.outChannels0; this.channelCount1_pointwise1Before = step0.outChannels1; // i.e. TWO_INPUTS (with concatenation, without add-input-to-output). this.depthwiseStridesPad = 1; // All steps (except step0) uses depthwise ( strides = 1, pad = "same" ) to keep ( height, width ). this.bShouldKeepInputTensor = false; // No matter bKeepInputTensor, all steps (except step0) should not keep input tensor. } /** @override */ configTo_beforeStepLast() { super.configTo_beforeStepLast(); // Still, stepLast may use a different activation function after pointwise2 convolution. // In ShuffleNetV2, the stepLast only has output0 (no output1). And the output0 has double channel count of source input0. // // Note: Although pointwise21 channel count changed, however, the pointwise1ChannelCount is not changed because the final // output0 is viewed as concatenation of pointwise21 and pointwise22. In pointwise1's point of view, its pointwise2 does // not changed. this.pointwise21ChannelCount = block.sourceChannelCount * 2; this.pointwise22ChannelCount = 0; } } /** Privode parameters for MobileNetV1 or MobileNetV2 (i.e. with pointwise1, with add-input-to-output). */ class ParamsConfig_MobileNet extends ParamsConfig { /** @override */ configTo_beforeStep0() { let block = this.block; this.channelCount0_pointwise1Before = block.sourceChannelCount; // Step0 uses the original input channel count (as input0). // In MobileNet, all steps (include step0) do not use input1 and do add-input-to-output (without concatenation). this.channelCount1_pointwise1Before = ValueDesc.channelCount1_pointwise1Before.Singleton.Ids.ONE_INPUT_ADD_TO_OUTPUT; this.depthwise_AvgMax_Or_ChannelMultiplier = 1; // All steps will not double the channel count. this.depthwiseFilterHeight = this.block.depthwiseFilterHeight; // All steps uses default depthwise filter size. this.depthwiseStridesPad = 2; // Step0 uses depthwise ( strides = 2, pad = "same" ) to halve ( height, width ). this.pointwise21ChannelCount = block.sourceChannelCount * 2; // In MobileNetV2, all steps (include step0) output0 is twice depth of source input0. this.pointwise22ChannelCount = 0; // In MobileNetV2, all steps (include step0) do not have output1. // If an operation has no activation function, it can have no bias too. Because the next operation's bias can achieve the same result. this.pointwise2Bias = false; // In MobileNetV2, the second 1x1 pointwise convolution doesn't have activation function in default. // // But it could be changed by nActivationIdAtBlockEnd for the last step of the block. this.pointwise2ActivationId = PointDepthPoint.Params.Activation.Ids.NONE; // In MobileNet, all steps have pointwise1 convolution before depthwise convolution. Its channel count is adjustable by user's request. // // Q: How to know whether it is MobileNetV2 or MobileNetV1? // A: By pointwise1ChannelCountRate. // - If ( pointwise1ChannelCount < pointwise2ChannelCount ), similar to ResNet. // - If ( pointwise1ChannelCount == pointwise2ChannelCount ), similar to MobileNetV1 or ShufffleNetV2. // - If ( pointwise1ChannelCount > pointwise2ChannelCount ), similar to MobileNetV2. this.pointwise1ChannelCount = this.pointwise21ChannelCount * block.pointwise1ChannelCountRate; // In MobileNetV2, the rate is usually 2. this.bShouldKeepInputTensor = block.bKeepInputTensor; // Step0 may or may not keep input tensor according to caller's necessary. } /** @override */ configTo_afterStep0( step0 ) { let block = this.block; // The input0 of all steps (except step0) have the same depth as previous (also step0's) step's output0. this.channelCount0_pointwise1Before = step0.outChannels0; this.depthwiseStridesPad = 1; // All steps (except step0) uses depthwise ( strides = 1, pad = "same" ) to keep ( height, width ). this.bShouldKeepInputTensor = false; // No matter bKeepInputTensor, all steps (except step0) should not keep input tensor. } }
Update Block.js
CNN/Conv/Block.js
Update Block.js
<ide><path>NN/Conv/Block.js <ide> <ide> this.disposeTensors(); <ide> <add>//!!! ...unfinished... (2021/07/30) should be moved into Params. Then ParamsConfig constructor accept Params (instead of Base). <add>// So that ParamsConfig could be tested individually. <ide> this.sourceHeight = sourceHeight; <ide> this.sourceWidth = sourceWidth; <ide> this.sourceChannelCount = sourceChannelCount; <ide> yield progressRoot; // Parameters extracted. Report progress. <ide> <ide> <del>//!!! ...unfinished... (2021/07/30) Perhaps, moved to a standalone function. <add>//!!! ...unfinished... (2021/07/30) Perhaps, moved to Params.outputHeight() as a standalone function. <ide> <ide> // By default, the output ( height, width ) is half of the input (i.e. result of depthwise convolution with ( strides = 2, pad = "same" ) ). <ide> //
Java
mit
ecc044b00aba57ba6fefdd83fd4b9682cd092648
0
danschultequb/qub-java
package qub; public class JavaTests { public static void test(TestRunner runner) { runner.testGroup(Java.class, () -> { runner.test("constructor()", (Test test) -> { final Java parser = new Java(); test.assertNotNull(parser); }); runner.testGroup("parse(String,Action1<Issue>)", () -> { final Action3<String,JavaSegment[],Issue[]> parseTest = (String text, JavaSegment[] expectedSegments, Issue[] expectedIssues) -> { runner.test("with " + Strings.escapeAndQuote(text) + " text and no issues", (Test test) -> { final JavaDocument document = Java.parse(text, null); test.assertNotNull(document); test.assertEqual(Array.fromValues(expectedSegments), document.getSegments()); }); runner.test("with " + Strings.escapeAndQuote(text) + " text and issues", (Test test) -> { final List<Issue> issues = new ArrayList<>(); final JavaDocument document = Java.parse(text, issues::add); test.assertNotNull(document); test.assertEqual(Array.fromValues(expectedSegments), document.getSegments()); test.assertEqual(Array.fromValues(expectedIssues), issues); }); }; parseTest.run(null, null, null); parseTest.run("", null, null); parseTest.run(" ", new JavaSegment[] { new JavaSegment(JavaSegmentType.Whitespace, Lex.space(0), Lex.space(1), Lex.space(2), Lex.space(3)) }, null); parseTest.run("\n", new JavaSegment[] { new JavaSegment(JavaSegmentType.Whitespace, Lex.newLine(0)) }, null); parseTest.run("abc", new JavaSegment[] { new JavaSegment(JavaSegmentType.Unrecognized, Lex.letters("abc", 0)) }, new Issue[] { JavaIssues.expectedPackageOrTypeDefinition(new Span(0, 3)) }); parseTest.run("package", new JavaSegment[] { new JavaSegment(JavaSegmentType.Package, Lex.letters("package", 0)) }, new Issue[] { JavaIssues.missingPackagePath(new Span(0, 7)) }); parseTest.run("package123", new JavaSegment[] { new JavaSegment(JavaSegmentType.Package, Lex.letters("package", 0)), new JavaSegment(JavaSegmentType.Unrecognized, Lex.digits("123", 7)) }, new Issue[] { JavaIssues.expectedWhitespaceBetweenPackageAndPackagePath(new Span(7, 3)), JavaIssues.expectedTypeDefinition(new Span(7, 3)) }); parseTest.run("package ", new JavaSegment[] { new JavaSegment(JavaSegmentType.Package, Lex.letters("package", 0), Lex.space(7)) }, new Issue[] { JavaIssues.missingPackagePath(new Span(0, 7)) }); parseTest.run("package 0", new JavaSegment[] { new JavaSegment(JavaSegmentType.Package, Lex.letters("package", 0), Lex.space(7)), new JavaSegment(JavaSegmentType.Unrecognized, Lex.digits("0", 8)) }, new Issue[] { JavaIssues.expectedPackagePathLetters(new Span(8, 1)), JavaIssues.expectedTypeDefinition(new Span(8, 1)) }); parseTest.run("package qub", new JavaSegment[] { new JavaSegment(JavaSegmentType.Package, Lex.letters("package", 0), Lex.space(7), Lex.letters("qub", 8)) }, new Issue[] { JavaIssues.missingStatementSemicolon(new Span(8, 3)) }); parseTest.run("package qub;", new JavaSegment[] { new JavaSegment(JavaSegmentType.Package, Lex.letters("package", 0), Lex.space(7), Lex.letters("qub", 8), Lex.semicolon(11)) }, null); }); }); } }
tests/qub/JavaTests.java
package qub; public class JavaTests { public static void test(TestRunner runner) { runner.testGroup(Java.class, () -> { runner.test("constructor()", (Test test) -> { final Java parser = new Java(); test.assertNotNull(parser); }); runner.testGroup("parse(String,Action1<Issue>)", () -> { final Action3<String,JavaSegment[],Issue[]> parseTest = (String text, JavaSegment[] expectedSegments, Issue[] expectedIssues) -> { runner.test("with " + Strings.escapeAndQuote(text) + " text and no issues", (Test test) -> { final JavaDocument document = Java.parse(text, null); test.assertNotNull(document); test.assertEqual(Array.fromValues(expectedSegments), document.getSegments()); }); runner.test("with " + Strings.escapeAndQuote(text) + " text and issues", (Test test) -> { final List<Issue> issues = new ArrayList<>(); final JavaDocument document = Java.parse(text, issues::add); test.assertNotNull(document); test.assertEqual(Array.fromValues(expectedSegments), document.getSegments()); test.assertEqual(Array.fromValues(expectedIssues), issues); }); }; parseTest.run(null, null, null); parseTest.run("", null, null); parseTest.run(" ", new JavaSegment[] { new JavaSegment(JavaSegmentType.Whitespace, Lex.space(0), Lex.space(1), Lex.space(2), Lex.space(3)) }, null); parseTest.run("\n", new JavaSegment[] { new JavaSegment(JavaSegmentType.Whitespace, Lex.newLine(0)) }, null); parseTest.run("abc", new JavaSegment[] { new JavaSegment(JavaSegmentType.Unrecognized, Lex.letters("abc", 0)) }, new Issue[] { JavaIssues.expectedPackageOrTypeDefinition(new Span(0, 3)) }); parseTest.run("package", new JavaSegment[] { new JavaSegment(JavaSegmentType.Package, Lex.letters("package", 0)) }, new Issue[] { JavaIssues.missingPackagePath(new Span(0, 7)) }); parseTest.run("package123", new JavaSegment[] { new JavaSegment(JavaSegmentType.Package, Lex.letters("package", 0)), new JavaSegment(JavaSegmentType.Unrecognized, Lex.digits("123", 7)) }, new Issue[] { JavaIssues.expectedWhitespaceBetweenPackageAndPackagePath(new Span(7, 3)), JavaIssues.expectedTypeDefinition(new Span(7, 3)) }); parseTest.run("package ", new JavaSegment[] { new JavaSegment(JavaSegmentType.Package, Lex.letters("package", 0), Lex.space(7)) }, new Issue[] { JavaIssues.missingPackagePath(new Span(0, 7)) }); parseTest.run("package 0", new JavaSegment[] { new JavaSegment(JavaSegmentType.Package, Lex.letters("package", 0), Lex.space(7)), new JavaSegment(JavaSegmentType.Unrecognized, Lex.digits("0", 8)) }, new Issue[] { JavaIssues.expectedPackagePathLetters(new Span(8, 1)), JavaIssues.expectedTypeDefinition(new Span(8, 1)) }); parseTest.run("package qub;", new JavaSegment[] { new JavaSegment(JavaSegmentType.Package, Lex.letters("package", 0), Lex.space(7), Lex.letters("qub", 8), Lex.semicolon(11)) }, null); }); }); } }
Add another Java parse() test
tests/qub/JavaTests.java
Add another Java parse() test
<ide><path>ests/qub/JavaTests.java <ide> JavaIssues.expectedPackagePathLetters(new Span(8, 1)), <ide> JavaIssues.expectedTypeDefinition(new Span(8, 1)) <ide> }); <add> parseTest.run("package qub", <add> new JavaSegment[] <add> { <add> new JavaSegment(JavaSegmentType.Package, <add> Lex.letters("package", 0), <add> Lex.space(7), <add> Lex.letters("qub", 8)) <add> }, <add> new Issue[] <add> { <add> JavaIssues.missingStatementSemicolon(new Span(8, 3)) <add> }); <ide> parseTest.run("package qub;", <ide> new JavaSegment[] <ide> {
JavaScript
agpl-3.0
caf106f78b840d59f167c1053f13b15c44039827
0
ontohub/ontohub,ontohub/ontohub,ontohub/ontohub,ontohub/ontohub,ontohub/ontohub,ontohub/ontohub
$(function(){ var form = $("#bulkupload"); if(!form[0]) return; var uploader = { form: form, running: false, // Pattern to the the URIs uriPattern: /(https?:\/\/?\S+)/g, jobs: [], created: 0, failed: 0, remaining: 0, // Inititialize the Uploader init: function(){ var self = this, form = this.form; this.uri = form.attr("action"); this.textarea = form.find("textarea"); this.actions = form.find("fieldset.actions"); this.progressbar = form.find(".progressbar"); this.statusUri = form.find(".status .uri"); this.statsContainer = form.find(".stats").hide(); this.showAction('start'); form.submit(function(event){ event.preventDefault(); if(self.running) self.cancel(); else self.run(); }) }, // hides all except the given action showAction: function(action){ this.actions.children().each(function(){ $(this).toggle($(this).hasClass(action)); }) }, // extracts URIs from the textarea getURIs: function(){ return this.form.find("textarea").val().match(this.uriPattern); }, // Starts the Uploader run: function(){ var uris = this.getURIs(); if(!uris){ alert("No supported URIs found!") return; } this.running = true; this.showAction('stop'); this.statsContainer.show(); this.initProgress(uris.length); this.createJobs(uris); this.nextJob(); }, // Initializes the progressbar initProgress: function(max){ this.updateStats('remaining', max); this.progressbar.progressbar({ max: max }); }, // Updates the progressbar updateProgress: function(){ this.progressbar.progressbar("option", "value", this.created + this.failed); }, // create jobs createJobs: function(uris){ var self = this; var list = $("<ol class='queue'></ol>"); $.each(uris, function(i,uri){ var li = $("<li></li>").data('uri', uri).text(uri) li.appendTo(list); self.jobs.push(li); }) this.textarea.replaceWith(list); }, // Handler for the cancel button cancel: function(){ this.showAction('restart'); }, // Mark the uploader as finished finished: function(){ this.cancel(); }, // is called when the current job is done jobDone: function(){ this.updateStats('remaining',-1); this.updateProgress(); if(this.jobs.length > 0) this.nextJob() else this.finished(); }, // updates the created/failed/remaining counter updateStats: function(field, change){ this[field] += change; this.statsContainer.find("."+field+" .count").text(this[field]); }, // executes the next job nextJob: function(){ var self = this; var job = this.jobs.shift(); var uri = job.data('uri'); // display the current job this.statusUri.text(uri); window.setTimeout(function(){ $.ajax({ type: 'POST', url: self.uri, data: { 'ontology[uri]': uri, 'ontology[versions_attributes][0][source_uri]': uri }, format: 'json' }) .success(function(){ self.updateStats('created',1); job.addClass('success'); }) .error(function(xhr, status, error){ self.updateStats('failed',1); var message = $("<ul class='errors'></ul>") if(xhr.getResponseHeader("Content-Type").indexOf("application/json") === 0){ $.each($.parseJSON(xhr.responseText).errors, function(attr,errors){ $("<li></li>").text(attr + " " + errors).appendTo(message); }); }else{ $("<li></li>").text(error).appendTo(message); } job.addClass('error').append(message); }) .complete(function(){ self.jobDone(); }) },500); } } uploader.init(); })
app/assets/javascripts/bulkupload.js
$(function(){ var form = $("#bulkupload"); if(!form[0]) return; var uploader = { form: form, running: false, // Pattern to the the URIs uriPattern: /(https?:\/\/?\S+)/g, jobs: [], created: 0, failed: 0, remaining: 0, // Inititialize the Uploader init: function(){ var self = this, form = this.form; this.uri = form.attr("action"); this.textarea = form.find("textarea"); this.actions = form.find("fieldset.actions"); this.progressbar = form.find(".progressbar"); this.statusUri = form.find(".status .uri"); this.statsContainer = form.find(".stats").hide(); this.showAction('start'); form.submit(function(event){ event.preventDefault(); if(self.running) self.cancel(); else self.run(); }) }, // hides all except the given action showAction: function(action){ this.actions.children().each(function(){ $(this).toggle($(this).hasClass(action)); }) }, // extracts URIs from the textarea getURIs: function(){ return this.form.find("textarea").val().match(this.uriPattern); }, // Starts the Uploader run: function(){ var uris = this.getURIs(); if(!uris){ alert("No supported URIs found!") return; } this.running = true; this.showAction('stop'); this.statsContainer.show(); this.initProgress(uris.length); this.createJobs(uris); this.nextJob(); }, // Initializes the progressbar initProgress: function(max){ this.updateStats('remaining', max); this.progressbar.progressbar({ max: max }); }, // Updates the progressbar updateProgress: function(){ this.progressbar.progressbar("option", "value", this.created + this.failed); }, // create jobs createJobs: function(uris){ var self = this; var list = $("<ol class='queue'></ol>"); $.each(uris, function(i,uri){ var li = $("<li></li>").data('uri', uri).text(uri) li.appendTo(list); self.jobs.push(li); }) this.textarea.replaceWith(list); }, // Handler for the cancel button cancel: function(){ this.showAction('restart'); }, // Mark the uploader as finished finished: function(){ this.cancel(); }, // is called when the current job is done jobDone: function(){ this.updateStats('remaining',-1); this.updateProgress(); if(this.jobs.length > 0) this.nextJob() else this.finished(); }, // updates the created/failed/remaining counter updateStats: function(field, change){ this[field] += change; this.statsContainer.find("."+field+" .count").text(this[field]); }, // executes the next job nextJob: function(){ var self = this; var job = this.jobs.shift(); var uri = job.data('uri'); // display the current job this.statusUri.text(uri); window.setTimeout(function(){ $.ajax({ type: 'POST', url: self.uri, data: { 'ontology[uri]': uri, 'ontology[versions_attributes][0][remote_raw_file_url]': uri }, format: 'json' }) .success(function(){ self.updateStats('created',1); job.addClass('success'); }) .error(function(xhr, status, error){ self.updateStats('failed',1); var message = $("<ul class='errors'></ul>") if(xhr.getResponseHeader("Content-Type").indexOf("application/json") === 0){ $.each($.parseJSON(xhr.responseText).errors, function(attr,errors){ $("<li></li>").text(attr + " " + errors).appendTo(message); }); }else{ $("<li></li>").text(error).appendTo(message); } job.addClass('error').append(message); }) .complete(function(){ self.jobDone(); }) },500); } } uploader.init(); })
bulk uploader fixed
app/assets/javascripts/bulkupload.js
bulk uploader fixed
<ide><path>pp/assets/javascripts/bulkupload.js <ide> url: self.uri, <ide> data: { <ide> 'ontology[uri]': uri, <del> 'ontology[versions_attributes][0][remote_raw_file_url]': uri <add> 'ontology[versions_attributes][0][source_uri]': uri <ide> }, <ide> format: 'json' <ide> })
Java
apache-2.0
9c37f644b9ef99b80188ca8abc9924bbd9189bcc
0
EvilMcJerkface/atlasdb,palantir/atlasdb,palantir/atlasdb,EvilMcJerkface/atlasdb,EvilMcJerkface/atlasdb,palantir/atlasdb
/** * Copyright 2015 Palantir Technologies * * Licensed under the BSD-3 License (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/BSD-3-Clause * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.palantir.atlasdb.keyvalue.cassandra; import java.net.InetSocketAddress; import java.net.SocketException; import java.net.SocketTimeoutException; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NoSuchElementException; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import org.apache.cassandra.thrift.Cassandra; import org.apache.cassandra.thrift.Cassandra.Client; import org.apache.cassandra.thrift.CfDef; import org.apache.cassandra.thrift.InvalidRequestException; import org.apache.cassandra.thrift.KsDef; import org.apache.cassandra.thrift.NotFoundException; import org.apache.cassandra.thrift.SchemaDisagreementException; import org.apache.cassandra.thrift.TimedOutException; import org.apache.cassandra.thrift.TokenRange; import org.apache.cassandra.thrift.UnavailableException; import org.apache.thrift.TException; import org.apache.thrift.transport.TTransportException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableRangeMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Range; import com.google.common.collect.RangeMap; import com.google.common.collect.Sets; import com.google.common.collect.Sets.SetView; import com.google.common.io.BaseEncoding; import com.google.common.primitives.UnsignedBytes; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.palantir.atlasdb.cassandra.CassandraKeyValueServiceConfig; import com.palantir.atlasdb.keyvalue.api.InsufficientConsistencyException; import com.palantir.atlasdb.keyvalue.api.TableReference; import com.palantir.atlasdb.keyvalue.cassandra.CassandraClientFactory.ClientCreationFailedException; import com.palantir.common.base.FunctionCheckedException; import com.palantir.common.concurrent.PTExecutors; /** * Feature breakdown: * - Pooling * - Token Aware Mapping / Query Routing / Data partitioning * - Retriable Queries * - Pool member error tracking / blacklisting* * - Pool refreshing * - Pool node autodiscovery * - Pool member health checking* * * *entirely new features * * By our old system, this would be a RefreshingRetriableTokenAwareHealthCheckingManyHostCassandraClientPoolingContainerManager; * ... this is one of the reasons why there is a new system. **/ public class CassandraClientPool { private static final Logger log = LoggerFactory.getLogger(CassandraClientPool.class); /** * This is the maximum number of times we'll accept connection failures to one host before blacklisting it. Note * that subsequent hosts we try in the same call will actually be blacklisted after one connection failure */ private static final int MAX_TRIES_SAME_HOST = 3; private static final int MAX_TRIES_TOTAL = 6; volatile RangeMap<LightweightOPPToken, List<InetSocketAddress>> tokenMap = ImmutableRangeMap.of(); Map<InetSocketAddress, Long> blacklistedHosts = Maps.newConcurrentMap(); Map<InetSocketAddress, CassandraClientPoolingContainer> currentPools = Maps.newConcurrentMap(); final CassandraKeyValueServiceConfig config; final ScheduledThreadPoolExecutor refreshDaemon; public static class LightweightOPPToken implements Comparable<LightweightOPPToken> { final byte[] bytes; public LightweightOPPToken(byte[] bytes) { this.bytes = bytes; } @Override public int compareTo(LightweightOPPToken other) { return UnsignedBytes.lexicographicalComparator().compare(this.bytes, other.bytes); } @Override public String toString() { return BaseEncoding.base16().encode(bytes); } } public CassandraClientPool(CassandraKeyValueServiceConfig config) { this.config = config; refreshDaemon = PTExecutors.newScheduledThreadPool(1, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("CassandraClientPoolRefresh-%d").build()); refreshDaemon.scheduleWithFixedDelay(new Runnable() { @Override public void run() { try { refreshPool(); } catch (Throwable t) { log.error("Failed to refresh Cassandra KVS pool. Extended periods of being unable to refresh will cause perf degradation.", t); } } }, config.poolRefreshIntervalSeconds(), config.poolRefreshIntervalSeconds(), TimeUnit.SECONDS); config.servers().forEach((server) -> currentPools.put(server, new CassandraClientPoolingContainer(server, config))); refreshPool(); // ensure we've initialized before returning } public void shutdown() { refreshDaemon.shutdown(); currentPools.forEach((address, cassandraClientPoolingContainer) -> cassandraClientPoolingContainer.shutdownPooling()); } private synchronized void refreshPool() { checkAndUpdateBlacklist(); Set<InetSocketAddress> serversToAdd = Sets.newHashSet(config.servers()); Set<InetSocketAddress> serversToRemove = ImmutableSet.of(); if (config.autoRefreshNodes()) { refreshTokenRanges(); // re-use token mapping as list of hosts in the cluster for (List<InetSocketAddress> rangeOwners : tokenMap.asMapOfRanges().values()) { for (InetSocketAddress address : rangeOwners) { serversToAdd.add(address); } } } serversToAdd = Sets.difference(serversToAdd, currentPools.keySet()); if (!config.autoRefreshNodes()) { // (we would just add them back in) serversToRemove = Sets.difference(currentPools.keySet(), config.servers()); } for (InetSocketAddress newServer : serversToAdd) { currentPools.put(newServer, new CassandraClientPoolingContainer(newServer, config)); } for (InetSocketAddress removedServerAddress : serversToRemove) { removePool(removedServerAddress); } if (!(serversToAdd.isEmpty() && serversToRemove.isEmpty())) { // if we made any changes sanityCheckRingConsistency(); if (!config.autoRefreshNodes()) { // grab new token mapping, if we didn't already do this before refreshTokenRanges(); } } log.debug("Cassandra pool refresh added hosts {}, removed hosts {}.", serversToAdd, serversToRemove); debugLogStateOfPool(); } private void removePool(InetSocketAddress removedServerAddress) { blacklistedHosts.remove(removedServerAddress); try { currentPools.get(removedServerAddress).shutdownPooling(); } catch (Exception e) { log.warn("While removing a host ({}) from the pool, we were unable to gently cleanup resources.", removedServerAddress, e); } currentPools.remove(removedServerAddress); } private void debugLogStateOfPool() { if (log.isDebugEnabled()) { StringBuilder currentState = new StringBuilder(); currentState.append( String.format("POOL STATUS: Current blacklist = %s,%n current hosts in pool = %s%n", blacklistedHosts.keySet().toString(), currentPools.keySet().toString())); for (Entry<InetSocketAddress, CassandraClientPoolingContainer> entry : currentPools.entrySet()) { int activeCheckouts = entry.getValue().getPoolUtilization(); int totalAllowed = entry.getValue().getPoolSize(); currentState.append( String.format("\tPOOL STATUS: Pooled host %s has %s out of %s connections checked out.%n", entry.getKey(), activeCheckouts > 0? Integer.toString(activeCheckouts) : "(unknown)", totalAllowed > 0? Integer.toString(totalAllowed) : "(not bounded)")); } log.debug(currentState.toString()); } } private void checkAndUpdateBlacklist() { // Check blacklist and re-integrate or continue to wait as necessary for (Map.Entry<InetSocketAddress, Long> blacklistedEntry : blacklistedHosts.entrySet()) { long backoffTimeMillis = TimeUnit.SECONDS.toMillis(config.unresponsiveHostBackoffTimeSeconds()); if (blacklistedEntry.getValue() + backoffTimeMillis < System.currentTimeMillis()) { InetSocketAddress host = blacklistedEntry.getKey(); if (isHostHealthy(host)) { blacklistedHosts.remove(host); log.error("Added host {} back into the pool after a waiting period and successful health check.", host); } } } } private void addToBlacklist(InetSocketAddress badHost) { blacklistedHosts.put(badHost, System.currentTimeMillis()); log.info("Blacklisted host '{}'", badHost); } private boolean isHostHealthy(InetSocketAddress host) { try { CassandraClientPoolingContainer testingContainer = currentPools.get(host); testingContainer.runWithPooledResource(describeRing); testingContainer.runWithPooledResource(validatePartitioner); return true; } catch (Exception e) { log.error("We tried to add {} back into the pool, but got an exception that caused to us distrust this host further.", host, e); return false; } } private CassandraClientPoolingContainer getRandomGoodHost() { Map<InetSocketAddress, CassandraClientPoolingContainer> pools = currentPools; Set<InetSocketAddress> livingHosts = Sets.difference(pools.keySet(), blacklistedHosts.keySet()); if (livingHosts.isEmpty()) { log.error("There are no known live hosts in the connection pool. We're choosing one at random in a last-ditch attempt at forward progress."); livingHosts = pools.keySet(); } return pools.get(getRandomHostByActiveConnections(Maps.filterKeys(currentPools, livingHosts::contains))); } public InetSocketAddress getRandomHostForKey(byte[] key) { List<InetSocketAddress> hostsForKey = tokenMap.get(new LightweightOPPToken(key)); SetView<InetSocketAddress> liveOwnerHosts; if (hostsForKey == null) { log.debug( "We attempted to route your query to a cassandra host that already contains the relevant data. " + "However, the mapping of which host contains which data is not available yet. We will choose a random host instead."); return getRandomGoodHost().getHost(); } else { liveOwnerHosts = Sets.difference(ImmutableSet.copyOf(hostsForKey), blacklistedHosts.keySet()); } if (liveOwnerHosts.isEmpty()) { log.warn("Perf / cluster stability issue. Token aware query routing has failed because there are no known " + "live hosts that claim ownership of the given range. Falling back to choosing a random live node. " + "Current state logged at DEBUG"); log.debug("Current ring view is: {} and our current host blacklist is {}", tokenMap, blacklistedHosts); return getRandomGoodHost().getHost(); } else { return getRandomHostByActiveConnections(Maps.filterKeys(currentPools, liveOwnerHosts::contains)); } } private static InetSocketAddress getRandomHostByActiveConnections(Map<InetSocketAddress, CassandraClientPoolingContainer> pools) { return WeightedHosts.create(pools).getRandomHost(); } public void runOneTimeStartupChecks() { try { CassandraVerifier.ensureKeyspaceExistsAndIsUpToDate(this, config); } catch (Exception e) { log.error("Startup checks failed, was not able to create the keyspace or ensure it already existed."); throw new RuntimeException(e); } Map<InetSocketAddress, Exception> completelyUnresponsiveHosts = Maps.newHashMap(), aliveButInvalidPartitionerHosts = Maps.newHashMap(); boolean thisHostResponded, atLeastOneHostResponded = false, atLeastOneHostSaidWeHaveALockTable = false; for (InetSocketAddress host : currentPools.keySet()) { thisHostResponded = false; try { runOnHost(host, CassandraVerifier.healthCheck); thisHostResponded = true; atLeastOneHostResponded = true; } catch (Exception e) { completelyUnresponsiveHosts.put(host, e); addToBlacklist(host); } if (thisHostResponded) { try { runOnHost(host, validatePartitioner); } catch (Exception e) { aliveButInvalidPartitionerHosts.put(host, e); } try { runOnHost(host, createInternalLockTable); atLeastOneHostSaidWeHaveALockTable = true; } catch (Exception e) { // don't fail here, want to give the user all the errors at once at the end } } } StringBuilder errorBuilderForEntireCluster = new StringBuilder(); if (completelyUnresponsiveHosts.size() > 0) { errorBuilderForEntireCluster.append("Performing routine startup checks, determined that the following hosts are unreachable for the following reasons: \n"); completelyUnresponsiveHosts.forEach((host, exception) -> errorBuilderForEntireCluster.append(String.format("\tHost: %s was marked unreachable via exception: %s%n", host.toString(), exception.toString()))); } if (aliveButInvalidPartitionerHosts.size() > 0) { errorBuilderForEntireCluster.append("Performing routine startup checks, determined that the following hosts were alive but are configured with an invalid partitioner: \n"); aliveButInvalidPartitionerHosts.forEach((host, exception) -> errorBuilderForEntireCluster.append(String.format("\tHost: %s was marked as invalid partitioner via exception: %s%n", host.toString(), exception.toString()))); } if (atLeastOneHostResponded && atLeastOneHostSaidWeHaveALockTable && aliveButInvalidPartitionerHosts.size() == 0) { return; } else { throw new RuntimeException(errorBuilderForEntireCluster.toString()); } } //todo dedupe this into a name-demangling class that everyone can access protected static String internalTableName(TableReference tableRef) { String tableName = tableRef.getQualifiedName(); if (tableName.startsWith("_")) { return tableName; } return tableName.replaceFirst("\\.", "__"); } // for tables internal / implementation specific to this KVS; these also don't get metadata in metadata table, nor do they show up in getTablenames, nor does this use concurrency control private void createTableInternal(Client client, TableReference tableRef) throws InvalidRequestException, SchemaDisagreementException, TException, NotFoundException { if (tableAlreadyExists(client, internalTableName(tableRef))) { return; } CfDef cf = CassandraConstants.getStandardCfDef(config.keyspace(), internalTableName(tableRef)); client.system_add_column_family(cf); CassandraKeyValueServices.waitForSchemaVersions(client, tableRef.getQualifiedName(), config.schemaMutationTimeoutMillis()); return; } private boolean tableAlreadyExists(Client client, String caseInsensitiveTableName) throws TException { KsDef ks = client.describe_keyspace(config.keyspace()); for (CfDef cf : ks.getCf_defs()) { if (cf.getName().equalsIgnoreCase(caseInsensitiveTableName)) { return true; } } return false; } private void refreshTokenRanges() { try { List<TokenRange> tokenRanges = getRandomGoodHost().runWithPooledResource(describeRing); ImmutableRangeMap.Builder<LightweightOPPToken, List<InetSocketAddress>> newTokenRing = ImmutableRangeMap.builder(); for (TokenRange tokenRange : tokenRanges) { List<InetSocketAddress> hosts = Lists.transform(tokenRange.getEndpoints(), new Function<String, InetSocketAddress>() { @Override public InetSocketAddress apply(String endpoint) { return new InetSocketAddress(endpoint, CassandraConstants.DEFAULT_THRIFT_PORT); } }); LightweightOPPToken startToken = new LightweightOPPToken(BaseEncoding.base16().decode(tokenRange.getStart_token().toUpperCase())); LightweightOPPToken endToken = new LightweightOPPToken(BaseEncoding.base16().decode(tokenRange.getEnd_token().toUpperCase())); if (startToken.compareTo(endToken) <= 0) { newTokenRing.put(Range.openClosed(startToken, endToken), hosts); } else { // Handle wrap-around newTokenRing.put(Range.greaterThan(startToken), hosts); newTokenRing.put(Range.atMost(endToken), hosts); } } tokenMap = newTokenRing.build(); } catch (Exception e) { log.error("Couldn't grab new token ranges for token aware cassandra mapping!", e); } } private FunctionCheckedException<Cassandra.Client, List<TokenRange>, Exception> describeRing = new FunctionCheckedException<Cassandra.Client, List<TokenRange>, Exception>() { @Override public List<TokenRange> apply (Cassandra.Client client) throws Exception { return client.describe_ring(config.keyspace()); }}; public <V, K extends Exception> V runWithRetry(FunctionCheckedException<Cassandra.Client, V, K> f) throws K { return runWithRetryOnHost(getRandomGoodHost().getHost(), f); } public <V, K extends Exception> V runWithRetryOnHost(InetSocketAddress specifiedHost, FunctionCheckedException<Cassandra.Client, V, K> f) throws K { int numTries = 0; while (true) { CassandraClientPoolingContainer hostPool = currentPools.get(specifiedHost); if (blacklistedHosts.containsKey(specifiedHost) || hostPool == null) { log.warn("Randomly redirected a query intended for host {} because it was not currently a live member of the pool.", specifiedHost); hostPool = getRandomGoodHost(); } try { return hostPool.runWithPooledResource(f); } catch (Exception e) { numTries++; this.<K>handleException(numTries, hostPool.getHost(), e); } } } public <V, K extends Exception> V run(FunctionCheckedException<Cassandra.Client, V, K> f) throws K { return runOnHost(getRandomGoodHost().getHost(), f); } private <V, K extends Exception> V runOnHost(InetSocketAddress specifiedHost, FunctionCheckedException<Cassandra.Client, V, K> f) throws K { CassandraClientPoolingContainer hostPool = currentPools.get(specifiedHost); return hostPool.runWithPooledResource(f); } @SuppressWarnings("unchecked") private <K extends Exception> void handleException(int numTries, InetSocketAddress host, Exception e) throws K { if (isRetriableException(e)) { if (numTries >= MAX_TRIES_TOTAL) { if (e instanceof TTransportException && e.getCause() != null && (e.getCause().getClass() == SocketException.class)) { String msg = "Error writing to Cassandra socket. Likely cause: Exceeded maximum thrift frame size; unlikely cause: network issues."; log.error("Tried to connect to cassandra " + numTries + " times. " + msg, e); e = new TTransportException(((TTransportException) e).getType(), msg, e); } else { log.error("Tried to connect to cassandra " + numTries + " times.", e); } throw (K) e; } else { log.warn("Error occurred talking to cassandra. Attempt {} of {}.", numTries, MAX_TRIES_TOTAL, e); if (isConnectionException(e) && numTries >= MAX_TRIES_SAME_HOST) { addToBlacklist(host); } } } else { throw (K) e; } } // This method exists to verify a particularly nasty bug where cassandra doesn't have a // consistent ring across all of it's nodes. One node will think it owns more than the others // think it does and they will not send writes to it, but it will respond to requests // acting like it does. private void sanityCheckRingConsistency() { Multimap<Set<TokenRange>, InetSocketAddress> tokenRangesToHost = HashMultimap.create(); for (InetSocketAddress host : currentPools.keySet()) { Cassandra.Client client = null; try { client = CassandraClientFactory.getClientInternal(host, config.credentials(), config.ssl(), config.socketTimeoutMillis(), config.socketQueryTimeoutMillis()); try { client.describe_keyspace(config.keyspace()); } catch (NotFoundException e) { return; // don't care to check for ring consistency when we're not even fully initialized } tokenRangesToHost.put(ImmutableSet.copyOf(client.describe_ring(config.keyspace())), host); } catch (Exception e) { log.warn("failed to get ring info from host: {}", host, e); } finally { if (client != null) { client.getOutputProtocol().getTransport().close(); } } if (tokenRangesToHost.isEmpty()) { log.warn("Failed to get ring info for entire Cassandra cluster ({}); ring could not be checked for consistency.", config.keyspace()); return; } if (tokenRangesToHost.keySet().size() == 1) { // all nodes agree on a consistent view of the cluster. Good. return; } RuntimeException e = new IllegalStateException("Hosts have differing ring descriptions. This can lead to inconsistent reads and lost data. "); log.error("QA-86204 " + e.getMessage() + tokenRangesToHost, e); // provide some easier to grok logging for the two most common cases if (tokenRangesToHost.size() > 2) { for (Map.Entry<Set<TokenRange>, Collection<InetSocketAddress>> entry : tokenRangesToHost.asMap().entrySet()) { if (entry.getValue().size() == 1) { log.error("Host: " + entry.getValue().iterator().next() + " disagrees with the other nodes about the ring state."); } } } if (tokenRangesToHost.keySet().size() == 2) { ImmutableList<Set<TokenRange>> sets = ImmutableList.copyOf(tokenRangesToHost.keySet()); Set<TokenRange> set1 = sets.get(0); Set<TokenRange> set2 = sets.get(1); log.error("Hosts are split. group1: " + tokenRangesToHost.get(set1) + " group2: " + tokenRangesToHost.get(set2)); } CassandraVerifier.logErrorOrThrow(e.getMessage(), config.safetyDisabled()); } } @VisibleForTesting static boolean isConnectionException(Throwable t) { return t != null && (t instanceof SocketTimeoutException || t instanceof ClientCreationFailedException || t instanceof UnavailableException || t instanceof NoSuchElementException || isConnectionException(t.getCause())); } @VisibleForTesting static boolean isRetriableException(Throwable t) { return t != null && (t instanceof TTransportException || t instanceof TimedOutException || t instanceof InsufficientConsistencyException || isConnectionException(t) || isRetriableException(t.getCause())); } final FunctionCheckedException<Cassandra.Client, Void, Exception> validatePartitioner = new FunctionCheckedException<Cassandra.Client, Void, Exception>() { @Override public Void apply(Cassandra.Client client) throws Exception { CassandraVerifier.validatePartitioner(client, config); return null; } }; final FunctionCheckedException<Cassandra.Client, Void, Exception> createInternalLockTable = new FunctionCheckedException<Cassandra.Client, Void, Exception>() { @Override public Void apply(Cassandra.Client client) throws Exception { createTableInternal(client, CassandraConstants.LOCK_TABLE); return null; } }; /** * Weights hosts inversely by the number of active connections. {@link #getRandomHost()} should then be used to * pick a random host */ @VisibleForTesting static class WeightedHosts { final TreeMap<Integer, InetSocketAddress> hosts; private WeightedHosts(TreeMap<Integer, InetSocketAddress> hosts) { this.hosts = hosts; } static WeightedHosts create(Map<InetSocketAddress, CassandraClientPoolingContainer> pools) { Preconditions.checkArgument(!pools.isEmpty(), "pools should be non-empty"); return new WeightedHosts(buildHostsWeightedByActiveConnections(pools)); } /** * The key for a host is the open upper bound of the weight. Since the domain is intended to be contiguous, the * closed lower bound of that weight is the key of the previous entry. * <p> * The closed lower bound of the first entry is 0. * <p> * Every weight is guaranteed to be non-zero in size. That is, every key is guaranteed to be at least one larger * than the previous key. */ private static TreeMap<Integer, InetSocketAddress> buildHostsWeightedByActiveConnections( Map<InetSocketAddress, CassandraClientPoolingContainer> pools) { Map<InetSocketAddress, Integer> activeConnectionsByHost = new HashMap<>(pools.size()); int totalActiveConnections = 0; for (InetSocketAddress host : pools.keySet()) { int activeConnections = Math.max(pools.get(host).getPoolUtilization(), 0); activeConnectionsByHost.put(host, activeConnections); totalActiveConnections += activeConnections; } int lowerBoundInclusive = 0; TreeMap<Integer, InetSocketAddress> weightedHosts = new TreeMap<>(); for (Entry<InetSocketAddress, Integer> entry : activeConnectionsByHost.entrySet()) { // We want the weight to be inversely proportional to the number of active connections so that we pick // less-active hosts. We add 1 to make sure that all ranges are non-empty int weight = totalActiveConnections - entry.getValue() + 1; weightedHosts.put(lowerBoundInclusive + weight, entry.getKey()); lowerBoundInclusive += weight; } return weightedHosts; } InetSocketAddress getRandomHost() { int index = ThreadLocalRandom.current().nextInt(hosts.lastKey()); return getRandomHostInternal(index); } // This basically exists for testing InetSocketAddress getRandomHostInternal(int index) { return hosts.higherEntry(index).getValue(); } } }
atlasdb-cassandra/src/main/java/com/palantir/atlasdb/keyvalue/cassandra/CassandraClientPool.java
/** * Copyright 2015 Palantir Technologies * * Licensed under the BSD-3 License (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/BSD-3-Clause * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.palantir.atlasdb.keyvalue.cassandra; import java.net.InetSocketAddress; import java.net.SocketException; import java.net.SocketTimeoutException; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NoSuchElementException; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import org.apache.cassandra.thrift.Cassandra; import org.apache.cassandra.thrift.Cassandra.Client; import org.apache.cassandra.thrift.CfDef; import org.apache.cassandra.thrift.InvalidRequestException; import org.apache.cassandra.thrift.KsDef; import org.apache.cassandra.thrift.NotFoundException; import org.apache.cassandra.thrift.SchemaDisagreementException; import org.apache.cassandra.thrift.TimedOutException; import org.apache.cassandra.thrift.TokenRange; import org.apache.cassandra.thrift.UnavailableException; import org.apache.thrift.TException; import org.apache.thrift.transport.TTransportException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableRangeMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Range; import com.google.common.collect.RangeMap; import com.google.common.collect.Sets; import com.google.common.collect.Sets.SetView; import com.google.common.io.BaseEncoding; import com.google.common.primitives.UnsignedBytes; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.palantir.atlasdb.cassandra.CassandraKeyValueServiceConfig; import com.palantir.atlasdb.keyvalue.api.InsufficientConsistencyException; import com.palantir.atlasdb.keyvalue.api.TableReference; import com.palantir.atlasdb.keyvalue.cassandra.CassandraClientFactory.ClientCreationFailedException; import com.palantir.common.base.FunctionCheckedException; import com.palantir.common.concurrent.PTExecutors; /** * Feature breakdown: * - Pooling * - Token Aware Mapping / Query Routing / Data partitioning * - Retriable Queries * - Pool member error tracking / blacklisting* * - Pool refreshing * - Pool node autodiscovery * - Pool member health checking* * * *entirely new features * * By our old system, this would be a RefreshingRetriableTokenAwareHealthCheckingManyHostCassandraClientPoolingContainerManager; * ... this is one of the reasons why there is a new system. **/ public class CassandraClientPool { private static final Logger log = LoggerFactory.getLogger(CassandraClientPool.class); /** * This is the maximum number of times we'll accept connection failures to one host before blacklisting it. Note * that subsequent hosts we try in the same call will actually be blacklisted after one connection failure */ private static final int MAX_TRIES_SAME_HOST = 3; private static final int MAX_TRIES_TOTAL = 6; volatile RangeMap<LightweightOPPToken, List<InetSocketAddress>> tokenMap = ImmutableRangeMap.of(); Map<InetSocketAddress, Long> blacklistedHosts = Maps.newConcurrentMap(); Map<InetSocketAddress, CassandraClientPoolingContainer> currentPools = Maps.newConcurrentMap(); final CassandraKeyValueServiceConfig config; final ScheduledThreadPoolExecutor refreshDaemon; public static class LightweightOPPToken implements Comparable<LightweightOPPToken> { final byte[] bytes; public LightweightOPPToken(byte[] bytes) { this.bytes = bytes; } @Override public int compareTo(LightweightOPPToken other) { return UnsignedBytes.lexicographicalComparator().compare(this.bytes, other.bytes); } @Override public String toString() { return BaseEncoding.base16().encode(bytes); } } public CassandraClientPool(CassandraKeyValueServiceConfig config) { this.config = config; refreshDaemon = PTExecutors.newScheduledThreadPool(1, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("CassandraClientPoolRefresh-%d").build()); refreshDaemon.scheduleWithFixedDelay(new Runnable() { @Override public void run() { try { refreshPool(); } catch (Throwable t) { log.error("Failed to refresh Cassandra KVS pool. Extended periods of being unable to refresh will cause perf degradation.", t); } } }, config.poolRefreshIntervalSeconds(), config.poolRefreshIntervalSeconds(), TimeUnit.SECONDS); config.servers().forEach((server) -> currentPools.put(server, new CassandraClientPoolingContainer(server, config))); refreshPool(); // ensure we've initialized before returning } public void shutdown() { refreshDaemon.shutdown(); currentPools.forEach((address, cassandraClientPoolingContainer) -> cassandraClientPoolingContainer.shutdownPooling()); } private synchronized void refreshPool() { checkAndUpdateBlacklist(); Set<InetSocketAddress> serversToAdd = Sets.newHashSet(config.servers()); Set<InetSocketAddress> serversToRemove = ImmutableSet.of(); if (config.autoRefreshNodes()) { refreshTokenRanges(); // re-use token mapping as list of hosts in the cluster for (List<InetSocketAddress> rangeOwners : tokenMap.asMapOfRanges().values()) { for (InetSocketAddress address : rangeOwners) { serversToAdd.add(address); } } } serversToAdd = Sets.difference(serversToAdd, currentPools.keySet()); if (!config.autoRefreshNodes()) { // (we would just add them back in) serversToRemove = Sets.difference(currentPools.keySet(), config.servers()); } for (InetSocketAddress newServer : serversToAdd) { currentPools.put(newServer, new CassandraClientPoolingContainer(newServer, config)); } for (InetSocketAddress removedServerAddress : serversToRemove) { removePool(removedServerAddress); } if (!(serversToAdd.isEmpty() && serversToRemove.isEmpty())) { // if we made any changes sanityCheckRingConsistency(); if (!config.autoRefreshNodes()) { // grab new token mapping, if we didn't already do this before refreshTokenRanges(); } } log.debug("Cassandra pool refresh added hosts {}, removed hosts {}.", serversToAdd, serversToRemove); debugLogStateOfPool(); } private void removePool(InetSocketAddress removedServerAddress) { blacklistedHosts.remove(removedServerAddress); try { currentPools.get(removedServerAddress).shutdownPooling(); } catch (Exception e) { log.warn("While removing a host ({}) from the pool, we were unable to gently cleanup resources.", removedServerAddress, e); } currentPools.remove(removedServerAddress); } private void debugLogStateOfPool() { if (log.isDebugEnabled()) { StringBuilder currentState = new StringBuilder(); currentState.append( String.format("POOL STATUS: Current blacklist = %s,%n current hosts in pool = %s%n", blacklistedHosts.keySet().toString(), currentPools.keySet().toString())); for (Entry<InetSocketAddress, CassandraClientPoolingContainer> entry : currentPools.entrySet()) { int activeCheckouts = entry.getValue().getPoolUtilization(); int totalAllowed = entry.getValue().getPoolSize(); currentState.append( String.format("\tPOOL STATUS: Pooled host %s has %s out of %s connections checked out.%n", entry.getKey(), activeCheckouts > 0? Integer.toString(activeCheckouts) : "(unknown)", totalAllowed > 0? Integer.toString(totalAllowed) : "(not bounded)")); } log.debug(currentState.toString()); } } private void checkAndUpdateBlacklist() { // Check blacklist and re-integrate or continue to wait as necessary for (Map.Entry<InetSocketAddress, Long> blacklistedEntry : blacklistedHosts.entrySet()) { long backoffTimeMillis = TimeUnit.SECONDS.toMillis(config.unresponsiveHostBackoffTimeSeconds()); if (blacklistedEntry.getValue() + backoffTimeMillis < System.currentTimeMillis()) { InetSocketAddress host = blacklistedEntry.getKey(); if (isHostHealthy(host)) { blacklistedHosts.remove(host); log.error("Added host {} back into the pool after a waiting period and successful health check.", host); } } } } private void addToBlacklist(InetSocketAddress badHost) { blacklistedHosts.put(badHost, System.currentTimeMillis()); log.info("Blacklisted host '{}'", badHost); } private boolean isHostHealthy(InetSocketAddress host) { try { CassandraClientPoolingContainer testingContainer = currentPools.get(host); testingContainer.runWithPooledResource(describeRing); testingContainer.runWithPooledResource(validatePartitioner); return true; } catch (Exception e) { log.error("We tried to add {} back into the pool, but got an exception that caused to us distrust this host further.", host, e); return false; } } private CassandraClientPoolingContainer getRandomGoodHost() { Map<InetSocketAddress, CassandraClientPoolingContainer> pools = currentPools; Set<InetSocketAddress> livingHosts = Sets.difference(pools.keySet(), blacklistedHosts.keySet()); if (livingHosts.isEmpty()) { log.error("There are no known live hosts in the connection pool. We're choosing one at random in a last-ditch attempt at forward progress."); livingHosts = pools.keySet(); } return pools.get(getRandomHostByActiveConnections(Maps.filterKeys(currentPools, livingHosts::contains))); } public InetSocketAddress getRandomHostForKey(byte[] key) { List<InetSocketAddress> hostsForKey = tokenMap.get(new LightweightOPPToken(key)); SetView<InetSocketAddress> liveOwnerHosts; if (hostsForKey == null) { log.debug("Cluster not fully initialized, not routing query to correct host as not token map found."); return getRandomGoodHost().getHost(); } else { liveOwnerHosts = Sets.difference(ImmutableSet.copyOf(hostsForKey), blacklistedHosts.keySet()); } if (liveOwnerHosts.isEmpty()) { log.warn("Perf / cluster stability issue. Token aware query routing has failed because there are no known " + "live hosts that claim ownership of the given range. Falling back to choosing a random live node. " + "Current state logged at DEBUG"); log.debug("Current ring view is: {} and our current host blacklist is {}", tokenMap, blacklistedHosts); return getRandomGoodHost().getHost(); } else { return getRandomHostByActiveConnections(Maps.filterKeys(currentPools, liveOwnerHosts::contains)); } } private static InetSocketAddress getRandomHostByActiveConnections(Map<InetSocketAddress, CassandraClientPoolingContainer> pools) { return WeightedHosts.create(pools).getRandomHost(); } public void runOneTimeStartupChecks() { try { CassandraVerifier.ensureKeyspaceExistsAndIsUpToDate(this, config); } catch (Exception e) { log.error("Startup checks failed, was not able to create the keyspace or ensure it already existed."); throw new RuntimeException(e); } Map<InetSocketAddress, Exception> completelyUnresponsiveHosts = Maps.newHashMap(), aliveButInvalidPartitionerHosts = Maps.newHashMap(); boolean thisHostResponded, atLeastOneHostResponded = false, atLeastOneHostSaidWeHaveALockTable = false; for (InetSocketAddress host : currentPools.keySet()) { thisHostResponded = false; try { runOnHost(host, CassandraVerifier.healthCheck); thisHostResponded = true; atLeastOneHostResponded = true; } catch (Exception e) { completelyUnresponsiveHosts.put(host, e); addToBlacklist(host); } if (thisHostResponded) { try { runOnHost(host, validatePartitioner); } catch (Exception e) { aliveButInvalidPartitionerHosts.put(host, e); } try { runOnHost(host, createInternalLockTable); atLeastOneHostSaidWeHaveALockTable = true; } catch (Exception e) { // don't fail here, want to give the user all the errors at once at the end } } } StringBuilder errorBuilderForEntireCluster = new StringBuilder(); if (completelyUnresponsiveHosts.size() > 0) { errorBuilderForEntireCluster.append("Performing routine startup checks, determined that the following hosts are unreachable for the following reasons: \n"); completelyUnresponsiveHosts.forEach((host, exception) -> errorBuilderForEntireCluster.append(String.format("\tHost: %s was marked unreachable via exception: %s%n", host.toString(), exception.toString()))); } if (aliveButInvalidPartitionerHosts.size() > 0) { errorBuilderForEntireCluster.append("Performing routine startup checks, determined that the following hosts were alive but are configured with an invalid partitioner: \n"); aliveButInvalidPartitionerHosts.forEach((host, exception) -> errorBuilderForEntireCluster.append(String.format("\tHost: %s was marked as invalid partitioner via exception: %s%n", host.toString(), exception.toString()))); } if (atLeastOneHostResponded && atLeastOneHostSaidWeHaveALockTable && aliveButInvalidPartitionerHosts.size() == 0) { return; } else { throw new RuntimeException(errorBuilderForEntireCluster.toString()); } } //todo dedupe this into a name-demangling class that everyone can access protected static String internalTableName(TableReference tableRef) { String tableName = tableRef.getQualifiedName(); if (tableName.startsWith("_")) { return tableName; } return tableName.replaceFirst("\\.", "__"); } // for tables internal / implementation specific to this KVS; these also don't get metadata in metadata table, nor do they show up in getTablenames, nor does this use concurrency control private void createTableInternal(Client client, TableReference tableRef) throws InvalidRequestException, SchemaDisagreementException, TException, NotFoundException { if (tableAlreadyExists(client, internalTableName(tableRef))) { return; } CfDef cf = CassandraConstants.getStandardCfDef(config.keyspace(), internalTableName(tableRef)); client.system_add_column_family(cf); CassandraKeyValueServices.waitForSchemaVersions(client, tableRef.getQualifiedName(), config.schemaMutationTimeoutMillis()); return; } private boolean tableAlreadyExists(Client client, String caseInsensitiveTableName) throws TException { KsDef ks = client.describe_keyspace(config.keyspace()); for (CfDef cf : ks.getCf_defs()) { if (cf.getName().equalsIgnoreCase(caseInsensitiveTableName)) { return true; } } return false; } private void refreshTokenRanges() { try { List<TokenRange> tokenRanges = getRandomGoodHost().runWithPooledResource(describeRing); ImmutableRangeMap.Builder<LightweightOPPToken, List<InetSocketAddress>> newTokenRing = ImmutableRangeMap.builder(); for (TokenRange tokenRange : tokenRanges) { List<InetSocketAddress> hosts = Lists.transform(tokenRange.getEndpoints(), new Function<String, InetSocketAddress>() { @Override public InetSocketAddress apply(String endpoint) { return new InetSocketAddress(endpoint, CassandraConstants.DEFAULT_THRIFT_PORT); } }); LightweightOPPToken startToken = new LightweightOPPToken(BaseEncoding.base16().decode(tokenRange.getStart_token().toUpperCase())); LightweightOPPToken endToken = new LightweightOPPToken(BaseEncoding.base16().decode(tokenRange.getEnd_token().toUpperCase())); if (startToken.compareTo(endToken) <= 0) { newTokenRing.put(Range.openClosed(startToken, endToken), hosts); } else { // Handle wrap-around newTokenRing.put(Range.greaterThan(startToken), hosts); newTokenRing.put(Range.atMost(endToken), hosts); } } tokenMap = newTokenRing.build(); } catch (Exception e) { log.error("Couldn't grab new token ranges for token aware cassandra mapping!", e); } } private FunctionCheckedException<Cassandra.Client, List<TokenRange>, Exception> describeRing = new FunctionCheckedException<Cassandra.Client, List<TokenRange>, Exception>() { @Override public List<TokenRange> apply (Cassandra.Client client) throws Exception { return client.describe_ring(config.keyspace()); }}; public <V, K extends Exception> V runWithRetry(FunctionCheckedException<Cassandra.Client, V, K> f) throws K { return runWithRetryOnHost(getRandomGoodHost().getHost(), f); } public <V, K extends Exception> V runWithRetryOnHost(InetSocketAddress specifiedHost, FunctionCheckedException<Cassandra.Client, V, K> f) throws K { int numTries = 0; while (true) { CassandraClientPoolingContainer hostPool = currentPools.get(specifiedHost); if (blacklistedHosts.containsKey(specifiedHost) || hostPool == null) { log.warn("Randomly redirected a query intended for host {} because it was not currently a live member of the pool.", specifiedHost); hostPool = getRandomGoodHost(); } try { return hostPool.runWithPooledResource(f); } catch (Exception e) { numTries++; this.<K>handleException(numTries, hostPool.getHost(), e); } } } public <V, K extends Exception> V run(FunctionCheckedException<Cassandra.Client, V, K> f) throws K { return runOnHost(getRandomGoodHost().getHost(), f); } private <V, K extends Exception> V runOnHost(InetSocketAddress specifiedHost, FunctionCheckedException<Cassandra.Client, V, K> f) throws K { CassandraClientPoolingContainer hostPool = currentPools.get(specifiedHost); return hostPool.runWithPooledResource(f); } @SuppressWarnings("unchecked") private <K extends Exception> void handleException(int numTries, InetSocketAddress host, Exception e) throws K { if (isRetriableException(e)) { if (numTries >= MAX_TRIES_TOTAL) { if (e instanceof TTransportException && e.getCause() != null && (e.getCause().getClass() == SocketException.class)) { String msg = "Error writing to Cassandra socket. Likely cause: Exceeded maximum thrift frame size; unlikely cause: network issues."; log.error("Tried to connect to cassandra " + numTries + " times. " + msg, e); e = new TTransportException(((TTransportException) e).getType(), msg, e); } else { log.error("Tried to connect to cassandra " + numTries + " times.", e); } throw (K) e; } else { log.warn("Error occurred talking to cassandra. Attempt {} of {}.", numTries, MAX_TRIES_TOTAL, e); if (isConnectionException(e) && numTries >= MAX_TRIES_SAME_HOST) { addToBlacklist(host); } } } else { throw (K) e; } } // This method exists to verify a particularly nasty bug where cassandra doesn't have a // consistent ring across all of it's nodes. One node will think it owns more than the others // think it does and they will not send writes to it, but it will respond to requests // acting like it does. private void sanityCheckRingConsistency() { Multimap<Set<TokenRange>, InetSocketAddress> tokenRangesToHost = HashMultimap.create(); for (InetSocketAddress host : currentPools.keySet()) { Cassandra.Client client = null; try { client = CassandraClientFactory.getClientInternal(host, config.credentials(), config.ssl(), config.socketTimeoutMillis(), config.socketQueryTimeoutMillis()); try { client.describe_keyspace(config.keyspace()); } catch (NotFoundException e) { return; // don't care to check for ring consistency when we're not even fully initialized } tokenRangesToHost.put(ImmutableSet.copyOf(client.describe_ring(config.keyspace())), host); } catch (Exception e) { log.warn("failed to get ring info from host: {}", host, e); } finally { if (client != null) { client.getOutputProtocol().getTransport().close(); } } if (tokenRangesToHost.isEmpty()) { log.warn("Failed to get ring info for entire Cassandra cluster ({}); ring could not be checked for consistency.", config.keyspace()); return; } if (tokenRangesToHost.keySet().size() == 1) { // all nodes agree on a consistent view of the cluster. Good. return; } RuntimeException e = new IllegalStateException("Hosts have differing ring descriptions. This can lead to inconsistent reads and lost data. "); log.error("QA-86204 " + e.getMessage() + tokenRangesToHost, e); // provide some easier to grok logging for the two most common cases if (tokenRangesToHost.size() > 2) { for (Map.Entry<Set<TokenRange>, Collection<InetSocketAddress>> entry : tokenRangesToHost.asMap().entrySet()) { if (entry.getValue().size() == 1) { log.error("Host: " + entry.getValue().iterator().next() + " disagrees with the other nodes about the ring state."); } } } if (tokenRangesToHost.keySet().size() == 2) { ImmutableList<Set<TokenRange>> sets = ImmutableList.copyOf(tokenRangesToHost.keySet()); Set<TokenRange> set1 = sets.get(0); Set<TokenRange> set2 = sets.get(1); log.error("Hosts are split. group1: " + tokenRangesToHost.get(set1) + " group2: " + tokenRangesToHost.get(set2)); } CassandraVerifier.logErrorOrThrow(e.getMessage(), config.safetyDisabled()); } } @VisibleForTesting static boolean isConnectionException(Throwable t) { return t != null && (t instanceof SocketTimeoutException || t instanceof ClientCreationFailedException || t instanceof UnavailableException || t instanceof NoSuchElementException || isConnectionException(t.getCause())); } @VisibleForTesting static boolean isRetriableException(Throwable t) { return t != null && (t instanceof TTransportException || t instanceof TimedOutException || t instanceof InsufficientConsistencyException || isConnectionException(t) || isRetriableException(t.getCause())); } final FunctionCheckedException<Cassandra.Client, Void, Exception> validatePartitioner = new FunctionCheckedException<Cassandra.Client, Void, Exception>() { @Override public Void apply(Cassandra.Client client) throws Exception { CassandraVerifier.validatePartitioner(client, config); return null; } }; final FunctionCheckedException<Cassandra.Client, Void, Exception> createInternalLockTable = new FunctionCheckedException<Cassandra.Client, Void, Exception>() { @Override public Void apply(Cassandra.Client client) throws Exception { createTableInternal(client, CassandraConstants.LOCK_TABLE); return null; } }; /** * Weights hosts inversely by the number of active connections. {@link #getRandomHost()} should then be used to * pick a random host */ @VisibleForTesting static class WeightedHosts { final TreeMap<Integer, InetSocketAddress> hosts; private WeightedHosts(TreeMap<Integer, InetSocketAddress> hosts) { this.hosts = hosts; } static WeightedHosts create(Map<InetSocketAddress, CassandraClientPoolingContainer> pools) { Preconditions.checkArgument(!pools.isEmpty(), "pools should be non-empty"); return new WeightedHosts(buildHostsWeightedByActiveConnections(pools)); } /** * The key for a host is the open upper bound of the weight. Since the domain is intended to be contiguous, the * closed lower bound of that weight is the key of the previous entry. * <p> * The closed lower bound of the first entry is 0. * <p> * Every weight is guaranteed to be non-zero in size. That is, every key is guaranteed to be at least one larger * than the previous key. */ private static TreeMap<Integer, InetSocketAddress> buildHostsWeightedByActiveConnections( Map<InetSocketAddress, CassandraClientPoolingContainer> pools) { Map<InetSocketAddress, Integer> activeConnectionsByHost = new HashMap<>(pools.size()); int totalActiveConnections = 0; for (InetSocketAddress host : pools.keySet()) { int activeConnections = Math.max(pools.get(host).getPoolUtilization(), 0); activeConnectionsByHost.put(host, activeConnections); totalActiveConnections += activeConnections; } int lowerBoundInclusive = 0; TreeMap<Integer, InetSocketAddress> weightedHosts = new TreeMap<>(); for (Entry<InetSocketAddress, Integer> entry : activeConnectionsByHost.entrySet()) { // We want the weight to be inversely proportional to the number of active connections so that we pick // less-active hosts. We add 1 to make sure that all ranges are non-empty int weight = totalActiveConnections - entry.getValue() + 1; weightedHosts.put(lowerBoundInclusive + weight, entry.getKey()); lowerBoundInclusive += weight; } return weightedHosts; } InetSocketAddress getRandomHost() { int index = ThreadLocalRandom.current().nextInt(hosts.lastKey()); return getRandomHostInternal(index); } // This basically exists for testing InetSocketAddress getRandomHostInternal(int index) { return hosts.higherEntry(index).getValue(); } } }
Clarify token mapping log message
atlasdb-cassandra/src/main/java/com/palantir/atlasdb/keyvalue/cassandra/CassandraClientPool.java
Clarify token mapping log message
<ide><path>tlasdb-cassandra/src/main/java/com/palantir/atlasdb/keyvalue/cassandra/CassandraClientPool.java <ide> SetView<InetSocketAddress> liveOwnerHosts; <ide> <ide> if (hostsForKey == null) { <del> log.debug("Cluster not fully initialized, not routing query to correct host as not token map found."); <add> log.debug( <add> "We attempted to route your query to a cassandra host that already contains the relevant data. " + <add> "However, the mapping of which host contains which data is not available yet. We will choose a random host instead."); <ide> return getRandomGoodHost().getHost(); <ide> } else { <ide> liveOwnerHosts = Sets.difference(ImmutableSet.copyOf(hostsForKey), blacklistedHosts.keySet());
Java
mit
cadd2ea3c708865ecb75093a6184ced0eaa44f7b
0
sqlancer/sqlancer,sqlancer/sqlancer
package lama.sqlite3.gen; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; import lama.QueryAdapter; import lama.Randomly; import lama.sqlite3.SQLite3Provider.SQLite3GlobalState; public class SQLite3PragmaGenerator { /** * Not all pragmas are generated. * * <ul> * <li>case_sensitive_like is not generated since the tool discovered that it * has some conceptual issues, see * https://www.sqlite.org/src/info/a340eef47b0cad5.</li> * <li>legacy_alter_table is not generated since it does not work well with the * ALTER command (see docs)</li> * <li>journal_mode=off is generated, since it can corrupt the database, see * https://www.sqlite.org/src/tktview?name=f4ec250930</li> * <li>temp_store deletes all existing temporary tables</li> * </ul> */ private enum Pragma { APPLICATION_ID, // AUTO_VACUUM, // AUTOMATIC_INDEX, // BUSY_TIMEOUT, // CACHE_SIZE, // CACHE_SPILL_ENABLED, // CACHE_SPILL_SIZE, /* CASE_SENSITIVE_LIKE */ CELL_SIZE_CHECK, CHECKPOINT_FULLSYNC, DEFAULT_CACHE_SIZE, DEFER_FOREIGN_KEY, /*ENCODING,*/ FOREIGN_KEYS, IGNORE_CHECK_CONSTRAINTS, INCREMENTAL_VACUUM, INTEGRITY_CHECK, JOURNAL_MODE, JOURNAL_SIZE_LIMIT, /* LEGACY_ALTER_TABLE */ OPTIMIZE, LEGACY_FORMAT, LOCKING_MODE, MMAP_SIZE, RECURSIVE_TRIGGERS, REVERSE_UNORDERED_SELECTS, SECURE_DELETE, SHRINK_MEMORY, SOFT_HEAP_LIMIT, // STATS, // /* TEMP_STORE, */ // THREADS, // WAL_AUTOCHECKPOINT, // WAL_CHECKPOINT, // // WRITEABLE_SCHEMA } private final StringBuilder sb = new StringBuilder(); private final List<String> errors = new ArrayList<>(); public void createPragma(String pragmaName, Supplier<Object> supplier) { boolean setSchema = Randomly.getBoolean(); boolean setValue = Randomly.getBoolean(); sb.append("PRAGMA "); if (setSchema) { sb.append(Randomly.fromOptions("main.", "temp.")); } sb.append(pragmaName); if (setValue) { Object value = supplier.get(); if (value != null) { sb.append(" = "); sb.append(supplier.get()); } } } public QueryAdapter insert(SQLite3GlobalState globalState) { Randomly r = globalState.getRandomly(); Pragma p = Randomly.fromOptions(Pragma.values()); switch (p) { case APPLICATION_ID: createPragma("application_id", () -> Randomly.getNonCachedInteger()); break; case AUTO_VACUUM: createPragma("auto_vacuum", () -> Randomly.fromOptions("NONE", "FULL", "INCREMENTAL")); break; case AUTOMATIC_INDEX: createPragma("automatic_index", () -> getRandomTextBoolean()); break; case BUSY_TIMEOUT: createPragma("busy_timeout", () -> { if (Randomly.getBoolean()) { return 0; } else { long value = Math.max(10000, Randomly.getNonCachedInteger()); return value; } }); break; case CACHE_SIZE: createPragma("cache_size", () -> { if (Randomly.getBoolean()) { return 0; } else { return Randomly.getNonCachedInteger(); } }); break; case CACHE_SPILL_ENABLED: createPragma("cache_spill", () -> getRandomTextBoolean()); break; case CACHE_SPILL_SIZE: createPragma("cache_spill", () -> Randomly.getNonCachedInteger()); break; case CELL_SIZE_CHECK: createPragma("cell_size_check", () -> getRandomTextBoolean()); break; case CHECKPOINT_FULLSYNC: createPragma("checkpoint_fullfsync", () -> getRandomTextBoolean()); break; case DEFAULT_CACHE_SIZE: createPragma("default_cache_size", () -> r.getInteger()); break; case DEFER_FOREIGN_KEY: createPragma("defer_foreign_keys", () -> getRandomTextBoolean()); break; // TODO: [SQLITE_ERROR] SQL error or missing database (attached databases must // use the same text encoding as main database) // case ENCODING: // sb.append("PRAGMA main.encoding = \""); // String encoding = Randomly.fromOptions("UTF-8", "UTF-16", "UTF-16be", "UTF-16le"); // sb.append(encoding); // sb.append("\";\n"); // sb.append("PRAGMA temp.encoding = \""); // sb.append(encoding); // sb.append("\""); // break; case FOREIGN_KEYS: createPragma("foreign_keys", () -> getRandomTextBoolean()); break; case IGNORE_CHECK_CONSTRAINTS: createPragma("ignore_check_constraints", () -> getRandomTextBoolean()); break; case INCREMENTAL_VACUUM: if (Randomly.getBoolean()) { createPragma("incremental_vacuum", () -> null); } else { sb.append(String.format("PRAGMA incremental_vacuum(%d)", r.getInteger())); } break; case INTEGRITY_CHECK: errors.add("malformed JSON"); errors.add("JSON cannot hold BLOB values"); if (Randomly.getBoolean()) { createPragma("integrity_check", () -> null); } else { sb.append(String.format("PRAGMA integrity_check(%d)", r.getInteger())); } break; case JOURNAL_MODE: // OFF is no longer generated, since it might corrupt the database upon failed // index creation, see https://www.sqlite.org/src/tktview?name=f4ec250930. createPragma("journal_mode", () -> Randomly.fromOptions("DELETE", "TRUNCATE", "PERSIST", "MEMORY", "WAL")); errors.add("from within a transaction"); break; case JOURNAL_SIZE_LIMIT: createPragma("journal_size_limit", () -> { if (Randomly.getBoolean()) { return 0; } else { return Randomly.getNonCachedInteger(); } }); break; case LEGACY_FORMAT: createPragma("legacy_file_format", () -> getRandomTextBoolean()); break; case LOCKING_MODE: createPragma("locking_mode", () -> Randomly.fromOptions("NORMAL", "EXCLUSIVE")); break; case MMAP_SIZE: createPragma("mmap_size", () -> Randomly.getNonCachedInteger()); break; case OPTIMIZE: createPragma("optimize", () -> null); break; case RECURSIVE_TRIGGERS: createPragma("recursive_triggers", () -> getRandomTextBoolean()); break; case REVERSE_UNORDERED_SELECTS: createPragma("reverse_unordered_selects", () -> getRandomTextBoolean()); break; case SECURE_DELETE: createPragma("secure_delete", () -> Randomly.fromOptions("true", "false", "FAST")); break; case SHRINK_MEMORY: createPragma("shrink_memory", () -> null); break; case SOFT_HEAP_LIMIT: createPragma("soft_heap_limit", () -> { if (Randomly.getBoolean()) { return 0; } else { return r.getPositiveInteger(); } }); break; case STATS: createPragma("stats", () -> null); break; // case TEMP_STORE: // createPragma("temp_store", () -> Randomly.fromOptions("DEFAULT", "FILE", "MEMORY")); // break; case THREADS: createPragma("threads", () -> Randomly.getNonCachedInteger()); break; case WAL_AUTOCHECKPOINT: createPragma("wal_autocheckpoint", () -> Randomly.getNonCachedInteger()); break; case WAL_CHECKPOINT: sb.append("PRAGMA wal_checkpoint("); sb.append(Randomly.fromOptions("PASSIVE", "FULL", "RESTART", "TRUNCATE")); sb.append(")"); errors.add("database table is locked"); break; // writeable schema can cause ALTER TABLE commands to result in a malformed schema // case WRITEABLE_SCHEMA: // createPragma("writable_schema", () -> Randomly.getBoolean()); // break; default: throw new AssertionError(); } sb.append(";"); String pragmaString = sb.toString(); // errors.add("cannot change"); return new QueryAdapter(pragmaString, errors); } public static QueryAdapter insertPragma(SQLite3GlobalState globalState) throws SQLException { return new SQLite3PragmaGenerator().insert(globalState); } private static String getRandomTextBoolean() { return Randomly.fromOptions("true", "false"); } }
src/lama/sqlite3/gen/SQLite3PragmaGenerator.java
package lama.sqlite3.gen; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; import lama.QueryAdapter; import lama.Randomly; import lama.sqlite3.SQLite3Provider.SQLite3GlobalState; public class SQLite3PragmaGenerator { /** * Not all pragmas are generated. * * <ul> * <li>case_sensitive_like is not generated since the tool discovered that it * has some conceptual issues, see * https://www.sqlite.org/src/info/a340eef47b0cad5.</li> * <li>legacy_alter_table is not generated since it does not work well with the * ALTER command (see docs)</li> * <li>journal_mode=off is generated, since it can corrupt the database, see * https://www.sqlite.org/src/tktview?name=f4ec250930</li> * <li>temp_store deletes all existing temporary tables</li> * </ul> */ private enum Pragma { APPLICATION_ID, // AUTO_VACUUM, // AUTOMATIC_INDEX, // BUSY_TIMEOUT, // CACHE_SIZE, // CACHE_SPILL_ENABLED, // CACHE_SPILL_SIZE, /* CASE_SENSITIVE_LIKE */ CELL_SIZE_CHECK, CHECKPOINT_FULLSYNC, DEFAULT_CACHE_SIZE, DEFER_FOREIGN_KEY, /*ENCODING,*/ FOREIGN_KEYS, IGNORE_CHECK_CONSTRAINTS, INCREMENTAL_VACUUM, INTEGRITY_CHECK, JOURNAL_MODE, JOURNAL_SIZE_LIMIT, /* LEGACY_ALTER_TABLE */ OPTIMIZE, LEGACY_FORMAT, LOCKING_MODE, MMAP_SIZE, RECURSIVE_TRIGGERS, REVERSE_UNORDERED_SELECTS, SECURE_DELETE, SHRINK_MEMORY, SOFT_HEAP_LIMIT, // STATS, // /* TEMP_STORE, */ // THREADS, // WAL_AUTOCHECKPOINT, // WAL_CHECKPOINT, // WRITEABLE_SCHEMA } private final StringBuilder sb = new StringBuilder(); private final List<String> errors = new ArrayList<>(); public void createPragma(String pragmaName, Supplier<Object> supplier) { boolean setSchema = Randomly.getBoolean(); boolean setValue = Randomly.getBoolean(); sb.append("PRAGMA "); if (setSchema) { sb.append(Randomly.fromOptions("main.", "temp.")); } sb.append(pragmaName); if (setValue) { Object value = supplier.get(); if (value != null) { sb.append(" = "); sb.append(supplier.get()); } } } public QueryAdapter insert(SQLite3GlobalState globalState) { Randomly r = globalState.getRandomly(); Pragma p = Randomly.fromOptions(Pragma.values()); switch (p) { case APPLICATION_ID: createPragma("application_id", () -> Randomly.getNonCachedInteger()); break; case AUTO_VACUUM: createPragma("auto_vacuum", () -> Randomly.fromOptions("NONE", "FULL", "INCREMENTAL")); break; case AUTOMATIC_INDEX: createPragma("automatic_index", () -> getRandomTextBoolean()); break; case BUSY_TIMEOUT: createPragma("busy_timeout", () -> { if (Randomly.getBoolean()) { return 0; } else { long value = Math.max(10000, Randomly.getNonCachedInteger()); return value; } }); break; case CACHE_SIZE: createPragma("cache_size", () -> { if (Randomly.getBoolean()) { return 0; } else { return Randomly.getNonCachedInteger(); } }); break; case CACHE_SPILL_ENABLED: createPragma("cache_spill", () -> getRandomTextBoolean()); break; case CACHE_SPILL_SIZE: createPragma("cache_spill", () -> Randomly.getNonCachedInteger()); break; case CELL_SIZE_CHECK: createPragma("cell_size_check", () -> getRandomTextBoolean()); break; case CHECKPOINT_FULLSYNC: createPragma("checkpoint_fullfsync", () -> getRandomTextBoolean()); break; case DEFAULT_CACHE_SIZE: createPragma("default_cache_size", () -> r.getInteger()); break; case DEFER_FOREIGN_KEY: createPragma("defer_foreign_keys", () -> getRandomTextBoolean()); break; // TODO: [SQLITE_ERROR] SQL error or missing database (attached databases must // use the same text encoding as main database) // case ENCODING: // sb.append("PRAGMA main.encoding = \""); // String encoding = Randomly.fromOptions("UTF-8", "UTF-16", "UTF-16be", "UTF-16le"); // sb.append(encoding); // sb.append("\";\n"); // sb.append("PRAGMA temp.encoding = \""); // sb.append(encoding); // sb.append("\""); // break; case FOREIGN_KEYS: createPragma("foreign_keys", () -> getRandomTextBoolean()); break; case IGNORE_CHECK_CONSTRAINTS: createPragma("ignore_check_constraints", () -> getRandomTextBoolean()); break; case INCREMENTAL_VACUUM: if (Randomly.getBoolean()) { createPragma("incremental_vacuum", () -> null); } else { sb.append(String.format("PRAGMA incremental_vacuum(%d)", r.getInteger())); } break; case INTEGRITY_CHECK: errors.add("malformed JSON"); errors.add("JSON cannot hold BLOB values"); if (Randomly.getBoolean()) { createPragma("integrity_check", () -> null); } else { sb.append(String.format("PRAGMA integrity_check(%d)", r.getInteger())); } break; case JOURNAL_MODE: // OFF is no longer generated, since it might corrupt the database upon failed // index creation, see https://www.sqlite.org/src/tktview?name=f4ec250930. createPragma("journal_mode", () -> Randomly.fromOptions("DELETE", "TRUNCATE", "PERSIST", "MEMORY", "WAL")); errors.add("from within a transaction"); break; case JOURNAL_SIZE_LIMIT: createPragma("journal_size_limit", () -> { if (Randomly.getBoolean()) { return 0; } else { return Randomly.getNonCachedInteger(); } }); break; case LEGACY_FORMAT: createPragma("legacy_file_format", () -> getRandomTextBoolean()); break; case LOCKING_MODE: createPragma("locking_mode", () -> Randomly.fromOptions("NORMAL", "EXCLUSIVE")); break; case MMAP_SIZE: createPragma("mmap_size", () -> Randomly.getNonCachedInteger()); break; case OPTIMIZE: createPragma("optimize", () -> null); break; case RECURSIVE_TRIGGERS: createPragma("recursive_triggers", () -> getRandomTextBoolean()); break; case REVERSE_UNORDERED_SELECTS: createPragma("reverse_unordered_selects", () -> getRandomTextBoolean()); break; case SECURE_DELETE: createPragma("secure_delete", () -> Randomly.fromOptions("true", "false", "FAST")); break; case SHRINK_MEMORY: createPragma("shrink_memory", () -> null); break; case SOFT_HEAP_LIMIT: createPragma("soft_heap_limit", () -> { if (Randomly.getBoolean()) { return 0; } else { return r.getPositiveInteger(); } }); break; case STATS: createPragma("stats", () -> null); break; // case TEMP_STORE: // createPragma("temp_store", () -> Randomly.fromOptions("DEFAULT", "FILE", "MEMORY")); // break; case THREADS: createPragma("threads", () -> Randomly.getNonCachedInteger()); break; case WAL_AUTOCHECKPOINT: createPragma("wal_autocheckpoint", () -> Randomly.getNonCachedInteger()); break; case WAL_CHECKPOINT: sb.append("PRAGMA wal_checkpoint("); sb.append(Randomly.fromOptions("PASSIVE", "FULL", "RESTART", "TRUNCATE")); sb.append(")"); errors.add("database table is locked"); break; case WRITEABLE_SCHEMA: createPragma("writable_schema", () -> Randomly.getBoolean()); break; default: throw new AssertionError(); } sb.append(";"); String pragmaString = sb.toString(); // errors.add("cannot change"); return new QueryAdapter(pragmaString, errors); } public static QueryAdapter insertPragma(SQLite3GlobalState globalState) throws SQLException { return new SQLite3PragmaGenerator().insert(globalState); } private static String getRandomTextBoolean() { return Randomly.fromOptions("true", "false"); } }
Disable the writable schema PRAGMA again
src/lama/sqlite3/gen/SQLite3PragmaGenerator.java
Disable the writable schema PRAGMA again
<ide><path>rc/lama/sqlite3/gen/SQLite3PragmaGenerator.java <ide> THREADS, // <ide> WAL_AUTOCHECKPOINT, // <ide> WAL_CHECKPOINT, // <del> WRITEABLE_SCHEMA <add>// WRITEABLE_SCHEMA <ide> } <ide> <ide> private final StringBuilder sb = new StringBuilder(); <ide> sb.append(")"); <ide> errors.add("database table is locked"); <ide> break; <del> case WRITEABLE_SCHEMA: <del> createPragma("writable_schema", () -> Randomly.getBoolean()); <del> break; <add>// writeable schema can cause ALTER TABLE commands to result in a malformed schema <add>// case WRITEABLE_SCHEMA: <add>// createPragma("writable_schema", () -> Randomly.getBoolean()); <add>// break; <ide> default: <ide> throw new AssertionError(); <ide> }
Java
apache-2.0
9fc9591505134ea82af8088c0700dd3c9ef216b3
0
h2oai/h2o-dev,h2oai/h2o-3,YzPaul3/h2o-3,YzPaul3/h2o-3,h2oai/h2o-dev,spennihana/h2o-3,jangorecki/h2o-3,h2oai/h2o-3,jangorecki/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,jangorecki/h2o-3,mathemage/h2o-3,spennihana/h2o-3,michalkurka/h2o-3,spennihana/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,YzPaul3/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,mathemage/h2o-3,spennihana/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,mathemage/h2o-3,jangorecki/h2o-3,michalkurka/h2o-3,spennihana/h2o-3,h2oai/h2o-dev,YzPaul3/h2o-3,YzPaul3/h2o-3,jangorecki/h2o-3,mathemage/h2o-3,mathemage/h2o-3,michalkurka/h2o-3,spennihana/h2o-3,h2oai/h2o-dev,h2oai/h2o-dev,YzPaul3/h2o-3,mathemage/h2o-3,jangorecki/h2o-3,YzPaul3/h2o-3,spennihana/h2o-3,h2oai/h2o-3,jangorecki/h2o-3,h2oai/h2o-3,mathemage/h2o-3
package hex; import water.exceptions.H2OIllegalArgumentException; import water.fvec.Frame; import water.util.ArrayUtils; import water.util.MathUtils; public class ModelMetricsBinomial extends ModelMetricsSupervised { public final AUC2 _auc; public final double _logloss; public final GainsLift _gainsLift; public ModelMetricsBinomial(Model model, Frame frame, double mse, String[] domain, double sigma, AUC2 auc, double logloss, GainsLift gainsLift) { super(model, frame, mse, domain, sigma); _auc = auc; _logloss = logloss; _gainsLift = gainsLift; } public static ModelMetricsBinomial getFromDKV(Model model, Frame frame) { ModelMetrics mm = ModelMetrics.getFromDKV(model, frame); if( !(mm instanceof ModelMetricsBinomial) ) throw new H2OIllegalArgumentException("Expected to find a Binomial ModelMetrics for model: " + model._key.toString() + " and frame: " + frame._key.toString(), "Expected to find a ModelMetricsBinomial for model: " + model._key.toString() + " and frame: " + frame._key.toString() + " but found a: " + (mm == null ? null : mm.getClass())); return (ModelMetricsBinomial) mm; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(super.toString()); if (_auc != null) sb.append(" AUC: " + (float)_auc._auc + "\n"); sb.append(" logloss: " + (float)_logloss + "\n"); if (cm() != null) sb.append(" CM: " + cm().toASCII()); if (_gainsLift != null) sb.append(_gainsLift.createTwoDimTable()); return sb.toString(); } public double logloss() { return _logloss; } @Override public AUC2 auc() { return _auc; } @Override public ConfusionMatrix cm() { if( _auc == null ) return null; double[][] cm = _auc.defaultCM(); return cm == null ? null : new ConfusionMatrix(cm, _domain); } public GainsLift gainsLift() { return _gainsLift; } public static class MetricBuilderBinomial<T extends MetricBuilderBinomial<T>> extends MetricBuilderSupervised<T> { protected double _logloss; protected AUC2.AUCBuilder _auc; public MetricBuilderBinomial( String[] domain ) { super(2,domain); _auc = new AUC2.AUCBuilder(AUC2.NBINS); } public double auc() {return new AUC2(_auc)._auc;} // Passed a float[] sized nclasses+1; ds[0] must be a prediction. ds[1...nclasses-1] must be a class // distribution; @Override public double[] perRow(double ds[], float[] yact, Model m) {return perRow(ds, yact, 1, 0, m);} @Override public double[] perRow(double ds[], float[] yact, double w, double o, Model m) { if( Float .isNaN(yact[0]) ) return ds; // No errors if actual is missing if(ArrayUtils.hasNaNs(ds)) return ds; // No errors if prediction has missing values (can happen for GLM) if(w == 0 || Double.isNaN(w)) return ds; final int iact = (int)yact[0]; if( iact != 0 && iact != 1 ) return ds; // The actual is effectively a NaN _count++; _wcount += w; _wY += w*iact; _wYY += w*iact*iact; // Compute error double err = iact+1 < ds.length ? 1-ds[iact+1] : 1; // Error: distance from predicting ycls as 1.0 _sumsqe += w*err*err; // Squared error assert !Double.isNaN(_sumsqe); // Compute log loss final double eps = 1e-15; _logloss -= w*Math.log(Math.max(eps, 1-err)); _auc.perRow(ds[2],iact,w); return ds; // Flow coding } @Override public void reduce( T mb ) { super.reduce(mb); // sumseq, count _logloss += mb._logloss; _auc.reduce(mb._auc); } @Override public ModelMetrics makeModelMetrics(Model m, Frame f, Frame preds) { double mse = Double.NaN; double logloss = Double.NaN; double sigma = Double.NaN; if (_wcount > 0) { sigma = weightedSigma(); mse = _sumsqe / _wcount; logloss = _logloss / _wcount; AUC2 auc = new AUC2(_auc); GainsLift gl = null; if (preds!=null) { gl = new GainsLift(); gl.preds = preds.lastVec(); gl.labels = f.vec(m._parms._response_column); gl.exec(); } return m._output.addModelMetrics(new ModelMetricsBinomial(m, f, mse, _domain, sigma, auc, logloss, gl)); } else { return m._output.addModelMetrics(new ModelMetricsBinomial(m, f, mse, null, sigma, null, logloss, null)); } } public String toString(){ if(_wcount == 0) return "empty, no rows"; return "auc = " + MathUtils.roundToNDigits(auc(),3) + ", logloss = " + _logloss / _wcount; } } }
h2o-core/src/main/java/hex/ModelMetricsBinomial.java
package hex; import water.exceptions.H2OIllegalArgumentException; import water.fvec.Frame; import water.util.ArrayUtils; import water.util.MathUtils; public class ModelMetricsBinomial extends ModelMetricsSupervised { public final AUC2 _auc; public final double _logloss; public final GainsLift _gainsLift; public ModelMetricsBinomial(Model model, Frame frame, double mse, String[] domain, double sigma, AUC2 auc, double logloss, GainsLift gainsLift) { super(model, frame, mse, domain, sigma); _auc = auc; _logloss = logloss; _gainsLift = gainsLift; } public static ModelMetricsBinomial getFromDKV(Model model, Frame frame) { ModelMetrics mm = ModelMetrics.getFromDKV(model, frame); if( !(mm instanceof ModelMetricsBinomial) ) throw new H2OIllegalArgumentException("Expected to find a Binomial ModelMetrics for model: " + model._key.toString() + " and frame: " + frame._key.toString(), "Expected to find a ModelMetricsBinomial for model: " + model._key.toString() + " and frame: " + frame._key.toString() + " but found a: " + (mm == null ? null : mm.getClass())); return (ModelMetricsBinomial) mm; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(super.toString()); if (_auc != null) sb.append(" AUC: " + (float)_auc._auc + "\n"); sb.append(" logloss: " + (float)_logloss + "\n"); if (cm() != null) sb.append(" CM: " + cm().toASCII()); if (_gainsLift != null) sb.append(_gainsLift.createTwoDimTable()); return sb.toString(); } public double logloss() { return _logloss; } @Override public AUC2 auc() { return _auc; } @Override public ConfusionMatrix cm() { if( _auc == null ) return null; double[][] cm = _auc.defaultCM(); return cm == null ? null : new ConfusionMatrix(cm, _domain); } public GainsLift gainsLift() { return _gainsLift; } public static class MetricBuilderBinomial<T extends MetricBuilderBinomial<T>> extends MetricBuilderSupervised<T> { protected double _logloss; protected AUC2.AUCBuilder _auc; public MetricBuilderBinomial( String[] domain ) { super(2,domain); _auc = new AUC2.AUCBuilder(AUC2.NBINS); } public double auc() {return new AUC2(_auc)._auc;} // Passed a float[] sized nclasses+1; ds[0] must be a prediction. ds[1...nclasses-1] must be a class // distribution; @Override public double[] perRow(double ds[], float[] yact, Model m) {return perRow(ds, yact, 1, 0, m);} @Override public double[] perRow(double ds[], float[] yact, double w, double o, Model m) { if( Float .isNaN(yact[0]) ) return ds; // No errors if actual is missing if(ArrayUtils.hasNaNs(ds)) return ds; // No errors if prediction has missing values (can happen for GLM) if(w == 0 || Double.isNaN(w)) return ds; final int iact = (int)yact[0]; if( iact != 0 && iact != 1 ) return ds; // The actual is effectively a NaN _count++; _wcount += w; _wY += w*iact; _wYY += w*iact*iact; // Compute error double err = iact+1 < ds.length ? 1-ds[iact+1] : 1; // Error: distance from predicting ycls as 1.0 _sumsqe += w*err*err; // Squared error assert !Double.isNaN(_sumsqe); // Compute log loss final double eps = 1e-15; _logloss -= w*Math.log(Math.max(eps, 1-err)); _auc.perRow(ds[2],iact,w); return ds; // Flow coding } @Override public void reduce( T mb ) { super.reduce(mb); // sumseq, count _logloss += mb._logloss; _auc.reduce(mb._auc); } @Override public ModelMetrics makeModelMetrics(Model m, Frame f, Frame preds) { double mse = Double.NaN; double logloss = Double.NaN; double sigma = Double.NaN; if (_wcount > 0) { sigma = weightedSigma(); mse = _sumsqe / _wcount; logloss = _logloss / _wcount; AUC2 auc = new AUC2(_auc); GainsLift gl = new GainsLift(); gl.preds = preds.lastVec(); gl.labels = f.vec(m._parms._response_column); gl.exec(); return m._output.addModelMetrics(new ModelMetricsBinomial(m, f, mse, _domain, sigma, auc, logloss, gl)); } else { return m._output.addModelMetrics(new ModelMetricsBinomial(m, f, mse, null, sigma, null, logloss, null)); } } public String toString(){ if(_wcount == 0) return "empty, no rows"; return "auc = " + MathUtils.roundToNDigits(auc(),3) + ", logloss = " + _logloss / _wcount; } } }
Fix NPE.
h2o-core/src/main/java/hex/ModelMetricsBinomial.java
Fix NPE.
<ide><path>2o-core/src/main/java/hex/ModelMetricsBinomial.java <ide> mse = _sumsqe / _wcount; <ide> logloss = _logloss / _wcount; <ide> AUC2 auc = new AUC2(_auc); <del> GainsLift gl = new GainsLift(); <del> gl.preds = preds.lastVec(); <del> gl.labels = f.vec(m._parms._response_column); <del> gl.exec(); <add> GainsLift gl = null; <add> if (preds!=null) { <add> gl = new GainsLift(); <add> gl.preds = preds.lastVec(); <add> gl.labels = f.vec(m._parms._response_column); <add> gl.exec(); <add> } <ide> return m._output.addModelMetrics(new ModelMetricsBinomial(m, f, mse, _domain, sigma, auc, logloss, gl)); <ide> } else { <ide> return m._output.addModelMetrics(new ModelMetricsBinomial(m, f, mse, null, sigma, null, logloss, null));
Java
apache-2.0
6ada632455210552649c083200f890d0ec4400f2
0
slisson/intellij-community,nicolargo/intellij-community,tmpgit/intellij-community,kool79/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,ryano144/intellij-community,vvv1559/intellij-community,Lekanich/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,retomerz/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,diorcety/intellij-community,dslomov/intellij-community,supersven/intellij-community,apixandru/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,izonder/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,allotria/intellij-community,dslomov/intellij-community,diorcety/intellij-community,diorcety/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,adedayo/intellij-community,fengbaicanhe/intellij-community,fengbaicanhe/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,akosyakov/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,holmes/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,michaelgallacher/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,ahb0327/intellij-community,TangHao1987/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,clumsy/intellij-community,fitermay/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,vladmm/intellij-community,idea4bsd/idea4bsd,diorcety/intellij-community,diorcety/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,samthor/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,ahb0327/intellij-community,allotria/intellij-community,akosyakov/intellij-community,MichaelNedzelsky/intellij-community,fitermay/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,jagguli/intellij-community,clumsy/intellij-community,hurricup/intellij-community,robovm/robovm-studio,vladmm/intellij-community,slisson/intellij-community,dslomov/intellij-community,muntasirsyed/intellij-community,ol-loginov/intellij-community,ftomassetti/intellij-community,MichaelNedzelsky/intellij-community,petteyg/intellij-community,izonder/intellij-community,orekyuu/intellij-community,MER-GROUP/intellij-community,Distrotech/intellij-community,TangHao1987/intellij-community,samthor/intellij-community,amith01994/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,jagguli/intellij-community,petteyg/intellij-community,ryano144/intellij-community,robovm/robovm-studio,FHannes/intellij-community,supersven/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,samthor/intellij-community,gnuhub/intellij-community,supersven/intellij-community,kool79/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,fnouama/intellij-community,retomerz/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,salguarnieri/intellij-community,kdwink/intellij-community,tmpgit/intellij-community,caot/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,diorcety/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,SerCeMan/intellij-community,signed/intellij-community,ernestp/consulo,signed/intellij-community,SerCeMan/intellij-community,ivan-fedorov/intellij-community,SerCeMan/intellij-community,blademainer/intellij-community,signed/intellij-community,nicolargo/intellij-community,FHannes/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,slisson/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,wreckJ/intellij-community,consulo/consulo,orekyuu/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,slisson/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,kool79/intellij-community,da1z/intellij-community,kdwink/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,kool79/intellij-community,petteyg/intellij-community,TangHao1987/intellij-community,amith01994/intellij-community,amith01994/intellij-community,Lekanich/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,MER-GROUP/intellij-community,tmpgit/intellij-community,semonte/intellij-community,holmes/intellij-community,ibinti/intellij-community,dslomov/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,kool79/intellij-community,orekyuu/intellij-community,izonder/intellij-community,retomerz/intellij-community,petteyg/intellij-community,asedunov/intellij-community,samthor/intellij-community,gnuhub/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,petteyg/intellij-community,orekyuu/intellij-community,MER-GROUP/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,pwoodworth/intellij-community,youdonghai/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,samthor/intellij-community,diorcety/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,ivan-fedorov/intellij-community,akosyakov/intellij-community,pwoodworth/intellij-community,kool79/intellij-community,consulo/consulo,signed/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,robovm/robovm-studio,asedunov/intellij-community,semonte/intellij-community,ibinti/intellij-community,amith01994/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,ol-loginov/intellij-community,salguarnieri/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,retomerz/intellij-community,clumsy/intellij-community,vladmm/intellij-community,ibinti/intellij-community,Lekanich/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,ryano144/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,wreckJ/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,ryano144/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,dslomov/intellij-community,kool79/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,consulo/consulo,vvv1559/intellij-community,blademainer/intellij-community,nicolargo/intellij-community,ryano144/intellij-community,dslomov/intellij-community,diorcety/intellij-community,FHannes/intellij-community,ftomassetti/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,blademainer/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,pwoodworth/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,akosyakov/intellij-community,ftomassetti/intellij-community,da1z/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,caot/intellij-community,ibinti/intellij-community,ahb0327/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,amith01994/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,akosyakov/intellij-community,michaelgallacher/intellij-community,petteyg/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,blademainer/intellij-community,kool79/intellij-community,ftomassetti/intellij-community,gnuhub/intellij-community,youdonghai/intellij-community,samthor/intellij-community,slisson/intellij-community,Lekanich/intellij-community,Distrotech/intellij-community,caot/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,izonder/intellij-community,fitermay/intellij-community,vladmm/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,muntasirsyed/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,semonte/intellij-community,ahb0327/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,vvv1559/intellij-community,adedayo/intellij-community,dslomov/intellij-community,asedunov/intellij-community,petteyg/intellij-community,ryano144/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,orekyuu/intellij-community,orekyuu/intellij-community,holmes/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,holmes/intellij-community,kdwink/intellij-community,xfournet/intellij-community,slisson/intellij-community,ibinti/intellij-community,TangHao1987/intellij-community,diorcety/intellij-community,slisson/intellij-community,dslomov/intellij-community,semonte/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,salguarnieri/intellij-community,caot/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,izonder/intellij-community,fnouama/intellij-community,fengbaicanhe/intellij-community,da1z/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,ernestp/consulo,ol-loginov/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,caot/intellij-community,Lekanich/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,robovm/robovm-studio,jagguli/intellij-community,wreckJ/intellij-community,ernestp/consulo,FHannes/intellij-community,tmpgit/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,muntasirsyed/intellij-community,vvv1559/intellij-community,da1z/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,blademainer/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,allotria/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,blademainer/intellij-community,kdwink/intellij-community,ibinti/intellij-community,ivan-fedorov/intellij-community,jagguli/intellij-community,allotria/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,Distrotech/intellij-community,apixandru/intellij-community,blademainer/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,wreckJ/intellij-community,FHannes/intellij-community,signed/intellij-community,izonder/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,fnouama/intellij-community,robovm/robovm-studio,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,samthor/intellij-community,MER-GROUP/intellij-community,dslomov/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,ahb0327/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,alphafoobar/intellij-community,muntasirsyed/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,wreckJ/intellij-community,FHannes/intellij-community,petteyg/intellij-community,samthor/intellij-community,petteyg/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,izonder/intellij-community,xfournet/intellij-community,tmpgit/intellij-community,vvv1559/intellij-community,MER-GROUP/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,ibinti/intellij-community,da1z/intellij-community,dslomov/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,consulo/consulo,ftomassetti/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,amith01994/intellij-community,asedunov/intellij-community,hurricup/intellij-community,fnouama/intellij-community,pwoodworth/intellij-community,suncycheng/intellij-community,ahb0327/intellij-community,lucafavatella/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,gnuhub/intellij-community,hurricup/intellij-community,xfournet/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,alphafoobar/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,nicolargo/intellij-community,caot/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,pwoodworth/intellij-community,signed/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,Distrotech/intellij-community,clumsy/intellij-community,supersven/intellij-community,ol-loginov/intellij-community,ibinti/intellij-community,izonder/intellij-community,fitermay/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,ryano144/intellij-community,MichaelNedzelsky/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,ernestp/consulo,slisson/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,alphafoobar/intellij-community,tmpgit/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,wreckJ/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,fnouama/intellij-community,allotria/intellij-community,FHannes/intellij-community,adedayo/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,FHannes/intellij-community,allotria/intellij-community,vladmm/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,mglukhikh/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,allotria/intellij-community,jagguli/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,petteyg/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,xfournet/intellij-community,kool79/intellij-community,da1z/intellij-community,retomerz/intellij-community,slisson/intellij-community,ahb0327/intellij-community,semonte/intellij-community,blademainer/intellij-community,tmpgit/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,ryano144/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,pwoodworth/intellij-community,ahb0327/intellij-community,semonte/intellij-community,tmpgit/intellij-community,vladmm/intellij-community,fnouama/intellij-community,signed/intellij-community,pwoodworth/intellij-community,clumsy/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,fengbaicanhe/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,holmes/intellij-community,akosyakov/intellij-community,vladmm/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,fnouama/intellij-community,ernestp/consulo,amith01994/intellij-community,vladmm/intellij-community,xfournet/intellij-community,supersven/intellij-community,apixandru/intellij-community,semonte/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,idea4bsd/idea4bsd,gnuhub/intellij-community,samthor/intellij-community,slisson/intellij-community,apixandru/intellij-community,blademainer/intellij-community,orekyuu/intellij-community,holmes/intellij-community,xfournet/intellij-community,ernestp/consulo,kdwink/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,MichaelNedzelsky/intellij-community,caot/intellij-community,SerCeMan/intellij-community,adedayo/intellij-community,ivan-fedorov/intellij-community,akosyakov/intellij-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,semonte/intellij-community,holmes/intellij-community,caot/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,adedayo/intellij-community,supersven/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,muntasirsyed/intellij-community,fengbaicanhe/intellij-community,fitermay/intellij-community,caot/intellij-community,allotria/intellij-community,ahb0327/intellij-community,signed/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,michaelgallacher/intellij-community,samthor/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,izonder/intellij-community,TangHao1987/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,kool79/intellij-community,gnuhub/intellij-community,fnouama/intellij-community,alphafoobar/intellij-community,FHannes/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,asedunov/intellij-community,vladmm/intellij-community,wreckJ/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,hurricup/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,supersven/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,consulo/consulo,idea4bsd/idea4bsd,ryano144/intellij-community,fnouama/intellij-community,nicolargo/intellij-community,MichaelNedzelsky/intellij-community,FHannes/intellij-community,kdwink/intellij-community,clumsy/intellij-community,fnouama/intellij-community,hurricup/intellij-community,dslomov/intellij-community,mglukhikh/intellij-community,consulo/consulo,fengbaicanhe/intellij-community,wreckJ/intellij-community,asedunov/intellij-community,jagguli/intellij-community,signed/intellij-community,ryano144/intellij-community,blademainer/intellij-community,kool79/intellij-community,da1z/intellij-community,ahb0327/intellij-community,fitermay/intellij-community,fitermay/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,akosyakov/intellij-community,robovm/robovm-studio,MichaelNedzelsky/intellij-community,izonder/intellij-community,apixandru/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,da1z/intellij-community,ibinti/intellij-community
/* * Copyright 2000-2011 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.plugins; import com.intellij.CommonBundle; import com.intellij.ide.actions.ShowSettingsUtilImpl; import com.intellij.openapi.actionSystem.ActionGroup; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.DefaultActionGroup; import com.intellij.openapi.actionSystem.ex.ComboBoxAction; import com.intellij.openapi.extensions.PluginId; import com.intellij.openapi.fileChooser.FileChooser; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.options.ex.SingleConfigurableEditor; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.updateSettings.impl.PluginDownloader; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.ui.ScrollPaneFactory; import com.intellij.util.Function; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; /** * User: anna */ public class InstalledPluginsManagerMain extends PluginManagerMain { public InstalledPluginsManagerMain(PluginManagerUISettings uiSettings) { super(uiSettings); init(); myActionsPanel.setLayout(new FlowLayout(FlowLayout.LEFT)); final JButton button = new JButton("Browse repositories..."); button.setMnemonic('b'); button.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { final PluginManagerConfigurable configurable = createAvailableConfigurable(); new SingleConfigurableEditor(myActionsPanel, configurable, ShowSettingsUtilImpl.createDimensionKey(configurable), false).show(); } }); myActionsPanel.add(button); final JButton installPluginFromFileSystem = new JButton("Install plugin from disk..."); installPluginFromFileSystem.setMnemonic('d'); installPluginFromFileSystem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { final FileChooserDescriptor descriptor = new FileChooserDescriptor(true, false, true, true, false, false){ @Override public boolean isFileSelectable(VirtualFile file) { final String extension = file.getExtension(); return Comparing.strEqual(extension, "jar") || Comparing.strEqual(extension, "zip"); } }; descriptor.setTitle("Choose Plugin File"); descriptor.setDescription("JAR and ZIP archives are accepted"); final VirtualFile virtualFile = FileChooser.chooseFile(myActionsPanel, descriptor); if (virtualFile != null) { final File file = VfsUtil.virtualToIoFile(virtualFile); try { PluginDownloader.install(file, file.getName()); setRequireShutdown(true); } catch (IOException ex) { Messages.showErrorDialog(ex.getMessage(), CommonBundle.getErrorTitle()); } } } }); myActionsPanel.add(installPluginFromFileSystem); } @Override protected void propagateUpdates(ArrayList<IdeaPluginDescriptor> list) { } private PluginManagerConfigurable createAvailableConfigurable() { return new PluginManagerConfigurable(PluginManagerUISettings.getInstance(), true) { @Override protected PluginManagerMain createPanel() { return new AvailablePluginsManagerMain(InstalledPluginsManagerMain.this, myUISettings); } @Override public String getDisplayName() { return "Browse Repositories"; } }; } protected JScrollPane createTable() { pluginsModel = new InstalledPluginsTableModel(); pluginTable = new PluginTable(pluginsModel); JScrollPane installedScrollPane = ScrollPaneFactory.createScrollPane(pluginTable); pluginTable.registerKeyboardAction(new ActionListener() { public void actionPerformed(ActionEvent e) { final int column = InstalledPluginsTableModel.getCheckboxColumn(); final int[] selectedRows = pluginTable.getSelectedRows(); boolean currentlyMarked = true; for (final int selectedRow : selectedRows) { if (selectedRow < 0 || !pluginTable.isCellEditable(selectedRow, column)) { return; } final Boolean enabled = (Boolean)pluginTable.getValueAt(selectedRow, column); currentlyMarked &= enabled == null || enabled.booleanValue(); } final IdeaPluginDescriptor[] selected = new IdeaPluginDescriptor[selectedRows.length]; for (int i = 0, selectedLength = selected.length; i < selectedLength; i++) { selected[i] = pluginsModel.getObjectAt(pluginTable.convertRowIndexToModel(selectedRows[i])); } ((InstalledPluginsTableModel)pluginsModel).enableRows(selected, currentlyMarked ? Boolean.FALSE : Boolean.TRUE); pluginTable.repaint(); } }, KeyStroke.getKeyStroke(KeyEvent.VK_SPACE, 0), JComponent.WHEN_FOCUSED); return installedScrollPane; } @Override protected ActionGroup getActionGroup(boolean inToolbar) { final DefaultActionGroup actionGroup = new DefaultActionGroup(); actionGroup.add(new RefreshAction()); if (!inToolbar) { actionGroup.add(new ActionUninstallPlugin(this, pluginTable)); actionGroup.add(new ActionInstallPlugin(this, this)); } else { actionGroup.add(new MyFilterEnabledAction()); //actionGroup.add(new MyFilterBundleAction()); } return actionGroup; } @Override public boolean isModified() { final boolean modified = super.isModified(); if (modified) return true; for (int i = 0; i < pluginsModel.getRowCount(); i++) { final IdeaPluginDescriptorImpl pluginDescriptor = (IdeaPluginDescriptorImpl)pluginsModel.getObjectAt(i); if (pluginDescriptor.isEnabled() != ((InstalledPluginsTableModel)pluginsModel).isEnabled(pluginDescriptor.getPluginId())) { return true; } } for (IdeaPluginDescriptor descriptor : pluginsModel.filtered) { if (((IdeaPluginDescriptorImpl)descriptor).isEnabled() != ((InstalledPluginsTableModel)pluginsModel).isEnabled(descriptor.getPluginId())) { return true; } } final List<String> disabledPlugins = PluginManager.getDisabledPlugins(); for (Map.Entry<PluginId, Boolean> entry : ((InstalledPluginsTableModel)pluginsModel).getEnabledMap().entrySet()) { final Boolean enabled = entry.getValue(); if (enabled != null && !enabled.booleanValue() && !disabledPlugins.contains(entry.getKey().toString())) { return true; } } return false; } @Override public String apply() { final String apply = super.apply(); if (apply != null) return apply; for (int i = 0; i < pluginTable.getRowCount(); i++) { final IdeaPluginDescriptorImpl pluginDescriptor = (IdeaPluginDescriptorImpl)pluginsModel.getObjectAt(i); final Boolean enabled = (Boolean)pluginsModel.getValueAt(i, InstalledPluginsTableModel.getCheckboxColumn()); pluginDescriptor.setEnabled(enabled != null && enabled.booleanValue()); } for (IdeaPluginDescriptor descriptor : pluginsModel.filtered) { ((IdeaPluginDescriptorImpl)descriptor).setEnabled( ((InstalledPluginsTableModel)pluginsModel).isEnabled(descriptor.getPluginId())); } try { final ArrayList<String> ids = new ArrayList<String>(); for (Map.Entry<PluginId, Boolean> entry : ((InstalledPluginsTableModel)pluginsModel).getEnabledMap().entrySet()) { final Boolean value = entry.getValue(); if (value != null && !value.booleanValue()) { ids.add(entry.getKey().getIdString()); } } PluginManager.saveDisabledPlugins(ids, false); } catch (IOException e) { LOG.error(e); } return null; } @Override protected String canApply() { final Map<PluginId, Set<PluginId>> dependentToRequiredListMap = ((InstalledPluginsTableModel)pluginsModel).getDependentToRequiredListMap(); if (!dependentToRequiredListMap.isEmpty()) { final StringBuffer sb = new StringBuffer("<html><body style=\"padding: 5px;\">Unable to apply changes: plugin") .append(dependentToRequiredListMap.size() == 1 ? " " : "s "); sb.append(StringUtil.join(dependentToRequiredListMap.keySet(), new Function<PluginId, String>() { public String fun(final PluginId pluginId) { final IdeaPluginDescriptor ideaPluginDescriptor = PluginManager.getPlugin(pluginId); return "\"" + (ideaPluginDescriptor != null ? ideaPluginDescriptor.getName() : pluginId.getIdString()) + "\""; } }, ", ")); sb.append(" won't be able to load.</body></html>"); return sb.toString(); } return super.canApply(); } private class MyFilterEnabledAction extends ComboBoxAction implements DumbAware { @Override public void update(AnActionEvent e) { super.update(e); e.getPresentation().setText("Show: " + ((InstalledPluginsTableModel)pluginsModel).getEnabledFilter()); } @NotNull @Override protected DefaultActionGroup createPopupActionGroup(JComponent button) { final DefaultActionGroup gr = new DefaultActionGroup(); for (final String enabledValue : InstalledPluginsTableModel.ENABLED_VALUES) { gr.add(new AnAction(enabledValue) { @Override public void actionPerformed(AnActionEvent e) { final String filter = myFilter.getFilter().toLowerCase(); ((InstalledPluginsTableModel)pluginsModel).setEnabledFilter(enabledValue, filter); } }); } return gr; } } private class MyFilterBundleAction extends ComboBoxAction implements DumbAware { @Override public void update(AnActionEvent e) { super.update(e); e.getPresentation().setVisible(((InstalledPluginsTableModel)pluginsModel).isBundledEnabled()); e.getPresentation().setText("Bundled: " + ((InstalledPluginsTableModel)pluginsModel).getBundledFilter()); } @NotNull @Override protected DefaultActionGroup createPopupActionGroup(JComponent button) { final DefaultActionGroup gr = new DefaultActionGroup(); for (final String bundledValue : InstalledPluginsTableModel.BUNDLED_VALUES) { gr.add(new AnAction(bundledValue) { @Override public void actionPerformed(AnActionEvent e) { final String filter = myFilter.getFilter().toLowerCase(); ((InstalledPluginsTableModel)pluginsModel).setBundledFilter(bundledValue, filter); } }); } return gr; } } }
platform/platform-impl/src/com/intellij/ide/plugins/InstalledPluginsManagerMain.java
/* * Copyright 2000-2011 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.plugins; import com.intellij.CommonBundle; import com.intellij.ide.actions.ShowSettingsUtilImpl; import com.intellij.openapi.actionSystem.ActionGroup; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.DefaultActionGroup; import com.intellij.openapi.actionSystem.ex.ComboBoxAction; import com.intellij.openapi.extensions.PluginId; import com.intellij.openapi.fileChooser.FileChooser; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.options.ex.SingleConfigurableEditor; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.updateSettings.impl.PluginDownloader; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.ui.ScrollPaneFactory; import com.intellij.util.Function; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; /** * User: anna */ public class InstalledPluginsManagerMain extends PluginManagerMain { public InstalledPluginsManagerMain(PluginManagerUISettings uiSettings) { super(uiSettings); init(); myActionsPanel.setLayout(new FlowLayout(FlowLayout.LEFT)); final JButton button = new JButton("Browse repositories..."); button.setMnemonic('b'); button.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { final PluginManagerConfigurable configurable = createAvailableConfigurable(); new SingleConfigurableEditor(myActionsPanel, configurable, ShowSettingsUtilImpl.createDimensionKey(configurable), false).show(); } }); myActionsPanel.add(button); final JButton installPluginFromFileSystem = new JButton("Install plugin from disk..."); installPluginFromFileSystem.setMnemonic('d'); installPluginFromFileSystem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { final FileChooserDescriptor descriptor = new FileChooserDescriptor(true, false, true, true, false, false){ @Override public boolean isFileSelectable(VirtualFile file) { final String extension = file.getExtension(); return Comparing.strEqual(extension, "jar") || Comparing.strEqual(extension, "zip"); } }; descriptor.setTitle("Choose Plugin File"); final VirtualFile virtualFile = FileChooser.chooseFile(myActionsPanel, descriptor); if (virtualFile != null) { final File file = VfsUtil.virtualToIoFile(virtualFile); try { PluginDownloader.install(file, file.getName()); setRequireShutdown(true); } catch (IOException ex) { Messages.showErrorDialog(ex.getMessage(), CommonBundle.getErrorTitle()); } } } }); myActionsPanel.add(installPluginFromFileSystem); } @Override protected void propagateUpdates(ArrayList<IdeaPluginDescriptor> list) { } private PluginManagerConfigurable createAvailableConfigurable() { return new PluginManagerConfigurable(PluginManagerUISettings.getInstance(), true) { @Override protected PluginManagerMain createPanel() { return new AvailablePluginsManagerMain(InstalledPluginsManagerMain.this, myUISettings); } @Override public String getDisplayName() { return "Browse Repositories"; } }; } protected JScrollPane createTable() { pluginsModel = new InstalledPluginsTableModel(); pluginTable = new PluginTable(pluginsModel); JScrollPane installedScrollPane = ScrollPaneFactory.createScrollPane(pluginTable); pluginTable.registerKeyboardAction(new ActionListener() { public void actionPerformed(ActionEvent e) { final int column = InstalledPluginsTableModel.getCheckboxColumn(); final int[] selectedRows = pluginTable.getSelectedRows(); boolean currentlyMarked = true; for (final int selectedRow : selectedRows) { if (selectedRow < 0 || !pluginTable.isCellEditable(selectedRow, column)) { return; } final Boolean enabled = (Boolean)pluginTable.getValueAt(selectedRow, column); currentlyMarked &= enabled == null || enabled.booleanValue(); } final IdeaPluginDescriptor[] selected = new IdeaPluginDescriptor[selectedRows.length]; for (int i = 0, selectedLength = selected.length; i < selectedLength; i++) { selected[i] = pluginsModel.getObjectAt(pluginTable.convertRowIndexToModel(selectedRows[i])); } ((InstalledPluginsTableModel)pluginsModel).enableRows(selected, currentlyMarked ? Boolean.FALSE : Boolean.TRUE); pluginTable.repaint(); } }, KeyStroke.getKeyStroke(KeyEvent.VK_SPACE, 0), JComponent.WHEN_FOCUSED); return installedScrollPane; } @Override protected ActionGroup getActionGroup(boolean inToolbar) { final DefaultActionGroup actionGroup = new DefaultActionGroup(); actionGroup.add(new RefreshAction()); if (!inToolbar) { actionGroup.add(new ActionUninstallPlugin(this, pluginTable)); actionGroup.add(new ActionInstallPlugin(this, this)); } else { actionGroup.add(new MyFilterEnabledAction()); //actionGroup.add(new MyFilterBundleAction()); } return actionGroup; } @Override public boolean isModified() { final boolean modified = super.isModified(); if (modified) return true; for (int i = 0; i < pluginsModel.getRowCount(); i++) { final IdeaPluginDescriptorImpl pluginDescriptor = (IdeaPluginDescriptorImpl)pluginsModel.getObjectAt(i); if (pluginDescriptor.isEnabled() != ((InstalledPluginsTableModel)pluginsModel).isEnabled(pluginDescriptor.getPluginId())) { return true; } } for (IdeaPluginDescriptor descriptor : pluginsModel.filtered) { if (((IdeaPluginDescriptorImpl)descriptor).isEnabled() != ((InstalledPluginsTableModel)pluginsModel).isEnabled(descriptor.getPluginId())) { return true; } } final List<String> disabledPlugins = PluginManager.getDisabledPlugins(); for (Map.Entry<PluginId, Boolean> entry : ((InstalledPluginsTableModel)pluginsModel).getEnabledMap().entrySet()) { final Boolean enabled = entry.getValue(); if (enabled != null && !enabled.booleanValue() && !disabledPlugins.contains(entry.getKey().toString())) { return true; } } return false; } @Override public String apply() { final String apply = super.apply(); if (apply != null) return apply; for (int i = 0; i < pluginTable.getRowCount(); i++) { final IdeaPluginDescriptorImpl pluginDescriptor = (IdeaPluginDescriptorImpl)pluginsModel.getObjectAt(i); final Boolean enabled = (Boolean)pluginsModel.getValueAt(i, InstalledPluginsTableModel.getCheckboxColumn()); pluginDescriptor.setEnabled(enabled != null && enabled.booleanValue()); } for (IdeaPluginDescriptor descriptor : pluginsModel.filtered) { ((IdeaPluginDescriptorImpl)descriptor).setEnabled( ((InstalledPluginsTableModel)pluginsModel).isEnabled(descriptor.getPluginId())); } try { final ArrayList<String> ids = new ArrayList<String>(); for (Map.Entry<PluginId, Boolean> entry : ((InstalledPluginsTableModel)pluginsModel).getEnabledMap().entrySet()) { final Boolean value = entry.getValue(); if (value != null && !value.booleanValue()) { ids.add(entry.getKey().getIdString()); } } PluginManager.saveDisabledPlugins(ids, false); } catch (IOException e) { LOG.error(e); } return null; } @Override protected String canApply() { final Map<PluginId, Set<PluginId>> dependentToRequiredListMap = ((InstalledPluginsTableModel)pluginsModel).getDependentToRequiredListMap(); if (!dependentToRequiredListMap.isEmpty()) { final StringBuffer sb = new StringBuffer("<html><body style=\"padding: 5px;\">Unable to apply changes: plugin") .append(dependentToRequiredListMap.size() == 1 ? " " : "s "); sb.append(StringUtil.join(dependentToRequiredListMap.keySet(), new Function<PluginId, String>() { public String fun(final PluginId pluginId) { final IdeaPluginDescriptor ideaPluginDescriptor = PluginManager.getPlugin(pluginId); return "\"" + (ideaPluginDescriptor != null ? ideaPluginDescriptor.getName() : pluginId.getIdString()) + "\""; } }, ", ")); sb.append(" won't be able to load.</body></html>"); return sb.toString(); } return super.canApply(); } private class MyFilterEnabledAction extends ComboBoxAction implements DumbAware { @Override public void update(AnActionEvent e) { super.update(e); e.getPresentation().setText("Show: " + ((InstalledPluginsTableModel)pluginsModel).getEnabledFilter()); } @NotNull @Override protected DefaultActionGroup createPopupActionGroup(JComponent button) { final DefaultActionGroup gr = new DefaultActionGroup(); for (final String enabledValue : InstalledPluginsTableModel.ENABLED_VALUES) { gr.add(new AnAction(enabledValue) { @Override public void actionPerformed(AnActionEvent e) { final String filter = myFilter.getFilter().toLowerCase(); ((InstalledPluginsTableModel)pluginsModel).setEnabledFilter(enabledValue, filter); } }); } return gr; } } private class MyFilterBundleAction extends ComboBoxAction implements DumbAware { @Override public void update(AnActionEvent e) { super.update(e); e.getPresentation().setVisible(((InstalledPluginsTableModel)pluginsModel).isBundledEnabled()); e.getPresentation().setText("Bundled: " + ((InstalledPluginsTableModel)pluginsModel).getBundledFilter()); } @NotNull @Override protected DefaultActionGroup createPopupActionGroup(JComponent button) { final DefaultActionGroup gr = new DefaultActionGroup(); for (final String bundledValue : InstalledPluginsTableModel.BUNDLED_VALUES) { gr.add(new AnAction(bundledValue) { @Override public void actionPerformed(AnActionEvent e) { final String filter = myFilter.getFilter().toLowerCase(); ((InstalledPluginsTableModel)pluginsModel).setBundledFilter(bundledValue, filter); } }); } return gr; } } }
install plugin from disk: description added (IDEA-76168)
platform/platform-impl/src/com/intellij/ide/plugins/InstalledPluginsManagerMain.java
install plugin from disk: description added (IDEA-76168)
<ide><path>latform/platform-impl/src/com/intellij/ide/plugins/InstalledPluginsManagerMain.java <ide> } <ide> }; <ide> descriptor.setTitle("Choose Plugin File"); <add> descriptor.setDescription("JAR and ZIP archives are accepted"); <ide> final VirtualFile virtualFile = FileChooser.chooseFile(myActionsPanel, descriptor); <ide> if (virtualFile != null) { <ide> final File file = VfsUtil.virtualToIoFile(virtualFile);
Java
lgpl-2.1
654f87b257d277111c3dc81626d2fce615832042
0
CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine
/* * jETeL/Clover - Java based ETL application framework. * Copyright (C) 2002-04 David Pavlis <[email protected]> * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * */ package org.jetel.util; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Properties; import java.util.Set; import org.jetel.exception.NotFoundException; import org.w3c.dom.NamedNodeMap; /** * Helper class (wrapper) around NamedNodeMap with possibility to parse string * values into integers, booleans, doubles..<br> * Used in conjunction with org.jetel.Component.*<br> * Converts any child nodes of form <code>&lt;attr name="xyz"&gt;abcd&lt;/attr&gt;</code> into * attribute of the current node with name="xyz" and value="abcd".<br> * Example:<br> * <code><pre> * &lt;Node id="mynode" name="xyz" append="yes"&gt; * &lt;attr name="query"&gt; * select * from my_table; * &lt;/attr&gt; * &lt;attr name="code"&gt; * a=b*10-20%50; * &lt;/attr&gt; * &lt;/Node&gt; * </pre></code> * * There will be following attribute/value pairs available for getXXX() calls: * <ul> * <li>id - "mynode"</li> * <li>name - "xyz"</li> * <li>append - "yes"</li> * <li>query - "select * from my_table;"</li> * <li>code - "a=b*10-20%50;"</li> * </ul> * @author dpavlis * @since July 25, 2002 * @revision $Revision$ * @created 26. March 2003 */ public class ComponentXMLAttributes { private NamedNodeMap attributes; private org.w3c.dom.Node nodeXML; private PropertyRefResolver refResolver; public static final String XML_ATTRIBUTE_NODE_NAME = "attr"; public static final String XML_ATTRIBUTE_NODE_NAME_ATTRIBUTE = "name"; //private Map childNodes; /** * Constructor for the ComponentXMLAttributes object * * @param nodeXML Description of the Parameter */ public ComponentXMLAttributes(org.w3c.dom.Node nodeXML) { this(nodeXML,null); } /** *Constructor for the ComponentXMLAttributes object * * @param nodeXML Description of the Parameter * @param properties Description of the Parameter */ public ComponentXMLAttributes(org.w3c.dom.Node nodeXML, Properties properties) { this.nodeXML = nodeXML; refResolver=new PropertyRefResolver( properties!=null ? properties : new Properties()); instantiateInlinedNodeAttributes(nodeXML); this.attributes = nodeXML.getAttributes(); } private void instantiateInlinedNodeAttributes(org.w3c.dom.Node nodeXML){ org.w3c.dom.Node childNode; org.w3c.dom.NodeList list; NamedNodeMap childNodeAttributes; String newAttributeName; String newAttributeValue; // add all "inlined" attributes in form of "attr" node as normal attributes if (nodeXML.hasChildNodes()) { list = nodeXML.getChildNodes(); for (int i = 0; i < list.getLength(); i++) { childNode = list.item(i); if (childNode.getNodeName().equalsIgnoreCase(XML_ATTRIBUTE_NODE_NAME)) { newAttributeName=childNode.getAttributes().getNamedItem(XML_ATTRIBUTE_NODE_NAME_ATTRIBUTE).getNodeValue(); // get text value newAttributeValue=null; org.w3c.dom.NodeList childList = childNode.getChildNodes(); for (int j = 0; j < list.getLength(); j++) { org.w3c.dom.Node child2Node = childList.item(j); if (child2Node.getNodeType() == org.w3c.dom.Node.TEXT_NODE) { newAttributeValue=child2Node.getNodeValue(); break; } } // add value of child node as attribute, also create new attribute node if (newAttributeName!=null && newAttributeValue!=null){ org.w3c.dom.Attr newAttribute = nodeXML.getOwnerDocument().createAttribute(newAttributeName); newAttribute.setValue(newAttributeValue); nodeXML.getAttributes().setNamedItem(newAttribute); // remove child node as it is now included as an attribute - in attribute nodeXML.removeChild(childNode); } } } } } /** * Returns the String value of specified XML attribute * * @param key name of the attribute * @return The string value */ public String getString(String key) { try { return refResolver.resolveRef(attributes.getNamedItem(key).getNodeValue()); } catch (Exception ex) { throw new NotFoundException("Attribute " + key + " not found!"); } } /** * Returns the String value of specified XML attribute * * @param key name of the attribute * @param defaultValue default value to be returned when attribute can't be found * @return The string value */ public String getString(String key, String defaultValue) { try { return refResolver.resolveRef(attributes.getNamedItem(key).getNodeValue()); } catch (Exception ex) { return defaultValue; } } /** * Returns the int value of specified XML attribute * * @param key name of the attribute * @return The integer value */ public int getInteger(String key) { String value; try { value = refResolver.resolveRef(attributes.getNamedItem(key).getNodeValue()); } catch (NullPointerException ex) { throw new NotFoundException("Attribute " + key + " not found!"); } return Integer.parseInt(value); } /** * Returns the int value of specified XML attribute * * @param key name of the attribute * @param defaultValue default value to be returned when attribute can't be found * @return The integer value */ public int getInteger(String key, int defaultValue) { String value; try { value = refResolver.resolveRef(attributes.getNamedItem(key).getNodeValue()); return Integer.parseInt(value); } catch (Exception ex) { return defaultValue; } } /** * Returns the boolean value of specified XML attribute * * @param key name of the attribute * @return The boolean value */ public boolean getBoolean(String key) { String value; try { value = refResolver.resolveRef(attributes.getNamedItem(key).getNodeValue()); } catch (NullPointerException ex) { throw new NotFoundException("Attribute " + key + " not found!"); } return value.matches("^[tTyY].*"); } /** * Returns the boolean value of specified XML attribute * * @param key name of the attribute * @param defaultValue default value to be returned when attribute can't be found * @return The boolean value */ public boolean getBoolean(String key, boolean defaultValue) { String value; try { value = refResolver.resolveRef(attributes.getNamedItem(key).getNodeValue()); return value.matches("^[tTyY].*"); } catch (Exception ex) { return defaultValue; } } /** * Returns the double value of specified XML attribute * * @param key name of the attribute * @return The double value */ public double getDouble(String key) { String value; try { value = refResolver.resolveRef(attributes.getNamedItem(key).getNodeValue()); } catch (NullPointerException ex) { throw new NotFoundException("Attribute " + key + " not found!"); } return Double.parseDouble(value); } /** * Returns the double value of specified XML attribute * * @param key name of the attribute * @param defaultValue default value to be returned when attribute can't be found * @return The double value */ public double getDouble(String key, double defaultValue) { String value; try { value = refResolver.resolveRef(attributes.getNamedItem(key).getNodeValue()); return Double.parseDouble(value); } catch (Exception ex) { return defaultValue; } } /** * Checks whether specified attribute exists (XML node has such attribute defined) * * @param key name of the attribute * @return true if exists, otherwise false */ public boolean exists(String key) { if (attributes.getNamedItem(key) != null) { return true; } else { return false; } } /** * Returns first TEXT_NODE child under specified XML Node * * @param nodeXML XML node from which to start searching * @return The TEXT_NODE value (String) if any exist or null */ public String getText(org.w3c.dom.Node nodeXML) { org.w3c.dom.Node childNode; org.w3c.dom.NodeList list; if (nodeXML.hasChildNodes()) { list = nodeXML.getChildNodes(); for (int i = 0; i < list.getLength(); i++) { childNode = list.item(i); if (childNode.getNodeType() == org.w3c.dom.Node.TEXT_NODE) { return refResolver.resolveRef(childNode.getNodeValue()); } } } throw new NotFoundException("Text not found !"); } /** * Searches for specific child node name under specified XML Node * * @param nodeXML XML node from which to start searching * @param childNodeName name of the child node to be searched for * @return childNode if exist under specified name or null */ public org.w3c.dom.Node getChildNode(org.w3c.dom.Node nodeXML, String childNodeName) { org.w3c.dom.Node childNode; org.w3c.dom.NodeList list; if (nodeXML.hasChildNodes()) { list = nodeXML.getChildNodes(); for (int i = 0; i < list.getLength(); i++) { childNode = list.item(i); if (childNodeName.equals(childNode.getNodeName())) { return childNode; } else { childNode = getChildNode(childNode, childNodeName); if (childNode != null) { return childNode; } } } } return null; } /** * Gets the childNodes attribute of the ComponentXMLAttributes object * * @param nodeXML Description of the Parameter * @param childNodeName Description of the Parameter * @return The childNodes value */ public org.w3c.dom.Node[] getChildNodes(org.w3c.dom.Node nodeXML, String childNodeName) { org.w3c.dom.Node childNode; org.w3c.dom.NodeList list; List childNodesList = new LinkedList(); if (nodeXML.hasChildNodes()) { list = nodeXML.getChildNodes(); for (int i = 0; i < list.getLength(); i++) { childNode = list.item(i); if (childNodeName.equals(childNode.getNodeName())) { childNodesList.add(childNode); } } } return (org.w3c.dom.Node[]) childNodesList.toArray(new org.w3c.dom.Node[0]); } /** * Converts XML Node's attributes to Properties object - hash of key-value pairs. * Can omit/exclude certain attributes based on specified array of Strings - attribute * names. * @param exclude array of Strings - names of attributes to be excluded (can be null) * @return Properties object with pairs [attribute name]-[attribute value] */ public Properties attributes2Properties(String[] exclude){ Properties properties=new Properties(); Set exception=new HashSet(); String name; if (exclude!=null){ for (int i=0;i<exclude.length;i++){ exception.add(exclude[i]); } } for (int i=0; i<attributes.getLength();i++){ name=attributes.item(i).getLocalName(); if (!exception.contains(name)){ properties.setProperty(name, refResolver.resolveRef(attributes.item(i).getNodeValue())); } } return properties; } /** * Replaces references to parameters in string with parameters' values. * * @param input string in which references to parameters should be resolved * (substituted with parameters' values) * @return String with references resolved. */ public String resloveReferences(String input){ return refResolver.resolveRef(input); } } /* * End class StringUtils */
cloveretl.engine/src/org/jetel/util/ComponentXMLAttributes.java
/* * jETeL/Clover - Java based ETL application framework. * Copyright (C) 2002-04 David Pavlis <[email protected]> * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * */ package org.jetel.util; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Properties; import java.util.Set; import org.jetel.exception.NotFoundException; import org.w3c.dom.NamedNodeMap; /** * Helper class (wrapper) around NamedNodeMap with possibility to parse string * values into integers, booleans, doubles..<br> * Used in conjunction with org.jetel.Component.* * * @author dpavlis * @since July 25, 2002 * @revision $Revision$ * @created 26. March 2003 */ public class ComponentXMLAttributes { private NamedNodeMap attributes; private org.w3c.dom.Node nodeXML; private PropertyRefResolver refResolver; //private Map childNodes; /** * Constructor for the ComponentXMLAttributes object * * @param nodeXML Description of the Parameter */ public ComponentXMLAttributes(org.w3c.dom.Node nodeXML) { attributes = nodeXML.getAttributes(); this.nodeXML = nodeXML; // if some property is defined for graph, we will // try to look for refereneces to graph properties within XML attributes values; refResolver=new PropertyRefResolver(); } /** *Constructor for the ComponentXMLAttributes object * * @param nodeXML Description of the Parameter * @param properties Description of the Parameter */ public ComponentXMLAttributes(org.w3c.dom.Node nodeXML, Properties properties) { attributes = nodeXML.getAttributes(); this.nodeXML = nodeXML; refResolver=new PropertyRefResolver(properties); } /** * Returns the String value of specified XML attribute * * @param key name of the attribute * @return The string value */ public String getString(String key) { try { return refResolver.resolveRef(attributes.getNamedItem(key).getNodeValue()); } catch (Exception ex) { throw new NotFoundException("Attribute " + key + " not found!"); } } /** * Returns the String value of specified XML attribute * * @param key name of the attribute * @param defaultValue default value to be returned when attribute can't be found * @return The string value */ public String getString(String key, String defaultValue) { try { return refResolver.resolveRef(attributes.getNamedItem(key).getNodeValue()); } catch (Exception ex) { return defaultValue; } } /** * Returns the int value of specified XML attribute * * @param key name of the attribute * @return The integer value */ public int getInteger(String key) { String value; try { value = refResolver.resolveRef(attributes.getNamedItem(key).getNodeValue()); } catch (NullPointerException ex) { throw new NotFoundException("Attribute " + key + " not found!"); } return Integer.parseInt(value); } /** * Returns the int value of specified XML attribute * * @param key name of the attribute * @param defaultValue default value to be returned when attribute can't be found * @return The integer value */ public int getInteger(String key, int defaultValue) { String value; try { value = refResolver.resolveRef(attributes.getNamedItem(key).getNodeValue()); return Integer.parseInt(value); } catch (Exception ex) { return defaultValue; } } /** * Returns the boolean value of specified XML attribute * * @param key name of the attribute * @return The boolean value */ public boolean getBoolean(String key) { String value; try { value = refResolver.resolveRef(attributes.getNamedItem(key).getNodeValue()); } catch (NullPointerException ex) { throw new NotFoundException("Attribute " + key + " not found!"); } return value.matches("^[tTyY].*"); } /** * Returns the boolean value of specified XML attribute * * @param key name of the attribute * @param defaultValue default value to be returned when attribute can't be found * @return The boolean value */ public boolean getBoolean(String key, boolean defaultValue) { String value; try { value = refResolver.resolveRef(attributes.getNamedItem(key).getNodeValue()); return value.matches("^[tTyY].*"); } catch (Exception ex) { return defaultValue; } } /** * Returns the double value of specified XML attribute * * @param key name of the attribute * @return The double value */ public double getDouble(String key) { String value; try { value = refResolver.resolveRef(attributes.getNamedItem(key).getNodeValue()); } catch (NullPointerException ex) { throw new NotFoundException("Attribute " + key + " not found!"); } return Double.parseDouble(value); } /** * Returns the double value of specified XML attribute * * @param key name of the attribute * @param defaultValue default value to be returned when attribute can't be found * @return The double value */ public double getDouble(String key, double defaultValue) { String value; try { value = refResolver.resolveRef(attributes.getNamedItem(key).getNodeValue()); return Double.parseDouble(value); } catch (Exception ex) { return defaultValue; } } /** * Checks whether specified attribute exists (XML node has such attribute defined) * * @param key name of the attribute * @return true if exists, otherwise false */ public boolean exists(String key) { if (attributes.getNamedItem(key) != null) { return true; } else { return false; } } /** * Returns first TEXT_NODE child under specified XML Node * * @param nodeXML XML node from which to start searching * @return The TEXT_NODE value (String) if any exist or null */ public String getText(org.w3c.dom.Node nodeXML) { org.w3c.dom.Node childNode; org.w3c.dom.NodeList list; if (nodeXML.hasChildNodes()) { list = nodeXML.getChildNodes(); for (int i = 0; i < list.getLength(); i++) { childNode = list.item(i); if (childNode.getNodeType() == org.w3c.dom.Node.TEXT_NODE) { return refResolver.resolveRef(childNode.getNodeValue()); } } } throw new NotFoundException("Text not found !"); } /** * Searches for specific child node name under specified XML Node * * @param nodeXML XML node from which to start searching * @param childNodeName name of the child node to be searched for * @return childNode if exist under specified name or null */ public org.w3c.dom.Node getChildNode(org.w3c.dom.Node nodeXML, String childNodeName) { org.w3c.dom.Node childNode; org.w3c.dom.NodeList list; if (nodeXML.hasChildNodes()) { list = nodeXML.getChildNodes(); for (int i = 0; i < list.getLength(); i++) { childNode = list.item(i); if (childNodeName.equals(childNode.getNodeName())) { return childNode; } else { childNode = getChildNode(childNode, childNodeName); if (childNode != null) { return childNode; } } } } return null; } /** * Gets the childNodes attribute of the ComponentXMLAttributes object * * @param nodeXML Description of the Parameter * @param childNodeName Description of the Parameter * @return The childNodes value */ public org.w3c.dom.Node[] getChildNodes(org.w3c.dom.Node nodeXML, String childNodeName) { org.w3c.dom.Node childNode; org.w3c.dom.NodeList list; List childNodesList = new LinkedList(); if (nodeXML.hasChildNodes()) { list = nodeXML.getChildNodes(); for (int i = 0; i < list.getLength(); i++) { childNode = list.item(i); if (childNodeName.equals(childNode.getNodeName())) { childNodesList.add(childNode); } } } return (org.w3c.dom.Node[]) childNodesList.toArray(new org.w3c.dom.Node[0]); } /** * Converts XML Node's attributes to Properties object - hash of key-value pairs. * Can omit/exclude certain attributes based on specified array of Strings - attribute * names. * @param exclude array of Strings - names of attributes to be excluded (can be null) * @return Properties object with pairs [attribute name]-[attribute value] */ public Properties attributes2Properties(String[] exclude){ Properties properties=new Properties(); Set exception=new HashSet(); String name; if (exclude!=null){ for (int i=0;i<exclude.length;i++){ exception.add(exclude[i]); } } for (int i=0; i<attributes.getLength();i++){ name=attributes.item(i).getLocalName(); if (!exception.contains(name)){ properties.setProperty(name, refResolver.resolveRef(attributes.item(i).getNodeValue())); } } return properties; } /** * Replaces references to parameters in string with parameters' values. * * @param input string in which references to parameters should be resolved * (substituted with parameters' values) * @return String with references resolved. */ public String resloveReferences(String input){ return refResolver.resolveRef(input); } } /* * End class StringUtils */
added support for "<attr>" XML child-nodes - are translated as node's attributes git-svn-id: 7003860f782148507aa0d02fa3b12992383fb6a5@604 a09ad3ba-1a0f-0410-b1b9-c67202f10d70
cloveretl.engine/src/org/jetel/util/ComponentXMLAttributes.java
added support for "<attr>" XML child-nodes - are translated as node's attributes
<ide><path>loveretl.engine/src/org/jetel/util/ComponentXMLAttributes.java <ide> /** <ide> * Helper class (wrapper) around NamedNodeMap with possibility to parse string <ide> * values into integers, booleans, doubles..<br> <del> * Used in conjunction with org.jetel.Component.* <add> * Used in conjunction with org.jetel.Component.*<br> <add> * Converts any child nodes of form <code>&lt;attr name="xyz"&gt;abcd&lt;/attr&gt;</code> into <add> * attribute of the current node with name="xyz" and value="abcd".<br> <add> * Example:<br> <add> * <code><pre> <add> * &lt;Node id="mynode" name="xyz" append="yes"&gt; <add> * &lt;attr name="query"&gt; <add> * select * from my_table; <add> * &lt;/attr&gt; <add> * &lt;attr name="code"&gt; <add> * a=b*10-20%50; <add> * &lt;/attr&gt; <add> * &lt;/Node&gt; <add> * </pre></code> <ide> * <add> * There will be following attribute/value pairs available for getXXX() calls: <add> * <ul> <add> * <li>id - "mynode"</li> <add> * <li>name - "xyz"</li> <add> * <li>append - "yes"</li> <add> * <li>query - "select * from my_table;"</li> <add> * <li>code - "a=b*10-20%50;"</li> <add> * </ul> <ide> * @author dpavlis <ide> * @since July 25, 2002 <ide> * @revision $Revision$ <ide> private org.w3c.dom.Node nodeXML; <ide> private PropertyRefResolver refResolver; <ide> <add> public static final String XML_ATTRIBUTE_NODE_NAME = "attr"; <add> public static final String XML_ATTRIBUTE_NODE_NAME_ATTRIBUTE = "name"; <ide> <ide> //private Map childNodes; <ide> <ide> * @param nodeXML Description of the Parameter <ide> */ <ide> public ComponentXMLAttributes(org.w3c.dom.Node nodeXML) { <del> attributes = nodeXML.getAttributes(); <del> this.nodeXML = nodeXML; <del> // if some property is defined for graph, we will <del> // try to look for refereneces to graph properties within XML attributes values; <del> refResolver=new PropertyRefResolver(); <add> this(nodeXML,null); <ide> } <ide> <ide> <ide> * @param properties Description of the Parameter <ide> */ <ide> public ComponentXMLAttributes(org.w3c.dom.Node nodeXML, Properties properties) { <del> attributes = nodeXML.getAttributes(); <add> <ide> this.nodeXML = nodeXML; <del> refResolver=new PropertyRefResolver(properties); <del> <add> refResolver=new PropertyRefResolver( properties!=null ? properties : new Properties()); <add> instantiateInlinedNodeAttributes(nodeXML); <add> this.attributes = nodeXML.getAttributes(); <add> } <add> <add> private void instantiateInlinedNodeAttributes(org.w3c.dom.Node nodeXML){ <add> org.w3c.dom.Node childNode; <add> org.w3c.dom.NodeList list; <add> NamedNodeMap childNodeAttributes; <add> String newAttributeName; <add> String newAttributeValue; <add> <add> // add all "inlined" attributes in form of "attr" node as normal attributes <add> if (nodeXML.hasChildNodes()) { <add> list = nodeXML.getChildNodes(); <add> for (int i = 0; i < list.getLength(); i++) { <add> childNode = list.item(i); <add> if (childNode.getNodeName().equalsIgnoreCase(XML_ATTRIBUTE_NODE_NAME)) { <add> newAttributeName=childNode.getAttributes().getNamedItem(XML_ATTRIBUTE_NODE_NAME_ATTRIBUTE).getNodeValue(); <add> // get text value <add> newAttributeValue=null; <add> org.w3c.dom.NodeList childList = childNode.getChildNodes(); <add> for (int j = 0; j < list.getLength(); j++) { <add> org.w3c.dom.Node child2Node = childList.item(j); <add> if (child2Node.getNodeType() == org.w3c.dom.Node.TEXT_NODE) { <add> newAttributeValue=child2Node.getNodeValue(); <add> break; <add> } <add> } <add> // add value of child node as attribute, also create new attribute node <add> if (newAttributeName!=null && newAttributeValue!=null){ <add> org.w3c.dom.Attr newAttribute = nodeXML.getOwnerDocument().createAttribute(newAttributeName); <add> newAttribute.setValue(newAttributeValue); <add> nodeXML.getAttributes().setNamedItem(newAttribute); <add> // remove child node as it is now included as an attribute - in attribute <add> nodeXML.removeChild(childNode); <add> } <add> <add> } <add> } <add> } <add> <ide> } <ide> <ide>
JavaScript
mit
5161627a0263114e06a4bb90277d266142f8b869
0
pgte/skiff
'use strict' const debug = require('debug')('skiff.peer-leader') const timers = require('timers') const EventEmitter = require('events') const BatchTransformStream = require('./lib/batch-transform-stream') class PeerLeader extends EventEmitter { constructor (address, node, options) { if (typeof address !== 'string') { throw new Error('need address to be a string') } super() this._address = address this._node = node this._options = options this._nextIndex = this._node.log._lastLogIndex + 1 this._matchIndex = 0 this._needsIndex = 0 this._installingSnapshot = false this._lastSent = 0 this._stopped = false this._appendEntries() } stop () { this._stopped = true this._clearAppendEntriesTimeout() } needsIndex (index) { if (index > this._needsIndex) { this._needsIndex = index } if (this._needsMore()) { timers.setImmediate(this._appendEntries.bind(this)) } } _needsMore () { return this._nextIndex <= this._needsIndex } _appendEntries () { debug('sending AppendEntries to %s', this._address) if (this._stopped) { return } if (this._installingSnapshot) { this._resetAppendEntriesTimeout() return } const log = this._node.log const currentTerm = this._node.state.term() const entriesReply = this._entries() const entries = entriesReply.entries const capped = entriesReply.capped if (entries) { debug('%s: entries for %s are: %j', this._node.state.id, this._address, entries) const previousEntry = this._previousEntry() const lastEntry = entries[entries.length - 1] const leaderCommit = log._commitIndex const appendEntriesArgs = { term: currentTerm, leaderId: this._node.state.id, prevLogIndex: previousEntry && previousEntry.i || 0, prevLogTerm: previousEntry && previousEntry.t || 0, entries, leaderCommit } this._lastSent = Date.now() this._resetAppendEntriesTimeout() this._node.network.rpc( { to: this._address, action: 'AppendEntries', params: appendEntriesArgs }, (err, reply) => { // callback debug('%s: got reply to AppendEntries from %s: %j', this._node.state.id, this._address, reply) if (err) { debug('%s: error on AppendEntries reply:\n%s', this._node.state.id, err.stack) } else if (reply && reply.params) { if (reply.params.success) { this._matchIndex = leaderCommit if (lastEntry) { this._nextIndex = lastEntry.i + 1 } const commitedEntry = lastEntry || previousEntry const commitedIndex = commitedEntry && commitedEntry.i || 0 this.emit('committed', this, commitedIndex) } else { if (reply.params.lastIndexForTerm !== undefined) { this._nextIndex = reply.params.lastIndexForTerm + 1 } else { this._nextIndex -- } } if (this._needsMore()) { timers.setImmediate(this._appendEntries.bind(this)) } } } ) } else { // no log entries for peer that's lagging behind debug('%s: peer %s is lagging behind (next index is %d), going to install snapshot', this._node.state.id, this._address, this._nextIndex) this._resetAppendEntriesTimeout() return this._installSnapshot() } } _clearAppendEntriesTimeout () { if (this._appendEntriesTimeout) { timers.clearTimeout(this._appendEntriesTimeout) } this._appendEntriesTimeout = null } _setAppendEntriesTimeout () { debug('%s: setting the append entries timeout to %d ms', this._node.state.id, this._options.appendEntriesIntervalMS) this._appendEntriesTimeout = timers.setTimeout( this._onAppendEntriesTimeout.bind(this), this._options.appendEntriesIntervalMS) } _resetAppendEntriesTimeout () { this._clearAppendEntriesTimeout() this._setAppendEntriesTimeout() } _onAppendEntriesTimeout () { debug('%s: AppendEntries timedout', this._node.state.id) this._appendEntries() } _entries () { debug('follower %s next index is %d', this._address, this._nextIndex) const start = this._nextIndex let entries = this._node.log.entriesFrom(start) const cap = entries && (entries.length > this._options.batchEntriesLimit) if (cap) { entries = entries.slice(0, this._options.batchEntriesLimit) } return { capped: cap, entries } } _previousEntry () { return this._node.log.atLogIndex(this._nextIndex - 1) } // Install snapshot _installSnapshot () { debug('%s: _installSnapshot on %s', this._node.state.id, this._address) if (this._stopped) { return } const self = this const log = this._node.state.log this._clearAppendEntriesTimeout() let finished = false let offset = 0 this._installingSnapshot = true const lastIncludedIndex = log._lastApplied const lastIncludedTerm = log._lastAppliedTerm const rs = this._node.state.db.state.createReadStream() const stream = rs.pipe( new BatchTransformStream({ batchSize: this._options.installSnapshotChunkSize }) ) stream.on('data', installSnapshot) function installSnapshot (data) { debug('%s: have chunks %j, finished = %j', self._node.state.id, data.chunks, data.finished) debug('%s: installSnapshot on leader: have chunks %j, finished = %j', self._node.state.id, data.chunks, data.finished) stream.pause() const installSnapshotArgs = { term: self._node.state.term(), leaderId: self._node.id, lastIncludedIndex, lastIncludedTerm, offset, data: data.chunks, done: data.finished } offset += data.chunks.length self._node.network.rpc( { to: self._address, action: 'InstallSnapshot', params: installSnapshotArgs }, (err, reply) => { // callback debug('%s: got InstallSnapshot reply', self._node.state.id, err, reply) if (err) { cleanup() } else { if (data.finished) { debug('%s: data finished, setting next index of %j to %d', self._node.state.id, self._address, lastIncludedIndex) self._nextIndex = self._matchIndex = lastIncludedIndex cleanup() this.emit('committed', self, lastIncludedIndex) } else { debug('resuming stream...') stream.resume() } } } ) debug('%s: sent InstallSnapshot', self._node.state.id) } function cleanup () { if (!finished) { finished = true self._installingSnapshot = false self._resetAppendEntriesTimeout() rs.destroy() } } } state () { return { address: this._address, stopped: this._stopped, nextIndex: this._nextIndex, matchIndex: this._matchIndex, installingSnapshot: this._installingSnapshot, sentAppendEntriesAgoMS: Date.now() - this._lastSent } } } module.exports = PeerLeader
src/peer-leader.js
'use strict' const debug = require('debug')('skiff.peer-leader') const timers = require('timers') const EventEmitter = require('events') const BatchTransformStream = require('./lib/batch-transform-stream') class PeerLeader extends EventEmitter { constructor (address, node, options) { if (typeof address !== 'string') { throw new Error('need address to be a string') } super() this._address = address this._node = node this._options = options this._nextIndex = this._node.log._lastLogIndex + 1 this._matchIndex = 0 this._needsIndex = 0 this._installingSnapshot = false this._lastSent = 0 this._stopped = false this._appendEntries() } stop () { this._stopped = true this._clearAppendEntriesTimeout() } needsIndex (index) { if (index > this._needsIndex) { this._needsIndex = index } if (this._needsMore) { timers.setImmediate(this._appendEntries.bind(this)) } } _needsMore () { return this._nextIndex <= this._needsIndex } _appendEntries () { debug('sending AppendEntries to %s', this._address) if (this._stopped) { return } if (this._installingSnapshot) { this._resetAppendEntriesTimeout() return } const log = this._node.log const currentTerm = this._node.state.term() const entriesReply = this._entries() const entries = entriesReply.entries const capped = entriesReply.capped if (entries) { debug('%s: entries for %s are: %j', this._node.state.id, this._address, entries) const previousEntry = this._previousEntry() const lastEntry = entries[entries.length - 1] const leaderCommit = log._commitIndex const appendEntriesArgs = { term: currentTerm, leaderId: this._node.state.id, prevLogIndex: previousEntry && previousEntry.i || 0, prevLogTerm: previousEntry && previousEntry.t || 0, entries, leaderCommit } this._lastSent = Date.now() this._resetAppendEntriesTimeout() this._node.network.rpc( { to: this._address, action: 'AppendEntries', params: appendEntriesArgs }, (err, reply) => { // callback debug('%s: got reply to AppendEntries from %s: %j', this._node.state.id, this._address, reply) if (err) { debug('%s: error on AppendEntries reply:\n%s', this._node.state.id, err.stack) } else if (reply && reply.params) { if (reply.params.success) { this._matchIndex = leaderCommit if (lastEntry) { this._nextIndex = lastEntry.i + 1 } if (capped) { this._needsMore = true } const commitedEntry = lastEntry || previousEntry const commitedIndex = commitedEntry && commitedEntry.i || 0 this.emit('committed', this, commitedIndex) } else { if (reply.params.lastIndexForTerm !== undefined) { this._nextIndex = reply.params.lastIndexForTerm + 1 } else { this._nextIndex -- } } if (this._needsMore()) { timers.setImmediate(this._appendEntries.bind(this)) } } } ) } else { // no log entries for peer that's lagging behind debug('%s: peer %s is lagging behind (next index is %d), going to install snapshot', this._node.state.id, this._address, this._nextIndex) this._resetAppendEntriesTimeout() return this._installSnapshot() } } _clearAppendEntriesTimeout () { if (this._appendEntriesTimeout) { timers.clearTimeout(this._appendEntriesTimeout) } this._appendEntriesTimeout = null } _setAppendEntriesTimeout () { debug('%s: setting the append entries timeout to %d ms', this._node.state.id, this._options.appendEntriesIntervalMS) this._appendEntriesTimeout = timers.setTimeout( this._onAppendEntriesTimeout.bind(this), this._options.appendEntriesIntervalMS) } _resetAppendEntriesTimeout () { this._clearAppendEntriesTimeout() this._setAppendEntriesTimeout() } _onAppendEntriesTimeout () { debug('%s: AppendEntries timedout', this._node.state.id) this._appendEntries() } _entries () { debug('follower %s next index is %d', this._address, this._nextIndex) const start = this._nextIndex let entries = this._node.log.entriesFrom(start) const cap = entries && (entries.length > this._options.batchEntriesLimit) if (cap) { entries = entries.slice(0, this._options.batchEntriesLimit) } return { capped: cap, entries } } _previousEntry () { return this._node.log.atLogIndex(this._nextIndex - 1) } // Install snapshot _installSnapshot () { debug('%s: _installSnapshot on %s', this._node.state.id, this._address) if (this._stopped) { return } const self = this const log = this._node.state.log this._clearAppendEntriesTimeout() let finished = false let offset = 0 this._installingSnapshot = true const lastIncludedIndex = log._lastApplied const lastIncludedTerm = log._lastAppliedTerm const rs = this._node.state.db.state.createReadStream() const stream = rs.pipe( new BatchTransformStream({ batchSize: this._options.installSnapshotChunkSize }) ) stream.on('data', installSnapshot) function installSnapshot (data) { debug('%s: have chunks %j, finished = %j', self._node.state.id, data.chunks, data.finished) debug('%s: installSnapshot on leader: have chunks %j, finished = %j', self._node.state.id, data.chunks, data.finished) stream.pause() const installSnapshotArgs = { term: self._node.state.term(), leaderId: self._node.id, lastIncludedIndex, lastIncludedTerm, offset, data: data.chunks, done: data.finished } offset += data.chunks.length self._node.network.rpc( { to: self._address, action: 'InstallSnapshot', params: installSnapshotArgs }, (err, reply) => { // callback debug('%s: got InstallSnapshot reply', self._node.state.id, err, reply) if (err) { cleanup() } else { if (data.finished) { debug('%s: data finished, setting next index of %j to %d', self._node.state.id, self._address, lastIncludedIndex) self._nextIndex = self._matchIndex = lastIncludedIndex cleanup() this.emit('committed', self, lastIncludedIndex) } else { debug('resuming stream...') stream.resume() } } } ) debug('%s: sent InstallSnapshot', self._node.state.id) } function cleanup () { if (!finished) { finished = true self._installingSnapshot = false self._resetAppendEntriesTimeout() rs.destroy() } } } state () { return { address: this._address, stopped: this._stopped, nextIndex: this._nextIndex, matchIndex: this._matchIndex, installingSnapshot: this._installingSnapshot, sentAppendEntriesAgoMS: Date.now() - this._lastSent } } } module.exports = PeerLeader
corrected peer leader needsMore()
src/peer-leader.js
corrected peer leader needsMore()
<ide><path>rc/peer-leader.js <ide> if (index > this._needsIndex) { <ide> this._needsIndex = index <ide> } <del> if (this._needsMore) { <add> if (this._needsMore()) { <ide> timers.setImmediate(this._appendEntries.bind(this)) <ide> } <ide> } <ide> this._matchIndex = leaderCommit <ide> if (lastEntry) { <ide> this._nextIndex = lastEntry.i + 1 <del> } <del> if (capped) { <del> this._needsMore = true <ide> } <ide> const commitedEntry = lastEntry || previousEntry <ide> const commitedIndex = commitedEntry && commitedEntry.i || 0
Java
mit
dc75659f8f221b885f8c6c801727b0c1aa15a0b4
0
Collap/bryg
package io.collap.bryg.compiler.ast; import bryg.org.objectweb.asm.Label; import io.collap.bryg.Unit; import io.collap.bryg.closure.Closure; import io.collap.bryg.closure.ClosureType; import io.collap.bryg.compiler.ast.expression.VariableExpression; import io.collap.bryg.compiler.helper.ObjectCompileHelper; import io.collap.bryg.compiler.scope.Variable; import io.collap.bryg.compiler.util.IdUtil; import io.collap.bryg.compiler.util.OperationUtil; import io.collap.bryg.model.BasicModel; import io.collap.bryg.template.Template; import io.collap.bryg.compiler.ast.expression.ArgumentExpression; import io.collap.bryg.compiler.ast.expression.coercion.BoxingExpression; import io.collap.bryg.compiler.bytecode.BrygMethodVisitor; import io.collap.bryg.compiler.context.Context; import io.collap.bryg.compiler.type.Type; import io.collap.bryg.compiler.type.TypeHelper; import io.collap.bryg.compiler.util.BoxingUtil; import io.collap.bryg.compiler.util.FunctionUtil; import io.collap.bryg.environment.Environment; import io.collap.bryg.exception.BrygJitException; import io.collap.bryg.model.Model; import io.collap.bryg.parser.BrygParser; import io.collap.bryg.template.TemplateFragmentInfo; import io.collap.bryg.template.TemplateType; import io.collap.bryg.unit.*; import javax.annotation.Nullable; import java.io.Writer; import java.util.ArrayList; import java.util.List; import static bryg.org.objectweb.asm.Opcodes.*; public class TemplateFragmentCall extends Node { /** * Only one may be set. */ private TemplateFragmentInfo calledFragment; private Variable calledClosure; private boolean isFragmentInternal; private boolean isCallInClosure; private @Nullable List<ArgumentExpression> argumentExpressions; private ClosureDeclarationNode closure; public TemplateFragmentCall (Context context, BrygParser.TemplateFragmentCallContext ctx) { super (context); setLine (ctx.getStart ().getLine ()); isCallInClosure = context.getUnitType () instanceof ClosureType; isFragmentInternal = false; findCalledUnit (ctx); /* Get argument expressions. */ if (ctx.argumentList () != null) { argumentExpressions = FunctionUtil.parseArgumentList (context, ctx.argumentList ()); if (calledFragment != null) { /* Infer parameter/argument names. */ boolean shouldInfer = false; /* Check first whether to infer or not, because we need to check whether the order of arguments is correct even if some arguments are named. */ for (ArgumentExpression argumentExpression : argumentExpressions) { if (argumentExpression.getName () == null) { shouldInfer = true; break; } } if (shouldInfer) { List<ParameterInfo> localParameters = calledFragment.getLocalParameters (); for (int i = 0; i < argumentExpressions.size (); ++i) { ArgumentExpression argumentExpression = argumentExpressions.get (i); ParameterInfo localParameter = localParameters.get (i); if (argumentExpression.getName () != null) { if (!localParameter.getName ().equals (argumentExpression.getName ())) { throw new BrygJitException ("Argument " + i + " is invalid: Expected name '" + localParameter.getName () + "' but read '" + argumentExpression.getName () + "'.", getLine ()); } } else { argumentExpression.setName (localParameter.getName ()); } } } } } if (ctx.closure () != null) { closure = new ClosureDeclarationNode (context, ctx.closure ()); }else { if (calledFragment != null) { /* Check if closure is expected. */ boolean closureExpected = false; List<ParameterInfo> parameters = calledFragment.getAllParameters (); for (ParameterInfo parameterInfo : parameters) { if (parameterInfo.getType ().similarTo (Closure.class) && !parameterInfo.isOptional ()) { closureExpected = true; break; } } if (closureExpected) { throw new BrygJitException ("Fragment '" + calledFragment.getOwner ().getFullName () + ":" + calledFragment.getName () + "' expects a closure.", getLine ()); } } } } private void findCalledUnit (BrygParser.TemplateFragmentCallContext ctx) { String fullName = ctx.templateId ().getText ().substring (1); /* Omit the AT (@). */ /* Check if there is a closure variable that can be called. */ Variable variable = context.getCurrentScope ().getVariable (fullName); if (variable != null) { calledClosure = variable; calledFragment = null; return; } /* Check if there is a local fragment function. */ { TemplateType templateType = context.getUnitType ().getParentTemplateType (); TemplateFragmentInfo fragmentInfo = templateType.getFragment (fullName); if (fragmentInfo != null) { calledClosure = null; calledFragment = fragmentInfo; isFragmentInternal = true; return; } } /* Check if the parent package needs to be prepended. */ if (ctx.templateId ().currentPackage != null) { fullName = context.getUnitType ().getClassPackage () + fullName; }else { fullName = UnitClassLoader.getPrefixedName (fullName); } /* Get the name for the fragment. */ String fragName; if (ctx.frag != null) { fragName = IdUtil.idToString (ctx.frag); }else { fragName = "render"; // TODO: Make this a universal constant. } TemplateType templateType = context.getEnvironment ().getTemplateTypePrefixed (fullName); if (templateType == null) { throw new BrygJitException ("Template " + fullName + " not found for template call!", getLine ()); } calledFragment = templateType.getFragment (fragName); if (calledFragment == null) { throw new BrygJitException ("Fragment " + fullName + ":" + fragName + " not found for template call!", getLine ()); } calledClosure = null; } @Override public void compile () { if (calledFragment != null) { compileFragmentCall (); }else if (calledClosure != null) { compileClosureCall (); }else { throw new BrygJitException ("The ID does not refer to a template, fragment or a closure.", getLine ()); } } private void compileFragmentCall () { BrygMethodVisitor mv = context.getMethodVisitor (); if (isFragmentInternal) { if (isCallInClosure) { mv.visitVarInsn (ALOAD, context.getRootScope ().getVariable (ClosureType.PARENT_FIELD_NAME).getId ()); }else { loadThis (); } // -> T extends Template }else { compileTemplateFetch (); } // -> T extends Template loadWriter (); // -> Writer if (isFragmentInternal) { /* Pass the current model as a parent of a new model and add the new arguments. */ new ObjectCompileHelper (mv, new Type (BasicModel.class)).compileNew ( TypeHelper.generateMethodDesc ( new Class[]{Model.class}, Void.TYPE ), new ArrayList<Node> () {{ Variable model; if (isCallInClosure) { model = context.getRootScope ().getVariable (ClosureType.PARENT_MODEL_FIELD_NAME); }else { model = context.getRootScope ().getVariable ("model"); } add (new VariableExpression (context, model, AccessMode.get, getLine ())); }} ); }else { /* Just create a new model. */ new ObjectCompileHelper (mv, new Type (BasicModel.class)).compileNew (); } // -> BasicModel compileArguments (); // Model -> Model TemplateType owner = calledFragment.getOwner (); context.getUnitType ().getParentTemplateType ().getReferencedTemplates ().add (owner); compileFragmentInvocation (calledFragment.getName (), owner.getJvmName (), false); } private void compileTemplateFetch () { BrygMethodVisitor mv = context.getMethodVisitor (); Type environmentType = new Type (Environment.class); /* Get environment. */ loadThis (); // -> StandardTemplate mv.visitFieldInsn (GETFIELD, new Type (StandardUnit.class).getAsmType ().getInternalName (), "environment", environmentType.getAsmType ().getDescriptor ()); // StandardTemplate -> Environment /* Get template with environment. (Method owning object) */ mv.visitInsn (DUP); // Environment -> Environment, Environment int environmentVariableId = context.getCurrentScope ().calculateNextId (environmentType); mv.visitVarInsn (ASTORE, environmentVariableId); // Environment -> mv.visitLdcInsn (calledFragment.getOwner ().getFullName ()); // -> String /* This assumes that the full name of the unit type is already prefixed. */ mv.visitMethodInsn (INVOKEINTERFACE, environmentType.getAsmType ().getInternalName (), "getTemplatePrefixed", TypeHelper.generateMethodDesc ( new Class[] { String.class }, Template.class ), true); // Environment, String -> Template mv.visitTypeInsn (CHECKCAST, calledFragment.getOwner ().getJvmName ()); // Template -> T extends Template } private void compileClosureCall () { BrygMethodVisitor mv = context.getMethodVisitor (); compileClosureFetch (); /* Load writer. (Argument 0) */ loadWriter (); // -> Writer /* Create model. (Argument 1) */ new ObjectCompileHelper (mv, new Type (BasicModel.class)).compileNew (); /* Compile arguments and set model variables. */ compileArguments (); /* Invoke render method. */ compileFragmentInvocation ("render", new Type (Unit.class).getAsmType ().getInternalName (), true); } /** * -> * * This method only checks whether the closure is null if the variable is declared optional. */ private void compileClosureFetch () { BrygMethodVisitor mv = context.getMethodVisitor (); mv.visitVarInsn (ALOAD, calledClosure.getId ()); // -> Closure if (calledClosure.isNullable ()) { OperationUtil.compileIfNullThrowException (mv, new Type (NullPointerException.class), "Closure variable '" + calledClosure.getName () + "' is null."); // -> } } /** * Model -> Model * * Also compiles the closure argument, if applicable. */ private void compileArguments () { BrygMethodVisitor mv = context.getMethodVisitor (); if (argumentExpressions != null) { for (ArgumentExpression argument : argumentExpressions) { if (argument.getName () == null) { throw new BrygJitException ("Argument name was neither supplied nor inferred.", getLine ()); } /* Compile predicate. */ Label afterArgument = argument.compilePredicate (); mv.visitInsn (DUP); // Model -> Model, Model mv.visitLdcInsn (argument.getName ()); // -> String /* Possibly box the argument. */ Type boxedType = BoxingUtil.boxType (argument.getType ()); if (boxedType != null) { new BoxingExpression (context, argument, boxedType).compile (); // -> T } else { argument.compile (); // -> T } mv.visitMethodInsn (INVOKEINTERFACE, new Type (Model.class).getAsmType ().getInternalName (), "setVariable", TypeHelper.generateMethodDesc ( new Class[]{String.class, Object.class}, Void.TYPE ), true); // Model, String, T -> if (afterArgument != null) { mv.visitLabel (afterArgument); } } } if (closure != null) { compileClosure (); } } private void compileClosure () { if (calledFragment == null) { throw new BrygJitException ("Currently only fragments can be called with closures.", getLine ()); } BrygMethodVisitor mv = context.getMethodVisitor (); mv.visitInsn (DUP); // Model -> Model, Model ParameterInfo closureParameter = findClosureParameter (calledFragment.getLocalParameters ()); if (closureParameter == null) { closureParameter = findClosureParameter (calledFragment.getGeneralParameters ()); } if (closureParameter == null) { throw new BrygJitException ("Expected closure parameter for fragment '" + calledFragment.getOwner ().getFullName () + ":" + calledFragment.getName () + "'", getLine ()); } mv.visitLdcInsn (closureParameter.getName ()); // -> String closure.compile (); // -> Closure mv.visitMethodInsn (INVOKEINTERFACE, new Type (Model.class).getAsmType ().getInternalName (), "setVariable", TypeHelper.generateMethodDesc ( new Class[]{String.class, Object.class}, Void.TYPE ), true); // Model, String, Closure -> } private ParameterInfo findClosureParameter (List<ParameterInfo> parameters) { ParameterInfo closureParameter = null; for (ParameterInfo parameter : parameters) { if (parameter.getType ().similarTo (Closure.class)) { if (closureParameter == null) { closureParameter = parameter; } else { throw new BrygJitException ("Found two or more closure parameters: " + closureParameter.getName () + ", " + parameter.getName (), getLine ()); } } } return closureParameter; } private void loadThis () { BrygMethodVisitor mv = context.getMethodVisitor (); mv.visitVarInsn (ALOAD, context.getRootScope ().getVariable ("this").getId ()); } private void loadWriter () { BrygMethodVisitor mv = context.getMethodVisitor (); mv.visitVarInsn (ALOAD, context.getRootScope ().getVariable ("writer").getId ()); } private void compileFragmentInvocation (String name, String ownerName, boolean isInterfaceCall) { context.getMethodVisitor ().visitMethodInsn ( isInterfaceCall ? INVOKEINTERFACE : INVOKEVIRTUAL, ownerName, name, TypeHelper.generateMethodDesc ( new Class[] { Writer.class, Model.class }, Void.TYPE ), isInterfaceCall); // Template, Writer, Model -> } }
src/main/java/io/collap/bryg/compiler/ast/TemplateFragmentCall.java
package io.collap.bryg.compiler.ast; import bryg.org.objectweb.asm.Label; import io.collap.bryg.Unit; import io.collap.bryg.closure.Closure; import io.collap.bryg.closure.ClosureType; import io.collap.bryg.compiler.ast.expression.VariableExpression; import io.collap.bryg.compiler.helper.ObjectCompileHelper; import io.collap.bryg.compiler.scope.Variable; import io.collap.bryg.compiler.util.IdUtil; import io.collap.bryg.compiler.util.OperationUtil; import io.collap.bryg.model.BasicModel; import io.collap.bryg.template.Template; import io.collap.bryg.compiler.ast.expression.ArgumentExpression; import io.collap.bryg.compiler.ast.expression.coercion.BoxingExpression; import io.collap.bryg.compiler.bytecode.BrygMethodVisitor; import io.collap.bryg.compiler.context.Context; import io.collap.bryg.compiler.type.Type; import io.collap.bryg.compiler.type.TypeHelper; import io.collap.bryg.compiler.util.BoxingUtil; import io.collap.bryg.compiler.util.FunctionUtil; import io.collap.bryg.environment.Environment; import io.collap.bryg.exception.BrygJitException; import io.collap.bryg.model.Model; import io.collap.bryg.parser.BrygParser; import io.collap.bryg.template.TemplateFragmentInfo; import io.collap.bryg.template.TemplateType; import io.collap.bryg.unit.*; import javax.annotation.Nullable; import java.io.Writer; import java.util.ArrayList; import java.util.List; import static bryg.org.objectweb.asm.Opcodes.*; public class TemplateFragmentCall extends Node { /** * Only one may be set. */ private TemplateFragmentInfo calledFragment; private Variable calledClosure; private boolean isFragmentInternal; private boolean isCallInClosure; private @Nullable List<ArgumentExpression> argumentExpressions; private ClosureDeclarationNode closure; public TemplateFragmentCall (Context context, BrygParser.TemplateFragmentCallContext ctx) { super (context); setLine (ctx.getStart ().getLine ()); isCallInClosure = context.getUnitType () instanceof ClosureType; isFragmentInternal = false; findCalledUnit (ctx); /* Get argument expressions. */ if (ctx.argumentList () != null) { argumentExpressions = FunctionUtil.parseArgumentList (context, ctx.argumentList ()); /* Infer parameter/argument names. */ boolean shouldInfer = false; /* Check first whether to infer or not, because we need to check whether the order of arguments is correct even if some arguments are named. */ for (ArgumentExpression argumentExpression : argumentExpressions) { if (argumentExpression.getName () == null) { shouldInfer = true; break; } } if (shouldInfer) { List<ParameterInfo> localParameters = calledFragment.getLocalParameters (); for (int i = 0; i < argumentExpressions.size (); ++i) { ArgumentExpression argumentExpression = argumentExpressions.get (i); ParameterInfo localParameter = localParameters.get (i); if (argumentExpression.getName () != null) { if (!localParameter.getName ().equals (argumentExpression.getName ())) { throw new BrygJitException ("Argument " + i + " is invalid: Expected name '" + localParameter.getName () + "' but read '" + argumentExpression.getName () + "'.", getLine ()); } }else { argumentExpression.setName (localParameter.getName ()); } } } } if (ctx.closure () != null) { closure = new ClosureDeclarationNode (context, ctx.closure ()); }else { if (calledFragment != null) { /* Check if closure is expected. */ boolean closureExpected = false; List<ParameterInfo> parameters = calledFragment.getAllParameters (); for (ParameterInfo parameterInfo : parameters) { if (parameterInfo.getType ().similarTo (Closure.class) && !parameterInfo.isOptional ()) { closureExpected = true; break; } } if (closureExpected) { throw new BrygJitException ("Fragment '" + calledFragment.getOwner ().getFullName () + ":" + calledFragment.getName () + "' expects a closure.", getLine ()); } } } } private void findCalledUnit (BrygParser.TemplateFragmentCallContext ctx) { String fullName = ctx.templateId ().getText ().substring (1); /* Omit the AT (@). */ /* Check if there is a closure variable that can be called. */ Variable variable = context.getCurrentScope ().getVariable (fullName); if (variable != null) { calledClosure = variable; calledFragment = null; return; } /* Check if there is a local fragment function. */ { TemplateType templateType = context.getUnitType ().getParentTemplateType (); TemplateFragmentInfo fragmentInfo = templateType.getFragment (fullName); if (fragmentInfo != null) { calledClosure = null; calledFragment = fragmentInfo; isFragmentInternal = true; return; } } /* Check if the parent package needs to be prepended. */ if (ctx.templateId ().currentPackage != null) { fullName = context.getUnitType ().getClassPackage () + fullName; }else { fullName = UnitClassLoader.getPrefixedName (fullName); } /* Get the name for the fragment. */ String fragName; if (ctx.frag != null) { fragName = IdUtil.idToString (ctx.frag); }else { fragName = "render"; // TODO: Make this a universal constant. } TemplateType templateType = context.getEnvironment ().getTemplateTypePrefixed (fullName); if (templateType == null) { throw new BrygJitException ("Template " + fullName + " not found for template call!", getLine ()); } calledFragment = templateType.getFragment (fragName); if (calledFragment == null) { throw new BrygJitException ("Fragment " + fullName + ":" + fragName + " not found for template call!", getLine ()); } calledClosure = null; } @Override public void compile () { if (calledFragment != null) { compileFragmentCall (); }else if (calledClosure != null) { compileClosureCall (); }else { throw new BrygJitException ("The ID does not refer to a template, fragment or a closure.", getLine ()); } } private void compileFragmentCall () { BrygMethodVisitor mv = context.getMethodVisitor (); if (isFragmentInternal) { if (isCallInClosure) { mv.visitVarInsn (ALOAD, context.getRootScope ().getVariable (ClosureType.PARENT_FIELD_NAME).getId ()); }else { loadThis (); } // -> T extends Template }else { compileTemplateFetch (); } // -> T extends Template loadWriter (); // -> Writer if (isFragmentInternal) { /* Pass the current model as a parent of a new model and add the new arguments. */ new ObjectCompileHelper (mv, new Type (BasicModel.class)).compileNew ( TypeHelper.generateMethodDesc ( new Class[]{Model.class}, Void.TYPE ), new ArrayList<Node> () {{ Variable model; if (isCallInClosure) { model = context.getRootScope ().getVariable (ClosureType.PARENT_MODEL_FIELD_NAME); }else { model = context.getRootScope ().getVariable ("model"); } add (new VariableExpression (context, model, AccessMode.get, getLine ())); }} ); }else { /* Just create a new model. */ new ObjectCompileHelper (mv, new Type (BasicModel.class)).compileNew (); } // -> BasicModel compileArguments (); // Model -> Model TemplateType owner = calledFragment.getOwner (); context.getUnitType ().getParentTemplateType ().getReferencedTemplates ().add (owner); compileFragmentInvocation (calledFragment.getName (), owner.getJvmName (), false); } private void compileTemplateFetch () { BrygMethodVisitor mv = context.getMethodVisitor (); Type environmentType = new Type (Environment.class); /* Get environment. */ loadThis (); // -> StandardTemplate mv.visitFieldInsn (GETFIELD, new Type (StandardUnit.class).getAsmType ().getInternalName (), "environment", environmentType.getAsmType ().getDescriptor ()); // StandardTemplate -> Environment /* Get template with environment. (Method owning object) */ mv.visitInsn (DUP); // Environment -> Environment, Environment int environmentVariableId = context.getCurrentScope ().calculateNextId (environmentType); mv.visitVarInsn (ASTORE, environmentVariableId); // Environment -> mv.visitLdcInsn (calledFragment.getOwner ().getFullName ()); // -> String /* This assumes that the full name of the unit type is already prefixed. */ mv.visitMethodInsn (INVOKEINTERFACE, environmentType.getAsmType ().getInternalName (), "getTemplatePrefixed", TypeHelper.generateMethodDesc ( new Class[] { String.class }, Template.class ), true); // Environment, String -> Template mv.visitTypeInsn (CHECKCAST, calledFragment.getOwner ().getJvmName ()); // Template -> T extends Template } private void compileClosureCall () { BrygMethodVisitor mv = context.getMethodVisitor (); compileClosureFetch (); /* Load writer. (Argument 0) */ loadWriter (); // -> Writer /* Create model. (Argument 1) */ new ObjectCompileHelper (mv, new Type (BasicModel.class)).compileNew (); /* Compile arguments and set model variables. */ compileArguments (); /* Invoke render method. */ compileFragmentInvocation ("render", new Type (Unit.class).getAsmType ().getInternalName (), true); } /** * -> * * This method only checks whether the closure is null if the variable is declared optional. */ private void compileClosureFetch () { BrygMethodVisitor mv = context.getMethodVisitor (); mv.visitVarInsn (ALOAD, calledClosure.getId ()); // -> Closure if (calledClosure.isNullable ()) { OperationUtil.compileIfNullThrowException (mv, new Type (NullPointerException.class), "Closure variable '" + calledClosure.getName () + "' is null."); // -> } } /** * Model -> Model * * Also compiles the closure argument, if applicable. */ private void compileArguments () { BrygMethodVisitor mv = context.getMethodVisitor (); if (argumentExpressions != null) { for (ArgumentExpression argument : argumentExpressions) { if (argument.getName () == null) { throw new BrygJitException ("All arguments to a template must be named.", getLine ()); } /* Compile predicate. */ Label afterArgument = argument.compilePredicate (); mv.visitInsn (DUP); // Model -> Model, Model mv.visitLdcInsn (argument.getName ()); // -> String /* Possibly box the argument. */ Type boxedType = BoxingUtil.boxType (argument.getType ()); if (boxedType != null) { new BoxingExpression (context, argument, boxedType).compile (); // -> T } else { argument.compile (); // -> T } mv.visitMethodInsn (INVOKEINTERFACE, new Type (Model.class).getAsmType ().getInternalName (), "setVariable", TypeHelper.generateMethodDesc ( new Class[]{String.class, Object.class}, Void.TYPE ), true); // Model, String, T -> if (afterArgument != null) { mv.visitLabel (afterArgument); } } } if (closure != null) { compileClosure (); } } private void compileClosure () { if (calledFragment == null) { throw new BrygJitException ("Currently only fragments can be called with closures.", getLine ()); } BrygMethodVisitor mv = context.getMethodVisitor (); mv.visitInsn (DUP); // Model -> Model, Model ParameterInfo closureParameter = findClosureParameter (calledFragment.getLocalParameters ()); if (closureParameter == null) { closureParameter = findClosureParameter (calledFragment.getGeneralParameters ()); } if (closureParameter == null) { throw new BrygJitException ("Expected closure parameter for fragment '" + calledFragment.getOwner ().getFullName () + ":" + calledFragment.getName () + "'", getLine ()); } mv.visitLdcInsn (closureParameter.getName ()); // -> String closure.compile (); // -> Closure mv.visitMethodInsn (INVOKEINTERFACE, new Type (Model.class).getAsmType ().getInternalName (), "setVariable", TypeHelper.generateMethodDesc ( new Class[]{String.class, Object.class}, Void.TYPE ), true); // Model, String, Closure -> } private ParameterInfo findClosureParameter (List<ParameterInfo> parameters) { ParameterInfo closureParameter = null; for (ParameterInfo parameter : parameters) { if (parameter.getType ().similarTo (Closure.class)) { if (closureParameter == null) { closureParameter = parameter; } else { throw new BrygJitException ("Found two or more closure parameters: " + closureParameter.getName () + ", " + parameter.getName (), getLine ()); } } } return closureParameter; } private void loadThis () { BrygMethodVisitor mv = context.getMethodVisitor (); mv.visitVarInsn (ALOAD, context.getRootScope ().getVariable ("this").getId ()); } private void loadWriter () { BrygMethodVisitor mv = context.getMethodVisitor (); mv.visitVarInsn (ALOAD, context.getRootScope ().getVariable ("writer").getId ()); } private void compileFragmentInvocation (String name, String ownerName, boolean isInterfaceCall) { context.getMethodVisitor ().visitMethodInsn ( isInterfaceCall ? INVOKEINTERFACE : INVOKEVIRTUAL, ownerName, name, TypeHelper.generateMethodDesc ( new Class[] { Writer.class, Model.class }, Void.TYPE ), isInterfaceCall); // Template, Writer, Model -> } }
Only infer argument names if the called unit is a fragment. This has to be done because closure parameters are currently not known to the compiler.
src/main/java/io/collap/bryg/compiler/ast/TemplateFragmentCall.java
Only infer argument names if the called unit is a fragment. This has to be done because closure parameters are currently not known to the compiler.
<ide><path>rc/main/java/io/collap/bryg/compiler/ast/TemplateFragmentCall.java <ide> if (ctx.argumentList () != null) { <ide> argumentExpressions = FunctionUtil.parseArgumentList (context, ctx.argumentList ()); <ide> <del> /* Infer parameter/argument names. */ <del> boolean shouldInfer = false; <del> <del> /* Check first whether to infer or not, because we need to check whether the <del> order of arguments is correct even if some arguments are named. */ <del> for (ArgumentExpression argumentExpression : argumentExpressions) { <del> if (argumentExpression.getName () == null) { <del> shouldInfer = true; <del> break; <del> } <del> } <del> <del> if (shouldInfer) { <del> List<ParameterInfo> localParameters = calledFragment.getLocalParameters (); <del> for (int i = 0; i < argumentExpressions.size (); ++i) { <del> ArgumentExpression argumentExpression = argumentExpressions.get (i); <del> ParameterInfo localParameter = localParameters.get (i); <del> if (argumentExpression.getName () != null) { <del> if (!localParameter.getName ().equals (argumentExpression.getName ())) { <del> throw new BrygJitException ("Argument " + i + " is invalid: Expected name '" + <del> localParameter.getName () + "' but read '" + argumentExpression.getName () + "'.", getLine ()); <add> if (calledFragment != null) { <add> /* Infer parameter/argument names. */ <add> boolean shouldInfer = false; <add> <add> /* Check first whether to infer or not, because we need to check whether the <add> order of arguments is correct even if some arguments are named. */ <add> for (ArgumentExpression argumentExpression : argumentExpressions) { <add> if (argumentExpression.getName () == null) { <add> shouldInfer = true; <add> break; <add> } <add> } <add> <add> if (shouldInfer) { <add> List<ParameterInfo> localParameters = calledFragment.getLocalParameters (); <add> for (int i = 0; i < argumentExpressions.size (); ++i) { <add> ArgumentExpression argumentExpression = argumentExpressions.get (i); <add> ParameterInfo localParameter = localParameters.get (i); <add> if (argumentExpression.getName () != null) { <add> if (!localParameter.getName ().equals (argumentExpression.getName ())) { <add> throw new BrygJitException ("Argument " + i + " is invalid: Expected name '" + <add> localParameter.getName () + "' but read '" + argumentExpression.getName () + "'.", getLine ()); <add> } <add> } else { <add> argumentExpression.setName (localParameter.getName ()); <ide> } <del> }else { <del> argumentExpression.setName (localParameter.getName ()); <ide> } <ide> } <ide> } <ide> if (argumentExpressions != null) { <ide> for (ArgumentExpression argument : argumentExpressions) { <ide> if (argument.getName () == null) { <del> throw new BrygJitException ("All arguments to a template must be named.", getLine ()); <add> throw new BrygJitException ("Argument name was neither supplied nor inferred.", getLine ()); <ide> } <ide> <ide> /* Compile predicate. */
Java
apache-2.0
95d7fe419b43c3b7883a136d028e54e30da929bd
0
Z3r0byte/Magistify
/* * Copyright (c) 2016-2017 Bas van den Boom 'Z3r0byte' * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.z3r0byte.magistify; import android.app.Dialog; import android.content.ComponentName; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.ServiceConnection; import android.os.Bundle; import android.os.IBinder; import android.os.Looper; import android.os.RemoteException; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.Toast; import com.android.vending.billing.IInAppBillingService; import com.google.android.gms.ads.AdRequest; import com.google.android.gms.ads.AdView; import com.google.android.gms.ads.MobileAds; import com.google.gson.Gson; import com.z3r0byte.magistify.DatabaseHelpers.CalendarDB; import com.z3r0byte.magistify.DatabaseHelpers.NewGradesDB; import com.z3r0byte.magistify.GUI.NavigationDrawer; import com.z3r0byte.magistify.GUI.NewGradeCard; import com.z3r0byte.magistify.GUI.NextAppointmentCard; import com.z3r0byte.magistify.Util.ConfigUtil; import net.ilexiconn.magister.Magister; import net.ilexiconn.magister.container.Appointment; import net.ilexiconn.magister.container.Grade; import net.ilexiconn.magister.container.School; import net.ilexiconn.magister.container.User; import net.ilexiconn.magister.handler.GradeHandler; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import java.security.InvalidParameterException; import java.text.ParseException; import java.util.ArrayList; import it.gmariotti.cardslib.library.internal.CardHeader; import it.gmariotti.cardslib.library.view.CardViewNative; public class DashboardActivity extends AppCompatActivity { private static final String TAG = "DashboardActivity"; Toolbar mToolbar; CardViewNative appointmentMain; CardViewNative gradeMain; SwipeRefreshLayout mSwipeRefreshLayout; ConfigUtil configUtil; final static String SKU_FIFTY_CENTS = "fifty_cents"; final static String SKU_ONE_EURO = "one_euro"; final static String SKU_TWO_EURO = "two_euro"; final static String SKU_FIVE_EURO = "five_euro"; IInAppBillingService mService; Bundle ownedItems; ArrayList<String> boughtSKU = new ArrayList<>(); ArrayList<String> boughtToken = new ArrayList<>(); ServiceConnection mServiceConn = new ServiceConnection() { @Override public void onServiceDisconnected(ComponentName name) { mService = null; } @Override public void onServiceConnected(ComponentName name, IBinder service) { mService = IInAppBillingService.Stub.asInterface(service); } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_dashboard); mToolbar = (Toolbar) findViewById(R.id.Toolbar); mToolbar.setTitle(R.string.title_dashboard); setSupportActionBar(mToolbar); NavigationDrawer navigationDrawer = new NavigationDrawer(this, mToolbar, GlobalAccount.PROFILE, GlobalAccount.USER, "Dashboard"); navigationDrawer.SetupNavigationDrawer(); mSwipeRefreshLayout = (SwipeRefreshLayout) findViewById(R.id.layout_refresh); mSwipeRefreshLayout.setColorSchemeResources( R.color.colorPrimary, R.color.setup_color_3, R.color.setup_color_5); mSwipeRefreshLayout.setOnRefreshListener( new SwipeRefreshLayout.OnRefreshListener() { @Override public void onRefresh() { Log.d(TAG, "onRefresh: Refreshing!"); mSwipeRefreshLayout.setVisibility(View.GONE); mSwipeRefreshLayout.setVisibility(View.VISIBLE); setupAppointmentCard(); retrieveGrades(); } } ); configUtil = new ConfigUtil(this); if (!configUtil.getBoolean("disable_ads")) { MobileAds.initialize(getApplicationContext(), getString(R.string.app_ad_id)); AdView mAdView = (AdView) findViewById(R.id.adView); AdRequest adRequest = new AdRequest.Builder() .addTestDevice("BEF9819F219452AE8661484A2AA03C59") .build(); mAdView.loadAd(adRequest); } if (configUtil.getInteger("failed_auth") >= 2) { AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(this); alertDialogBuilder.setMessage(R.string.dialog_login_failed_desc); alertDialogBuilder.setPositiveButton(R.string.msg_relogin, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { relogin(); } }); alertDialogBuilder.setNegativeButton(R.string.msg_later, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { } }); AlertDialog alertDialog = alertDialogBuilder.create(); alertDialog.show(); } Intent serviceIntent = new Intent("com.android.vending.billing.InAppBillingService.BIND"); serviceIntent.setPackage("com.android.vending"); bindService(serviceIntent, mServiceConn, Context.BIND_AUTO_CREATE); appointmentMain = (CardViewNative) findViewById(R.id.card_next_appointment); gradeMain = (CardViewNative) findViewById(R.id.card_new_grade); gradeMain.setVisibility(View.GONE); setupAppointmentCard(); retrieveGrades(); getPurchases(); } private void relogin() { final Dialog dialog = new Dialog(this); dialog.setContentView(R.layout.fragment_login); dialog.setTitle(R.string.msg_relogin); Button button = (Button) dialog.findViewById(R.id.button_login); button.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { new Thread(new Runnable() { @Override public void run() { Looper.prepare(); EditText usertxt = (EditText) dialog.findViewById(R.id.edit_text_username); EditText passwordtxt = (EditText) dialog.findViewById(R.id.edit_text_password); String username = usertxt.getText().toString(); String password = passwordtxt.getText().toString(); School school = new Gson().fromJson(configUtil.getString("School"), School.class); try { Magister magister = Magister.login(school, username, password); } catch (final IOException e) { runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(DashboardActivity.this, R.string.err_no_connection, Toast.LENGTH_SHORT).show(); } }); return; } catch (ParseException e) { e.printStackTrace(); runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(DashboardActivity.this, R.string.err_unknown, Toast.LENGTH_SHORT).show(); } }); return; } catch (final InvalidParameterException e) { runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(DashboardActivity.this, R.string.err_wrong_username_or_password, Toast.LENGTH_SHORT).show(); } }); return; } Log.d(TAG, "onClick: login succeeded!"); User user = new User(username, password, false); configUtil.setString("User", new Gson().toJson(user)); configUtil.setInteger("failed_auth", 0); GlobalAccount.USER = user; dialog.dismiss(); } }).start(); } }); dialog.show(); } private void setupAppointmentCard() { CalendarDB db = new CalendarDB(this); Appointment[] appointments = db.getNextAppointments(); Appointment appointment = null; Log.d(TAG, "setupAppointmentCard: Amount of appointments: " + appointments.length); if (appointments != null && appointments.length > 0) { appointment = appointments[0]; } NextAppointmentCard mainCardContent = new NextAppointmentCard(this, appointment); CardHeader cardHeader = new CardHeader(this); cardHeader.setTitle(getString(R.string.msg_next_appointment)); mainCardContent.addCardHeader(cardHeader); appointmentMain.setCard(mainCardContent); appointmentMain.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { startActivity(new Intent(getApplicationContext(), AppointmentActivity.class)); } }); } private void setupGradeCard() { NewGradesDB gradesdb = new NewGradesDB(this); Grade[] grades = gradesdb.getNewGrades(); Grade grade = null; if (grades != null && grades.length > 0) { grade = grades[0]; } /* Grade sampleGrade = new Grade(); sampleGrade.isSufficient = false; sampleGrade.grade = "2.3"; sampleGrade.subject = new SubSubject(); sampleGrade.subject.name = "Latijn"; grade = sampleGrade;*/ NewGradeCard mainCardContent = new NewGradeCard(this, grade); CardHeader cardHeader = new CardHeader(this); cardHeader.setTitle(getString(R.string.msg_newest_grade)); mainCardContent.addCardHeader(cardHeader); gradeMain.setVisibility(View.VISIBLE); gradeMain.setCard(mainCardContent); gradeMain.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { startActivity(new Intent(getApplicationContext(), NewGradeActivity.class)); } }); mSwipeRefreshLayout.setRefreshing(false); } private void retrieveGrades() { new Thread(new Runnable() { @Override public void run() { Magister magister = GlobalAccount.MAGISTER; if (magister != null && magister.isExpired()) { try { magister.login(); } catch (IOException e) { Log.e(TAG, "run: No connection during login", e); runOnUiThread(new Runnable() { @Override public void run() { mSwipeRefreshLayout.setRefreshing(false); setupGradeCard(); } }); return; } } else if (magister == null) { runOnUiThread(new Runnable() { @Override public void run() { mSwipeRefreshLayout.setRefreshing(false); setupGradeCard(); } }); return; } GradeHandler gradeHandler = new GradeHandler(magister); Grade[] Grades; try { Grades = gradeHandler.getRecentGrades(); } catch (IOException e) { Grades = null; Log.e(TAG, "run: No connection...", e); runOnUiThread(new Runnable() { @Override public void run() { setupGradeCard(); } }); } if (Grades != null && Grades.length != 0) { NewGradesDB db = new NewGradesDB(getApplicationContext()); db.addGrades(Grades); runOnUiThread(new Runnable() { @Override public void run() { setupGradeCard(); } }); } else if (Grades != null && Grades.length < 1) { runOnUiThread(new Runnable() { @Override public void run() { setupGradeCard(); } }); } else { runOnUiThread(new Runnable() { @Override public void run() { setupGradeCard(); } }); } } }).start(); } private void getPurchases() { new Thread(new Runnable() { @Override public void run() { try { Thread.sleep(500); ownedItems = mService.getPurchases(3, getPackageName(), "inapp", null); int response = ownedItems.getInt("RESPONSE_CODE"); if (response == 0) { ArrayList<String> ownedSkus = ownedItems.getStringArrayList("INAPP_PURCHASE_ITEM_LIST"); ArrayList<String> purchaseDataList = ownedItems.getStringArrayList("INAPP_PURCHASE_DATA_LIST"); ArrayList<String> signatureList = ownedItems.getStringArrayList("INAPP_DATA_SIGNATURE_LIST"); for (int i = 0; i < purchaseDataList.size(); ++i) { String purchaseData = purchaseDataList.get(i); String signature = signatureList.get(i); String sku = ownedSkus.get(i); JSONObject jo = new JSONObject(purchaseData); String token = jo.getString("purchaseToken"); boughtSKU.add(sku); boughtToken.add(token); Log.i(TAG, "run: Purchased item " + i + ": SKU: " + sku + ", purchaseData:" + purchaseData + ", Signature: " + signature); configUtil.setBoolean("disable_ads", false); configUtil.setBoolean("pro_unlocked", false); if (boughtSKU.contains(SKU_FIFTY_CENTS)) { configUtil.setBoolean("disable_ads", true); } else if (boughtSKU.contains(SKU_ONE_EURO)) { configUtil.setBoolean("disable_ads", true); configUtil.setBoolean("pro_unlocked", true); configUtil.setString("token_one_euro", token); } else if (boughtSKU.contains(SKU_TWO_EURO)) { configUtil.setBoolean("disable_ads", true); configUtil.setBoolean("pro_unlocked", true); configUtil.setString("token_two_euro", token); } else if (boughtSKU.contains(SKU_FIVE_EURO)) { configUtil.setBoolean("disable_ads", true); configUtil.setBoolean("pro_unlocked", true); configUtil.setString("token_five_euro", token); } // do something with this purchase information // e.g. display the updated list of products owned by user } } } catch (RemoteException e) { if (mService != null) { unbindService(mServiceConn); } e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } catch (JSONException e) { e.printStackTrace(); } } }).start(); } }
app/src/main/java/com/z3r0byte/magistify/DashboardActivity.java
/* * Copyright (c) 2016-2017 Bas van den Boom 'Z3r0byte' * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.z3r0byte.magistify; import android.app.Dialog; import android.content.ComponentName; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.ServiceConnection; import android.os.Bundle; import android.os.IBinder; import android.os.Looper; import android.os.RemoteException; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.Toast; import com.android.vending.billing.IInAppBillingService; import com.google.android.gms.ads.AdRequest; import com.google.android.gms.ads.AdView; import com.google.android.gms.ads.MobileAds; import com.google.gson.Gson; import com.z3r0byte.magistify.DatabaseHelpers.CalendarDB; import com.z3r0byte.magistify.DatabaseHelpers.NewGradesDB; import com.z3r0byte.magistify.GUI.NavigationDrawer; import com.z3r0byte.magistify.GUI.NewGradeCard; import com.z3r0byte.magistify.GUI.NextAppointmentCard; import com.z3r0byte.magistify.Util.ConfigUtil; import net.ilexiconn.magister.Magister; import net.ilexiconn.magister.container.Appointment; import net.ilexiconn.magister.container.Grade; import net.ilexiconn.magister.container.School; import net.ilexiconn.magister.container.User; import net.ilexiconn.magister.handler.GradeHandler; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import java.security.InvalidParameterException; import java.text.ParseException; import java.util.ArrayList; import it.gmariotti.cardslib.library.internal.CardHeader; import it.gmariotti.cardslib.library.view.CardViewNative; public class DashboardActivity extends AppCompatActivity { private static final String TAG = "DashboardActivity"; Toolbar mToolbar; CardViewNative appointmentMain; CardViewNative gradeMain; SwipeRefreshLayout mSwipeRefreshLayout; ConfigUtil configUtil; final static String SKU_FIFTY_CENTS = "fifty_cents"; final static String SKU_ONE_EURO = "one_euro"; final static String SKU_TWO_EURO = "two_euro"; final static String SKU_FIVE_EURO = "five_euro"; IInAppBillingService mService; Bundle ownedItems; ArrayList<String> boughtSKU = new ArrayList<>(); ArrayList<String> boughtToken = new ArrayList<>(); ServiceConnection mServiceConn = new ServiceConnection() { @Override public void onServiceDisconnected(ComponentName name) { mService = null; } @Override public void onServiceConnected(ComponentName name, IBinder service) { mService = IInAppBillingService.Stub.asInterface(service); } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_dashboard); mToolbar = (Toolbar) findViewById(R.id.Toolbar); mToolbar.setTitle(R.string.title_dashboard); setSupportActionBar(mToolbar); NavigationDrawer navigationDrawer = new NavigationDrawer(this, mToolbar, GlobalAccount.PROFILE, GlobalAccount.USER, "Dashboard"); navigationDrawer.SetupNavigationDrawer(); mSwipeRefreshLayout = (SwipeRefreshLayout) findViewById(R.id.layout_refresh); mSwipeRefreshLayout.setColorSchemeResources( R.color.colorPrimary, R.color.setup_color_3, R.color.setup_color_5); mSwipeRefreshLayout.setOnRefreshListener( new SwipeRefreshLayout.OnRefreshListener() { @Override public void onRefresh() { Log.d(TAG, "onRefresh: Refreshing!"); mSwipeRefreshLayout.setVisibility(View.GONE); mSwipeRefreshLayout.setVisibility(View.VISIBLE); setupAppointmentCard(); retrieveGrades(); } } ); setupAppointmentCard(); retrieveGrades(); configUtil = new ConfigUtil(this); if (!configUtil.getBoolean("disable_ads")) { MobileAds.initialize(getApplicationContext(), getString(R.string.app_ad_id)); AdView mAdView = (AdView) findViewById(R.id.adView); AdRequest adRequest = new AdRequest.Builder() .addTestDevice("BEF9819F219452AE8661484A2AA03C59") .build(); mAdView.loadAd(adRequest); } if (configUtil.getInteger("failed_auth") >= 2) { AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(this); alertDialogBuilder.setMessage(R.string.dialog_login_failed_desc); alertDialogBuilder.setPositiveButton(R.string.msg_relogin, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { relogin(); } }); alertDialogBuilder.setNegativeButton(R.string.msg_later, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { } }); AlertDialog alertDialog = alertDialogBuilder.create(); alertDialog.show(); } Intent serviceIntent = new Intent("com.android.vending.billing.InAppBillingService.BIND"); serviceIntent.setPackage("com.android.vending"); bindService(serviceIntent, mServiceConn, Context.BIND_AUTO_CREATE); gradeMain = (CardViewNative) findViewById(R.id.card_new_grade); gradeMain.setVisibility(View.GONE); getPurchases(); } private void relogin() { final Dialog dialog = new Dialog(this); dialog.setContentView(R.layout.fragment_login); dialog.setTitle(R.string.msg_relogin); Button button = (Button) dialog.findViewById(R.id.button_login); button.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { new Thread(new Runnable() { @Override public void run() { Looper.prepare(); EditText usertxt = (EditText) dialog.findViewById(R.id.edit_text_username); EditText passwordtxt = (EditText) dialog.findViewById(R.id.edit_text_password); String username = usertxt.getText().toString(); String password = passwordtxt.getText().toString(); School school = new Gson().fromJson(configUtil.getString("School"), School.class); try { Magister magister = Magister.login(school, username, password); } catch (final IOException e) { runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(DashboardActivity.this, R.string.err_no_connection, Toast.LENGTH_SHORT).show(); } }); return; } catch (ParseException e) { e.printStackTrace(); runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(DashboardActivity.this, R.string.err_unknown, Toast.LENGTH_SHORT).show(); } }); return; } catch (final InvalidParameterException e) { runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(DashboardActivity.this, R.string.err_wrong_username_or_password, Toast.LENGTH_SHORT).show(); } }); return; } Log.d(TAG, "onClick: login succeeded!"); User user = new User(username, password, false); configUtil.setString("User", new Gson().toJson(user)); configUtil.setInteger("failed_auth", 0); GlobalAccount.USER = user; dialog.dismiss(); } }).start(); } }); dialog.show(); } private void setupAppointmentCard() { CalendarDB db = new CalendarDB(this); Appointment[] appointments = db.getNextAppointments(); Appointment appointment = null; Log.d(TAG, "setupAppointmentCard: Amount of appointments: " + appointments.length); if (appointments != null && appointments.length > 0) { appointment = appointments[0]; } NextAppointmentCard mainCardContent = new NextAppointmentCard(this, appointment); CardHeader cardHeader = new CardHeader(this); cardHeader.setTitle(getString(R.string.msg_next_appointment)); mainCardContent.addCardHeader(cardHeader); appointmentMain = (CardViewNative) findViewById(R.id.card_next_appointment); appointmentMain.setCard(mainCardContent); } private void setupGradeCard() { NewGradesDB gradesdb = new NewGradesDB(this); Grade[] grades = gradesdb.getNewGrades(); Grade grade = null; if (grades != null && grades.length > 0) { grade = grades[0]; } /* Grade sampleGrade = new Grade(); sampleGrade.isSufficient = false; sampleGrade.grade = "2.3"; sampleGrade.subject = new SubSubject(); sampleGrade.subject.name = "Latijn"; grade = sampleGrade;*/ NewGradeCard mainCardContent = new NewGradeCard(this, grade); CardHeader cardHeader = new CardHeader(this); cardHeader.setTitle(getString(R.string.msg_newest_grade)); mainCardContent.addCardHeader(cardHeader); gradeMain.setVisibility(View.VISIBLE); gradeMain.setCard(mainCardContent); mSwipeRefreshLayout.setRefreshing(false); } private void retrieveGrades() { new Thread(new Runnable() { @Override public void run() { Magister magister = GlobalAccount.MAGISTER; if (magister != null && magister.isExpired()) { try { magister.login(); } catch (IOException e) { Log.e(TAG, "run: No connection during login", e); runOnUiThread(new Runnable() { @Override public void run() { mSwipeRefreshLayout.setRefreshing(false); setupGradeCard(); } }); return; } } else if (magister == null) { runOnUiThread(new Runnable() { @Override public void run() { mSwipeRefreshLayout.setRefreshing(false); setupGradeCard(); } }); return; } GradeHandler gradeHandler = new GradeHandler(magister); Grade[] Grades; try { Grades = gradeHandler.getRecentGrades(); } catch (IOException e) { Grades = null; Log.e(TAG, "run: No connection...", e); runOnUiThread(new Runnable() { @Override public void run() { setupGradeCard(); } }); } if (Grades != null && Grades.length != 0) { NewGradesDB db = new NewGradesDB(getApplicationContext()); db.addGrades(Grades); runOnUiThread(new Runnable() { @Override public void run() { setupGradeCard(); } }); } else if (Grades != null && Grades.length < 1) { runOnUiThread(new Runnable() { @Override public void run() { setupGradeCard(); } }); } else { runOnUiThread(new Runnable() { @Override public void run() { setupGradeCard(); } }); } } }).start(); } private void getPurchases() { new Thread(new Runnable() { @Override public void run() { try { Thread.sleep(500); ownedItems = mService.getPurchases(3, getPackageName(), "inapp", null); int response = ownedItems.getInt("RESPONSE_CODE"); if (response == 0) { ArrayList<String> ownedSkus = ownedItems.getStringArrayList("INAPP_PURCHASE_ITEM_LIST"); ArrayList<String> purchaseDataList = ownedItems.getStringArrayList("INAPP_PURCHASE_DATA_LIST"); ArrayList<String> signatureList = ownedItems.getStringArrayList("INAPP_DATA_SIGNATURE_LIST"); for (int i = 0; i < purchaseDataList.size(); ++i) { String purchaseData = purchaseDataList.get(i); String signature = signatureList.get(i); String sku = ownedSkus.get(i); JSONObject jo = new JSONObject(purchaseData); String token = jo.getString("purchaseToken"); boughtSKU.add(sku); boughtToken.add(token); Log.i(TAG, "run: Purchased item " + i + ": SKU: " + sku + ", purchaseData:" + purchaseData + ", Signature: " + signature); configUtil.setBoolean("disable_ads", false); configUtil.setBoolean("pro_unlocked", false); if (boughtSKU.contains(SKU_FIFTY_CENTS)) { configUtil.setBoolean("disable_ads", true); } else if (boughtSKU.contains(SKU_ONE_EURO)) { configUtil.setBoolean("disable_ads", true); configUtil.setBoolean("pro_unlocked", true); configUtil.setString("token_one_euro", token); } else if (boughtSKU.contains(SKU_TWO_EURO)) { configUtil.setBoolean("disable_ads", true); configUtil.setBoolean("pro_unlocked", true); configUtil.setString("token_two_euro", token); } else if (boughtSKU.contains(SKU_FIVE_EURO)) { configUtil.setBoolean("disable_ads", true); configUtil.setBoolean("pro_unlocked", true); configUtil.setString("token_five_euro", token); } // do something with this purchase information // e.g. display the updated list of products owned by user } } } catch (RemoteException e) { if (mService != null) { unbindService(mServiceConn); } e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } catch (JSONException e) { e.printStackTrace(); } } }).start(); } }
implemented #18
app/src/main/java/com/z3r0byte/magistify/DashboardActivity.java
implemented #18
<ide><path>pp/src/main/java/com/z3r0byte/magistify/DashboardActivity.java <ide> } <ide> ); <ide> <del> setupAppointmentCard(); <del> retrieveGrades(); <del> <ide> configUtil = new ConfigUtil(this); <ide> <ide> if (!configUtil.getBoolean("disable_ads")) { <ide> serviceIntent.setPackage("com.android.vending"); <ide> bindService(serviceIntent, mServiceConn, Context.BIND_AUTO_CREATE); <ide> <add> appointmentMain = (CardViewNative) findViewById(R.id.card_next_appointment); <ide> <ide> gradeMain = (CardViewNative) findViewById(R.id.card_new_grade); <ide> gradeMain.setVisibility(View.GONE); <add> <add> setupAppointmentCard(); <add> retrieveGrades(); <ide> <ide> getPurchases(); <ide> } <ide> cardHeader.setTitle(getString(R.string.msg_next_appointment)); <ide> <ide> mainCardContent.addCardHeader(cardHeader); <del> appointmentMain = (CardViewNative) findViewById(R.id.card_next_appointment); <ide> appointmentMain.setCard(mainCardContent); <add> appointmentMain.setOnClickListener(new View.OnClickListener() { <add> @Override <add> public void onClick(View view) { <add> startActivity(new Intent(getApplicationContext(), AppointmentActivity.class)); <add> } <add> }); <ide> } <ide> <ide> private void setupGradeCard() { <ide> mainCardContent.addCardHeader(cardHeader); <ide> gradeMain.setVisibility(View.VISIBLE); <ide> gradeMain.setCard(mainCardContent); <add> gradeMain.setOnClickListener(new View.OnClickListener() { <add> @Override <add> public void onClick(View view) { <add> startActivity(new Intent(getApplicationContext(), NewGradeActivity.class)); <add> } <add> }); <ide> <ide> mSwipeRefreshLayout.setRefreshing(false); <ide> }
JavaScript
mit
5fd8d8e2336824eb175238d58814573a342bcea6
0
papernotes/actemotion,papernotes/actemotion
import React, {Component} from 'react'; import {Button, FormGroup, FormControl} from 'react-bootstrap'; import Toolbar from './Toolbar'; class EditEvent extends Component { getValidationState() { const length = this.state.value.length; if (length > 10) return 'success'; else if (length > 5) return 'warning'; else if (length > 0) return 'error'; } // TODO handleChange(e) { } preventDefault(e) { e.preventDefault(); } // TODO take in props render() { return( <div> <Toolbar/> <h1> Edit/Add Event Page </h1> <div style={{height: '80vh', width: '50vw', margin:'0 auto', marginTop: '20px'}}> <form onSubmit={this.preventDefault.bind(this)}> <FormGroup controlId="formBasicText" validationState={this.getValidationState.bind(this)} > <FormControl type="text" placeholder="Event Name" onChange={this.handleChange.bind(this)} /> <FormControl componentClass="select" placeholder="Event Type" onChange={this.handleChange.bind(this)} > <option value="school">school</option> <option value="work">work</option> </FormControl> <FormControl componentClass="select" placeholder="Emotion" onChange={this.handleChange.bind(this)} > <option value="happy">happy</option> <option value="sad">sad</option> </FormControl> <FormControl type="text" placeholder="Energy Level (1-10)" onChange={this.handleChange.bind(this)} /> <FormControl componentClass="textarea" placeholder="Add Description" onChange={this.handleChange.bind(this)} /> </FormGroup> </form> <Button>Cancel</Button> <Button>Submit</Button> </div> </div> ); } } export default EditEvent;
src/components/EditEvent.js
import React, {Component} from 'react'; import Toolbar from './Toolbar'; class EditEvent extends Component { // TODO take in props render() { return( <div> <Toolbar/> <h1> Edit/Add Event Page </h1> </div> ); } } export default EditEvent;
Set up base for EditEvent page
src/components/EditEvent.js
Set up base for EditEvent page
<ide><path>rc/components/EditEvent.js <ide> import React, {Component} from 'react'; <add>import {Button, FormGroup, FormControl} from 'react-bootstrap'; <ide> import Toolbar from './Toolbar'; <ide> <ide> class EditEvent extends Component { <add> <add> getValidationState() { <add> const length = this.state.value.length; <add> if (length > 10) return 'success'; <add> else if (length > 5) return 'warning'; <add> else if (length > 0) return 'error'; <add> } <add> <add> // TODO <add> handleChange(e) { <add> <add> } <add> <add> preventDefault(e) { <add> e.preventDefault(); <add> } <add> <ide> <ide> // TODO take in props <ide> render() { <ide> <div> <ide> <Toolbar/> <ide> <h1> Edit/Add Event Page </h1> <add> <div style={{height: '80vh', width: '50vw', margin:'0 auto', marginTop: '20px'}}> <add> <form onSubmit={this.preventDefault.bind(this)}> <add> <FormGroup <add> controlId="formBasicText" <add> validationState={this.getValidationState.bind(this)} <add> > <add> <add> <FormControl <add> type="text" <add> placeholder="Event Name" <add> onChange={this.handleChange.bind(this)} <add> /> <add> <add> <FormControl <add> componentClass="select" <add> placeholder="Event Type" <add> onChange={this.handleChange.bind(this)} <add> > <add> <option value="school">school</option> <add> <option value="work">work</option> <add> </FormControl> <add> <add> <FormControl <add> componentClass="select" <add> placeholder="Emotion" <add> onChange={this.handleChange.bind(this)} <add> > <add> <option value="happy">happy</option> <add> <option value="sad">sad</option> <add> </FormControl> <add> <add> <FormControl <add> type="text" <add> placeholder="Energy Level (1-10)" <add> onChange={this.handleChange.bind(this)} <add> /> <add> <add> <FormControl <add> componentClass="textarea" <add> placeholder="Add Description" <add> onChange={this.handleChange.bind(this)} <add> /> <add> <add> </FormGroup> <add> </form> <add> <add> <Button>Cancel</Button> <add> <Button>Submit</Button> <add> </div> <add> <add> <ide> </div> <ide> ); <ide> }
Java
apache-2.0
7325640a9b8dd19e2973007e93ad06c64443d860
0
nezda/yawni,nezda/yawni,nezda/yawni,nezda/yawni,nezda/yawni
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.yawni.wn; import java.util.List; /** * A {@code RelationTarget} is the <em>source</em> or <em>target</em> of a {@link Relation}. * The target (and source) of a {@link SemanticRelation} is a {@link Synset}; * the target (and source) of a {@link LexicalRelation} is a {@link WordSense}. * {@code RelationTarget} acts as common interface to {@code Synset} and {@code WordSense}, * which form a composite pair, as evidenced by both being {@code Iterable<WordSense>} and * having {@link #getSynset()}. * * <p> Note this class used to be called {@code PointerTarget}. * * @see Relation * @see Synset * @see WordSense */ public interface RelationTarget extends Iterable<WordSense> { /** * Returns the outgoing {@code Relation}s from <em>this</em> target, i.e., those * {@code Relation}s that have this object as their source. For a {@code WordSense}, * this method returns all of the {@link LexicalRelation}s emanating from it, * and all {@link SemanticRelation}s sourced at its {@link WordSense#getSynset()}. * For a {@code Synset}, this method returns all {@link SemanticRelation}s sourced at it, * and <em>all</em> {@link LexicalRelation}s emanating from <em>all</em> of its {@code WordSense}s. */ public List<Relation> getRelations(); /** Filters {@link #getRelations()} by type {@code type}. */ public List<Relation> getRelations(RelationType type); /** Returns the targets of the {@code Relation}s returned by {@link #getRelations()}. */ public List<RelationTarget> getRelationTargets(); /** Returns the targets of the {@code Relation}s returned by {@link #getRelationTargets(org.yawni.wn.RelationType)} */ public List<RelationTarget> getRelationTargets(RelationType type); /** {@code Synset} returns itself, {@code WordSense} returns its {@code Synset} */ public Synset getSynset(); public POS getPOS(); /** * Returns a description of the target. For a {@code WordSense}, this is * its lemma; for a {@code Synset}, it's the concatenated lemmas of * its {@code WordSense}s. */ public String getDescription(); /** * Returns a long description of the target. This is its description, * appended by, if it exists, a dash and its gloss. */ public String getLongDescription(); }
core/src/main/java/org/yawni/wn/RelationTarget.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.yawni.wn; import java.util.List; /** * A {@code RelationTarget} is the <em>source</em> or <em>target</em> of a {@link Relation}. * The target (and source) of a {@link SemanticRelation} is a {@link Synset}; * the target (and source) of a {@link LexicalRelation} is a {@link WordSense}. * * <p> Note this class used to be called {@code PointerTarget}. * * @see Relation * @see Synset * @see WordSense */ public interface RelationTarget extends Iterable<WordSense> { public POS getPOS(); /** * Returns a description of the target. For a {@code WordSense}, this is * its lemma; for a {@code Synset}, it's the concatenated lemma's of * its {@code WordSense}s. */ public String getDescription(); /** * Returns a long description of the target. This is its description, * appended by, if it exists, a dash and its gloss. */ public String getLongDescription(); /** * Returns the outgoing {@code Relation}s from this target -- those * {@code Relation}s that have this object as their source. */ public List<? extends Relation> getRelations(); /** Returns the outgoing {@code Relation}s of type {@code type}. */ public List<? extends Relation> getRelations(RelationType type); /** Returns the targets of the outgoing {@code Relation}s. */ public List<RelationTarget> getRelationTargets(); /** * Returns the targets of the outgoing {@code Relation}s that have type * {@code type}. */ public List<RelationTarget> getRelationTargets(RelationType type); /** LN Added */ public Synset getSynset(); }
tweaked defintion of getRelations(); see javadoc
core/src/main/java/org/yawni/wn/RelationTarget.java
tweaked defintion of getRelations(); see javadoc
<ide><path>ore/src/main/java/org/yawni/wn/RelationTarget.java <ide> * A {@code RelationTarget} is the <em>source</em> or <em>target</em> of a {@link Relation}. <ide> * The target (and source) of a {@link SemanticRelation} is a {@link Synset}; <ide> * the target (and source) of a {@link LexicalRelation} is a {@link WordSense}. <add> * {@code RelationTarget} acts as common interface to {@code Synset} and {@code WordSense}, <add> * which form a composite pair, as evidenced by both being {@code Iterable<WordSense>} and <add> * having {@link #getSynset()}. <ide> * <ide> * <p> Note this class used to be called {@code PointerTarget}. <ide> * <ide> * @see WordSense <ide> */ <ide> public interface RelationTarget extends Iterable<WordSense> { <add> /** <add> * Returns the outgoing {@code Relation}s from <em>this</em> target, i.e., those <add> * {@code Relation}s that have this object as their source. For a {@code WordSense}, <add> * this method returns all of the {@link LexicalRelation}s emanating from it, <add> * and all {@link SemanticRelation}s sourced at its {@link WordSense#getSynset()}. <add> * For a {@code Synset}, this method returns all {@link SemanticRelation}s sourced at it, <add> * and <em>all</em> {@link LexicalRelation}s emanating from <em>all</em> of its {@code WordSense}s. <add> */ <add> public List<Relation> getRelations(); <add> <add> /** Filters {@link #getRelations()} by type {@code type}. */ <add> public List<Relation> getRelations(RelationType type); <add> <add> /** Returns the targets of the {@code Relation}s returned by {@link #getRelations()}. */ <add> public List<RelationTarget> getRelationTargets(); <add> <add> /** Returns the targets of the {@code Relation}s returned by {@link #getRelationTargets(org.yawni.wn.RelationType)} */ <add> public List<RelationTarget> getRelationTargets(RelationType type); <add> <add> /** {@code Synset} returns itself, {@code WordSense} returns its {@code Synset} */ <add> public Synset getSynset(); <add> <ide> public POS getPOS(); <ide> <ide> /** <ide> * Returns a description of the target. For a {@code WordSense}, this is <del> * its lemma; for a {@code Synset}, it's the concatenated lemma's of <add> * its lemma; for a {@code Synset}, it's the concatenated lemmas of <ide> * its {@code WordSense}s. <ide> */ <ide> public String getDescription(); <ide> * appended by, if it exists, a dash and its gloss. <ide> */ <ide> public String getLongDescription(); <del> <del> /** <del> * Returns the outgoing {@code Relation}s from this target -- those <del> * {@code Relation}s that have this object as their source. <del> */ <del> public List<? extends Relation> getRelations(); <del> <del> /** Returns the outgoing {@code Relation}s of type {@code type}. */ <del> public List<? extends Relation> getRelations(RelationType type); <del> <del> /** Returns the targets of the outgoing {@code Relation}s. */ <del> public List<RelationTarget> getRelationTargets(); <del> <del> /** <del> * Returns the targets of the outgoing {@code Relation}s that have type <del> * {@code type}. <del> */ <del> public List<RelationTarget> getRelationTargets(RelationType type); <del> <del> /** LN Added */ <del> public Synset getSynset(); <ide> }
JavaScript
mit
8282a32a04618ad699ceefb665039d8b711c9ad0
0
irjudson/client,leeric92/client,irjudson/client,nitrogenjs/client,leeric92/client,nitrogenjs/client
/** * The Message object is the core of the Nitrogen framework. Applications, devices, and * services use Messages to communicate with and issue commands to each other. All messages * that don't begin with an unscore are checked against a schema chosen by the messages 'type' * and 'schema_version' fields such that a message of a given type is known to conform to a * particular structure. This enables sharing between differing devices and applications. For * custom message types, an application may use an unscore prefix (eg. '_myMessage') with any * schema that they'd like. This supports communication between principals of the same * organization over a private schema. That said, it is strongly encouraged to use standard * schemas wherever possible. * * Messages have a sender principal (referred to as 'from') and a receiver principal (referred * to as 'to'). These fields are used to route messages to their receiver. * * Message types are divided into two main classes: data and commands. Data messages carry * information, typically the output of a device's operation. For example, a message typed * 'image' contains an image url in its body in its 'url' property. * * The second class of messages are commands. Command messages are sent from one principal to * another to request an operation on the receiving principal. For example, a message of the * type 'cameraCommand' contains a command that directs the operation of a camera principal. * * @class Message * @namespace nitrogen */ function Message(json) { this.ts = new Date(); this.body = {}; for(var key in json) { if(json.hasOwnProperty(key)) { if (key === 'ts' || key === 'expires' || key === 'index_until') this[key] = new Date(Date.parse(json[key])); else this[key] = json[key]; } } } /** * Find messages filtered by the passed query and limited to and sorted by the passed options. * * @method find * @async * @param {Object} session The session with a Nitrogen service to make this request under. * @param {Object} query A query filter for the messages you want to find defined using MongoDB query format. * @param {Object} options Options for the query: 'limit': maximum number of results to be returned. 'sort': The field that the results should be sorted on, 'dir': The direction that the results should be sorted. 'skip': The number of results that should be skipped before pulling results. * @param {Function} callback Callback function of the form f(err, messages). **/ Message.find = function(session, query, options, callback) { if (!session) return callback('no session passed to Message.find'); if (!callback || typeof(callback) !== 'function') return callback('no callback passed to Message.find.'); if (!query) query = {}; if (!options) options = {}; var messageUrl = session.service.config.endpoints.messages; session.get({ url: messageUrl, query: query, queryOptions: options, json: true }, function(err, resp, body) { if (err) return callback(err); var messages = body.messages.map(function(message) { return new Message(message); }); callback(null, messages); }); }; /** * Returns true if the message is of the passed type. * * @method is * @param {String} type Message type to compare against. * @returns {Boolean} Returns true if the message is of the passed type. **/ Message.prototype.is = function(type) { return this.type === type; }; /** * Returns true if the message is from the passed principal. * * @method isFrom * @param {String} principalId Principal id to compare against. * @returns {Boolean} Returns true if the message is from the passed principal id. **/ Message.prototype.isFrom = function(principal) { return this.from === principal.id; }; /** * Returns true if the message is in response to the passed message. * * @method isResponseTo * @param {String} type Message to compare against. * @returns {Boolean} Returns true if the message is in response to the passed message. **/ Message.prototype.isResponseTo = function(otherMessage) { return otherMessage.id && this.response_to && this.response_to.indexOf(otherMessage.id) !== -1; }; /** * Returns true if the message is of the passed type. * * @method isTo * @param {String} principalId Principal id to compare against. * @returns {Boolean} Returns true if the message is of the passed type. **/ Message.prototype.isTo = function(principal) { return this.to === principal.id; }; /** * Removes a set of messages specified by passed filter. Used by the internal service principal to * to cleanup expired messages etc. * * @method remove * @async * @static * @private * @param {Object} session An open session with a Nitrogen service. * @param {Object} query A query filter for the messages you want to remove. * @param {Function} callback Callback function of the form f(err, removedCount). */ Message.remove = function(session, query, callback) { session.remove({ url: session.service.config.endpoints.messages, query: query, json: true }, function(err, resp, body) { if (err) return callback(err); if (resp.statusCode != 200) return callback(resp.statusCode); callback(null, body.removed); }); }; /** * Remove this message. Used by the internal service principal for cleanup. * * @method remove * @async * @private * @param {Object} session An open session with a Nitrogen service. * @param {Function} callback Callback function of the form f(err, removedCount). **/ Message.prototype.remove = function(session, callback) { Message.remove(session, { "_id": this.id }, callback || function() {}); }; /** * Send this message. * * @method send * @async * @param {Object} session An open session with a Nitrogen service. * @param {Function} callback Callback function of the form f(err, sentMessages). **/ Message.prototype.send = function(session, callback) { Message.sendMany(session, [this], callback || function() {}); }; /** * Send multiple messages. * * @method sendMany * @async * @param {Object} session An open session with a Nitrogen service. * @param {Array} messages An array of messages to send. * @param {Function} callback Callback function of the form f(err, sentMessages). **/ Message.sendMany = function(session, messages, callback) { if (!session) return callback('session required for Message.sendMany'); session.post({ url: session.service.config.endpoints.messages, json: messages }, function(err, resp, body) { if (err) return callback(err); if (resp.statusCode != 200) return callback(resp.statusCode, null); var sentMessages = []; body.messages.forEach(function(messageJson) { sentMessages.push(new Message(messageJson)); }); if (callback) callback(null, sentMessages); }); }; /** * Returns true if the message expired. * * @method expired * @returns {Boolean} Returns true if the message is expired. **/ Message.prototype.expired = function() { return this.millisToExpiration() < 0; }; /** * Returns the number of milliseconds before this message expires. * * @method millisToExpiration * @returns {Number} Number of milliseconds before this message expires. **/ Message.prototype.millisToExpiration = function() { return this.expires - new Date().getTime(); }; /** * Returns the number of milliseconds before the timestamp for this message. Used to calculate * time to execution for command messages. * * @method millisToTimestamp * @returns {Number} Number of milliseconds before the timestamp for this message. **/ Message.prototype.millisToTimestamp = function() { return this.ts - new Date().getTime(); }; Message.NEVER_EXPIRE = new Date(Date.UTC(2500, 0, 1)); Message.INDEX_FOREVER = new Date(Date.UTC(2500, 0, 1)); module.exports = Message;
lib/message.js
/** * The Message object is the core of the Nitrogen framework. Applications, devices, and * services use Messages to communicate with and issue commands to each other. All messages * that don't begin with an unscore are checked against a schema chosen by the messages 'type' * and 'schema_version' fields such that a message of a given type is known to conform to a * particular structure. This enables sharing between differing devices and applications. For * custom message types, an application may use an unscore prefix (eg. '_myMessage') with any * schema that they'd like. This supports communication between principals of the same * organization over a private schema. That said, it is strongly encouraged to use standard * schemas wherever possible. * * Messages have a sender principal (referred to as 'from') and a receiver principal (referred * to as 'to'). These fields are used to route messages to their receiver. * * Message types are divided into two main classes: data and commands. Data messages carry * information, typically the output of a device's operation. For example, a message typed * 'image' contains an image url in its body in its 'url' property. * * The second class of messages are commands. Command messages are sent from one principal to * another to request an operation on the receiving principal. For example, a message of the * type 'cameraCommand' contains a command that directs the operation of a camera principal. * * @class Message * @namespace nitrogen */ function Message(json) { this.ts = new Date(); this.body = {}; for(var key in json) { if(json.hasOwnProperty(key)) { if (key === 'ts' || key === 'expires' || key === 'index_until') this[key] = new Date(Date.parse(json[key])); else this[key] = json[key]; } } } /** * Find messages filtered by the passed query and limited to and sorted by the passed options. * * @method find * @async * @param {Object} session The session with a Nitrogen service to make this request under. * @param {Object} query A query filter for the messages you want to find defined using MongoDB query format. * @param {Object} options Options for the query: 'limit': maximum number of results to be returned. 'sort': The field that the results should be sorted on, 'dir': The direction that the results should be sorted. 'skip': The number of results that should be skipped before pulling results. * @param {Function} callback Callback function of the form f(err, messages). **/ Message.find = function(session, query, options, callback) { if (!session) return callback('no session passed to Message.find'); if (!callback || typeof(callback) !== 'function') return callback('no callback passed to Message.find.'); if (!query) query = {}; if (!options) options = {}; var messageUrl = session.service.config.endpoints.messages; session.get({ url: messageUrl, query: query, queryOptions: options, json: true }, function(err, resp, body) { if (err) return callback(err); var messages = body.messages.map(function(message) { return new Message(message); }); callback(null, messages); }); }; /** * Returns true if the message is of the passed type. * * @method is * @param {String} type Message type to compare against. * @returns {Boolean} Returns true if the message is of the passed type. **/ Message.prototype.is = function(type) { return this.type === type; }; /** * Returns true if the message is from the passed principal. * * @method isFrom * @param {String} principalId Principal id to compare against. * @returns {Boolean} Returns true if the message is from the passed principal id. **/ Message.prototype.isFrom = function(principal) { return this.from === principal.id; }; /** * Returns true if the message is in response to the passed message. * * @method isResponseTo * @param {String} type Message to compare against. * @returns {Boolean} Returns true if the message is in response to the passed message. **/ Message.prototype.isResponseTo = function(otherMessage) { return otherMessage.id && this.response_to && this.response_to.indexOf(otherMessage.id) !== -1; }; /** * Returns true if the message is of the passed type. * * @method isTo * @param {String} principalId Principal id to compare against. * @returns {Boolean} Returns true if the message is of the passed type. **/ Message.prototype.isTo = function(principal) { return this.to === principal.id; }; /** * Removes a set of messages specified by passed filter. Used by the internal service principal to * to cleanup expired messages etc. * * @method remove * @async * @static * @private * @param {Object} session An open session with a Nitrogen service. * @param {Object} query A query filter for the messages you want to remove. * @param {Function} callback Callback function of the form f(err, removedCount). */ Message.remove = function(session, query, callback) { session.remove({ url: session.service.config.endpoints.messages, query: query, json: true }, function(err, resp, body) { if (err) return callback(err); if (resp.statusCode != 200) return callback(resp.statusCode); callback(null, body.removed); }); }; /** * Remove this message. Used by the internal service principal for cleanup. * * @method remove * @async * @private * @param {Object} session An open session with a Nitrogen service. * @param {Function} callback Callback function of the form f(err, removedCount). **/ Message.prototype.remove = function(session, callback) { Message.remove(session, { "_id": this.id }, callback || function() {}); }; /** * Send this message. * * @method send * @async * @param {Object} session An open session with a Nitrogen service. * @param {Function} callback Callback function of the form f(err, sentMessages). **/ Message.prototype.send = function(session, callback) { Message.sendMany(session, [this], callback || function() {}); }; /** * Send multiple messages. * * @method sendMany * @async * @param {Object} session An open session with a Nitrogen service. * @param {Array} messages An array of messages to send. * @param {Function} callback Callback function of the form f(err, sentMessages). **/ Message.sendMany = function(session, messages, callback) { if (!session) return callback('session required for Message.sendMany'); session.post({ url: session.service.config.endpoints.messages, json: messages }, function(err, resp, body) { if (err) return callback(err); if (resp.statusCode != 200) return callback(resp.statusCode, null); var sentMessages = []; body.messages.forEach(function(messageJson) { sentMessages.push(new Message(messageJson)); }); if (callback) callback(null, sentMessages); }); }; /** * Returns true if the message expired. * * @method expired * @returns {Boolean} Returns true if the message is expired. **/ Message.prototype.expired = function() { return this.millisToExpiration() < 0; }; /** * Returns the number of milliseconds before this message expires. * * @method millisToExpiration * @returns {Number} Number of milliseconds before this message expires. **/ Message.prototype.millisToExpiration = function() { return this.expires - new Date().getTime(); }; /** * Returns the number of milliseconds before the timestamp for this message. Used to calculate * time to execution for command messages. * * @method millisToTimestamp * @returns {Number} Number of milliseconds before the timestamp for this message. **/ Message.prototype.millisToTimestamp = function() { return this.ts - new Date().getTime(); }; module.exports = Message;
add NEVER_EXPIRES and INDEX_UNTIL constants
lib/message.js
add NEVER_EXPIRES and INDEX_UNTIL constants
<ide><path>ib/message.js <ide> return this.ts - new Date().getTime(); <ide> }; <ide> <add>Message.NEVER_EXPIRE = new Date(Date.UTC(2500, 0, 1)); <add>Message.INDEX_FOREVER = new Date(Date.UTC(2500, 0, 1)); <add> <ide> module.exports = Message;
Java
apache-2.0
59774093e441f70eefe5d92d01534789abee2f93
0
senseidb/sensei,javasoze/sensei,senseidb/sensei,javasoze/sensei,senseidb/sensei,senseidb/sensei,javasoze/sensei,javasoze/sensei
package com.senseidb.search.node; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import org.apache.commons.lang.RandomStringUtils; import org.apache.log4j.Logger; import zu.core.cluster.ZuCluster; import zu.core.cluster.routing.ConsistentHashRoutingAlgorithm; import zu.core.cluster.routing.RoutingAlgorithm; import zu.finagle.client.ZuFinagleServiceDecorator; import zu.finagle.client.ZuTransportClientProxy; import zu.finagle.serialize.ZuSerializer; import com.senseidb.metrics.MetricsConstants; import com.senseidb.search.req.AbstractSenseiRequest; import com.senseidb.search.req.AbstractSenseiResult; import com.senseidb.search.req.ErrorType; import com.senseidb.search.req.SenseiError; import com.senseidb.search.req.SenseiRequest; import com.senseidb.svc.api.SenseiException; import com.twitter.finagle.Service; import com.twitter.util.Duration; import com.twitter.util.Future; import com.twitter.util.FutureEventListener; import com.yammer.metrics.Metrics; import com.yammer.metrics.core.Meter; import com.yammer.metrics.core.MetricName; import com.yammer.metrics.core.Timer; /** * @author "Xiaoyang Gu<[email protected]>" * * @param <REQUEST> * @param <RESULT> */ public abstract class AbstractConsistentHashBroker<REQUEST extends AbstractSenseiRequest, RESULT extends AbstractSenseiResult> extends AbstractSenseiBroker<REQUEST, RESULT> { private final static Logger logger = Logger.getLogger(AbstractConsistentHashBroker.class); protected long _timeout = 8000; protected final ZuSerializer<REQUEST, RESULT> _serializer; private static Timer ScatterTimer = null; private static Timer GatherTimer = null; private static Timer TotalTimer = null; private static Meter SearchCounter = null; private static Meter ErrorMeter = null; private static Meter EmptyMeter = null; protected ZuFinagleServiceDecorator<REQUEST, RESULT> serviceDecorator; private final RoutingAlgorithm<Service<REQUEST, RESULT>> router; static { // register metrics monitoring for timers try { MetricName scatterMetricName = new MetricName(MetricsConstants.Domain, "timer", "scatter-time", "broker"); ScatterTimer = Metrics.newTimer(scatterMetricName, TimeUnit.MILLISECONDS, TimeUnit.SECONDS); MetricName gatherMetricName = new MetricName(MetricsConstants.Domain, "timer", "gather-time", "broker"); GatherTimer = Metrics.newTimer(gatherMetricName, TimeUnit.MILLISECONDS, TimeUnit.SECONDS); MetricName totalMetricName = new MetricName(MetricsConstants.Domain, "timer", "total-time", "broker"); TotalTimer = Metrics.newTimer(totalMetricName, TimeUnit.MILLISECONDS, TimeUnit.SECONDS); MetricName searchCounterMetricName = new MetricName(MetricsConstants.Domain, "meter", "search-count", "broker"); SearchCounter = Metrics.newMeter(searchCounterMetricName, "requets", TimeUnit.SECONDS); MetricName errorMetricName = new MetricName(MetricsConstants.Domain, "meter", "error-meter", "broker"); ErrorMeter = Metrics.newMeter(errorMetricName, "errors", TimeUnit.SECONDS); MetricName emptyMetricName = new MetricName(MetricsConstants.Domain, "meter", "empty-meter", "broker"); EmptyMeter = Metrics.newMeter(emptyMetricName, "null-hits", TimeUnit.SECONDS); } catch (Exception e) { logger.error(e.getMessage(), e); } } /** * @param clusterClient * @param serializer * The serializer used to serialize/deserialize request/response pairs */ public AbstractConsistentHashBroker(ZuCluster clusterClient, ZuSerializer<REQUEST, RESULT> serializer) { super(); _serializer = serializer; ZuTransportClientProxy<REQUEST, RESULT> proxy = new ZuTransportClientProxy<REQUEST, RESULT>( getMessageType(), _serializer); serviceDecorator = new ZuFinagleServiceDecorator<REQUEST, RESULT>(proxy); router = new ConsistentHashRoutingAlgorithm<Service<REQUEST, RESULT>>(serviceDecorator); clusterClient.addClusterEventListener(router); } public REQUEST customizeRequest(REQUEST request) { return request; } /** * @return an empty result instance. Used when the request cannot be properly * processed or when the true result is empty. */ @Override public abstract RESULT getEmptyResultInstance(); /** * The method that provides the search service. * * @param req * @return * @throws SenseiException */ @Override public RESULT browse(final REQUEST req) throws SenseiException { SearchCounter.mark(); try { return TotalTimer.time(new Callable<RESULT>() { @Override public RESULT call() throws Exception { return doBrowse(req); } }); } catch (Exception e) { ErrorMeter.mark(); throw new SenseiException(e.getMessage(), e); } } /** * Merge results on the client/broker side. It likely works differently from * the one in the search node. * * @param request * the original request object * @param resultList * the list of results from all the requested partitions. * @return one single result instance that is merged from the result list. */ public abstract RESULT mergeResults(REQUEST request, List<RESULT> resultList); protected String getRouteParam(REQUEST req) { String param = req.getRouteParam(); if (param == null) { return RandomStringUtils.random(4); } else { return param; } } protected RESULT doBrowse(final REQUEST req) { final long time = System.currentTimeMillis(); final List<RESULT> resultList = new ArrayList<RESULT>(); try { resultList.addAll(ScatterTimer.time(new Callable<List<RESULT>>() { @Override public List<RESULT> call() throws Exception { return doCall(req); } })); } catch (Exception e) { ErrorMeter.mark(); RESULT emptyResult = getEmptyResultInstance(); logger.error("Error running scatter/gather", e); emptyResult.addError(new SenseiError("Error gathering the results" + e.getMessage(), ErrorType.BrokerGatherError)); return emptyResult; } if (resultList.size() == 0) { logger.error("no result received at all return empty result"); RESULT emptyResult = getEmptyResultInstance(); emptyResult.addError(new SenseiError("Error gathering the results. " + "no result received at all return empty result", ErrorType.BrokerGatherError)); EmptyMeter.mark(); return emptyResult; } RESULT result = null; try { result = GatherTimer.time(new Callable<RESULT>() { @Override public RESULT call() throws Exception { return mergeResults(req, resultList); } }); } catch (Exception e) { result = getEmptyResultInstance(); logger.error("Error gathering the results", e); result.addError(new SenseiError("Error gathering the results" + e.getMessage(), ErrorType.BrokerGatherError)); ErrorMeter.mark(); } result.setTime(System.currentTimeMillis() - time); logger.info("remote search took " + result.getTime() + "ms"); return result; } @SuppressWarnings("unchecked") protected List<RESULT> doCall(REQUEST req) throws ExecutionException { Set<Integer> shards = router.getShards(); Map<Service<REQUEST, RESULT>, REQUEST> serviceToRequest = new HashMap<Service<REQUEST, RESULT>, REQUEST>(); byte[] routeBytes = getRouteParam(req).getBytes(); for (Integer shard : shards) { Service<REQUEST, RESULT> service = router.route(routeBytes, shard); if (service == null) { logger.warn("router returned null as a destination service"); continue; } REQUEST requestToNode = serviceToRequest.get(service); if (requestToNode == null) { // TODO: Cloning is yucky per http://www.artima.com/intv/bloch13.htm requestToNode = (REQUEST) (((SenseiRequest) req).clone()); requestToNode = customizeRequest(requestToNode); requestToNode.setPartitions(new HashSet<Integer>()); serviceToRequest.put(service, requestToNode); } requestToNode.getPartitions().add(shard); } return executeRequestsInParallel(serviceToRequest, _timeout); } protected abstract String getMessageType(); @Override public void shutdown() { logger.info("shutting down broker..."); } @Override public long getTimeout() { return _timeout; } @Override public void setTimeout(long timeout) { this._timeout = timeout; } protected List<RESULT> executeRequestsInParallel( final Map<Service<REQUEST, RESULT>, REQUEST> serviceToRequest, long timeout) { long start = System.currentTimeMillis(); final List<Future<RESULT>> futures = new ArrayList<Future<RESULT>>(); for (Entry<Service<REQUEST, RESULT>, REQUEST> entry : serviceToRequest.entrySet()) { futures.add(entry.getKey().apply(entry.getValue()) .addEventListener(new FutureEventListener<RESULT>() { @Override public void onFailure(Throwable t) { logger.error("Failed to get response", t); } @Override public void onSuccess(RESULT result) { // do nothing as we wait for all results below } })); } Future<List<RESULT>> collected = Future.collect(futures); List<RESULT> results = collected.apply(Duration.apply(timeout, TimeUnit.MILLISECONDS)); logger.info(String.format("Getting responses from %d nodes took %dms.", results.size(), (System.currentTimeMillis() - start))); return results; } protected static Set<InetSocketAddress> getNodesAddresses( Map<Integer, List<InetSocketAddress>> clusterView) { Set<InetSocketAddress> nodes = new HashSet<InetSocketAddress>(); for (List<InetSocketAddress> inetSocketAddressList : clusterView.values()) { nodes.addAll(inetSocketAddressList); } return nodes; } }
sensei-core/src/main/java/com/senseidb/search/node/AbstractConsistentHashBroker.java
package com.senseidb.search.node; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import org.apache.commons.lang.RandomStringUtils; import org.apache.log4j.Logger; import zu.core.cluster.ZuCluster; import zu.core.cluster.routing.ConsistentHashRoutingAlgorithm; import zu.core.cluster.routing.RoutingAlgorithm; import zu.finagle.client.ZuFinagleServiceDecorator; import zu.finagle.client.ZuTransportClientProxy; import zu.finagle.serialize.ZuSerializer; import com.senseidb.metrics.MetricsConstants; import com.senseidb.search.req.AbstractSenseiRequest; import com.senseidb.search.req.AbstractSenseiResult; import com.senseidb.search.req.ErrorType; import com.senseidb.search.req.SenseiError; import com.senseidb.search.req.SenseiRequest; import com.senseidb.svc.api.SenseiException; import com.twitter.finagle.Service; import com.twitter.util.Duration; import com.twitter.util.Future; import com.twitter.util.FutureEventListener; import com.yammer.metrics.Metrics; import com.yammer.metrics.core.Meter; import com.yammer.metrics.core.MetricName; import com.yammer.metrics.core.Timer; /** * @author "Xiaoyang Gu<[email protected]>" * * @param <REQUEST> * @param <RESULT> */ public abstract class AbstractConsistentHashBroker<REQUEST extends AbstractSenseiRequest, RESULT extends AbstractSenseiResult> extends AbstractSenseiBroker<REQUEST, RESULT> { private final static Logger logger = Logger.getLogger(AbstractConsistentHashBroker.class); protected long _timeout = 8000; protected final ZuSerializer<REQUEST, RESULT> _serializer; private static Timer ScatterTimer = null; private static Timer GatherTimer = null; private static Timer TotalTimer = null; private static Meter SearchCounter = null; private static Meter ErrorMeter = null; private static Meter EmptyMeter = null; protected ZuFinagleServiceDecorator<REQUEST, RESULT> serviceDecorator; private final RoutingAlgorithm<Service<REQUEST, RESULT>> router; static { // register metrics monitoring for timers try { MetricName scatterMetricName = new MetricName(MetricsConstants.Domain, "timer", "scatter-time", "broker"); ScatterTimer = Metrics.newTimer(scatterMetricName, TimeUnit.MILLISECONDS, TimeUnit.SECONDS); MetricName gatherMetricName = new MetricName(MetricsConstants.Domain, "timer", "gather-time", "broker"); GatherTimer = Metrics.newTimer(gatherMetricName, TimeUnit.MILLISECONDS, TimeUnit.SECONDS); MetricName totalMetricName = new MetricName(MetricsConstants.Domain, "timer", "total-time", "broker"); TotalTimer = Metrics.newTimer(totalMetricName, TimeUnit.MILLISECONDS, TimeUnit.SECONDS); MetricName searchCounterMetricName = new MetricName(MetricsConstants.Domain, "meter", "search-count", "broker"); SearchCounter = Metrics.newMeter(searchCounterMetricName, "requets", TimeUnit.SECONDS); MetricName errorMetricName = new MetricName(MetricsConstants.Domain, "meter", "error-meter", "broker"); ErrorMeter = Metrics.newMeter(errorMetricName, "errors", TimeUnit.SECONDS); MetricName emptyMetricName = new MetricName(MetricsConstants.Domain, "meter", "empty-meter", "broker"); EmptyMeter = Metrics.newMeter(emptyMetricName, "null-hits", TimeUnit.SECONDS); } catch (Exception e) { logger.error(e.getMessage(), e); } } /** * @param clusterClient * @param serializer * The serializer used to serialize/deserialize request/response pairs */ public AbstractConsistentHashBroker(ZuCluster clusterClient, ZuSerializer<REQUEST, RESULT> serializer) { super(); _serializer = serializer; ZuTransportClientProxy<REQUEST, RESULT> proxy = new ZuTransportClientProxy<REQUEST, RESULT>( getMessageType(), _serializer); serviceDecorator = new ZuFinagleServiceDecorator<REQUEST, RESULT>(proxy); router = new ConsistentHashRoutingAlgorithm<Service<REQUEST, RESULT>>(serviceDecorator); clusterClient.addClusterEventListener(router); } public REQUEST customizeRequest(REQUEST request) { return request; } /** * @return an empty result instance. Used when the request cannot be properly * processed or when the true result is empty. */ @Override public abstract RESULT getEmptyResultInstance(); /** * The method that provides the search service. * * @param req * @return * @throws SenseiException */ @Override public RESULT browse(final REQUEST req) throws SenseiException { SearchCounter.mark(); try { return TotalTimer.time(new Callable<RESULT>() { @Override public RESULT call() throws Exception { return doBrowse(req); } }); } catch (Exception e) { ErrorMeter.mark(); throw new SenseiException(e.getMessage(), e); } } /** * Merge results on the client/broker side. It likely works differently from * the one in the search node. * * @param request * the original request object * @param resultList * the list of results from all the requested partitions. * @return one single result instance that is merged from the result list. */ public abstract RESULT mergeResults(REQUEST request, List<RESULT> resultList); protected String getRouteParam(REQUEST req) { String param = req.getRouteParam(); if (param == null) { return RandomStringUtils.random(4); } else { return param; } } protected RESULT doBrowse(final REQUEST req) { final long time = System.currentTimeMillis(); final List<RESULT> resultList = new ArrayList<RESULT>(); try { resultList.addAll(ScatterTimer.time(new Callable<List<RESULT>>() { @Override public List<RESULT> call() throws Exception { return doCall(req); } })); } catch (Exception e) { ErrorMeter.mark(); RESULT emptyResult = getEmptyResultInstance(); logger.error("Error running scatter/gather", e); emptyResult.addError(new SenseiError("Error gathering the results" + e.getMessage(), ErrorType.BrokerGatherError)); return emptyResult; } if (resultList.size() == 0) { logger.error("no result received at all return empty result"); RESULT emptyResult = getEmptyResultInstance(); emptyResult.addError(new SenseiError("Error gathering the results. " + "no result received at all return empty result", ErrorType.BrokerGatherError)); EmptyMeter.mark(); return emptyResult; } RESULT result = null; try { result = GatherTimer.time(new Callable<RESULT>() { @Override public RESULT call() throws Exception { return mergeResults(req, resultList); } }); } catch (Exception e) { result = getEmptyResultInstance(); logger.error("Error gathering the results", e); result.addError(new SenseiError("Error gathering the results" + e.getMessage(), ErrorType.BrokerGatherError)); ErrorMeter.mark(); } result.setTime(System.currentTimeMillis() - time); logger.info("remote search took " + result.getTime() + "ms"); return result; } @SuppressWarnings("unchecked") protected List<RESULT> doCall(REQUEST req) throws ExecutionException { Set<Integer> shards = router.getShards(); Map<Service<REQUEST, RESULT>, REQUEST> serviceToRequest = new HashMap<Service<REQUEST, RESULT>, REQUEST>(); for (Integer shard : shards) { Service<REQUEST, RESULT> service = router.route(getRouteParam(req).getBytes(), shard); if (service == null) { logger.warn("router returned null as a destination service"); continue; } REQUEST requestToNode = serviceToRequest.get(service); if (requestToNode == null) { // TODO: Cloning is yucky per http://www.artima.com/intv/bloch13.htm requestToNode = (REQUEST) (((SenseiRequest) req).clone()); requestToNode = customizeRequest(requestToNode); requestToNode.setPartitions(new HashSet<Integer>()); serviceToRequest.put(service, requestToNode); } requestToNode.getPartitions().add(shard); } return executeRequestsInParallel(serviceToRequest, _timeout); } protected abstract String getMessageType(); @Override public void shutdown() { logger.info("shutting down broker..."); } @Override public long getTimeout() { return _timeout; } @Override public void setTimeout(long timeout) { this._timeout = timeout; } protected List<RESULT> executeRequestsInParallel( final Map<Service<REQUEST, RESULT>, REQUEST> serviceToRequest, long timeout) { long start = System.currentTimeMillis(); final List<Future<RESULT>> futures = new ArrayList<Future<RESULT>>(); for (Entry<Service<REQUEST, RESULT>, REQUEST> entry : serviceToRequest.entrySet()) { futures.add(entry.getKey().apply(entry.getValue()) .addEventListener(new FutureEventListener<RESULT>() { @Override public void onFailure(Throwable t) { logger.error("Failed to get response", t); } @Override public void onSuccess(RESULT result) { // do nothing as we wait for all results below } })); } Future<List<RESULT>> collected = Future.collect(futures); List<RESULT> results = collected.apply(Duration.apply(timeout, TimeUnit.MILLISECONDS)); logger.info(String.format("Getting responses from %d nodes took %dms.", results.size(), (System.currentTimeMillis() - start))); return results; } protected static Set<InetSocketAddress> getNodesAddresses( Map<Integer, List<InetSocketAddress>> clusterView) { Set<InetSocketAddress> nodes = new HashSet<InetSocketAddress>(); for (List<InetSocketAddress> inetSocketAddressList : clusterView.values()) { nodes.addAll(inetSocketAddressList); } return nodes; } }
Fix the bug "sending query to all nodes for no route param specified cases"
sensei-core/src/main/java/com/senseidb/search/node/AbstractConsistentHashBroker.java
Fix the bug "sending query to all nodes for no route param specified cases"
<ide><path>ensei-core/src/main/java/com/senseidb/search/node/AbstractConsistentHashBroker.java <ide> <ide> Map<Service<REQUEST, RESULT>, REQUEST> serviceToRequest = new HashMap<Service<REQUEST, RESULT>, REQUEST>(); <ide> <add> byte[] routeBytes = getRouteParam(req).getBytes(); <ide> for (Integer shard : shards) { <del> Service<REQUEST, RESULT> service = router.route(getRouteParam(req).getBytes(), shard); <del> <add> Service<REQUEST, RESULT> service = router.route(routeBytes, shard); <ide> if (service == null) { <ide> logger.warn("router returned null as a destination service"); <ide> continue;
Java
apache-2.0
802f614a45cba89a05db9002fbd9abe2e8d6f8f7
0
amith01994/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,ivan-fedorov/intellij-community,pwoodworth/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,alphafoobar/intellij-community,izonder/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,pwoodworth/intellij-community,ernestp/consulo,apixandru/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,slisson/intellij-community,retomerz/intellij-community,asedunov/intellij-community,MER-GROUP/intellij-community,vladmm/intellij-community,caot/intellij-community,petteyg/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,fitermay/intellij-community,adedayo/intellij-community,supersven/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,joewalnes/idea-community,allotria/intellij-community,robovm/robovm-studio,hurricup/intellij-community,Distrotech/intellij-community,ryano144/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,izonder/intellij-community,asedunov/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,fnouama/intellij-community,blademainer/intellij-community,diorcety/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,kool79/intellij-community,michaelgallacher/intellij-community,ol-loginov/intellij-community,dslomov/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,da1z/intellij-community,clumsy/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,FHannes/intellij-community,Lekanich/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,clumsy/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,caot/intellij-community,amith01994/intellij-community,ibinti/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,FHannes/intellij-community,supersven/intellij-community,muntasirsyed/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,ol-loginov/intellij-community,ernestp/consulo,clumsy/intellij-community,tmpgit/intellij-community,ryano144/intellij-community,da1z/intellij-community,supersven/intellij-community,ahb0327/intellij-community,asedunov/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,ivan-fedorov/intellij-community,clumsy/intellij-community,kdwink/intellij-community,asedunov/intellij-community,robovm/robovm-studio,kdwink/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,nicolargo/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,blademainer/intellij-community,orekyuu/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,da1z/intellij-community,clumsy/intellij-community,allotria/intellij-community,slisson/intellij-community,wreckJ/intellij-community,allotria/intellij-community,gnuhub/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,orekyuu/intellij-community,TangHao1987/intellij-community,pwoodworth/intellij-community,holmes/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,TangHao1987/intellij-community,holmes/intellij-community,ol-loginov/intellij-community,blademainer/intellij-community,petteyg/intellij-community,vvv1559/intellij-community,joewalnes/idea-community,ThiagoGarciaAlves/intellij-community,ahb0327/intellij-community,signed/intellij-community,holmes/intellij-community,muntasirsyed/intellij-community,dslomov/intellij-community,slisson/intellij-community,signed/intellij-community,wreckJ/intellij-community,ol-loginov/intellij-community,blademainer/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,jagguli/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,da1z/intellij-community,robovm/robovm-studio,FHannes/intellij-community,fnouama/intellij-community,asedunov/intellij-community,samthor/intellij-community,retomerz/intellij-community,amith01994/intellij-community,diorcety/intellij-community,ftomassetti/intellij-community,fengbaicanhe/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,ryano144/intellij-community,signed/intellij-community,signed/intellij-community,xfournet/intellij-community,izonder/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,asedunov/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,izonder/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,FHannes/intellij-community,apixandru/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,caot/intellij-community,joewalnes/idea-community,salguarnieri/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,gnuhub/intellij-community,petteyg/intellij-community,fitermay/intellij-community,vladmm/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,TangHao1987/intellij-community,holmes/intellij-community,nicolargo/intellij-community,fnouama/intellij-community,clumsy/intellij-community,diorcety/intellij-community,tmpgit/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,robovm/robovm-studio,izonder/intellij-community,wreckJ/intellij-community,slisson/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community,allotria/intellij-community,signed/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,vladmm/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,salguarnieri/intellij-community,dslomov/intellij-community,retomerz/intellij-community,kdwink/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,joewalnes/idea-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,ftomassetti/intellij-community,ibinti/intellij-community,MichaelNedzelsky/intellij-community,holmes/intellij-community,semonte/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,MichaelNedzelsky/intellij-community,adedayo/intellij-community,allotria/intellij-community,blademainer/intellij-community,petteyg/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,ftomassetti/intellij-community,apixandru/intellij-community,kool79/intellij-community,blademainer/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,signed/intellij-community,da1z/intellij-community,adedayo/intellij-community,mglukhikh/intellij-community,slisson/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,adedayo/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,orekyuu/intellij-community,samthor/intellij-community,ftomassetti/intellij-community,caot/intellij-community,fnouama/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,allotria/intellij-community,gnuhub/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,xfournet/intellij-community,SerCeMan/intellij-community,apixandru/intellij-community,fitermay/intellij-community,apixandru/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,apixandru/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,caot/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,fengbaicanhe/intellij-community,lucafavatella/intellij-community,Distrotech/intellij-community,kdwink/intellij-community,ol-loginov/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,suncycheng/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,diorcety/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,supersven/intellij-community,robovm/robovm-studio,caot/intellij-community,ernestp/consulo,jagguli/intellij-community,adedayo/intellij-community,clumsy/intellij-community,ahb0327/intellij-community,fengbaicanhe/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,izonder/intellij-community,tmpgit/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,robovm/robovm-studio,fnouama/intellij-community,holmes/intellij-community,hurricup/intellij-community,ibinti/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,tmpgit/intellij-community,samthor/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,TangHao1987/intellij-community,consulo/consulo,semonte/intellij-community,fnouama/intellij-community,hurricup/intellij-community,FHannes/intellij-community,fnouama/intellij-community,joewalnes/idea-community,fitermay/intellij-community,vladmm/intellij-community,izonder/intellij-community,ahb0327/intellij-community,diorcety/intellij-community,petteyg/intellij-community,ryano144/intellij-community,joewalnes/idea-community,SerCeMan/intellij-community,semonte/intellij-community,petteyg/intellij-community,nicolargo/intellij-community,alphafoobar/intellij-community,lucafavatella/intellij-community,SerCeMan/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,vladmm/intellij-community,apixandru/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,MER-GROUP/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,samthor/intellij-community,amith01994/intellij-community,semonte/intellij-community,da1z/intellij-community,kool79/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,orekyuu/intellij-community,wreckJ/intellij-community,vvv1559/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,orekyuu/intellij-community,kdwink/intellij-community,alphafoobar/intellij-community,signed/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,MichaelNedzelsky/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,Lekanich/intellij-community,joewalnes/idea-community,ftomassetti/intellij-community,adedayo/intellij-community,muntasirsyed/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,semonte/intellij-community,kdwink/intellij-community,Distrotech/intellij-community,blademainer/intellij-community,kdwink/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,semonte/intellij-community,Distrotech/intellij-community,kool79/intellij-community,SerCeMan/intellij-community,da1z/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,akosyakov/intellij-community,kool79/intellij-community,kool79/intellij-community,vladmm/intellij-community,allotria/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,jagguli/intellij-community,supersven/intellij-community,akosyakov/intellij-community,ahb0327/intellij-community,clumsy/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,holmes/intellij-community,xfournet/intellij-community,fitermay/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,blademainer/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,joewalnes/idea-community,xfournet/intellij-community,muntasirsyed/intellij-community,ernestp/consulo,tmpgit/intellij-community,muntasirsyed/intellij-community,FHannes/intellij-community,caot/intellij-community,supersven/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,hurricup/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,slisson/intellij-community,fnouama/intellij-community,idea4bsd/idea4bsd,alphafoobar/intellij-community,muntasirsyed/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,diorcety/intellij-community,holmes/intellij-community,dslomov/intellij-community,semonte/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,consulo/consulo,apixandru/intellij-community,signed/intellij-community,ryano144/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,ahb0327/intellij-community,Lekanich/intellij-community,kdwink/intellij-community,ryano144/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,caot/intellij-community,ol-loginov/intellij-community,da1z/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,SerCeMan/intellij-community,samthor/intellij-community,Lekanich/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,ol-loginov/intellij-community,slisson/intellij-community,dslomov/intellij-community,alphafoobar/intellij-community,joewalnes/idea-community,FHannes/intellij-community,apixandru/intellij-community,vladmm/intellij-community,alphafoobar/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,ivan-fedorov/intellij-community,mglukhikh/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,kdwink/intellij-community,wreckJ/intellij-community,semonte/intellij-community,Lekanich/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,nicolargo/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,ryano144/intellij-community,asedunov/intellij-community,xfournet/intellij-community,izonder/intellij-community,xfournet/intellij-community,Distrotech/intellij-community,clumsy/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,ftomassetti/intellij-community,vladmm/intellij-community,supersven/intellij-community,youdonghai/intellij-community,tmpgit/intellij-community,akosyakov/intellij-community,da1z/intellij-community,semonte/intellij-community,hurricup/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,ryano144/intellij-community,adedayo/intellij-community,MER-GROUP/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,da1z/intellij-community,dslomov/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,wreckJ/intellij-community,holmes/intellij-community,ibinti/intellij-community,kdwink/intellij-community,caot/intellij-community,gnuhub/intellij-community,jagguli/intellij-community,clumsy/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,allotria/intellij-community,gnuhub/intellij-community,ivan-fedorov/intellij-community,dslomov/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,caot/intellij-community,caot/intellij-community,michaelgallacher/intellij-community,ernestp/consulo,SerCeMan/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,ibinti/intellij-community,ivan-fedorov/intellij-community,akosyakov/intellij-community,caot/intellij-community,jagguli/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,fengbaicanhe/intellij-community,ernestp/consulo,adedayo/intellij-community,MER-GROUP/intellij-community,robovm/robovm-studio,Distrotech/intellij-community,salguarnieri/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,ftomassetti/intellij-community,akosyakov/intellij-community,allotria/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,TangHao1987/intellij-community,lucafavatella/intellij-community,fengbaicanhe/intellij-community,asedunov/intellij-community,amith01994/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,slisson/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,kool79/intellij-community,supersven/intellij-community,michaelgallacher/intellij-community,ibinti/intellij-community,signed/intellij-community,akosyakov/intellij-community,samthor/intellij-community,dslomov/intellij-community,nicolargo/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,alphafoobar/intellij-community,asedunov/intellij-community,clumsy/intellij-community,pwoodworth/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,mglukhikh/intellij-community,dslomov/intellij-community,ibinti/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,slisson/intellij-community,xfournet/intellij-community,signed/intellij-community,ftomassetti/intellij-community,MichaelNedzelsky/intellij-community,hurricup/intellij-community,allotria/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,ThiagoGarciaAlves/intellij-community,dslomov/intellij-community,holmes/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,petteyg/intellij-community,idea4bsd/idea4bsd,fengbaicanhe/intellij-community,vvv1559/intellij-community,ryano144/intellij-community,izonder/intellij-community,dslomov/intellij-community,adedayo/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,hurricup/intellij-community,signed/intellij-community,semonte/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,Lekanich/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,ryano144/intellij-community,consulo/consulo,FHannes/intellij-community,consulo/consulo,salguarnieri/intellij-community,adedayo/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,orekyuu/intellij-community,consulo/consulo,MichaelNedzelsky/intellij-community,gnuhub/intellij-community,nicolargo/intellij-community,vvv1559/intellij-community,slisson/intellij-community,retomerz/intellij-community,fnouama/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,ahb0327/intellij-community,slisson/intellij-community,consulo/consulo,youdonghai/intellij-community,wreckJ/intellij-community,MichaelNedzelsky/intellij-community,robovm/robovm-studio,ryano144/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,holmes/intellij-community,Distrotech/intellij-community,petteyg/intellij-community,supersven/intellij-community,holmes/intellij-community,diorcety/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,petteyg/intellij-community,Lekanich/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,SerCeMan/intellij-community,youdonghai/intellij-community
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl.source.codeStyle; import com.intellij.formatting.FormatTextRanges; import com.intellij.formatting.FormatterEx; import com.intellij.formatting.FormattingModel; import com.intellij.formatting.FormattingModelBuilder; import com.intellij.injected.editor.DocumentWindow; import com.intellij.lang.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.*; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.TextRange; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CodeStyleSettingsManager; import com.intellij.psi.codeStyle.Indent; import com.intellij.psi.impl.CheckUtil; import com.intellij.psi.impl.source.PostprocessReformattingAspect; import com.intellij.psi.impl.source.SourceTreeToPsiMap; import com.intellij.psi.impl.source.tree.*; import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil; import com.intellij.psi.util.PsiUtilBase; import com.intellij.util.CharTable; import com.intellij.util.IncorrectOperationException; import com.intellij.util.text.CharArrayUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public class CodeStyleManagerImpl extends CodeStyleManager { private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.codeStyle.CodeStyleManagerImpl"); private final Project myProject; @NonNls private static final String DUMMY_IDENTIFIER = "xxx"; public CodeStyleManagerImpl(Project project) { myProject = project; } @NotNull public Project getProject() { return myProject; } @NotNull public PsiElement reformat(@NotNull PsiElement element) throws IncorrectOperationException { return reformat(element, false); } @NotNull public PsiElement reformat(@NotNull PsiElement element, boolean canChangeWhiteSpacesOnly) throws IncorrectOperationException { CheckUtil.checkWritable(element); if( !SourceTreeToPsiMap.hasTreeElement( element ) ) { return element; } ASTNode treeElement = SourceTreeToPsiMap.psiElementToTree(element); final PsiElement formatted = SourceTreeToPsiMap.treeElementToPsi(new CodeFormatterFacade(getSettings()).processElement(treeElement)); if (!canChangeWhiteSpacesOnly) { return postProcessElement(formatted); } else { return formatted; } } private PsiElement postProcessElement(final PsiElement formatted) { PsiElement result = formatted; for (PostFormatProcessor postFormatProcessor : Extensions.getExtensions(PostFormatProcessor.EP_NAME)) { result = postFormatProcessor.processElement(result, getSettings()); } return result; } private void postProcessText(final PsiFile file, final TextRange textRange) { TextRange currentRange = textRange; for (final PostFormatProcessor myPostFormatProcessor : Extensions.getExtensions(PostFormatProcessor.EP_NAME)) { currentRange = myPostFormatProcessor.processText(file, currentRange, getSettings()); } } public PsiElement reformatRange(@NotNull PsiElement element, int startOffset, int endOffset, boolean canChangeWhiteSpacesOnly) throws IncorrectOperationException { return reformatRangeImpl(element, startOffset, endOffset, canChangeWhiteSpacesOnly); } public PsiElement reformatRange(@NotNull PsiElement element, int startOffset, int endOffset) throws IncorrectOperationException { return reformatRangeImpl(element, startOffset, endOffset, false); } private static void transformAllChildren(final ASTNode file) { for (ASTNode child = file.getFirstChildNode(); child != null; child = child.getTreeNext()) { transformAllChildren(child); } } public void reformatText(@NotNull PsiFile file, int startOffset, int endOffset) throws IncorrectOperationException { ApplicationManager.getApplication().assertWriteAccessAllowed(); PsiDocumentManager.getInstance(getProject()).commitAllDocuments(); CheckUtil.checkWritable(file); if (!SourceTreeToPsiMap.hasTreeElement(file)) { return; } ASTNode treeElement = SourceTreeToPsiMap.psiElementToTree(file); transformAllChildren(treeElement); final CodeFormatterFacade codeFormatter = new CodeFormatterFacade(getSettings()); LOG.assertTrue(file.isValid()); final PsiElement start = findElementInTreeWithFormatterEnabled(file, startOffset); final PsiElement end = findElementInTreeWithFormatterEnabled(file, endOffset); if (start != null && !start.isValid()) { LOG.error("start=" + start + "; file=" + file); } if (end != null && !end.isValid()) { LOG.error("end=" + start + "; end=" + file); } Editor editor = PsiUtilBase.findEditor(file); // There is a possible case that cursor is located at the end of the line that contains only white spaces. For example: // public void foo() { // <caret> // } // Formatter removes such white spaces, i.e. keeps only line feed symbol. But we want to preserve caret position then. // So, we check if it should be preserved and restore it after formatting if necessary int visualColumnToRestore = -1; if (editor != null) { Document document = editor.getDocument(); int caretOffset = editor.getCaretModel().getOffset(); caretOffset = Math.max(Math.min(caretOffset, document.getTextLength() - 1), 0); CharSequence text = document.getCharsSequence(); int caretLine = document.getLineNumber(caretOffset); int lineStartOffset = document.getLineStartOffset(caretLine); boolean fixCaretPosition = true; for (int i = Math.min(caretOffset, text.length() - 1); i>= lineStartOffset; i--) { char c = text.charAt(i); if (c != ' ' && c != '\t' && c != '\n') { fixCaretPosition = false; break; } } if (fixCaretPosition) { visualColumnToRestore = editor.getCaretModel().getVisualPosition().column; } } boolean formatFromStart = startOffset == 0; boolean formatToEnd = endOffset == file.getTextLength(); final SmartPointerManager smartPointerManager = SmartPointerManager.getInstance(getProject()); final SmartPsiElementPointer startPointer = start == null ? null : smartPointerManager.createSmartPsiElementPointer(start); final SmartPsiElementPointer endPointer = end == null ? null : smartPointerManager.createSmartPsiElementPointer(end); codeFormatter.processText(file, new FormatTextRanges(new TextRange(startOffset, endOffset), true), true); final PsiElement startElement = startPointer == null ? null : startPointer.getElement(); final PsiElement endElement = endPointer == null ? null : endPointer.getElement(); if ((startElement != null || formatFromStart) && (endElement != null || formatToEnd)) { postProcessText(file, new TextRange(formatFromStart ? 0 : startElement.getTextRange().getStartOffset(), formatToEnd ? file.getTextLength() : endElement.getTextRange().getEndOffset())); } if (visualColumnToRestore < 0) { return; } CaretModel caretModel = editor.getCaretModel(); VisualPosition position = caretModel.getVisualPosition(); if (visualColumnToRestore != position.column) { caretModel.moveToVisualPosition(new VisualPosition(position.line, visualColumnToRestore)); } } private PsiElement reformatRangeImpl(final PsiElement element, final int startOffset, final int endOffset, boolean canChangeWhiteSpacesOnly) throws IncorrectOperationException { LOG.assertTrue(element.isValid()); CheckUtil.checkWritable(element); if( !SourceTreeToPsiMap.hasTreeElement( element ) ) { return element; } ASTNode treeElement = SourceTreeToPsiMap.psiElementToTree(element); final CodeFormatterFacade codeFormatter = new CodeFormatterFacade(getSettings()); final PsiElement formatted = SourceTreeToPsiMap.treeElementToPsi(codeFormatter.processRange(treeElement, startOffset, endOffset)); return canChangeWhiteSpacesOnly ? formatted : postProcessElement(formatted); } public void reformatNewlyAddedElement(@NotNull final ASTNode parent, @NotNull final ASTNode addedElement) throws IncorrectOperationException { LOG.assertTrue(addedElement.getTreeParent() == parent, "addedElement must be added to parent"); final PsiElement psiElement = parent.getPsi(); PsiFile containingFile = psiElement.getContainingFile(); final FileViewProvider fileViewProvider = containingFile.getViewProvider(); if (fileViewProvider instanceof MultiplePsiFilesPerDocumentFileViewProvider) { containingFile = fileViewProvider.getPsi(fileViewProvider.getBaseLanguage()); } TextRange textRange = addedElement.getTextRange(); final Document document = fileViewProvider.getDocument(); if (document instanceof DocumentWindow) { containingFile = InjectedLanguageUtil.getTopLevelFile(containingFile); textRange = ((DocumentWindow)document).injectedToHost(textRange); } final FormattingModelBuilder builder = LanguageFormatting.INSTANCE.forContext(containingFile); if (builder != null) { final FormattingModel model = builder.createModel(containingFile, getSettings()); FormatterEx.getInstanceEx().formatAroundRange(model, getSettings(), textRange, containingFile.getFileType()); } adjustLineIndent(containingFile, textRange); } public int adjustLineIndent(@NotNull final PsiFile file, final int offset) throws IncorrectOperationException { return PostprocessReformattingAspect.getInstance(file.getProject()).disablePostprocessFormattingInside(new Computable<Integer>() { public Integer compute() { return doAdjustLineIndentByOffset(file, offset); } }); } @Nullable static PsiElement findElementInTreeWithFormatterEnabled(final PsiFile file, final int offset) { final PsiElement bottomost = file.findElementAt(offset); if (bottomost != null && LanguageFormatting.INSTANCE.forContext(bottomost) != null){ return bottomost; } final Language fileLang = file.getLanguage(); if (fileLang instanceof CompositeLanguage) { return file.getViewProvider().findElementAt(offset, fileLang); } return bottomost; } public int adjustLineIndent(@NotNull final Document document, final int offset) { return PostprocessReformattingAspect.getInstance(getProject()).disablePostprocessFormattingInside(new Computable<Integer>() { public Integer compute() { final PsiDocumentManager documentManager = PsiDocumentManager.getInstance(myProject); documentManager.commitDocument(document); PsiFile file = documentManager.getPsiFile(document); if (file == null) return offset; return doAdjustLineIndentByOffset(file, offset); } }); } private int doAdjustLineIndentByOffset(@NotNull PsiFile file, int offset) { return new CodeStyleManagerRunnable<Integer>(this) { @Override protected Integer doPerform(int offset, TextRange range) { return FormatterEx.getInstanceEx().adjustLineIndent(myModel, mySettings, myIndentOptions, offset, mySignificantRange); } @Override protected Integer computeValueInsidePlainComment(PsiFile file, int offset, Integer defaultValue) { return CharArrayUtil.shiftForward(file.getViewProvider().getContents(), offset, " \t"); } @Override protected Integer adjustResultForInjected(Integer result, DocumentWindow documentWindow) { return documentWindow.hostToInjected(result); } }.perform(file, offset, null, offset); } public void adjustLineIndent(@NotNull PsiFile file, TextRange rangeToAdjust) throws IncorrectOperationException { new CodeStyleManagerRunnable<Object>(this) { @Override protected Object doPerform(int offset, TextRange range) { FormatterEx.getInstanceEx().adjustLineIndentsForRange(myModel, mySettings, myIndentOptions, range); return null; } }.perform(file, -1, rangeToAdjust, null); } @Nullable public String getLineIndent(@NotNull PsiFile file, int offset) { return new CodeStyleManagerRunnable<String>(this) { @Override protected boolean useDocumentBaseFormattingModel() { return false; } @Override protected String doPerform(int offset, TextRange range) { return FormatterEx.getInstanceEx().getLineIndent(myModel, mySettings, myIndentOptions, offset, mySignificantRange); } }.perform(file, offset, null, null); } @Nullable public String getLineIndent(@NotNull Editor editor) { Document doc = editor.getDocument(); int offset = editor.getCaretModel().getOffset(); if (offset >= doc.getTextLength()) { return ""; } PsiFile file = PsiDocumentManager.getInstance(myProject).getPsiFile(doc); if (file == null) return ""; return getLineIndent(file, offset); } public boolean isLineToBeIndented(@NotNull PsiFile file, int offset) { if (!SourceTreeToPsiMap.hasTreeElement(file)) { return false; } IndentHelper indentHelper = HelperFactory.createHelper(file.getFileType(), myProject); CharSequence chars = file.getViewProvider().getContents(); int start = CharArrayUtil.shiftBackward(chars, offset - 1, " \t"); if (start > 0 && chars.charAt(start) != '\n' && chars.charAt(start) != '\r') { return false; } int end = CharArrayUtil.shiftForward(chars, offset, " \t"); if (end >= chars.length()) { return false; } ASTNode element = SourceTreeToPsiMap.psiElementToTree(findElementInTreeWithFormatterEnabled(file, end)); if (element == null) { return false; } if (element.getElementType() == TokenType.WHITE_SPACE) { return false; } if (element.getElementType() == PlainTextTokenTypes.PLAIN_TEXT) { return false; } /* if( element.getElementType() instanceof IJspElementType ) { return false; } */ if (getSettings().KEEP_FIRST_COLUMN_COMMENT && isCommentToken(element)) { if (indentHelper.getIndent(element, true) == 0) { return false; } } return true; } private static boolean isCommentToken(final ASTNode element) { final Language language = element.getElementType().getLanguage(); final Commenter commenter = LanguageCommenters.INSTANCE.forLanguage(language); if (commenter instanceof CodeDocumentationAwareCommenter) { final CodeDocumentationAwareCommenter documentationAwareCommenter = (CodeDocumentationAwareCommenter)commenter; return element.getElementType() == documentationAwareCommenter.getBlockCommentTokenType() || element.getElementType() == documentationAwareCommenter.getLineCommentTokenType(); } return false; } private static boolean isWhiteSpaceSymbol(char c) { return c == ' ' || c == '\t' || c == '\n'; } /** * Formatter trims line that contains white spaces symbols only, however, there is a possible case that we want * to preserve them for particular line (e.g. for live template that defines blank line that contains $END$ marker). * <p/> * Current approach is to do the following: * <pre> * <ol> * <li>Insert dummy text at the end of the blank line which white space symbols should be preserved;</li> * <li>Perform formatting;</li> * <li>Remove dummy text;</li> * </ol> * </pre> * <p/> * This method inserts that dummy text if necessary (if target line contains white space symbols only). * <p/> * Please note that it tries to do that via PSI at first (checks if given offset points to * {@link TokenType#WHITE_SPACE white space element} and inserts dummy text as dedicated element if necessary) and, * in case of the negative answer, tries to perform the examination considering document just as a sequence of characters * and assuming that white space symbols are white spaces, tabulations and line feeds. The rationale for such an approach is: * <pre> * <ul> * <li> * there is a possible case that target language considers symbols over than white spaces, tabulations and line feeds * to be white spaces and the answer lays at PSI structure of the file; * </li> * <li> * dummy text inserted during PSI-based processing has {@link TokenType#NEW_LINE_INDENT special type} that may be treated * specifically during formatting; * </li> * </ul> * </pre> * <p/> * <b>Note:</b> it's expected that the whole white space region that contains given offset is processed in a way that all * {@link RangeMarker range markers} registered for the given offset are expanded to the whole white space region. * E.g. there is a possible case that particular range marker serves for defining formatting range, hence, its start/end offsets * are updated correspondingly after current method call and whole white space region is reformatted. * * @param document target document * @param offset offset that defines end boundary of the target line text fragment (start boundary is the first line's symbol) * @return text range that points to the newly inserted dummy text if any; <code>null</code> otherwise */ @Nullable public static TextRange insertNewLineIndentMarker(@NotNull PsiFile file, @NotNull Document document, int offset) throws IncorrectOperationException { TextRange result = insertNewLineIndentMarker(file, offset); if (result == null) { result = insertNewLineIndentMarker(document, offset); } return result; } @Nullable private static TextRange insertNewLineIndentMarker(@NotNull Document document, final int offset) { CharSequence text = document.getCharsSequence(); if (offset < 0 || offset >= text.length() || !isWhiteSpaceSymbol(text.charAt(offset))) { return null; } int start = offset; for (int i = offset - 1; i >= 0; i--) { if (!isWhiteSpaceSymbol(text.charAt(i))) { break; } start = i; } int end = offset; for (; end < text.length(); end++) { if (!isWhiteSpaceSymbol(text.charAt(end))) { break; } } StringBuilder buffer = new StringBuilder(); buffer.append(text.subSequence(start, end)); // Modify the document in order to expand range markers pointing to the given offset to the whole white space range. document.deleteString(start, end); document.insertString(start, buffer); document.insertString(offset, DUMMY_IDENTIFIER); return new TextRange(offset, offset + DUMMY_IDENTIFIER.length()); } @Nullable private static TextRange insertNewLineIndentMarker(@NotNull PsiFile file, int offset) throws IncorrectOperationException { CheckUtil.checkWritable(file); final CharTable charTable = ((FileElement)SourceTreeToPsiMap.psiElementToTree(file)).getCharTable(); PsiElement elementAt = findElementInTreeWithFormatterEnabled(file, offset); if( elementAt == null ) { return null; } ASTNode element = SourceTreeToPsiMap.psiElementToTree(elementAt); ASTNode parent = element.getTreeParent(); int elementStart = element.getTextRange().getStartOffset(); if (element.getElementType() != TokenType.WHITE_SPACE) { /* if (elementStart < offset) return null; Element marker = Factory.createLeafElement(ElementType.NEW_LINE_INDENT, "###".toCharArray(), 0, "###".length()); ChangeUtil.addChild(parent, marker, element); return marker; */ return null; } ASTNode space1 = splitSpaceElement((TreeElement)element, offset - elementStart, charTable); ASTNode marker = Factory.createSingleLeafElement(TokenType.NEW_LINE_INDENT, DUMMY_IDENTIFIER, charTable, file.getManager()); parent.addChild(marker, space1.getTreeNext()); PsiElement psiElement = SourceTreeToPsiMap.treeElementToPsi(marker); return psiElement == null ? null : psiElement.getTextRange(); } public Indent getIndent(String text, FileType fileType) { int indent = HelperFactory.createHelper(fileType, myProject).getIndent(text, true); int indenLevel = indent / IndentHelper.INDENT_FACTOR; int spaceCount = indent - indenLevel * IndentHelper.INDENT_FACTOR; return new IndentImpl(getSettings(), indenLevel, spaceCount, fileType); } public String fillIndent(Indent indent, FileType fileType) { IndentImpl indent1 = (IndentImpl)indent; int indentLevel = indent1.getIndentLevel(); int spaceCount = indent1.getSpaceCount(); if (indentLevel < 0) { spaceCount += indentLevel * getSettings().getIndentSize(fileType); indentLevel = 0; if (spaceCount < 0) { spaceCount = 0; } } else { if (spaceCount < 0) { int v = (-spaceCount + getSettings().getIndentSize(fileType) - 1) / getSettings().getIndentSize(fileType); indentLevel -= v; spaceCount += v * getSettings().getIndentSize(fileType); if (indentLevel < 0) { indentLevel = 0; } } } return HelperFactory.createHelper(fileType, myProject).fillIndent(indentLevel * IndentHelper.INDENT_FACTOR + spaceCount); } public Indent zeroIndent() { return new IndentImpl(getSettings(), 0, 0, null); } private static ASTNode splitSpaceElement(TreeElement space, int offset, CharTable charTable) { LOG.assertTrue(space.getElementType() == TokenType.WHITE_SPACE); CharSequence chars = space.getChars(); LeafElement space1 = Factory.createSingleLeafElement(TokenType.WHITE_SPACE, chars, 0, offset, charTable, SharedImplUtil.getManagerByTree(space)); LeafElement space2 = Factory.createSingleLeafElement(TokenType.WHITE_SPACE, chars, offset, chars.length(), charTable, SharedImplUtil.getManagerByTree(space)); ASTNode parent = space.getTreeParent(); parent.replaceChild(space, space1); parent.addChild(space2, space1.getTreeNext()); return space1; } private CodeStyleSettings getSettings() { return CodeStyleSettingsManager.getSettings(myProject); } }
platform/lang-impl/src/com/intellij/psi/impl/source/codeStyle/CodeStyleManagerImpl.java
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl.source.codeStyle; import com.intellij.formatting.FormatTextRanges; import com.intellij.formatting.FormatterEx; import com.intellij.formatting.FormattingModel; import com.intellij.formatting.FormattingModelBuilder; import com.intellij.injected.editor.DocumentWindow; import com.intellij.lang.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.*; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.TextRange; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CodeStyleSettingsManager; import com.intellij.psi.codeStyle.Indent; import com.intellij.psi.impl.CheckUtil; import com.intellij.psi.impl.source.PostprocessReformattingAspect; import com.intellij.psi.impl.source.SourceTreeToPsiMap; import com.intellij.psi.impl.source.tree.*; import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil; import com.intellij.psi.util.PsiUtilBase; import com.intellij.util.CharTable; import com.intellij.util.IncorrectOperationException; import com.intellij.util.text.CharArrayUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public class CodeStyleManagerImpl extends CodeStyleManager { private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.codeStyle.CodeStyleManagerImpl"); private final Project myProject; @NonNls private static final String DUMMY_IDENTIFIER = "xxx"; public CodeStyleManagerImpl(Project project) { myProject = project; } @NotNull public Project getProject() { return myProject; } @NotNull public PsiElement reformat(@NotNull PsiElement element) throws IncorrectOperationException { return reformat(element, false); } @NotNull public PsiElement reformat(@NotNull PsiElement element, boolean canChangeWhiteSpacesOnly) throws IncorrectOperationException { CheckUtil.checkWritable(element); if( !SourceTreeToPsiMap.hasTreeElement( element ) ) { return element; } ASTNode treeElement = SourceTreeToPsiMap.psiElementToTree(element); final PsiElement formatted = SourceTreeToPsiMap.treeElementToPsi(new CodeFormatterFacade(getSettings()).processElement(treeElement)); if (!canChangeWhiteSpacesOnly) { return postProcessElement(formatted); } else { return formatted; } } private PsiElement postProcessElement(final PsiElement formatted) { PsiElement result = formatted; for (PostFormatProcessor postFormatProcessor : Extensions.getExtensions(PostFormatProcessor.EP_NAME)) { result = postFormatProcessor.processElement(result, getSettings()); } return result; } private void postProcessText(final PsiFile file, final TextRange textRange) { TextRange currentRange = textRange; for (final PostFormatProcessor myPostFormatProcessor : Extensions.getExtensions(PostFormatProcessor.EP_NAME)) { currentRange = myPostFormatProcessor.processText(file, currentRange, getSettings()); } } public PsiElement reformatRange(@NotNull PsiElement element, int startOffset, int endOffset, boolean canChangeWhiteSpacesOnly) throws IncorrectOperationException { return reformatRangeImpl(element, startOffset, endOffset, canChangeWhiteSpacesOnly); } public PsiElement reformatRange(@NotNull PsiElement element, int startOffset, int endOffset) throws IncorrectOperationException { return reformatRangeImpl(element, startOffset, endOffset, false); } private static void transformAllChildren(final ASTNode file) { for (ASTNode child = file.getFirstChildNode(); child != null; child = child.getTreeNext()) { transformAllChildren(child); } } public void reformatText(@NotNull PsiFile file, int startOffset, int endOffset) throws IncorrectOperationException { ApplicationManager.getApplication().assertWriteAccessAllowed(); PsiDocumentManager.getInstance(getProject()).commitAllDocuments(); CheckUtil.checkWritable(file); if (!SourceTreeToPsiMap.hasTreeElement(file)) { return; } ASTNode treeElement = SourceTreeToPsiMap.psiElementToTree(file); transformAllChildren(treeElement); final CodeFormatterFacade codeFormatter = new CodeFormatterFacade(getSettings()); LOG.assertTrue(file.isValid()); final PsiElement start = findElementInTreeWithFormatterEnabled(file, startOffset); final PsiElement end = findElementInTreeWithFormatterEnabled(file, endOffset); if (start != null && !start.isValid()) { LOG.error("start=" + start + "; file=" + file); } if (end != null && !end.isValid()) { LOG.error("end=" + start + "; end=" + file); } Editor editor = PsiUtilBase.findEditor(file); // There is a possible case that cursor is located at the end of the line that contains only white spaces. For example: // public void foo() { // <caret> // } // Formatter removes such white spaces, i.e. keeps only line feed symbol. But we want to preserve caret position then. // So, we check if it should be preserved and restore it after formatting if necessary int visualColumnToRestore = -1; if (editor != null) { Document document = editor.getDocument(); int caretOffset = editor.getCaretModel().getOffset(); caretOffset = Math.max(Math.min(caretOffset, document.getTextLength() - 1), 0); CharSequence text = document.getCharsSequence(); int caretLine = document.getLineNumber(caretOffset); int lineStartOffset = document.getLineStartOffset(caretLine); boolean fixCaretPosition = true; for (int i = caretOffset; i>= lineStartOffset; i--) { char c = text.charAt(i); if (c != ' ' && c != '\t' && c != '\n') { fixCaretPosition = false; break; } } if (fixCaretPosition) { visualColumnToRestore = editor.getCaretModel().getVisualPosition().column; } } boolean formatFromStart = startOffset == 0; boolean formatToEnd = endOffset == file.getTextLength(); final SmartPointerManager smartPointerManager = SmartPointerManager.getInstance(getProject()); final SmartPsiElementPointer startPointer = start == null ? null : smartPointerManager.createSmartPsiElementPointer(start); final SmartPsiElementPointer endPointer = end == null ? null : smartPointerManager.createSmartPsiElementPointer(end); codeFormatter.processText(file, new FormatTextRanges(new TextRange(startOffset, endOffset), true), true); final PsiElement startElement = startPointer == null ? null : startPointer.getElement(); final PsiElement endElement = endPointer == null ? null : endPointer.getElement(); if ((startElement != null || formatFromStart) && (endElement != null || formatToEnd)) { postProcessText(file, new TextRange(formatFromStart ? 0 : startElement.getTextRange().getStartOffset(), formatToEnd ? file.getTextLength() : endElement.getTextRange().getEndOffset())); } if (visualColumnToRestore < 0) { return; } CaretModel caretModel = editor.getCaretModel(); VisualPosition position = caretModel.getVisualPosition(); if (visualColumnToRestore != position.column) { caretModel.moveToVisualPosition(new VisualPosition(position.line, visualColumnToRestore)); } } private PsiElement reformatRangeImpl(final PsiElement element, final int startOffset, final int endOffset, boolean canChangeWhiteSpacesOnly) throws IncorrectOperationException { LOG.assertTrue(element.isValid()); CheckUtil.checkWritable(element); if( !SourceTreeToPsiMap.hasTreeElement( element ) ) { return element; } ASTNode treeElement = SourceTreeToPsiMap.psiElementToTree(element); final CodeFormatterFacade codeFormatter = new CodeFormatterFacade(getSettings()); final PsiElement formatted = SourceTreeToPsiMap.treeElementToPsi(codeFormatter.processRange(treeElement, startOffset, endOffset)); return canChangeWhiteSpacesOnly ? formatted : postProcessElement(formatted); } public void reformatNewlyAddedElement(@NotNull final ASTNode parent, @NotNull final ASTNode addedElement) throws IncorrectOperationException { LOG.assertTrue(addedElement.getTreeParent() == parent, "addedElement must be added to parent"); final PsiElement psiElement = parent.getPsi(); PsiFile containingFile = psiElement.getContainingFile(); final FileViewProvider fileViewProvider = containingFile.getViewProvider(); if (fileViewProvider instanceof MultiplePsiFilesPerDocumentFileViewProvider) { containingFile = fileViewProvider.getPsi(fileViewProvider.getBaseLanguage()); } TextRange textRange = addedElement.getTextRange(); final Document document = fileViewProvider.getDocument(); if (document instanceof DocumentWindow) { containingFile = InjectedLanguageUtil.getTopLevelFile(containingFile); textRange = ((DocumentWindow)document).injectedToHost(textRange); } final FormattingModelBuilder builder = LanguageFormatting.INSTANCE.forContext(containingFile); if (builder != null) { final FormattingModel model = builder.createModel(containingFile, getSettings()); FormatterEx.getInstanceEx().formatAroundRange(model, getSettings(), textRange, containingFile.getFileType()); } adjustLineIndent(containingFile, textRange); } public int adjustLineIndent(@NotNull final PsiFile file, final int offset) throws IncorrectOperationException { return PostprocessReformattingAspect.getInstance(file.getProject()).disablePostprocessFormattingInside(new Computable<Integer>() { public Integer compute() { return doAdjustLineIndentByOffset(file, offset); } }); } @Nullable static PsiElement findElementInTreeWithFormatterEnabled(final PsiFile file, final int offset) { final PsiElement bottomost = file.findElementAt(offset); if (bottomost != null && LanguageFormatting.INSTANCE.forContext(bottomost) != null){ return bottomost; } final Language fileLang = file.getLanguage(); if (fileLang instanceof CompositeLanguage) { return file.getViewProvider().findElementAt(offset, fileLang); } return bottomost; } public int adjustLineIndent(@NotNull final Document document, final int offset) { return PostprocessReformattingAspect.getInstance(getProject()).disablePostprocessFormattingInside(new Computable<Integer>() { public Integer compute() { final PsiDocumentManager documentManager = PsiDocumentManager.getInstance(myProject); documentManager.commitDocument(document); PsiFile file = documentManager.getPsiFile(document); if (file == null) return offset; return doAdjustLineIndentByOffset(file, offset); } }); } private int doAdjustLineIndentByOffset(@NotNull PsiFile file, int offset) { return new CodeStyleManagerRunnable<Integer>(this) { @Override protected Integer doPerform(int offset, TextRange range) { return FormatterEx.getInstanceEx().adjustLineIndent(myModel, mySettings, myIndentOptions, offset, mySignificantRange); } @Override protected Integer computeValueInsidePlainComment(PsiFile file, int offset, Integer defaultValue) { return CharArrayUtil.shiftForward(file.getViewProvider().getContents(), offset, " \t"); } @Override protected Integer adjustResultForInjected(Integer result, DocumentWindow documentWindow) { return documentWindow.hostToInjected(result); } }.perform(file, offset, null, offset); } public void adjustLineIndent(@NotNull PsiFile file, TextRange rangeToAdjust) throws IncorrectOperationException { new CodeStyleManagerRunnable<Object>(this) { @Override protected Object doPerform(int offset, TextRange range) { FormatterEx.getInstanceEx().adjustLineIndentsForRange(myModel, mySettings, myIndentOptions, range); return null; } }.perform(file, -1, rangeToAdjust, null); } @Nullable public String getLineIndent(@NotNull PsiFile file, int offset) { return new CodeStyleManagerRunnable<String>(this) { @Override protected boolean useDocumentBaseFormattingModel() { return false; } @Override protected String doPerform(int offset, TextRange range) { return FormatterEx.getInstanceEx().getLineIndent(myModel, mySettings, myIndentOptions, offset, mySignificantRange); } }.perform(file, offset, null, null); } @Nullable public String getLineIndent(@NotNull Editor editor) { Document doc = editor.getDocument(); int offset = editor.getCaretModel().getOffset(); if (offset >= doc.getTextLength()) { return ""; } PsiFile file = PsiDocumentManager.getInstance(myProject).getPsiFile(doc); if (file == null) return ""; return getLineIndent(file, offset); } public boolean isLineToBeIndented(@NotNull PsiFile file, int offset) { if (!SourceTreeToPsiMap.hasTreeElement(file)) { return false; } IndentHelper indentHelper = HelperFactory.createHelper(file.getFileType(), myProject); CharSequence chars = file.getViewProvider().getContents(); int start = CharArrayUtil.shiftBackward(chars, offset - 1, " \t"); if (start > 0 && chars.charAt(start) != '\n' && chars.charAt(start) != '\r') { return false; } int end = CharArrayUtil.shiftForward(chars, offset, " \t"); if (end >= chars.length()) { return false; } ASTNode element = SourceTreeToPsiMap.psiElementToTree(findElementInTreeWithFormatterEnabled(file, end)); if (element == null) { return false; } if (element.getElementType() == TokenType.WHITE_SPACE) { return false; } if (element.getElementType() == PlainTextTokenTypes.PLAIN_TEXT) { return false; } /* if( element.getElementType() instanceof IJspElementType ) { return false; } */ if (getSettings().KEEP_FIRST_COLUMN_COMMENT && isCommentToken(element)) { if (indentHelper.getIndent(element, true) == 0) { return false; } } return true; } private static boolean isCommentToken(final ASTNode element) { final Language language = element.getElementType().getLanguage(); final Commenter commenter = LanguageCommenters.INSTANCE.forLanguage(language); if (commenter instanceof CodeDocumentationAwareCommenter) { final CodeDocumentationAwareCommenter documentationAwareCommenter = (CodeDocumentationAwareCommenter)commenter; return element.getElementType() == documentationAwareCommenter.getBlockCommentTokenType() || element.getElementType() == documentationAwareCommenter.getLineCommentTokenType(); } return false; } private static boolean isWhiteSpaceSymbol(char c) { return c == ' ' || c == '\t' || c == '\n'; } /** * Formatter trims line that contains white spaces symbols only, however, there is a possible case that we want * to preserve them for particular line (e.g. for live template that defines blank line that contains $END$ marker). * <p/> * Current approach is to do the following: * <pre> * <ol> * <li>Insert dummy text at the end of the blank line which white space symbols should be preserved;</li> * <li>Perform formatting;</li> * <li>Remove dummy text;</li> * </ol> * </pre> * <p/> * This method inserts that dummy text if necessary (if target line contains white space symbols only). * <p/> * Please note that it tries to do that via PSI at first (checks if given offset points to * {@link TokenType#WHITE_SPACE white space element} and inserts dummy text as dedicated element if necessary) and, * in case of the negative answer, tries to perform the examination considering document just as a sequence of characters * and assuming that white space symbols are white spaces, tabulations and line feeds. The rationale for such an approach is: * <pre> * <ul> * <li> * there is a possible case that target language considers symbols over than white spaces, tabulations and line feeds * to be white spaces and the answer lays at PSI structure of the file; * </li> * <li> * dummy text inserted during PSI-based processing has {@link TokenType#NEW_LINE_INDENT special type} that may be treated * specifically during formatting; * </li> * </ul> * </pre> * <p/> * <b>Note:</b> it's expected that the whole white space region that contains given offset is processed in a way that all * {@link RangeMarker range markers} registered for the given offset are expanded to the whole white space region. * E.g. there is a possible case that particular range marker serves for defining formatting range, hence, its start/end offsets * are updated correspondingly after current method call and whole white space region is reformatted. * * @param document target document * @param offset offset that defines end boundary of the target line text fragment (start boundary is the first line's symbol) * @return text range that points to the newly inserted dummy text if any; <code>null</code> otherwise */ @Nullable public static TextRange insertNewLineIndentMarker(@NotNull PsiFile file, @NotNull Document document, int offset) throws IncorrectOperationException { TextRange result = insertNewLineIndentMarker(file, offset); if (result == null) { result = insertNewLineIndentMarker(document, offset); } return result; } @Nullable private static TextRange insertNewLineIndentMarker(@NotNull Document document, final int offset) { CharSequence text = document.getCharsSequence(); if (offset < 0 || offset >= text.length() || !isWhiteSpaceSymbol(text.charAt(offset))) { return null; } int start = offset; for (int i = offset - 1; i >= 0; i--) { if (!isWhiteSpaceSymbol(text.charAt(i))) { break; } start = i; } int end = offset; for (; end < text.length(); end++) { if (!isWhiteSpaceSymbol(text.charAt(end))) { break; } } StringBuilder buffer = new StringBuilder(); buffer.append(text.subSequence(start, end)); // Modify the document in order to expand range markers pointing to the given offset to the whole white space range. document.deleteString(start, end); document.insertString(start, buffer); document.insertString(offset, DUMMY_IDENTIFIER); return new TextRange(offset, offset + DUMMY_IDENTIFIER.length()); } @Nullable private static TextRange insertNewLineIndentMarker(@NotNull PsiFile file, int offset) throws IncorrectOperationException { CheckUtil.checkWritable(file); final CharTable charTable = ((FileElement)SourceTreeToPsiMap.psiElementToTree(file)).getCharTable(); PsiElement elementAt = findElementInTreeWithFormatterEnabled(file, offset); if( elementAt == null ) { return null; } ASTNode element = SourceTreeToPsiMap.psiElementToTree(elementAt); ASTNode parent = element.getTreeParent(); int elementStart = element.getTextRange().getStartOffset(); if (element.getElementType() != TokenType.WHITE_SPACE) { /* if (elementStart < offset) return null; Element marker = Factory.createLeafElement(ElementType.NEW_LINE_INDENT, "###".toCharArray(), 0, "###".length()); ChangeUtil.addChild(parent, marker, element); return marker; */ return null; } ASTNode space1 = splitSpaceElement((TreeElement)element, offset - elementStart, charTable); ASTNode marker = Factory.createSingleLeafElement(TokenType.NEW_LINE_INDENT, DUMMY_IDENTIFIER, charTable, file.getManager()); parent.addChild(marker, space1.getTreeNext()); PsiElement psiElement = SourceTreeToPsiMap.treeElementToPsi(marker); return psiElement == null ? null : psiElement.getTextRange(); } public Indent getIndent(String text, FileType fileType) { int indent = HelperFactory.createHelper(fileType, myProject).getIndent(text, true); int indenLevel = indent / IndentHelper.INDENT_FACTOR; int spaceCount = indent - indenLevel * IndentHelper.INDENT_FACTOR; return new IndentImpl(getSettings(), indenLevel, spaceCount, fileType); } public String fillIndent(Indent indent, FileType fileType) { IndentImpl indent1 = (IndentImpl)indent; int indentLevel = indent1.getIndentLevel(); int spaceCount = indent1.getSpaceCount(); if (indentLevel < 0) { spaceCount += indentLevel * getSettings().getIndentSize(fileType); indentLevel = 0; if (spaceCount < 0) { spaceCount = 0; } } else { if (spaceCount < 0) { int v = (-spaceCount + getSettings().getIndentSize(fileType) - 1) / getSettings().getIndentSize(fileType); indentLevel -= v; spaceCount += v * getSettings().getIndentSize(fileType); if (indentLevel < 0) { indentLevel = 0; } } } return HelperFactory.createHelper(fileType, myProject).fillIndent(indentLevel * IndentHelper.INDENT_FACTOR + spaceCount); } public Indent zeroIndent() { return new IndentImpl(getSettings(), 0, 0, null); } private static ASTNode splitSpaceElement(TreeElement space, int offset, CharTable charTable) { LOG.assertTrue(space.getElementType() == TokenType.WHITE_SPACE); CharSequence chars = space.getChars(); LeafElement space1 = Factory.createSingleLeafElement(TokenType.WHITE_SPACE, chars, 0, offset, charTable, SharedImplUtil.getManagerByTree(space)); LeafElement space2 = Factory.createSingleLeafElement(TokenType.WHITE_SPACE, chars, offset, chars.length(), charTable, SharedImplUtil.getManagerByTree(space)); ASTNode parent = space.getTreeParent(); parent.replaceChild(space, space1); parent.addChild(space2, space1.getTreeNext()); return space1; } private CodeStyleSettings getSettings() { return CodeStyleSettingsManager.getSettings(myProject); } }
EA-23891 - IOOBE: CodeStyleManagerImpl.reformatText
platform/lang-impl/src/com/intellij/psi/impl/source/codeStyle/CodeStyleManagerImpl.java
EA-23891 - IOOBE: CodeStyleManagerImpl.reformatText
<ide><path>latform/lang-impl/src/com/intellij/psi/impl/source/codeStyle/CodeStyleManagerImpl.java <ide> int caretLine = document.getLineNumber(caretOffset); <ide> int lineStartOffset = document.getLineStartOffset(caretLine); <ide> boolean fixCaretPosition = true; <del> for (int i = caretOffset; i>= lineStartOffset; i--) { <add> for (int i = Math.min(caretOffset, text.length() - 1); i>= lineStartOffset; i--) { <ide> char c = text.charAt(i); <ide> if (c != ' ' && c != '\t' && c != '\n') { <ide> fixCaretPosition = false;
Java
apache-2.0
8fcb55a915d0560675bc7f740f845f6be719a86f
0
Pushpalanka/carbon-identity,Pushpalanka/carbon-identity,Pushpalanka/carbon-identity
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.workflow.impl.internal; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceRegistration; import org.osgi.service.component.ComponentContext; import org.wso2.carbon.base.MultitenantConstants; import org.wso2.carbon.identity.core.util.IdentityCoreInitializedEvent; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.identity.workflow.impl.ApprovalWorkflow; import org.wso2.carbon.identity.workflow.impl.BPELDeployer; import org.wso2.carbon.identity.workflow.impl.RequestExecutor; import org.wso2.carbon.identity.workflow.impl.WFImplConstant; import org.wso2.carbon.identity.workflow.impl.WorkflowImplService; import org.wso2.carbon.identity.workflow.impl.WorkflowImplServiceImpl; import org.wso2.carbon.identity.workflow.impl.bean.BPSProfile; import org.wso2.carbon.identity.workflow.impl.listener.WorkflowImplTenantMgtListener; import org.wso2.carbon.identity.workflow.impl.listener.WorkflowListenerImpl; import org.wso2.carbon.identity.workflow.mgt.WorkflowManagementService; import org.wso2.carbon.identity.workflow.mgt.exception.WorkflowException; import org.wso2.carbon.identity.workflow.mgt.exception.WorkflowRuntimeException; import org.wso2.carbon.identity.workflow.mgt.listener.WorkflowListener; import org.wso2.carbon.identity.workflow.mgt.util.WFConstant; import org.wso2.carbon.identity.workflow.mgt.util.WorkflowManagementUtil; import org.wso2.carbon.identity.workflow.mgt.workflow.AbstractWorkflow; import org.wso2.carbon.identity.workflow.mgt.workflow.TemplateInitializer; import org.wso2.carbon.identity.workflow.mgt.workflow.WorkFlowExecutor; import org.wso2.carbon.stratos.common.listeners.TenantMgtListener; import org.wso2.carbon.user.core.service.RealmService; import org.wso2.carbon.utils.ConfigurationContextService; import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; /** * @scr.component name="org.wso2.carbon.identity.workflow.impl" immediate="true" * @scr.reference name="org.wso2.carbon.user.core.service.realmservice" interface="org.wso2.carbon.user.core.service.RealmService" * cardinality="1..1" policy="dynamic" bind="setRealmService" * unbind="unsetRealmService" * @scr.reference name="org.wso2.carbon.identity.workflow.mgt.workflowservice" * interface="org.wso2.carbon.identity.workflow.mgt.WorkflowManagementService" * cardinality="0..n" policy="dynamic" bind="setWorkflowManagementService" * unbind="unsetWorkflowManagementService" * @scr.reference name="identityCoreInitializedEventService" * interface="org.wso2.carbon.identity.core.util.IdentityCoreInitializedEvent" cardinality="1..1" * policy="dynamic" bind="setIdentityCoreInitializedEventService" unbind="unsetIdentityCoreInitializedEventService" * @scr.reference name="org.wso2.carbon.utils.contextservice" * interface="org.wso2.carbon.utils.ConfigurationContextService" * cardinality="1..1" policy="dynamic" bind="setConfigurationContextService" * unbind="unsetConfigurationContextService" */ public class WorkflowImplServiceComponent { private static Log log = LogFactory.getLog(WorkflowImplServiceComponent.class); protected void activate(ComponentContext context) { BundleContext bundleContext = context.getBundleContext(); try { String metaDataXML = readWorkflowImplParamMetaDataXML(WFImplConstant.WORKFLOW_IMPL_PARAMETER_METADATA_FILE_NAME); bundleContext.registerService(AbstractWorkflow.class, new ApprovalWorkflow(BPELDeployer.class, RequestExecutor.class, metaDataXML), null); bundleContext.registerService(WorkflowListener.class, new WorkflowListenerImpl(), null); WorkflowImplServiceDataHolder.getInstance().setWorkflowImplService(new WorkflowImplServiceImpl()); WorkflowImplTenantMgtListener workflowTenantMgtListener = new WorkflowImplTenantMgtListener(); ServiceRegistration tenantMgtListenerSR = bundleContext.registerService( TenantMgtListener.class.getName(), workflowTenantMgtListener, null); if (tenantMgtListenerSR != null) { log.debug("Workflow Management - WorkflowTenantMgtListener registered"); } else { log.error("Workflow Management - WorkflowTenantMgtListener could not be registered"); } this.addDefaultBPSProfile(); } catch (Throwable e) { log.error("Error occurred while activating WorkflowImplServiceComponent bundle, " + e.getMessage()); } } protected void setWorkflowManagementService(WorkflowManagementService workflowManagementService) { WorkflowImplServiceDataHolder.getInstance().setWorkflowManagementService(workflowManagementService); } protected void unsetWorkflowManagementService(WorkflowManagementService workflowManagementService) { WorkflowImplServiceDataHolder.getInstance().setWorkflowManagementService(null); } protected void setRealmService(RealmService realmService) { WorkflowImplServiceDataHolder.getInstance().setRealmService(realmService); } protected void unsetRealmService(RealmService realmService) { WorkflowImplServiceDataHolder.getInstance().setRealmService(null); } protected void unsetConfigurationContextService(ConfigurationContextService contextService) { WorkflowImplServiceDataHolder.getInstance().setConfigurationContextService(null); } protected void setConfigurationContextService(ConfigurationContextService contextService) { WorkflowImplServiceDataHolder.getInstance().setConfigurationContextService(contextService); } private void addDefaultBPSProfile() { try { WorkflowImplService workflowImplService = WorkflowImplServiceDataHolder.getInstance().getWorkflowImplService(); BPSProfile currentBpsProfile = workflowImplService.getBPSProfile(WFConstant.DEFAULT_BPS_PROFILE, MultitenantConstants.SUPER_TENANT_ID); String url = IdentityUtil.getServerURL("", true); String userName = WorkflowImplServiceDataHolder.getInstance().getRealmService() .getBootstrapRealmConfiguration().getAdminUserName(); String password = WorkflowImplServiceDataHolder.getInstance().getRealmService() .getBootstrapRealmConfiguration().getAdminPassword(); if (StringUtils.isBlank(password)) { log.info("Insufficient data for adding embedded_bps profile, hence skipping."); return; } if (currentBpsProfile == null || !currentBpsProfile.getWorkerHostURL().equals(url) || !currentBpsProfile .getUsername().equals(userName) || !currentBpsProfile.getPassword().equals(password)) { BPSProfile bpsProfileDTO = new BPSProfile(); bpsProfileDTO.setManagerHostURL(url); bpsProfileDTO.setWorkerHostURL(url); bpsProfileDTO.setUsername(userName); bpsProfileDTO.setPassword(password); bpsProfileDTO.setCallbackUser(userName); bpsProfileDTO.setCallbackPassword(password); bpsProfileDTO.setProfileName(WFConstant.DEFAULT_BPS_PROFILE); if (currentBpsProfile == null) { workflowImplService.addBPSProfile(bpsProfileDTO, MultitenantConstants.SUPER_TENANT_ID); log.info("Default BPS profile added to the DB."); } else { workflowImplService.updateBPSProfile(bpsProfileDTO, MultitenantConstants.SUPER_TENANT_ID); log.info("Default BPS profile updated."); } } } catch (WorkflowException e) { //This is not thrown exception because this is not blocked to the other functionality. User can create // default profile by manually. String errorMsg = "Error occured while adding default bps profile, " + e.getMessage(); log.error(errorMsg); } } private String readWorkflowImplParamMetaDataXML(String fileName) throws WorkflowRuntimeException { String content = null; try { InputStream resourceAsStream = this.getClass().getClassLoader() .getResourceAsStream(fileName); content = WorkflowManagementUtil.readFileFromResource(resourceAsStream); } catch (IOException e) { String errorMsg = "Error occurred while reading file from class path, " + e.getMessage(); log.error(errorMsg); throw new WorkflowRuntimeException(errorMsg, e); } catch (URISyntaxException e) { String errorMsg = "Error occurred while reading file from class path, " + e.getMessage(); log.error(errorMsg); throw new WorkflowRuntimeException(errorMsg, e); } return content; } protected void unsetIdentityCoreInitializedEventService(IdentityCoreInitializedEvent identityCoreInitializedEvent) { /* reference IdentityCoreInitializedEvent service to guarantee that this component will wait until identity core is started */ } protected void setIdentityCoreInitializedEventService(IdentityCoreInitializedEvent identityCoreInitializedEvent) { /* reference IdentityCoreInitializedEvent service to guarantee that this component will wait until identity core is started */ } }
components/workflow-mgt/org.wso2.carbon.identity.workflow.impl/src/main/java/org/wso2/carbon/identity/workflow/impl/internal/WorkflowImplServiceComponent.java
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.workflow.impl.internal; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceRegistration; import org.osgi.service.component.ComponentContext; import org.wso2.carbon.base.MultitenantConstants; import org.wso2.carbon.identity.core.util.IdentityCoreInitializedEvent; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.identity.workflow.impl.ApprovalWorkflow; import org.wso2.carbon.identity.workflow.impl.BPELDeployer; import org.wso2.carbon.identity.workflow.impl.RequestExecutor; import org.wso2.carbon.identity.workflow.impl.WFImplConstant; import org.wso2.carbon.identity.workflow.impl.WorkflowImplService; import org.wso2.carbon.identity.workflow.impl.WorkflowImplServiceImpl; import org.wso2.carbon.identity.workflow.impl.bean.BPSProfile; import org.wso2.carbon.identity.workflow.impl.listener.WorkflowImplTenantMgtListener; import org.wso2.carbon.identity.workflow.impl.listener.WorkflowListenerImpl; import org.wso2.carbon.identity.workflow.mgt.WorkflowManagementService; import org.wso2.carbon.identity.workflow.mgt.exception.WorkflowException; import org.wso2.carbon.identity.workflow.mgt.exception.WorkflowRuntimeException; import org.wso2.carbon.identity.workflow.mgt.listener.WorkflowListener; import org.wso2.carbon.identity.workflow.mgt.util.WFConstant; import org.wso2.carbon.identity.workflow.mgt.util.WorkflowManagementUtil; import org.wso2.carbon.identity.workflow.mgt.workflow.AbstractWorkflow; import org.wso2.carbon.identity.workflow.mgt.workflow.TemplateInitializer; import org.wso2.carbon.identity.workflow.mgt.workflow.WorkFlowExecutor; import org.wso2.carbon.stratos.common.listeners.TenantMgtListener; import org.wso2.carbon.user.core.service.RealmService; import org.wso2.carbon.utils.ConfigurationContextService; import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; /** * @scr.component name="org.wso2.carbon.identity.workflow.impl" immediate="true" * @scr.reference name="org.wso2.carbon.user.core.service.realmservice" interface="org.wso2.carbon.user.core.service.RealmService" * cardinality="1..1" policy="dynamic" bind="setRealmService" * unbind="unsetRealmService" * @scr.reference name="org.wso2.carbon.identity.workflow.mgt.workflowservice" * interface="org.wso2.carbon.identity.workflow.mgt.WorkflowManagementService" * cardinality="0..n" policy="dynamic" bind="setWorkflowManagementService" * unbind="unsetWorkflowManagementService" * @scr.reference name="identityCoreInitializedEventService" * interface="org.wso2.carbon.identity.core.util.IdentityCoreInitializedEvent" cardinality="1..1" * policy="dynamic" bind="setIdentityCoreInitializedEventService" unbind="unsetIdentityCoreInitializedEventService" * @scr.reference name="org.wso2.carbon.utils.contextservice" * interface="org.wso2.carbon.utils.ConfigurationContextService" * cardinality="1..1" policy="dynamic" bind="setConfigurationContextService" * unbind="unsetConfigurationContextService" */ public class WorkflowImplServiceComponent { private static Log log = LogFactory.getLog(WorkflowImplServiceComponent.class); protected void activate(ComponentContext context) { BundleContext bundleContext = context.getBundleContext(); try { String metaDataXML = readWorkflowImplParamMetaDataXML(WFImplConstant.WORKFLOW_IMPL_PARAMETER_METADATA_FILE_NAME); bundleContext.registerService(AbstractWorkflow.class, new ApprovalWorkflow(BPELDeployer.class, RequestExecutor.class, metaDataXML), null); bundleContext.registerService(WorkflowListener.class, new WorkflowListenerImpl(), null); WorkflowImplServiceDataHolder.getInstance().setWorkflowImplService(new WorkflowImplServiceImpl()); WorkflowImplTenantMgtListener workflowTenantMgtListener = new WorkflowImplTenantMgtListener(); ServiceRegistration tenantMgtListenerSR = bundleContext.registerService( TenantMgtListener.class.getName(), workflowTenantMgtListener, null); if (tenantMgtListenerSR != null) { log.debug("Workflow Management - WorkflowTenantMgtListener registered"); } else { log.error("Workflow Management - WorkflowTenantMgtListener could not be registered"); } this.addDefaultBPSProfile(); } catch (Throwable e) { log.error("Error occurred while activating WorkflowImplServiceComponent bundle, " + e.getMessage()); } } protected void setWorkflowManagementService(WorkflowManagementService workflowManagementService) { WorkflowImplServiceDataHolder.getInstance().setWorkflowManagementService(workflowManagementService); } protected void unsetWorkflowManagementService(WorkflowManagementService workflowManagementService) { WorkflowImplServiceDataHolder.getInstance().setWorkflowManagementService(null); } protected void setRealmService(RealmService realmService) { WorkflowImplServiceDataHolder.getInstance().setRealmService(realmService); } protected void unsetRealmService(RealmService realmService) { WorkflowImplServiceDataHolder.getInstance().setRealmService(null); } protected void unsetConfigurationContextService(ConfigurationContextService contextService) { WorkflowImplServiceDataHolder.getInstance().setConfigurationContextService(null); } protected void setConfigurationContextService(ConfigurationContextService contextService) { WorkflowImplServiceDataHolder.getInstance().setConfigurationContextService(contextService); } private void addDefaultBPSProfile() { try { WorkflowImplService workflowImplService = WorkflowImplServiceDataHolder.getInstance().getWorkflowImplService(); BPSProfile currentBpsProfile = workflowImplService.getBPSProfile(WFConstant.DEFAULT_BPS_PROFILE, MultitenantConstants.SUPER_TENANT_ID); String url = IdentityUtil.getServerURL("", true); String userName = WorkflowImplServiceDataHolder.getInstance().getRealmService().getBootstrapRealmConfiguration() .getAdminUserName(); String password = WorkflowImplServiceDataHolder.getInstance().getRealmService().getBootstrapRealmConfiguration() .getAdminPassword(); if (currentBpsProfile == null || !currentBpsProfile.getWorkerHostURL().equals(url) || !currentBpsProfile .getUsername().equals(userName) || !currentBpsProfile.getPassword().equals(password)) { BPSProfile bpsProfileDTO = new BPSProfile(); bpsProfileDTO.setManagerHostURL(url); bpsProfileDTO.setWorkerHostURL(url); bpsProfileDTO.setUsername(userName); bpsProfileDTO.setPassword(password); bpsProfileDTO.setCallbackUser(userName); bpsProfileDTO.setCallbackPassword(password); bpsProfileDTO.setProfileName(WFConstant.DEFAULT_BPS_PROFILE); if (currentBpsProfile == null) { workflowImplService.addBPSProfile(bpsProfileDTO, MultitenantConstants.SUPER_TENANT_ID); log.info("Default BPS profile added to the DB."); } else { workflowImplService.updateBPSProfile(bpsProfileDTO, MultitenantConstants.SUPER_TENANT_ID); log.info("Default BPS profile updated."); } } } catch (WorkflowException e) { //This is not thrown exception because this is not blocked to the other functionality. User can create // default profile by manually. String errorMsg = "Error occured while adding default bps profile, " + e.getMessage(); log.error(errorMsg); } } private String readWorkflowImplParamMetaDataXML(String fileName) throws WorkflowRuntimeException { String content = null; try { InputStream resourceAsStream = this.getClass().getClassLoader() .getResourceAsStream(fileName); content = WorkflowManagementUtil.readFileFromResource(resourceAsStream); } catch (IOException e) { String errorMsg = "Error occurred while reading file from class path, " + e.getMessage(); log.error(errorMsg); throw new WorkflowRuntimeException(errorMsg, e); } catch (URISyntaxException e) { String errorMsg = "Error occurred while reading file from class path, " + e.getMessage(); log.error(errorMsg); throw new WorkflowRuntimeException(errorMsg, e); } return content; } protected void unsetIdentityCoreInitializedEventService(IdentityCoreInitializedEvent identityCoreInitializedEvent) { /* reference IdentityCoreInitializedEvent service to guarantee that this component will wait until identity core is started */ } protected void setIdentityCoreInitializedEventService(IdentityCoreInitializedEvent identityCoreInitializedEvent) { /* reference IdentityCoreInitializedEvent service to guarantee that this component will wait until identity core is started */ } }
IDENTITY-3979 Cannot create workflows - Workflows/Add
components/workflow-mgt/org.wso2.carbon.identity.workflow.impl/src/main/java/org/wso2/carbon/identity/workflow/impl/internal/WorkflowImplServiceComponent.java
IDENTITY-3979 Cannot create workflows - Workflows/Add
<ide><path>omponents/workflow-mgt/org.wso2.carbon.identity.workflow.impl/src/main/java/org/wso2/carbon/identity/workflow/impl/internal/WorkflowImplServiceComponent.java <ide> <ide> package org.wso2.carbon.identity.workflow.impl.internal; <ide> <add>import org.apache.commons.lang.StringUtils; <ide> import org.apache.commons.logging.Log; <ide> import org.apache.commons.logging.LogFactory; <ide> import org.osgi.framework.BundleContext; <ide> BPSProfile currentBpsProfile = workflowImplService.getBPSProfile(WFConstant.DEFAULT_BPS_PROFILE, <ide> MultitenantConstants.SUPER_TENANT_ID); <ide> String url = IdentityUtil.getServerURL("", true); <del> String userName = <del> WorkflowImplServiceDataHolder.getInstance().getRealmService().getBootstrapRealmConfiguration() <del> .getAdminUserName(); <del> String password = <del> WorkflowImplServiceDataHolder.getInstance().getRealmService().getBootstrapRealmConfiguration() <del> .getAdminPassword(); <add> String userName = WorkflowImplServiceDataHolder.getInstance().getRealmService() <add> .getBootstrapRealmConfiguration().getAdminUserName(); <add> String password = WorkflowImplServiceDataHolder.getInstance().getRealmService() <add> .getBootstrapRealmConfiguration().getAdminPassword(); <add> if (StringUtils.isBlank(password)) { <add> log.info("Insufficient data for adding embedded_bps profile, hence skipping."); <add> return; <add> } <ide> if (currentBpsProfile == null || !currentBpsProfile.getWorkerHostURL().equals(url) || !currentBpsProfile <ide> .getUsername().equals(userName) || !currentBpsProfile.getPassword().equals(password)) { <ide> BPSProfile bpsProfileDTO = new BPSProfile();
Java
apache-2.0
40af773e101a0c9af36450e1d2fe84329ae75fca
0
charlesccychen/beam,markflyhigh/incubator-beam,markflyhigh/incubator-beam,rangadi/beam,apache/beam,rangadi/beam,charlesccychen/beam,apache/beam,charlesccychen/incubator-beam,robertwb/incubator-beam,robertwb/incubator-beam,RyanSkraba/beam,chamikaramj/beam,RyanSkraba/beam,lukecwik/incubator-beam,lukecwik/incubator-beam,chamikaramj/beam,rangadi/incubator-beam,iemejia/incubator-beam,apache/beam,chamikaramj/beam,charlesccychen/beam,chamikaramj/beam,lukecwik/incubator-beam,lukecwik/incubator-beam,apache/beam,robertwb/incubator-beam,rangadi/beam,chamikaramj/beam,RyanSkraba/beam,RyanSkraba/beam,apache/beam,lukecwik/incubator-beam,rangadi/incubator-beam,apache/beam,mxm/incubator-beam,charlesccychen/incubator-beam,rangadi/beam,RyanSkraba/beam,markflyhigh/incubator-beam,mxm/incubator-beam,rangadi/beam,charlesccychen/incubator-beam,RyanSkraba/beam,robertwb/incubator-beam,apache/beam,chamikaramj/beam,lukecwik/incubator-beam,rangadi/beam,markflyhigh/incubator-beam,markflyhigh/incubator-beam,robertwb/incubator-beam,robertwb/incubator-beam,apache/beam,markflyhigh/incubator-beam,apache/beam,charlesccychen/beam,lukecwik/incubator-beam,chamikaramj/beam,robertwb/incubator-beam,charlesccychen/beam,lukecwik/incubator-beam,apache/beam,rangadi/incubator-beam,RyanSkraba/beam,lukecwik/incubator-beam,rangadi/beam,charlesccychen/beam,markflyhigh/incubator-beam,robertwb/incubator-beam,iemejia/incubator-beam,apache/beam,charlesccychen/beam,chamikaramj/beam,chamikaramj/beam,chamikaramj/beam,robertwb/incubator-beam,lukecwik/incubator-beam,robertwb/incubator-beam
package cz.seznam.euphoria.core.executor.inmem; import cz.seznam.euphoria.core.client.dataset.Dataset; import cz.seznam.euphoria.core.client.dataset.GroupedDataset; import cz.seznam.euphoria.core.client.dataset.windowing.Batch; import cz.seznam.euphoria.core.client.dataset.windowing.Time; import cz.seznam.euphoria.core.client.dataset.windowing.Window; import cz.seznam.euphoria.core.client.dataset.windowing.WindowedElement; import cz.seznam.euphoria.core.client.dataset.windowing.Windowing; import cz.seznam.euphoria.core.client.flow.Flow; import cz.seznam.euphoria.core.client.functional.UnaryFunction; import cz.seznam.euphoria.core.client.functional.UnaryFunctor; import cz.seznam.euphoria.core.client.io.Context; import cz.seznam.euphoria.core.client.io.ListDataSink; import cz.seznam.euphoria.core.client.io.ListDataSource; import cz.seznam.euphoria.core.client.operator.CompositeKey; import cz.seznam.euphoria.core.client.operator.FlatMap; import cz.seznam.euphoria.core.client.operator.GroupByKey; import cz.seznam.euphoria.core.client.operator.MapElements; import cz.seznam.euphoria.core.client.operator.ReduceByKey; import cz.seznam.euphoria.core.client.operator.ReduceStateByKey; import cz.seznam.euphoria.core.client.operator.ReduceWindow; import cz.seznam.euphoria.core.client.operator.Repartition; import cz.seznam.euphoria.core.client.operator.Union; import cz.seznam.euphoria.core.client.operator.state.ListStorage; import cz.seznam.euphoria.core.client.operator.state.ListStorageDescriptor; import cz.seznam.euphoria.core.client.operator.state.State; import cz.seznam.euphoria.core.client.operator.state.StorageProvider; import cz.seznam.euphoria.core.client.operator.state.ValueStorage; import cz.seznam.euphoria.core.client.operator.state.ValueStorageDescriptor; import cz.seznam.euphoria.core.client.triggers.Trigger; import cz.seznam.euphoria.core.client.triggers.TriggerContext; import cz.seznam.euphoria.core.client.util.Pair; import cz.seznam.euphoria.core.client.util.Sums; import cz.seznam.euphoria.core.client.util.Triple; import cz.seznam.euphoria.guava.shaded.com.google.common.collect.Lists; import cz.seznam.euphoria.guava.shaded.com.google.common.collect.Sets; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.stream.Collectors; import static org.junit.Assert.*; /** * {@code InMemExecutor} test suite. * The {@code InMemExecutor} stands on the basic operators, so we just * need to test it correctly implements all of them. Next we need to test * that it can process complex flows with many partitions. */ public class InMemExecutorTest { InMemExecutor executor; Flow flow; @Before public void setup() { executor = new InMemExecutor(); flow = Flow.create("Test"); } @After public void teardown() { executor.abort(); } // Repartition operator @Test public void simpleRepartitionTest() { Dataset<Integer> ints = flow.createInput( ListDataSource.unbounded( Arrays.asList(1, 2, 3), Arrays.asList(4, 5, 6))); // repartition even and odd elements to different partitions Dataset<Integer> repartitioned = Repartition.of(ints) .setPartitioner(e -> e % 2) .setNumPartitions(2) .output(); // collector of outputs ListDataSink<Integer> outputSink = ListDataSink.get(2); repartitioned.persist(outputSink); executor.waitForCompletion(flow); List<List<Integer>> outputs = outputSink.getOutputs(); assertEquals(2, outputs.size()); // first partition contains even numbers assertUnorderedEquals(Arrays.asList(2, 4, 6), outputs.get(0)); // second partition contains odd numbers assertUnorderedEquals(Arrays.asList(1, 3, 5), outputs.get(1)); } @Test // test that repartition works from 2 to 3 partitions public void upRepartitionTest() { Dataset<Integer> ints = flow.createInput( ListDataSource.unbounded( Arrays.asList(1, 2, 3), Arrays.asList(4, 5, 6))); // repartition even and odd elements to different partitions Dataset<Integer> repartitioned = Repartition.of(ints) .setPartitioner(e -> e % 3) .setNumPartitions(3) .output(); // collector of outputs ListDataSink<Integer> outputSink = ListDataSink.get(3); repartitioned.persist(outputSink); executor.waitForCompletion(flow); List<List<Integer>> outputs = outputSink.getOutputs(); assertEquals(3, outputs.size()); assertUnorderedEquals(Arrays.asList(3, 6), outputs.get(0)); assertUnorderedEquals(Arrays.asList(4, 1), outputs.get(1)); assertUnorderedEquals(Arrays.asList(5, 2), outputs.get(2)); } @Test // test that repartition works from 3 to 2 partitions public void downRepartitionTest() { Dataset<Integer> ints = flow.createInput( ListDataSource.unbounded( Arrays.asList(1, 2), Arrays.asList(3, 4), Arrays.asList(5, 6))); // repartition even and odd elements to different partitions Dataset<Integer> repartitioned = Repartition.of(ints) .setPartitioner(e -> e % 2) .setNumPartitions(2) .output(); // collector of outputs ListDataSink<Integer> outputSink = ListDataSink.get(2); repartitioned.persist(outputSink); executor.waitForCompletion(flow); List<List<Integer>> outputs = outputSink.getOutputs(); assertEquals(2, outputs.size()); assertUnorderedEquals(Arrays.asList(2, 4, 6), outputs.get(0)); assertUnorderedEquals(Arrays.asList(1, 3, 5), outputs.get(1)); } @Test // test that repartition works from 3 to 2 partitions public void downRepartitionTestWithHashPartitioner() { Dataset<Integer> ints = flow.createInput( ListDataSource.unbounded( Arrays.asList(1, 2, 3), Arrays.asList(4, 5, 6))); // repartition even and odd elements to different partitions Dataset<Integer> repartitioned = Repartition.of(ints) .setNumPartitions(2) .output(); // collector of outputs ListDataSink<Integer> outputSink = ListDataSink.get(2); repartitioned.persist(outputSink); executor.waitForCompletion(flow); List<List<Integer>> outputs = outputSink.getOutputs(); assertEquals(2, outputs.size()); assertUnorderedEquals(Arrays.asList(2, 4, 6), outputs.get(0)); assertUnorderedEquals(Arrays.asList(1, 3, 5), outputs.get(1)); } // Union operator @Test public void simpleUnionTest() { Dataset<Integer> first = flow.createInput( ListDataSource.unbounded( Arrays.asList(1), Arrays.asList(2, 3, 4, 5, 6))); Dataset<Integer> second = flow.createInput( ListDataSource.unbounded( Arrays.asList(7, 8, 9))); Dataset<Integer> union = Union.of(first, second) .output(); // collector of outputs ListDataSink<Integer> outputSink = ListDataSink.get(1); Repartition.of(union) .setNumPartitions(1) .output() .persist(outputSink); executor.waitForCompletion(flow); List<List<Integer>> outputs = outputSink.getOutputs(); assertEquals(1, outputs.size()); assertUnorderedEquals( Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9), outputs.get(0)); } // FlatMap operator @Test public void simpleFlatMapTest() { Dataset<Integer> ints = flow.createInput( ListDataSource.unbounded( Arrays.asList(0, 1, 2, 3), Arrays.asList(4, 5, 6))); // repeat each element N N count Dataset<Integer> output = FlatMap.of(ints) .using((Integer e, Context<Integer> c) -> { for (int i = 0; i < e; i++) { c.collect(e); } }) .output(); // collector of outputs ListDataSink<Integer> outputSink = ListDataSink.get(2); output.persist(outputSink); executor.waitForCompletion(flow); List<List<Integer>> outputs = outputSink.getOutputs(); assertEquals(2, outputs.size()); // this must be equal including ordering and partitioning assertEquals(Arrays.asList(1, 2, 2, 3, 3, 3), outputs.get(0)); assertEquals(Arrays.asList(4, 4, 4, 4, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6), outputs.get(1)); } // ReduceStateByKey operator /** * Simple sort state for tests. * This state takes comparable elements and produces sorted sequence. */ public static class SortState extends State<Integer, Integer> { final ListStorage<Integer> data; SortState( Context<Integer> c, StorageProvider storageProvider) { super(c, storageProvider); data = storageProvider.getListStorage( ListStorageDescriptor.of("data", Integer.class)); } @Override public void add(Integer element) { data.add(element); } @Override @SuppressWarnings("unchecked") public void flush() { List<Integer> toSort = Lists.newArrayList(data.get()); Collections.sort(toSort); for (Integer i : toSort) { getContext().collect(i); } } static SortState combine(Iterable<SortState> others) { SortState ret = null; for (SortState s : others) { if (ret == null) { ret = new SortState( s.getContext(), s.getStorageProvider()); } ret.data.addAll(s.data.get()); } return ret; } @Override public void close() { data.clear(); } } // ~ end of SortState static class SizedCountWindow extends Window { final int size; int get() { return size; } SizedCountWindow(int size) { this.size = size; } @Override public String toString() { return String.valueOf(size); } @Override public boolean equals(Object o) { if (o instanceof SizedCountWindow) { SizedCountWindow that = (SizedCountWindow) o; return size == that.size; } return false; } @Override public int hashCode() { return size; } } // ~ end of SizedCountWindow static class SizedCountWindowing<T> implements Windowing<T, SizedCountWindow> { final UnaryFunction<T, Integer> size; SizedCountWindowing(UnaryFunction<T, Integer> size) { this.size = size; } @Override public Set<SizedCountWindow> assignWindowsToElement(WindowedElement<?, T> input) { int sz = size.apply(input.get()); return Sets.newHashSet(new SizedCountWindow(sz), new SizedCountWindow(2 * sz)); } @Override public Trigger<T, SizedCountWindow> getTrigger() { return new SizedCountTrigger<T>(); } } // ~ end of SizedCountWindowing static class SizedCountTrigger<T> implements Trigger<T, SizedCountWindow> { private final ValueStorageDescriptor<Long> countDesc = ValueStorageDescriptor.of("count", Long.class, 0L, (x, y) -> x + y ); @Override public TriggerResult onElement(long time, T element, SizedCountWindow window, TriggerContext ctx) { ValueStorage<Long> cnt = ctx.getValueStorage(countDesc); cnt.set(cnt.get() + 1L); if (cnt.get() >= window.get()) { return TriggerResult.FLUSH_AND_PURGE; } return TriggerResult.NOOP; } @Override public TriggerResult onTimeEvent(long time, SizedCountWindow window, TriggerContext ctx) { return TriggerResult.NOOP; } @Override public void onClear(SizedCountWindow window, TriggerContext ctx) { ctx.getValueStorage(countDesc).clear(); } @Override public TriggerResult onMerge(SizedCountWindow window, TriggerContext.TriggerMergeContext ctx) { ctx.mergeStoredState(countDesc); return TriggerResult.NOOP; } } // ~ end of SizedCountTrigger @Test public void testReduceByKeyWithSortStateAndCustomWindowing() { Dataset<Integer> ints = flow.createInput( ListDataSource.unbounded( reversed(sequenceInts(0, 100)), reversed(sequenceInts(100, 1100)))); SizedCountWindowing<Integer> windowing = new SizedCountWindowing<>(i -> (i % 10) + 1); // the key for sort will be the last digit Dataset<Pair<Integer, Integer>> output = ReduceStateByKey.of(ints) .keyBy(i -> i % 10) .valueBy(e -> e) .stateFactory(SortState::new) .combineStateBy(SortState::combine) .windowBy(windowing) .output(); // collector of outputs ListDataSink<Triple<SizedCountWindow, Integer, Integer>> outputSink = ListDataSink.get(2); FlatMap.of(output) .using((UnaryFunctor<Pair<Integer, Integer>, Triple<SizedCountWindow, Integer, Integer>>) (elem, context) -> context.collect(Triple.of((SizedCountWindow) context.getWindow(), elem.getFirst(), elem.getSecond()))) .output() .persist(outputSink); executor.waitForCompletion(flow); List<List<Triple<SizedCountWindow, Integer, Integer>>> outputs = outputSink.getOutputs(); assertEquals(2, outputs.size()); // each partition should have 550 items in each window set assertEquals(2 * 550, outputs.get(0).size()); assertEquals(2 * 550, outputs.get(1).size()); Set<Integer> firstKeys = outputs.get(0).stream() .map(Triple::getSecond).distinct() .collect(Collectors.toSet()); // validate that the two partitions contain different keys outputs.get(1).forEach(p -> assertFalse(firstKeys.contains(p.getSecond()))); checkKeyAlignedSortedList(outputs.get(0)); checkKeyAlignedSortedList(outputs.get(1)); } private void checkKeyAlignedSortedList( List<Triple<SizedCountWindow, Integer, Integer>> list) { Map<SizedCountWindow, Map<Integer, List<Integer>>> byWindow = new HashMap<>(); for (Triple<SizedCountWindow, Integer, Integer> p : list) { Map<Integer, List<Integer>> byKey = byWindow.get(p.getFirst()); if (byKey == null) { byWindow.put(p.getFirst(), byKey = new HashMap<>()); } List<Integer> sorted = byKey.get(p.getSecond()); if (sorted == null) { byKey.put(p.getSecond(), sorted = new ArrayList<>()); } sorted.add(p.getThird()); } assertFalse(byWindow.isEmpty()); int totalCount = 0; List<SizedCountWindow> iterOrder = byWindow.keySet() .stream() .sorted(Comparator.comparing(SizedCountWindow::get)) .collect(Collectors.toList()); for (SizedCountWindow w : iterOrder) { Map<Integer, List<Integer>> wkeys = byWindow.get(w); assertNotNull(wkeys); assertFalse(wkeys.isEmpty()); for (Map.Entry<Integer, List<Integer>> e : wkeys.entrySet()) { // now, each list must be sorted assertAscendingWindows(e.getValue(), w, e.getKey()); totalCount += e.getValue().size(); } } assertEquals(1100, totalCount); } private static void assertAscendingWindows( List<Integer> xs, SizedCountWindow window, Integer key) { List<List<Integer>> windows = Lists.partition(xs, window.get()); assertFalse(windows.isEmpty()); int totalSeen = 0; for (List<Integer> windowData : windows) { int last = -1; for (int x : windowData) { if (last > x) { fail(String.format("Sequence not ascending for (window: %s / key: %d): %s", window, key, xs)); } last = x; totalSeen += 1; } } assertEquals(xs.size(), totalSeen); } // reverse given list private static <T> List<T> reversed(List<T> what) { Collections.reverse(what); return what; } // produce random N random ints as list private static List<Integer> sequenceInts(int from, int to) { List<Integer> ret = new ArrayList<>(); for (int i = from; i < to; i++) { ret.add(i); } return ret; } // check that given lists are equal irrespecitve of order public static <T extends Comparable<T>> void assertUnorderedEquals( List<T> first, List<T> second) { List<T> firstCopy = new ArrayList<>(first); List<T> secondCopy = new ArrayList<>(second); Collections.sort(firstCopy); Collections.sort(secondCopy); assertEquals(firstCopy, secondCopy); } @Test(timeout = 5000L) public void testInputMultiConsumption() { final int N = 1000; Dataset<Integer> input = flow.createInput( ListDataSource.unbounded(sequenceInts(0, N))); // ~ consume the input another time Dataset<Integer> map = MapElements .of(input) .using(e -> e) .output(); ListDataSink<Integer> mapOut = ListDataSink.get(1); map.persist(mapOut); Dataset<Pair<Integer, Integer>> sum = ReduceByKey .of(input) .keyBy(e -> 0) .valueBy(e -> e) .reduceBy(Sums.ofInts()) .output(); ListDataSink<Pair<Integer, Integer>> sumOut = ListDataSink.get(1); sum.persist(sumOut); executor.waitForCompletion(flow); assertNotNull(sumOut.getOutput(0)); assertEquals(1, sumOut.getOutput(0).size()); assertEquals(Integer.valueOf((N-1) * N / 2), sumOut.getOutput(0).get(0).getSecond()); assertNotNull(mapOut.getOutput(0)); assertEquals(N, mapOut.getOutput(0).size()); assertEquals(Integer.valueOf((N-1) * N / 2), mapOut.getOutput(0).stream().reduce((x, y) -> x + y).get()); } @Test public void testWithWatermarkAndEventTime() throws Exception { int N = 2000; // generate some small ints, use them as event time and count them // in 10s windows Dataset<Integer> input = flow.createInput( ListDataSource.unbounded(sequenceInts(0, N))); ListDataSink<Long> outputs = ListDataSink.get(2); ReduceWindow.of(input) .valueBy(e -> 1L) .combineBy(Sums.ofLongs()) .windowBy(Time.of(Duration.ofSeconds(10)).using(e -> e * 1000L)) .setNumPartitions(1) .output() .persist(outputs); // watermarking 100 ms executor.setTriggeringSchedulerSupplier( () -> new WatermarkTriggerScheduler(100)); // run the executor in separate thread in order to be able to watch // the partial results Thread exec = new Thread(() -> executor.waitForCompletion(flow)); exec.start(); // sleep for one second Thread.sleep(1000L); // the data in first unfinished partition List<Long> output = new ArrayList<>(outputs.getUncommittedOutputs().get(0)); // after one second we should have something about 500 elements read, // this means we should have at least 40 complete windows assertTrue("Should have at least 40 windows, got " + output.size(), 40 <= output.size()); assertTrue("All but (at most) one window should have size 10", output.stream().filter(w -> w != 10).count() <= 1); exec.join(); output = outputs.getOutputs().get(0); output.forEach(w -> assertEquals("Each window should have 10 elements, got " + w, 10L, (long) w)); // we have 2000 elements split into 200 windows assertEquals(200, output.size()); } @Test public void testWithWatermarkAndEventTimeAndDiscarding() throws Exception { int N = 2000; // generate some small ints, use them as event time and count them // in 10s windows Dataset<Integer> input = flow.createInput( ListDataSource.unbounded(reversed(sequenceInts(0, N)))); ListDataSink<Long> outputs = ListDataSink.get(2); ReduceWindow.of(input) .valueBy(e -> 1L) .combineBy(Sums.ofLongs()) .windowBy(Time.of(Duration.ofSeconds(10)).using(e -> e * 1000L)) .setNumPartitions(1) .output() .persist(outputs); // watermarking 100 ms executor.setTriggeringSchedulerSupplier( () -> new WatermarkTriggerScheduler(100)); executor.waitForCompletion(flow); // there should be only one element on output - the first element // all other windows are discarded List<Long> output = outputs.getOutputs().get(0); assertEquals(1, output.size()); } @Test public void testWithWatermarkAndEventTimeMixed() throws Exception { int N = 2000; // generate some small ints, use them as event time and count them // in 10s windows Dataset<Integer> input = flow.createInput( ListDataSource.unbounded(sequenceInts(0, N)) .withReadDelay(Duration.ofMillis(2))); // first add some fake operator operating on processing time // doing virtually nothing Dataset<Set<Integer>> reduced = ReduceWindow.of(input) .reduceBy((Iterable<Integer> values) -> { Set<Integer> grp = new TreeSet<>(); for (Integer i : values) { grp.add(i); } return grp; }) .windowBy(Batch.get()) .output(); // explode it back to the original input (maybe reordered) // and store it as the original input, process it further in // the same way as in `testWithWatermarkAndEventTime' input = FlatMap.of(reduced) .using((Set<Integer> grp, Context<Integer> c) -> { for (Integer i : grp) { c.collect(i); } }).output(); ListDataSink<Long> outputs = ListDataSink.get(2); ReduceWindow.of(input) .valueBy(e -> 1L) .combineBy(Sums.ofLongs()) .windowBy(Time.of(Duration.ofSeconds(10)).using(e -> e * 1000L)) .setNumPartitions(1) .output() .persist(outputs); // watermarking 100 ms executor.setTriggeringSchedulerSupplier( () -> new WatermarkTriggerScheduler(100)); executor.waitForCompletion(flow); // the data in first unfinished partition List<Long> output = new ArrayList<>(outputs.getUncommittedOutputs().get(0)); // after one second we should have something about 500 elements read, // this means we should have at least 40 complete windows assertTrue("Should have at least 40 windows, got " + output.size(), 40 <= output.size()); assertTrue("All but (at most) one window should have size 10", output.stream().filter(w -> w != 10).count() <= 1); output = outputs.getOutputs().get(0); output.forEach(w -> assertEquals("Each window should have 10 elements, got " + w, 10L, (long) w)); // we have 2000 elements split into 200 windows assertEquals(200, output.size()); } @Test(timeout = 2000) public void testGroupedDatasetReduceByKey() throws Exception { Flow flow = Flow.create("Test"); ListDataSource<Pair<Integer, String>> input = ListDataSource.bounded(Arrays.asList( Pair.of(1, "one"), Pair.of(1, "two"), Pair.of(1, "three"), Pair.of(1, "one"), Pair.of(2, "two"), Pair.of(1, "three"), Pair.of(1, "three"))); Dataset<Pair<Integer, String>> pairs = flow.createInput(input); GroupedDataset<Integer, String> grouped = GroupByKey.of(pairs) .keyBy(Pair::getFirst) .valueBy(Pair::getSecond) .output(); Dataset<Pair<CompositeKey<Integer, String>, Long>> output = ReduceByKey.of(grouped) .keyBy(e -> e) .valueBy(e -> 1L) .combineBy(Sums.ofLongs()) .output(); ListDataSink<Pair<CompositeKey<Integer, String>, Long>> out = ListDataSink.get(1); output.persist(out); InMemExecutor executor = new InMemExecutor(); executor.waitForCompletion(flow); assertUnorderedEquals( Arrays.asList("1-one:2", "1-two:1", "1-three:3", "2-two:1"), out.getOutput(0).stream().map(p -> { assertEquals(Integer.class, p.getFirst().getFirst().getClass()); assertEquals(String.class, p.getFirst().getSecond().getClass()); assertEquals(Long.class, p.getSecond().getClass()); return p.getFirst().getFirst() + "-" + p.getFirst().getSecond() + ":" + p.getSecond(); }).collect(Collectors.toList())); } }
sdks/java/extensions/euphoria/euphoria-core/src/test/java/cz/seznam/euphoria/core/executor/inmem/InMemExecutorTest.java
package cz.seznam.euphoria.core.executor.inmem; import cz.seznam.euphoria.core.client.dataset.Dataset; import cz.seznam.euphoria.core.client.dataset.GroupedDataset; import cz.seznam.euphoria.core.client.dataset.windowing.Batch; import cz.seznam.euphoria.core.client.dataset.windowing.MergingWindowing; import cz.seznam.euphoria.core.client.dataset.windowing.Time; import cz.seznam.euphoria.core.client.dataset.windowing.WindowedElement; import cz.seznam.euphoria.core.client.flow.Flow; import cz.seznam.euphoria.core.client.functional.UnaryFunction; import cz.seznam.euphoria.core.client.functional.UnaryFunctor; import cz.seznam.euphoria.core.client.io.Context; import cz.seznam.euphoria.core.client.io.ListDataSink; import cz.seznam.euphoria.core.client.io.ListDataSource; import cz.seznam.euphoria.core.client.operator.CompositeKey; import cz.seznam.euphoria.core.client.operator.FlatMap; import cz.seznam.euphoria.core.client.operator.GroupByKey; import cz.seznam.euphoria.core.client.operator.MapElements; import cz.seznam.euphoria.core.client.operator.ReduceByKey; import cz.seznam.euphoria.core.client.operator.ReduceStateByKey; import cz.seznam.euphoria.core.client.operator.ReduceWindow; import cz.seznam.euphoria.core.client.operator.Repartition; import cz.seznam.euphoria.core.client.operator.Union; import cz.seznam.euphoria.core.client.operator.state.ListStorage; import cz.seznam.euphoria.core.client.operator.state.ListStorageDescriptor; import cz.seznam.euphoria.core.client.operator.state.State; import cz.seznam.euphoria.core.client.operator.state.StorageProvider; import cz.seznam.euphoria.core.client.util.Pair; import cz.seznam.euphoria.core.client.util.Sums; import cz.seznam.euphoria.core.client.util.Triple; import cz.seznam.euphoria.guava.shaded.com.google.common.collect.Lists; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import static org.junit.Assert.*; /** * {@code InMemExecutor} test suite. * The {@code InMemExecutor} stands on the basic operators, so we just * need to test it correctly implements all of them. Next we need to test * that it can process complex flows with many partitions. */ public class InMemExecutorTest { InMemExecutor executor; Flow flow; @Before public void setup() { executor = new InMemExecutor(); flow = Flow.create("Test"); } @After public void teardown() { executor.abort(); } // Repartition operator @Test public void simpleRepartitionTest() { Dataset<Integer> ints = flow.createInput( ListDataSource.unbounded( Arrays.asList(1, 2, 3), Arrays.asList(4, 5, 6))); // repartition even and odd elements to different partitions Dataset<Integer> repartitioned = Repartition.of(ints) .setPartitioner(e -> e % 2) .setNumPartitions(2) .output(); // collector of outputs ListDataSink<Integer> outputSink = ListDataSink.get(2); repartitioned.persist(outputSink); executor.waitForCompletion(flow); List<List<Integer>> outputs = outputSink.getOutputs(); assertEquals(2, outputs.size()); // first partition contains even numbers assertUnorderedEquals(Arrays.asList(2, 4, 6), outputs.get(0)); // second partition contains odd numbers assertUnorderedEquals(Arrays.asList(1, 3, 5), outputs.get(1)); } @Test // test that repartition works from 2 to 3 partitions public void upRepartitionTest() { Dataset<Integer> ints = flow.createInput( ListDataSource.unbounded( Arrays.asList(1, 2, 3), Arrays.asList(4, 5, 6))); // repartition even and odd elements to different partitions Dataset<Integer> repartitioned = Repartition.of(ints) .setPartitioner(e -> e % 3) .setNumPartitions(3) .output(); // collector of outputs ListDataSink<Integer> outputSink = ListDataSink.get(3); repartitioned.persist(outputSink); executor.waitForCompletion(flow); List<List<Integer>> outputs = outputSink.getOutputs(); assertEquals(3, outputs.size()); assertUnorderedEquals(Arrays.asList(3, 6), outputs.get(0)); assertUnorderedEquals(Arrays.asList(4, 1), outputs.get(1)); assertUnorderedEquals(Arrays.asList(5, 2), outputs.get(2)); } @Test // test that repartition works from 3 to 2 partitions public void downRepartitionTest() { Dataset<Integer> ints = flow.createInput( ListDataSource.unbounded( Arrays.asList(1, 2), Arrays.asList(3, 4), Arrays.asList(5, 6))); // repartition even and odd elements to different partitions Dataset<Integer> repartitioned = Repartition.of(ints) .setPartitioner(e -> e % 2) .setNumPartitions(2) .output(); // collector of outputs ListDataSink<Integer> outputSink = ListDataSink.get(2); repartitioned.persist(outputSink); executor.waitForCompletion(flow); List<List<Integer>> outputs = outputSink.getOutputs(); assertEquals(2, outputs.size()); assertUnorderedEquals(Arrays.asList(2, 4, 6), outputs.get(0)); assertUnorderedEquals(Arrays.asList(1, 3, 5), outputs.get(1)); } @Test // test that repartition works from 3 to 2 partitions public void downRepartitionTestWithHashPartitioner() { Dataset<Integer> ints = flow.createInput( ListDataSource.unbounded( Arrays.asList(1, 2, 3), Arrays.asList(4, 5, 6))); // repartition even and odd elements to different partitions Dataset<Integer> repartitioned = Repartition.of(ints) .setNumPartitions(2) .output(); // collector of outputs ListDataSink<Integer> outputSink = ListDataSink.get(2); repartitioned.persist(outputSink); executor.waitForCompletion(flow); List<List<Integer>> outputs = outputSink.getOutputs(); assertEquals(2, outputs.size()); assertUnorderedEquals(Arrays.asList(2, 4, 6), outputs.get(0)); assertUnorderedEquals(Arrays.asList(1, 3, 5), outputs.get(1)); } // Union operator @Test public void simpleUnionTest() { Dataset<Integer> first = flow.createInput( ListDataSource.unbounded( Arrays.asList(1), Arrays.asList(2, 3, 4, 5, 6))); Dataset<Integer> second = flow.createInput( ListDataSource.unbounded( Arrays.asList(7, 8, 9))); Dataset<Integer> union = Union.of(first, second) .output(); // collector of outputs ListDataSink<Integer> outputSink = ListDataSink.get(1); Repartition.of(union) .setNumPartitions(1) .output() .persist(outputSink); executor.waitForCompletion(flow); List<List<Integer>> outputs = outputSink.getOutputs(); assertEquals(1, outputs.size()); assertUnorderedEquals( Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9), outputs.get(0)); } // FlatMap operator @Test public void simpleFlatMapTest() { Dataset<Integer> ints = flow.createInput( ListDataSource.unbounded( Arrays.asList(0, 1, 2, 3), Arrays.asList(4, 5, 6))); // repeat each element N N count Dataset<Integer> output = FlatMap.of(ints) .using((Integer e, Context<Integer> c) -> { for (int i = 0; i < e; i++) { c.collect(e); } }) .output(); // collector of outputs ListDataSink<Integer> outputSink = ListDataSink.get(2); output.persist(outputSink); executor.waitForCompletion(flow); List<List<Integer>> outputs = outputSink.getOutputs(); assertEquals(2, outputs.size()); // this must be equal including ordering and partitioning assertEquals(Arrays.asList(1, 2, 2, 3, 3, 3), outputs.get(0)); assertEquals(Arrays.asList(4, 4, 4, 4, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6), outputs.get(1)); } // ReduceStateByKey operator /** * Simple sort state for tests. * This state takes comparable elements and produces sorted sequence. */ public static class SortState extends State<Integer, Integer> { final ListStorage<Integer> data; SortState( Context<Integer> c, StorageProvider storageProvider) { super(c, storageProvider); data = storageProvider.getListStorage( ListStorageDescriptor.of("data", Integer.class)); } @Override public void add(Integer element) { data.add(element); } @Override @SuppressWarnings("unchecked") public void flush() { List<Integer> toSort = Lists.newArrayList(data.get()); Collections.sort(toSort); for (Integer i : toSort) { getContext().collect(i); } } static SortState combine(Iterable<SortState> others) { SortState ret = null; for (SortState s : others) { if (ret == null) { ret = new SortState( s.getContext(), s.getStorageProvider()); } ret.data.addAll(s.data.get()); } return ret; } @Override public void close() { data.clear(); } } static class CountLabel { final int count; int get() { return count; } // on purpose no hashcode or equals CountLabel(int count) { this.count = count; } @Override public String toString() { return String.valueOf(count); } } // XXX // private static class CountWindowContext // extends WindowContext<CountLabel> { // // final int maxSize; // int size = 1; // // public CountWindowContext(WindowID<CountLabel> wid, int maxSize) { // super(wid); // this.maxSize = maxSize; // } // // public CountWindowContext(int maxSize) { // this(new WindowID(new CountLabel(maxSize)), maxSize); // } // // public CountWindowContext(CountLabel label) { // this(new WindowID(label), label.get()); // } // // // @Override // public String toString() { // return "CountWindowContext(" + getWindowID() + ", " + size + ")"; // } // // // } // // // static class SizedCountWindowing<T> implements // MergingWindowing<T, CountLabel, CountWindowContext> { // // final UnaryFunction<T, Integer> size; // // SizedCountWindowing(UnaryFunction<T, Integer> size) { // this.size = size; // } // // @Override // public Collection<Pair<Collection<CountWindowContext>, CountWindowContext>> mergeWindows( // Collection<CountWindowContext> actives) { // // // we will merge together only windows with the same window size // // List<Pair<Collection<CountWindowContext>, CountWindowContext>> ret // = new ArrayList<>(); // // Map<Integer, List<CountWindowContext>> toMergeMap = new HashMap<>(); // Map<Integer, AtomicInteger> currentSizeMap = new HashMap<>(); // // for (CountWindowContext w : actives) { // final int wSize = w.maxSize; // AtomicInteger currentSize = currentSizeMap.get(wSize); // if (currentSize == null) { // currentSize = new AtomicInteger(0); // currentSizeMap.put(wSize, currentSize); // toMergeMap.put(wSize, new ArrayList<>()); // } // if (currentSize.get() + w.size <= wSize) { // currentSize.addAndGet(w.size); // toMergeMap.get(wSize).add(w); // } else { // List<CountWindowContext> toMerge = toMergeMap.get(wSize); // if (!toMerge.isEmpty()) { // CountWindowContext res = new CountWindowContext(currentSize.get()); // res.size = currentSize.get(); // ret.add(Pair.of(new ArrayList<>(toMerge), res)); // toMerge.clear(); // } // toMerge.add(w); // currentSize.set(w.size); // } // } // // for (List<CountWindowContext> toMerge : toMergeMap.values()) { // if (!toMerge.isEmpty()) { // CountWindowContext first = toMerge.get(0); // CountWindowContext res = new CountWindowContext(first.maxSize); // res.size = currentSizeMap.get(first.maxSize).get(); // ret.add(Pair.of(toMerge, res)); // } // } // return ret; // } // // @Override // public Set<WindowID<CountLabel>> assignWindowsToElement(WindowedElement<?, T> input) { // int sz = size.apply(input.get()); // return new HashSet<>(Arrays.asList( // new WindowID<>(new CountLabel(sz)), // new WindowID<>(new CountLabel(2 * sz)))); // } // // // @Override // public boolean isComplete(CountWindowContext window) { // return window.size == window.maxSize; // } // // @Override // public CountWindowContext createWindowContext(WindowID<CountLabel> wid) { // return new CountWindowContext(wid, wid.getLabel().get()); // } // // } // // // @Test // public void testReduceByKeyWithSortStateAndCustomWindowing() { // Dataset<Integer> ints = flow.createInput( // ListDataSource.unbounded( // reversed(sequenceInts(0, 100)), // reversed(sequenceInts(100, 1100)))); // // SizedCountWindowing<Integer> windowing = // new SizedCountWindowing<>(i -> (i % 10) + 1); // // // the key for sort will be the last digit // Dataset<Pair<Integer, Integer>> output = // ReduceStateByKey.of(ints) // .keyBy(i -> i % 10) // .valueBy(e -> e) // .stateFactory(SortState::new) // .combineStateBy(SortState::combine) // .windowBy(windowing) // .output(); // // // collector of outputs // ListDataSink<Triple<CountLabel, Integer, Integer>> outputSink = ListDataSink.get(2); // // FlatMap.of(output) // .using((UnaryFunctor<Pair<Integer, Integer>, Triple<CountLabel, Integer, Integer>>) // (elem, context) -> context.collect(Triple.of((CountLabel) context.getWindow(), elem.getFirst(), elem.getSecond()))) // .output() // .persist(outputSink); // // executor.waitForCompletion(flow); // // List<List<Triple<CountLabel, Integer, Integer>>> outputs = outputSink.getOutputs(); // assertEquals(2, outputs.size()); // // // each partition should have 550 items in each window set // assertEquals(2 * 550, outputs.get(0).size()); // assertEquals(2 * 550, outputs.get(1).size()); // // Set<Integer> firstKeys = outputs.get(0).stream() // .map(Triple::getSecond).distinct() // .collect(Collectors.toSet()); // // // validate that the two partitions contain different keys // outputs.get(1).forEach(p -> assertFalse(firstKeys.contains(p.getSecond()))); // // outputs.forEach(this::checkKeyAlignedSortedList); // // } private void checkKeyAlignedSortedList( List<Triple<CountLabel, Integer, Integer>> list) { Map<CountLabel, Map<Integer, List<Integer>>> sortedSequencesInWindow = new HashMap<>(); for (Triple<CountLabel, Integer, Integer> p : list) { Map<Integer, List<Integer>> sortedSequences = sortedSequencesInWindow.get( p.getFirst()); if (sortedSequences == null) { sortedSequencesInWindow.put(p.getFirst(), sortedSequences = new HashMap<>()); } List<Integer> sorted = sortedSequences.get(p.getSecond()); if (sorted == null) { sortedSequences.put(p.getSecond(), sorted = new ArrayList<>()); } sorted.add(p.getThird()); } assertFalse(sortedSequencesInWindow.isEmpty()); int totalCount = 0; for (Map.Entry<CountLabel, Map<Integer, List<Integer>>> we : sortedSequencesInWindow.entrySet()) { assertFalse(we.getValue().isEmpty()); for (Map.Entry<Integer, List<Integer>> e : we.getValue().entrySet()) { // now, each list must be sorted int last = -1; for (int i : e.getValue()) { assertTrue("Sequence " + e.getValue() + " is not sorted", last < i); last = i; totalCount++; } } } assertEquals(1100, totalCount); } // reverse given list private static <T> List<T> reversed(List<T> what) { Collections.reverse(what); return what; } // produce random N random ints as list private static List<Integer> sequenceInts(int from, int to) { List<Integer> ret = new ArrayList<>(); for (int i = from; i < to; i++) { ret.add(i); } return ret; } // check that given lists are equal irrespecitve of order public static <T extends Comparable<T>> void assertUnorderedEquals( List<T> first, List<T> second) { List<T> firstCopy = new ArrayList<>(first); List<T> secondCopy = new ArrayList<>(second); Collections.sort(firstCopy); Collections.sort(secondCopy); assertEquals(firstCopy, secondCopy); } @Test(timeout = 5000L) public void testInputMultiConsumption() { final int N = 1000; Dataset<Integer> input = flow.createInput( ListDataSource.unbounded(sequenceInts(0, N))); // ~ consume the input another time Dataset<Integer> map = MapElements .of(input) .using(e -> e) .output(); ListDataSink<Integer> mapOut = ListDataSink.get(1); map.persist(mapOut); Dataset<Pair<Integer, Integer>> sum = ReduceByKey .of(input) .keyBy(e -> 0) .valueBy(e -> e) .reduceBy(Sums.ofInts()) .output(); ListDataSink<Pair<Integer, Integer>> sumOut = ListDataSink.get(1); sum.persist(sumOut); executor.waitForCompletion(flow); assertNotNull(sumOut.getOutput(0)); assertEquals(1, sumOut.getOutput(0).size()); assertEquals(Integer.valueOf((N-1) * N / 2), sumOut.getOutput(0).get(0).getSecond()); assertNotNull(mapOut.getOutput(0)); assertEquals(N, mapOut.getOutput(0).size()); assertEquals(Integer.valueOf((N-1) * N / 2), mapOut.getOutput(0).stream().reduce((x, y) -> x + y).get()); } @Test public void testWithWatermarkAndEventTime() throws Exception { int N = 2000; // generate some small ints, use them as event time and count them // in 10s windows Dataset<Integer> input = flow.createInput( ListDataSource.unbounded(sequenceInts(0, N))); ListDataSink<Long> outputs = ListDataSink.get(2); ReduceWindow.of(input) .valueBy(e -> 1L) .combineBy(Sums.ofLongs()) .windowBy(Time.of(Duration.ofSeconds(10)).using(e -> e * 1000L)) .setNumPartitions(1) .output() .persist(outputs); // watermarking 100 ms executor.setTriggeringSchedulerSupplier( () -> new WatermarkTriggerScheduler(100)); // run the executor in separate thread in order to be able to watch // the partial results Thread exec = new Thread(() -> executor.waitForCompletion(flow)); exec.start(); // sleep for one second Thread.sleep(1000L); // the data in first unfinished partition List<Long> output = new ArrayList<>(outputs.getUncommittedOutputs().get(0)); // after one second we should have something about 500 elements read, // this means we should have at least 40 complete windows assertTrue("Should have at least 40 windows, got " + output.size(), 40 <= output.size()); assertTrue("All but (at most) one window should have size 10", output.stream().filter(w -> w != 10).count() <= 1); exec.join(); output = outputs.getOutputs().get(0); output.forEach(w -> assertEquals("Each window should have 10 elements, got " + w, 10L, (long) w)); // we have 2000 elements split into 200 windows assertEquals(200, output.size()); } @Test public void testWithWatermarkAndEventTimeAndDiscarding() throws Exception { int N = 2000; // generate some small ints, use them as event time and count them // in 10s windows Dataset<Integer> input = flow.createInput( ListDataSource.unbounded(reversed(sequenceInts(0, N)))); ListDataSink<Long> outputs = ListDataSink.get(2); ReduceWindow.of(input) .valueBy(e -> 1L) .combineBy(Sums.ofLongs()) .windowBy(Time.of(Duration.ofSeconds(10)).using(e -> e * 1000L)) .setNumPartitions(1) .output() .persist(outputs); // watermarking 100 ms executor.setTriggeringSchedulerSupplier( () -> new WatermarkTriggerScheduler(100)); executor.waitForCompletion(flow); // there should be only one element on output - the first element // all other windows are discarded List<Long> output = outputs.getOutputs().get(0); assertEquals(1, output.size()); } @Test public void testWithWatermarkAndEventTimeMixed() throws Exception { int N = 2000; // generate some small ints, use them as event time and count them // in 10s windows Dataset<Integer> input = flow.createInput( ListDataSource.unbounded(sequenceInts(0, N)) .withReadDelay(Duration.ofMillis(2))); // first add some fake operator operating on processing time // doing virtually nothing Dataset<Set<Integer>> reduced = ReduceWindow.of(input) .reduceBy((Iterable<Integer> values) -> { Set<Integer> grp = new TreeSet<>(); for (Integer i : values) { grp.add(i); } return grp; }) .windowBy(Batch.get()) .output(); // explode it back to the original input (maybe reordered) // and store it as the original input, process it further in // the same way as in `testWithWatermarkAndEventTime' input = FlatMap.of(reduced) .using((Set<Integer> grp, Context<Integer> c) -> { for (Integer i : grp) { c.collect(i); } }).output(); ListDataSink<Long> outputs = ListDataSink.get(2); ReduceWindow.of(input) .valueBy(e -> 1L) .combineBy(Sums.ofLongs()) .windowBy(Time.of(Duration.ofSeconds(10)).using(e -> e * 1000L)) .setNumPartitions(1) .output() .persist(outputs); // watermarking 100 ms executor.setTriggeringSchedulerSupplier( () -> new WatermarkTriggerScheduler(100)); executor.waitForCompletion(flow); // the data in first unfinished partition List<Long> output = new ArrayList<>(outputs.getUncommittedOutputs().get(0)); // after one second we should have something about 500 elements read, // this means we should have at least 40 complete windows assertTrue("Should have at least 40 windows, got " + output.size(), 40 <= output.size()); assertTrue("All but (at most) one window should have size 10", output.stream().filter(w -> w != 10).count() <= 1); output = outputs.getOutputs().get(0); output.forEach(w -> assertEquals("Each window should have 10 elements, got " + w, 10L, (long) w)); // we have 2000 elements split into 200 windows assertEquals(200, output.size()); } @Test(timeout = 2000) public void testGroupedDatasetReduceByKey() throws Exception { Flow flow = Flow.create("Test"); ListDataSource<Pair<Integer, String>> input = ListDataSource.bounded(Arrays.asList( Pair.of(1, "one"), Pair.of(1, "two"), Pair.of(1, "three"), Pair.of(1, "one"), Pair.of(2, "two"), Pair.of(1, "three"), Pair.of(1, "three"))); Dataset<Pair<Integer, String>> pairs = flow.createInput(input); GroupedDataset<Integer, String> grouped = GroupByKey.of(pairs) .keyBy(Pair::getFirst) .valueBy(Pair::getSecond) .output(); Dataset<Pair<CompositeKey<Integer, String>, Long>> output = ReduceByKey.of(grouped) .keyBy(e -> e) .valueBy(e -> 1L) .combineBy(Sums.ofLongs()) .output(); ListDataSink<Pair<CompositeKey<Integer, String>, Long>> out = ListDataSink.get(1); output.persist(out); InMemExecutor executor = new InMemExecutor(); executor.waitForCompletion(flow); assertUnorderedEquals( Arrays.asList("1-one:2", "1-two:1", "1-three:3", "2-two:1"), out.getOutput(0).stream().map(p -> { assertEquals(Integer.class, p.getFirst().getFirst().getClass()); assertEquals(String.class, p.getFirst().getSecond().getClass()); assertEquals(Long.class, p.getSecond().getClass()); return p.getFirst().getFirst() + "-" + p.getFirst().getSecond() + ":" + p.getSecond(); }).collect(Collectors.toList())); } }
#! [euphoria-inmem] Enable sort-state test
sdks/java/extensions/euphoria/euphoria-core/src/test/java/cz/seznam/euphoria/core/executor/inmem/InMemExecutorTest.java
#! [euphoria-inmem] Enable sort-state test
<ide><path>dks/java/extensions/euphoria/euphoria-core/src/test/java/cz/seznam/euphoria/core/executor/inmem/InMemExecutorTest.java <del> <ide> package cz.seznam.euphoria.core.executor.inmem; <ide> <ide> import cz.seznam.euphoria.core.client.dataset.Dataset; <ide> import cz.seznam.euphoria.core.client.dataset.GroupedDataset; <ide> import cz.seznam.euphoria.core.client.dataset.windowing.Batch; <del>import cz.seznam.euphoria.core.client.dataset.windowing.MergingWindowing; <ide> import cz.seznam.euphoria.core.client.dataset.windowing.Time; <add>import cz.seznam.euphoria.core.client.dataset.windowing.Window; <ide> import cz.seznam.euphoria.core.client.dataset.windowing.WindowedElement; <add>import cz.seznam.euphoria.core.client.dataset.windowing.Windowing; <ide> import cz.seznam.euphoria.core.client.flow.Flow; <ide> import cz.seznam.euphoria.core.client.functional.UnaryFunction; <ide> import cz.seznam.euphoria.core.client.functional.UnaryFunctor; <ide> import cz.seznam.euphoria.core.client.operator.state.ListStorageDescriptor; <ide> import cz.seznam.euphoria.core.client.operator.state.State; <ide> import cz.seznam.euphoria.core.client.operator.state.StorageProvider; <add>import cz.seznam.euphoria.core.client.operator.state.ValueStorage; <add>import cz.seznam.euphoria.core.client.operator.state.ValueStorageDescriptor; <add>import cz.seznam.euphoria.core.client.triggers.Trigger; <add>import cz.seznam.euphoria.core.client.triggers.TriggerContext; <ide> import cz.seznam.euphoria.core.client.util.Pair; <ide> import cz.seznam.euphoria.core.client.util.Sums; <ide> import cz.seznam.euphoria.core.client.util.Triple; <ide> import cz.seznam.euphoria.guava.shaded.com.google.common.collect.Lists; <add>import cz.seznam.euphoria.guava.shaded.com.google.common.collect.Sets; <ide> import org.junit.After; <ide> import org.junit.Before; <ide> import org.junit.Test; <ide> import java.time.Duration; <ide> import java.util.ArrayList; <ide> import java.util.Arrays; <del>import java.util.Collection; <ide> import java.util.Collections; <add>import java.util.Comparator; <ide> import java.util.HashMap; <del>import java.util.HashSet; <ide> import java.util.List; <ide> import java.util.Map; <ide> import java.util.Set; <ide> import java.util.TreeSet; <del>import java.util.concurrent.atomic.AtomicInteger; <ide> import java.util.stream.Collectors; <ide> <ide> import static org.junit.Assert.*; <ide> public void close() { <ide> data.clear(); <ide> } <del> <del> } <del> <del> static class CountLabel { <del> final int count; <del> int get() { return count; } <del> // on purpose no hashcode or equals <del> CountLabel(int count) { this.count = count; } <del> @Override <del> public String toString() { return String.valueOf(count); } <del> } <del> <del> <del>// XXX <del>// private static class CountWindowContext <del>// extends WindowContext<CountLabel> { <del>// <del>// final int maxSize; <del>// int size = 1; <del>// <del>// public CountWindowContext(WindowID<CountLabel> wid, int maxSize) { <del>// super(wid); <del>// this.maxSize = maxSize; <del>// } <del>// <del>// public CountWindowContext(int maxSize) { <del>// this(new WindowID(new CountLabel(maxSize)), maxSize); <del>// } <del>// <del>// public CountWindowContext(CountLabel label) { <del>// this(new WindowID(label), label.get()); <del>// } <del>// <del>// <del>// @Override <del>// public String toString() { <del>// return "CountWindowContext(" + getWindowID() + ", " + size + ")"; <del>// } <del>// <del>// <del>// } <del>// <del>// <del>// static class SizedCountWindowing<T> implements <del>// MergingWindowing<T, CountLabel, CountWindowContext> { <del>// <del>// final UnaryFunction<T, Integer> size; <del>// <del>// SizedCountWindowing(UnaryFunction<T, Integer> size) { <del>// this.size = size; <del>// } <del>// <del>// @Override <del>// public Collection<Pair<Collection<CountWindowContext>, CountWindowContext>> mergeWindows( <del>// Collection<CountWindowContext> actives) { <del>// <del>// // we will merge together only windows with the same window size <del>// <del>// List<Pair<Collection<CountWindowContext>, CountWindowContext>> ret <del>// = new ArrayList<>(); <del>// <del>// Map<Integer, List<CountWindowContext>> toMergeMap = new HashMap<>(); <del>// Map<Integer, AtomicInteger> currentSizeMap = new HashMap<>(); <del>// <del>// for (CountWindowContext w : actives) { <del>// final int wSize = w.maxSize; <del>// AtomicInteger currentSize = currentSizeMap.get(wSize); <del>// if (currentSize == null) { <del>// currentSize = new AtomicInteger(0); <del>// currentSizeMap.put(wSize, currentSize); <del>// toMergeMap.put(wSize, new ArrayList<>()); <del>// } <del>// if (currentSize.get() + w.size <= wSize) { <del>// currentSize.addAndGet(w.size); <del>// toMergeMap.get(wSize).add(w); <del>// } else { <del>// List<CountWindowContext> toMerge = toMergeMap.get(wSize); <del>// if (!toMerge.isEmpty()) { <del>// CountWindowContext res = new CountWindowContext(currentSize.get()); <del>// res.size = currentSize.get(); <del>// ret.add(Pair.of(new ArrayList<>(toMerge), res)); <del>// toMerge.clear(); <del>// } <del>// toMerge.add(w); <del>// currentSize.set(w.size); <del>// } <del>// } <del>// <del>// for (List<CountWindowContext> toMerge : toMergeMap.values()) { <del>// if (!toMerge.isEmpty()) { <del>// CountWindowContext first = toMerge.get(0); <del>// CountWindowContext res = new CountWindowContext(first.maxSize); <del>// res.size = currentSizeMap.get(first.maxSize).get(); <del>// ret.add(Pair.of(toMerge, res)); <del>// } <del>// } <del>// return ret; <del>// } <del>// <del>// @Override <del>// public Set<WindowID<CountLabel>> assignWindowsToElement(WindowedElement<?, T> input) { <del>// int sz = size.apply(input.get()); <del>// return new HashSet<>(Arrays.asList( <del>// new WindowID<>(new CountLabel(sz)), <del>// new WindowID<>(new CountLabel(2 * sz)))); <del>// } <del>// <del>// <del>// @Override <del>// public boolean isComplete(CountWindowContext window) { <del>// return window.size == window.maxSize; <del>// } <del>// <del>// @Override <del>// public CountWindowContext createWindowContext(WindowID<CountLabel> wid) { <del>// return new CountWindowContext(wid, wid.getLabel().get()); <del>// } <del>// <del>// } <del>// <del>// <del>// @Test <del>// public void testReduceByKeyWithSortStateAndCustomWindowing() { <del>// Dataset<Integer> ints = flow.createInput( <del>// ListDataSource.unbounded( <del>// reversed(sequenceInts(0, 100)), <del>// reversed(sequenceInts(100, 1100)))); <del>// <del>// SizedCountWindowing<Integer> windowing = <del>// new SizedCountWindowing<>(i -> (i % 10) + 1); <del>// <del>// // the key for sort will be the last digit <del>// Dataset<Pair<Integer, Integer>> output = <del>// ReduceStateByKey.of(ints) <del>// .keyBy(i -> i % 10) <del>// .valueBy(e -> e) <del>// .stateFactory(SortState::new) <del>// .combineStateBy(SortState::combine) <del>// .windowBy(windowing) <del>// .output(); <del>// <del>// // collector of outputs <del>// ListDataSink<Triple<CountLabel, Integer, Integer>> outputSink = ListDataSink.get(2); <del>// <del>// FlatMap.of(output) <del>// .using((UnaryFunctor<Pair<Integer, Integer>, Triple<CountLabel, Integer, Integer>>) <del>// (elem, context) -> context.collect(Triple.of((CountLabel) context.getWindow(), elem.getFirst(), elem.getSecond()))) <del>// .output() <del>// .persist(outputSink); <del>// <del>// executor.waitForCompletion(flow); <del>// <del>// List<List<Triple<CountLabel, Integer, Integer>>> outputs = outputSink.getOutputs(); <del>// assertEquals(2, outputs.size()); <del>// <del>// // each partition should have 550 items in each window set <del>// assertEquals(2 * 550, outputs.get(0).size()); <del>// assertEquals(2 * 550, outputs.get(1).size()); <del>// <del>// Set<Integer> firstKeys = outputs.get(0).stream() <del>// .map(Triple::getSecond).distinct() <del>// .collect(Collectors.toSet()); <del>// <del>// // validate that the two partitions contain different keys <del>// outputs.get(1).forEach(p -> assertFalse(firstKeys.contains(p.getSecond()))); <del>// <del>// outputs.forEach(this::checkKeyAlignedSortedList); <del>// <del>// } <del> <add> } // ~ end of SortState <add> <add> static class SizedCountWindow extends Window { <add> final int size; <add> <add> int get() { <add> return size; <add> } <add> <add> SizedCountWindow(int size) { <add> this.size = size; <add> } <add> <add> @Override <add> public String toString() { <add> return String.valueOf(size); <add> } <add> <add> @Override <add> public boolean equals(Object o) { <add> if (o instanceof SizedCountWindow) { <add> SizedCountWindow that = (SizedCountWindow) o; <add> return size == that.size; <add> } <add> return false; <add> } <add> <add> @Override <add> public int hashCode() { <add> return size; <add> } <add> } // ~ end of SizedCountWindow <add> <add> static class SizedCountWindowing<T> <add> implements Windowing<T, SizedCountWindow> { <add> <add> final UnaryFunction<T, Integer> size; <add> <add> SizedCountWindowing(UnaryFunction<T, Integer> size) { <add> this.size = size; <add> } <add> <add> @Override <add> public Set<SizedCountWindow> assignWindowsToElement(WindowedElement<?, T> input) { <add> int sz = size.apply(input.get()); <add> return Sets.newHashSet(new SizedCountWindow(sz), new SizedCountWindow(2 * sz)); <add> } <add> <add> @Override <add> public Trigger<T, SizedCountWindow> getTrigger() { <add> return new SizedCountTrigger<T>(); <add> } <add> } // ~ end of SizedCountWindowing <add> <add> static class SizedCountTrigger<T> implements Trigger<T, SizedCountWindow> { <add> private final ValueStorageDescriptor<Long> countDesc = <add> ValueStorageDescriptor.of("count", Long.class, 0L, (x, y) -> x + y ); <add> <add> @Override <add> public TriggerResult onElement(long time, T element, SizedCountWindow window, TriggerContext ctx) { <add> ValueStorage<Long> cnt = ctx.getValueStorage(countDesc); <add> cnt.set(cnt.get() + 1L); <add> if (cnt.get() >= window.get()) { <add> return TriggerResult.FLUSH_AND_PURGE; <add> } <add> return TriggerResult.NOOP; <add> } <add> <add> @Override <add> public TriggerResult onTimeEvent(long time, SizedCountWindow window, <add> TriggerContext ctx) { <add> return TriggerResult.NOOP; <add> } <add> <add> @Override <add> public void onClear(SizedCountWindow window, TriggerContext ctx) { <add> ctx.getValueStorage(countDesc).clear(); <add> } <add> <add> @Override <add> public TriggerResult onMerge(SizedCountWindow window, TriggerContext.TriggerMergeContext ctx) { <add> ctx.mergeStoredState(countDesc); <add> return TriggerResult.NOOP; <add> } <add> } // ~ end of SizedCountTrigger <add> <add> @Test <add> public void testReduceByKeyWithSortStateAndCustomWindowing() { <add> Dataset<Integer> ints = flow.createInput( <add> ListDataSource.unbounded( <add> reversed(sequenceInts(0, 100)), <add> reversed(sequenceInts(100, 1100)))); <add> <add> SizedCountWindowing<Integer> windowing = <add> new SizedCountWindowing<>(i -> (i % 10) + 1); <add> <add> // the key for sort will be the last digit <add> Dataset<Pair<Integer, Integer>> output = <add> ReduceStateByKey.of(ints) <add> .keyBy(i -> i % 10) <add> .valueBy(e -> e) <add> .stateFactory(SortState::new) <add> .combineStateBy(SortState::combine) <add> .windowBy(windowing) <add> .output(); <add> <add> // collector of outputs <add> ListDataSink<Triple<SizedCountWindow, Integer, Integer>> outputSink = ListDataSink.get(2); <add> <add> FlatMap.of(output) <add> .using((UnaryFunctor<Pair<Integer, Integer>, Triple<SizedCountWindow, Integer, Integer>>) <add> (elem, context) -> context.collect(Triple.of((SizedCountWindow) context.getWindow(), elem.getFirst(), elem.getSecond()))) <add> .output() <add> .persist(outputSink); <add> <add> executor.waitForCompletion(flow); <add> <add> List<List<Triple<SizedCountWindow, Integer, Integer>>> outputs = outputSink.getOutputs(); <add> assertEquals(2, outputs.size()); <add> <add> // each partition should have 550 items in each window set <add> assertEquals(2 * 550, outputs.get(0).size()); <add> assertEquals(2 * 550, outputs.get(1).size()); <add> <add> Set<Integer> firstKeys = outputs.get(0).stream() <add> .map(Triple::getSecond).distinct() <add> .collect(Collectors.toSet()); <add> <add> // validate that the two partitions contain different keys <add> outputs.get(1).forEach(p -> assertFalse(firstKeys.contains(p.getSecond()))); <add> <add> checkKeyAlignedSortedList(outputs.get(0)); <add> checkKeyAlignedSortedList(outputs.get(1)); <add> } <ide> <ide> private void checkKeyAlignedSortedList( <del> List<Triple<CountLabel, Integer, Integer>> list) { <del> <del> Map<CountLabel, Map<Integer, List<Integer>>> sortedSequencesInWindow = new HashMap<>(); <del> <del> for (Triple<CountLabel, Integer, Integer> p : list) { <del> Map<Integer, List<Integer>> sortedSequences = sortedSequencesInWindow.get( <del> p.getFirst()); <del> if (sortedSequences == null) { <del> sortedSequencesInWindow.put(p.getFirst(), <del> sortedSequences = new HashMap<>()); <add> List<Triple<SizedCountWindow, Integer, Integer>> list) { <add> <add> Map<SizedCountWindow, Map<Integer, List<Integer>>> byWindow = new HashMap<>(); <add> <add> for (Triple<SizedCountWindow, Integer, Integer> p : list) { <add> Map<Integer, List<Integer>> byKey = byWindow.get(p.getFirst()); <add> if (byKey == null) { <add> byWindow.put(p.getFirst(), byKey = new HashMap<>()); <ide> } <del> List<Integer> sorted = sortedSequences.get(p.getSecond()); <add> List<Integer> sorted = byKey.get(p.getSecond()); <ide> if (sorted == null) { <del> sortedSequences.put(p.getSecond(), sorted = new ArrayList<>()); <add> byKey.put(p.getSecond(), sorted = new ArrayList<>()); <ide> } <ide> sorted.add(p.getThird()); <ide> } <ide> <del> assertFalse(sortedSequencesInWindow.isEmpty()); <add> assertFalse(byWindow.isEmpty()); <ide> int totalCount = 0; <del> for (Map.Entry<CountLabel, Map<Integer, List<Integer>>> we : sortedSequencesInWindow.entrySet()) { <del> assertFalse(we.getValue().isEmpty()); <del> for (Map.Entry<Integer, List<Integer>> e : we.getValue().entrySet()) { <add> List<SizedCountWindow> iterOrder = <add> byWindow.keySet() <add> .stream() <add> .sorted(Comparator.comparing(SizedCountWindow::get)) <add> .collect(Collectors.toList()); <add> for (SizedCountWindow w : iterOrder) { <add> Map<Integer, List<Integer>> wkeys = byWindow.get(w); <add> assertNotNull(wkeys); <add> assertFalse(wkeys.isEmpty()); <add> for (Map.Entry<Integer, List<Integer>> e : wkeys.entrySet()) { <ide> // now, each list must be sorted <del> int last = -1; <del> for (int i : e.getValue()) { <del> assertTrue("Sequence " + e.getValue() + " is not sorted", last < i); <del> last = i; <del> totalCount++; <add> assertAscendingWindows(e.getValue(), w, e.getKey()); <add> totalCount += e.getValue().size(); <add> } <add> } <add> assertEquals(1100, totalCount); <add> } <add> <add> private static void assertAscendingWindows( <add> List<Integer> xs, SizedCountWindow window, Integer key) { <add> List<List<Integer>> windows = Lists.partition(xs, window.get()); <add> assertFalse(windows.isEmpty()); <add> int totalSeen = 0; <add> for (List<Integer> windowData : windows) { <add> int last = -1; <add> for (int x : windowData) { <add> if (last > x) { <add> fail(String.format("Sequence not ascending for (window: %s / key: %d): %s", <add> window, key, xs)); <ide> } <add> last = x; <add> totalSeen += 1; <ide> } <ide> } <del> assertEquals(1100, totalCount); <add> assertEquals(xs.size(), totalSeen); <ide> } <ide> <ide> // reverse given list
JavaScript
apache-2.0
7bdc9b494d288ea6543653bbe63ba9caef20aed3
0
ilkkao/mas,ilkkao/mas,ilkkao/mas,ilkkao/mas
// // Copyright 2009-2014 Ilkka Oksanen <[email protected]> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an "AS // IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either // express or implied. See the License for the specific language // governing permissions and limitations under the License. // 'use strict'; /* globals $, FileAPI, emojify, titlenotifier */ import Ember from 'ember'; import { play } from '../../../helpers/sound'; import UploadMixin from '../../../mixins/upload'; export default Ember.Component.extend(UploadMixin, { classNames: [ 'window', 'flex-grow-column' ], attributeBindings: [ 'row:data-row', 'column:data-column', 'desktop:data-desktop' ], classNameBindings: [ 'animating:velocity-animating:', 'expanded:expanded:', 'visible:visible:hidden', 'ircServerWindow:irc-server-window:' ], expanded: false, animating: false, scrolling: false, scrollLock: false, linesAmount: null, deletedLine: false, $messagePanel: null, $images: null, logModeEnabled: false, row: Ember.computed.alias('content.row'), column: Ember.computed.alias('content.column'), desktop: Ember.computed.alias('content.desktop'), selectedDesktop: 0, visible: function() { return this.get('selectedDesktop') === this.get('content.desktop'); }.property('selectedDesktop', 'content.desktop'), windowChanged: function() { this.sendAction('relayout', { animate: true }); }.observes('row', 'column', 'desktop'), visibilityChanged: function() { if (this.get('visible')) { this.set('content.newMessagesCount', 0); // Hidden div can't be scrolled so the scrolling in the linedAdded() observer // hasn't worked if new messages arrived to this window while it was hidden. Ember.run.scheduleOnce('afterRender', this, function() { this._goToBottom(false); }); } this.sendAction('relayout', { animate: false }); }.observes('visible'), lineAdded: function() { let messages = this.get('content.messages'); let previousLines = this.get('linesAmount'); this.set('linesAmount', messages.length); if (previousLines && previousLines >= messages.length) { // Line was removed. this.set('deletedLine', true); return; } if (!this.get('scrollLock')) { // Prevents _addScrollHandler to make faulty conclusion. // We need to scroll and we we will after debounce kicks in. this.set('scrolling', true); } Ember.run.debounce(this, function() { // Update images array this.$images = this.$('img[data-src]'); Ember.run.scheduleOnce('afterRender', this, function() { this._goToBottom(true); }); }, 300); // This should be more than duration of goToBottom() scrolling animation let cat = messages[messages.length - 1].cat; // Message that was just added. let importantMessage = cat === 'msg' || cat === 'error' || cat === 'action'; if ((!this.get('visible') || this.get('scrollLock')) && importantMessage) { this.incrementProperty('content.newMessagesCount'); } if (document.hidden && importantMessage) { // Browser title notification if (this.get('content.titleAlert')) { titlenotifier.add(); } // Sound notification if (this.get('content.sounds')) { play(); } } }.observes('content.messages.@each'), ircServerWindow: function() { return this.get('content.userId') === 'iSERVER' ? 'irc-server-window' : ''; }.property('content.userId'), isGroup: function() { return this.get('content.type') === 'group'; }.property('content.type'), cssType: function() { if (this.get('content.type') === 'group') { return 'group'; } else if (this.get('content.userId') === 'iSERVER') { return 'server-1on1'; } else { return 'private-1on1'; } }.property('content.type'), actions: { expand() { this.set('expanded', true); this.sendAction('relayout', { animate: true }); }, compress() { this.set('expanded', false); this.sendAction('relayout', { animate: true }); }, browse() { this.set('logModeEnabled', true); this.set('expanded', true); this.sendAction('relayout', { animate: true }); }, toggleMemberListWidth() { this.toggleProperty('content.minimizedNamesList'); }, sendMessage() { let message = this.get('newMessage'); if (message) { this.sendAction('action', 'sendMessage', this.content, message); this.set('newMessage', ''); } }, close() { this.sendAction('action', 'close', this.content); }, menu(operation) { this.sendAction('menuAction', operation, this.content); }, jumpToBottom() { this.set('scrollLock', false); this._goToBottom(true); } }, mouseDown(event) { if (!$(event.target).hasClass('fa-arrows')) { return; // Not moving the window } this.sendAction('dragWindowStart', this, event); }, layoutDone() { Ember.run.scheduleOnce('afterRender', this, function() { this._goToBottom(false); }); }, didInsertElement() { let that = this; this.$images = this.$('img[data-src]'); this.$messagePanel = this.$('.window-messages'); this._addScrollHandler(); this.$('.window-caption').tooltip(); this.$messagePanel.tooltip({ selector: '.timestamp', placement: 'right' }); let selectedUserId; this.$('.window-members').contextmenu({ target: '#window-contextMenu', before(e) { let $target = $(e.target); if ($target.hasClass('window-members')) { return false; } e.preventDefault(); let $row = $target.closest('.member-row'); let selectedNick = $row.data('nick'); let avatar = $row.find('.gravatar').attr('src'); selectedUserId = $row.data('userid'); this.getMenu().find('li').eq(0).html( '<img class="menu-avatar" src="' + avatar + '">' + selectedNick); // Only MAS users can be added to a contacts list. $('.window-contexMenu-request-friend').toggle(selectedUserId.charAt(0) === 'm'); return true; }, onItem(context, e) { let action = $(e.target).data('action'); that.sendAction('action', action, that.content, selectedUserId); } }); this.$('.window-members').click(function(e) { $(this).contextmenu('show', e); e.preventDefault(); return false; }); let emojisList = $.map(emojify.emojiNames, function(value, i) { return { id: i, name: value }; }); let emojiListTemplate = '<li><img src="/app/assets/images/emoji/${name}.png"> ${name}</li>'; this.$('.form-control').atwho({ at: ':', displayTpl: emojiListTemplate, insertTpl: ':${name}:', data: emojisList, highlightFirst: false, limit: 20 }); function getNick(item) { return item.nick; } let nickList = this.get('content.operatorNames').map(getNick) .concat(this.get('content.voiceNames').map(getNick)) .concat(this.get('content.userNames').map(getNick)); this.$('.form-control').atwho({ at: '@', data: nickList, limit: 10 }); this.$messagePanel.magnificPopup({ type: 'image', delegate: '.user-img', closeOnContentClick: true, image: { verticalFit: false, titleSrc(item) { let href = item.el.attr('href'); return '<small>Link to the original image:</small><a href="' + href + '" target="_blank">' + href + '</a>'; } } }); let fileInput = this.$('.btn-file input')[0]; FileAPI.event.on(fileInput, 'change', function(evt) { let files = FileAPI.getFiles(evt); // Retrieve file list this.send('upload', files, 'jpeg'); }.bind(this)); this.sendAction('relayout', { animate: false }); }, willDestroyElement() { Ember.run.scheduleOnce('afterRender', this, function() { this.sendAction('relayout', { animate: true }); }); }, _goToBottom(animate) { if (this.get('scrollLock')) { return; } // TBD: Animation running and Ember updating {{each}} doesn't always seem to mix well // Maybe glimmer fixes that. let duration = animate ? 200 : 0; this.$('.window-messages-end').velocity('stop').velocity('scroll', { container: this.$messagePanel, duration: duration, easing: 'spring', offset: 100, // Shouldn't be needed begin: function() { this.set('scrolling', true); }.bind(this), complete: function() { this.set('scrolling', false); this._showImages(); }.bind(this) }); }, _addScrollHandler() { this.$messagePanel.on('scroll', function() { Ember.run.debounce(this, function() { if (this.get('animating') || this.get('scrolling')) { return; } let $panel = this.$messagePanel; let scrollPos = $panel.scrollTop(); // User doesn't need to scroll exactly to the end. let bottomTreshhold = $panel.prop('scrollHeight') - 5; if (scrollPos + $panel.innerHeight() >= bottomTreshhold) { this.set('scrollLock', false); Ember.Logger.info('scrollock off'); } else if (!this.get('deletedLine')) { this.set('scrollLock', true); Ember.Logger.info('scrollock on'); } this.set('deletedLine', false); // Hack this._showImages(); }, 150); }.bind(this)); }, _showImages() { if (!this.$images) { return; } let placeHolderHeight = 31; let panelHeight = this.$messagePanel.height(); let that = this; this.$images = this.$images.filter(function() { let $img = $(this); // We want to know image's position in .window-messages container div. For position() // to work correctly, .window-messages has to have position set to 'relative'. See // jQuery offsetParent() documentation for details. let pos = $img.position().top; if (pos + placeHolderHeight >= 0 && pos <= panelHeight) { $img.attr('src', $img.data('src')); $img.one('load', function() { $img.removeClass('loader loader-small-dark'); $img.removeAttr('data-src'); that._goToBottom(true); }); $img.one('error', function() { $img.hide(); that._goToBottom(true); }); return false; } return true; }); } });
client/app/pods/components/discussion-window/component.js
// // Copyright 2009-2014 Ilkka Oksanen <[email protected]> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an "AS // IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either // express or implied. See the License for the specific language // governing permissions and limitations under the License. // 'use strict'; /* globals $, FileAPI, emojify, titlenotifier */ import Ember from 'ember'; import { play } from '../../../helpers/sound'; import UploadMixin from '../../../mixins/upload'; export default Ember.Component.extend(UploadMixin, { classNames: [ 'window', 'flex-grow-column' ], attributeBindings: [ 'row:data-row', 'column:data-column', 'desktop:data-desktop' ], classNameBindings: [ 'animating:velocity-animating:', 'expanded:expanded:', 'visible:visible:hidden', 'ircServerWindow:irc-server-window:' ], expanded: false, animating: false, scrolling: false, scrollLock: false, linesAmount: null, deletedLine: false, $messagePanel: null, $images: null, logModeEnabled: false, row: Ember.computed.alias('content.row'), column: Ember.computed.alias('content.column'), desktop: Ember.computed.alias('content.desktop'), selectedDesktop: 0, visible: function() { return this.get('selectedDesktop') === this.get('content.desktop'); }.property('selectedDesktop', 'content.desktop'), windowChanged: function() { this.sendAction('relayout', { animate: true }); }.observes('row', 'column', 'desktop'), visibilityChanged: function() { if (this.get('visible')) { this.set('content.newMessagesCount', 0); // Hidden div can't be scrolled so the scrolling in the linedAdded() observer // hasn't worked if new messages arrived to this window while it was hidden. Ember.run.scheduleOnce('afterRender', this, function() { this._goToBottom(false); }); } this.sendAction('relayout', { animate: false }); }.observes('visible'), lineAdded: function() { let messages = this.get('content.messages'); let previousLines = this.get('linesAmount'); this.set('linesAmount', messages.length); if (previousLines && previousLines >= messages.length) { // Line was removed. this.set('deletedLine', true); return; } if (!this.get('scrollLock')) { // Prevents _addScrollHandler to make faulty conclusion. // We need to scroll and we we will after debounce kicks in. this.set('scrolling', true); } Ember.run.debounce(this, function() { // Update images array this.$images = this.$('img[data-src]'); Ember.run.scheduleOnce('afterRender', this, function() { this._goToBottom(true); }); }, 300); // This should be more than duration of goToBottom() scrolling animation let cat = messages[messages.length - 1].cat; // Message that was just added. let importantMessage = cat === 'msg' || cat === 'error' || cat === 'action'; if ((!this.get('visible') || this.get('scrollLock')) && importantMessage) { this.incrementProperty('content.newMessagesCount'); } if (document.hidden && importantMessage) { // Browser title notification if (this.get('content.titleAlert')) { titlenotifier.add(); } // Sound notification if (this.get('content.sounds')) { play(); } } }.observes('content.messages.@each'), ircServerWindow: function() { return this.get('content.userId') === 'iSERVER' ? 'irc-server-window' : ''; }.property('content.userId'), isGroup: function() { return this.get('content.type') === 'group'; }.property('content.type'), cssType: function() { if (this.get('content.type') === 'group') { return 'group'; } else if (this.get('content.userId') === 'iSERVER') { return 'server-1on1'; } else { return 'private-1on1'; } }.property('content.type'), actions: { expand() { this.set('expanded', true); this.sendAction('relayout', { animate: true }); }, compress() { this.set('expanded', false); this.sendAction('relayout', { animate: true }); }, browse() { this.set('logModeEnabled', true); this.set('expanded', true); this.sendAction('relayout', { animate: true }); }, toggleMemberListWidth() { this.toggleProperty('content.minimizedNamesList'); }, sendMessage() { let message = this.get('newMessage'); if (message) { this.sendAction('action', 'sendMessage', this.content, message); this.set('newMessage', ''); } }, close() { this.sendAction('action', 'close', this.content); }, menu(operation) { this.sendAction('menuAction', operation, this.content); }, jumpToBottom() { this.set('scrollLock', false); this._goToBottom(true); } }, mouseDown(event) { if (!$(event.target).hasClass('fa-arrows')) { return; // Not moving the window } this.sendAction('dragWindowStart', this, event); }, layoutDone() { Ember.run.scheduleOnce('afterRender', this, function() { this._goToBottom(false); }); }, didInsertElement() { let that = this; this.$messagePanel = this.$('.window-messages'); this._addScrollHandler(); this.$('.window-caption').tooltip(); this.$messagePanel.tooltip({ selector: '.timestamp', placement: 'right' }); let selectedUserId; this.$('.window-members').contextmenu({ target: '#window-contextMenu', before(e) { let $target = $(e.target); if ($target.hasClass('window-members')) { return false; } e.preventDefault(); let $row = $target.closest('.member-row'); let selectedNick = $row.data('nick'); let avatar = $row.find('.gravatar').attr('src'); selectedUserId = $row.data('userid'); this.getMenu().find('li').eq(0).html( '<img class="menu-avatar" src="' + avatar + '">' + selectedNick); // Only MAS users can be added to a contacts list. $('.window-contexMenu-request-friend').toggle(selectedUserId.charAt(0) === 'm'); return true; }, onItem(context, e) { let action = $(e.target).data('action'); that.sendAction('action', action, that.content, selectedUserId); } }); this.$('.window-members').click(function(e) { $(this).contextmenu('show', e); e.preventDefault(); return false; }); let emojisList = $.map(emojify.emojiNames, function(value, i) { return { id: i, name: value }; }); let emojiListTemplate = '<li><img src="/app/assets/images/emoji/${name}.png"> ${name}</li>'; this.$('.form-control').atwho({ at: ':', displayTpl: emojiListTemplate, insertTpl: ':${name}:', data: emojisList, highlightFirst: false, limit: 20 }); function getNick(item) { return item.nick; } let nickList = this.get('content.operatorNames').map(getNick) .concat(this.get('content.voiceNames').map(getNick)) .concat(this.get('content.userNames').map(getNick)); this.$('.form-control').atwho({ at: '@', data: nickList, limit: 10 }); this.$messagePanel.magnificPopup({ type: 'image', delegate: '.user-img', closeOnContentClick: true, image: { verticalFit: false, titleSrc(item) { let href = item.el.attr('href'); return '<small>Link to the original image:</small><a href="' + href + '" target="_blank">' + href + '</a>'; } } }); let fileInput = this.$('.btn-file input')[0]; FileAPI.event.on(fileInput, 'change', function(evt) { let files = FileAPI.getFiles(evt); // Retrieve file list this.send('upload', files, 'jpeg'); }.bind(this)); this.sendAction('relayout', { animate: false }); }, willDestroyElement() { Ember.run.scheduleOnce('afterRender', this, function() { this.sendAction('relayout', { animate: true }); }); }, _goToBottom(animate) { if (this.get('scrollLock')) { return; } // TBD: Animation running and Ember updating {{each}} doesn't always seem to mix well // Maybe glimmer fixes that. let duration = animate ? 200 : 0; this.$('.window-messages-end').velocity('stop').velocity('scroll', { container: this.$messagePanel, duration: duration, easing: 'spring', offset: 100, // Shouldn't be needed begin: function() { this.set('scrolling', true); }.bind(this), complete: function() { this.set('scrolling', false); this._showImages(); }.bind(this) }); }, _addScrollHandler() { this.$messagePanel.on('scroll', function() { Ember.run.debounce(this, function() { if (this.get('animating') || this.get('scrolling')) { return; } let $panel = this.$messagePanel; let scrollPos = $panel.scrollTop(); // User doesn't need to scroll exactly to the end. let bottomTreshhold = $panel.prop('scrollHeight') - 5; if (scrollPos + $panel.innerHeight() >= bottomTreshhold) { this.set('scrollLock', false); Ember.Logger.info('scrollock off'); } else if (!this.get('deletedLine')) { this.set('scrollLock', true); Ember.Logger.info('scrollock on'); } this.set('deletedLine', false); // Hack this._showImages(); }, 150); }.bind(this)); }, _showImages() { if (!this.$images) { return; } let placeHolderHeight = 31; let panelHeight = this.$messagePanel.height(); let that = this; this.$images = this.$images.filter(function() { let $img = $(this); // We want to know image's position in .window-messages container div. For position() // to work correctly, .window-messages has to have position set to 'relative'. See // jQuery offsetParent() documentation for details. let pos = $img.position().top; if (pos + placeHolderHeight >= 0 && pos <= panelHeight) { $img.attr('src', $img.data('src')); $img.one('load', function() { $img.removeClass('loader loader-small-dark'); $img.removeAttr('data-src'); that._goToBottom(true); }); $img.one('error', function() { $img.hide(); that._goToBottom(true); }); return false; } return true; }); } });
Update images list at window creation phase
client/app/pods/components/discussion-window/component.js
Update images list at window creation phase
<ide><path>lient/app/pods/components/discussion-window/component.js <ide> didInsertElement() { <ide> let that = this; <ide> <add> this.$images = this.$('img[data-src]'); <ide> this.$messagePanel = this.$('.window-messages'); <ide> this._addScrollHandler(); <ide>
Java
mit
25d225350fadb8d84af4e6f7bf1ab1449d9930ea
0
tcdl/msb-java,tcdl/msb-java
package io.github.tcdl; import static io.github.tcdl.events.Event.ERROR_EVENT; import io.github.tcdl.config.MsbMessageOptions; import io.github.tcdl.events.*; import io.github.tcdl.messages.Message; import io.github.tcdl.messages.Message.MessageBuilder; import io.github.tcdl.messages.MessageFactory; import io.github.tcdl.messages.MetaMessage.MetaMessageBuilder; import io.github.tcdl.messages.payload.Payload; import java.util.Objects; import javax.annotation.Nullable; import org.apache.commons.lang3.Validate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Created by rdro on 4/27/2015. */ public class Requester implements ExtendedEventEmitter { public static final Logger LOG = LoggerFactory.getLogger(Requester.class); private Collector collector; private MessageFactory messageFactory; private Message message; private MetaMessageBuilder metaBuilder; private MessageBuilder messageBuilder; public Requester(MsbMessageOptions config, Message originalMessage) { Validate.notNull(config, "the 'config' must not be null"); this.collector = new Collector(config); this.messageFactory = getMessageFactory(); this.metaBuilder = messageFactory.createMeta(config); this.messageBuilder = messageFactory.createRequestMessage(config, originalMessage); } public void publish(@Nullable Payload requestPayload) { if (requestPayload != null) { messageBuilder.setPayload(requestPayload); } this.message = messageFactory.completeMeta(messageBuilder, metaBuilder); if (collector.isWaitForResponses()) { collector.listenForResponses(message.getTopics().getResponse(), (responseMessage) -> Objects.equals(responseMessage.getCorrelationId(), message.getCorrelationId()) ); } TwoArgsEventHandler<Message, Exception> callback = (message, exception) -> { if (exception != null) { collector.getChannelManager().emit(ERROR_EVENT, exception); LOG.debug("Exception was thrown.", exception); return; } if (!collector.isAwaitingResponses()) collector.end(); collector.enableTimeout(); }; collector.getChannelManager().findOrCreateProducer(this.message.getTopics().getTo()) .publish(this.message, callback); } @Override public <A1> Requester on(Event event, SingleArgEventHandler<A1> eventHandler) { collector.getChannelManager().emit(event, eventHandler); return this; } @Override public <A1, A2> Requester on(Event event, TwoArgsEventHandler<A1, A2> eventHandler) { collector.getChannelManager().emit(event, eventHandler); return this; } @Override public <A1, A2, A3> Requester on(Event event, ThreeArgsEventHandler<A1, A2, A3> eventHandler) { collector.getChannelManager().emit(event, eventHandler); return this; } Message getMessage() { return message; } MessageFactory getMessageFactory() { return new MessageFactory(); } boolean isMessageAcknowledged() { return !collector.getAckMessages().isEmpty(); } }
core/src/main/java/io/github/tcdl/Requester.java
package io.github.tcdl; import static io.github.tcdl.events.Event.ERROR_EVENT; import io.github.tcdl.config.MsbMessageOptions; import io.github.tcdl.events.*; import io.github.tcdl.messages.Message; import io.github.tcdl.messages.Message.MessageBuilder; import io.github.tcdl.messages.MessageFactory; import io.github.tcdl.messages.MetaMessage.MetaMessageBuilder; import io.github.tcdl.messages.payload.Payload; import java.util.Objects; import javax.annotation.Nullable; import org.apache.commons.lang3.Validate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Created by rdro on 4/27/2015. */ public class Requester extends Collector implements ExtendedEventEmitter { public static final Logger LOG = LoggerFactory.getLogger(Requester.class); private MessageFactory messageFactory; private Message message; private MetaMessageBuilder metaBuilder; private MessageBuilder messageBuilder; public Requester(MsbMessageOptions config, Message originalMessage) { super(config); Validate.notNull(config, "the 'config' must not be null"); this.messageFactory = getMessageFactory(); this.metaBuilder = messageFactory.createMeta(config); this.messageBuilder = messageFactory.createRequestMessage(config, originalMessage); } public void publish(@Nullable Payload requestPayload) { if (requestPayload != null) { messageBuilder.setPayload(requestPayload); } this.message = messageFactory.completeMeta(messageBuilder, metaBuilder); if (isWaitForResponses()) { listenForResponses(message.getTopics().getResponse(), (responseMessage) -> Objects.equals(responseMessage.getCorrelationId(), message.getCorrelationId()) ); } TwoArgsEventHandler<Message, Exception> callback = (message, exception) -> { if (exception != null) { channelManager.emit(ERROR_EVENT, exception); LOG.debug("Exception was thrown.", exception); return; } if (!isAwaitingResponses()) end(); enableTimeout(); }; channelManager.findOrCreateProducer(this.message.getTopics().getTo()) .publish(this.message, callback); } @Override public <A1> Requester on(Event event, SingleArgEventHandler<A1> eventHandler) { channelManager.emit(event, eventHandler); return this; } @Override public <A1, A2> Requester on(Event event, TwoArgsEventHandler<A1, A2> eventHandler) { channelManager.emit(event, eventHandler); return this; } @Override public <A1, A2, A3> Requester on(Event event, ThreeArgsEventHandler<A1, A2, A3> eventHandler) { channelManager.emit(event, eventHandler); return this; } Message getMessage() { return message; } MessageFactory getMessageFactory() { return new MessageFactory(); } }
resolved conflicts
core/src/main/java/io/github/tcdl/Requester.java
resolved conflicts
<ide><path>ore/src/main/java/io/github/tcdl/Requester.java <ide> /** <ide> * Created by rdro on 4/27/2015. <ide> */ <del>public class Requester extends Collector implements ExtendedEventEmitter { <add>public class Requester implements ExtendedEventEmitter { <ide> <ide> public static final Logger LOG = LoggerFactory.getLogger(Requester.class); <ide> <add> private Collector collector; <ide> private MessageFactory messageFactory; <ide> private Message message; <ide> private MetaMessageBuilder metaBuilder; <ide> private MessageBuilder messageBuilder; <ide> <ide> public Requester(MsbMessageOptions config, Message originalMessage) { <del> super(config); <ide> Validate.notNull(config, "the 'config' must not be null"); <add> this.collector = new Collector(config); <ide> this.messageFactory = getMessageFactory(); <ide> this.metaBuilder = messageFactory.createMeta(config); <ide> this.messageBuilder = messageFactory.createRequestMessage(config, originalMessage); <ide> } <ide> this.message = messageFactory.completeMeta(messageBuilder, metaBuilder); <ide> <del> if (isWaitForResponses()) { <del> listenForResponses(message.getTopics().getResponse(), (responseMessage) -> <add> if (collector.isWaitForResponses()) { <add> collector.listenForResponses(message.getTopics().getResponse(), (responseMessage) -> <ide> Objects.equals(responseMessage.getCorrelationId(), message.getCorrelationId()) <ide> ); <ide> } <ide> <ide> TwoArgsEventHandler<Message, Exception> callback = (message, exception) -> { <ide> if (exception != null) { <del> channelManager.emit(ERROR_EVENT, exception); <add> collector.getChannelManager().emit(ERROR_EVENT, exception); <ide> LOG.debug("Exception was thrown.", exception); <ide> return; <ide> } <ide> <del> if (!isAwaitingResponses()) <del> end(); <del> enableTimeout(); <add> if (!collector.isAwaitingResponses()) <add> collector.end(); <add> collector.enableTimeout(); <ide> }; <del> <del> channelManager.findOrCreateProducer(this.message.getTopics().getTo()) <add> <add> collector.getChannelManager().findOrCreateProducer(this.message.getTopics().getTo()) <ide> .publish(this.message, callback); <ide> } <ide> <ide> @Override <ide> public <A1> Requester on(Event event, SingleArgEventHandler<A1> eventHandler) { <del> channelManager.emit(event, eventHandler); <add> collector.getChannelManager().emit(event, eventHandler); <ide> return this; <ide> } <ide> <ide> @Override <ide> public <A1, A2> Requester on(Event event, TwoArgsEventHandler<A1, A2> eventHandler) { <del> channelManager.emit(event, eventHandler); <add> collector.getChannelManager().emit(event, eventHandler); <ide> return this; <ide> } <ide> <ide> @Override <ide> public <A1, A2, A3> Requester on(Event event, ThreeArgsEventHandler<A1, A2, A3> eventHandler) { <del> channelManager.emit(event, eventHandler); <add> collector.getChannelManager().emit(event, eventHandler); <ide> return this; <ide> } <ide> <ide> MessageFactory getMessageFactory() { <ide> return new MessageFactory(); <ide> } <add> <add> boolean isMessageAcknowledged() { <add> return !collector.getAckMessages().isEmpty(); <add> } <ide> }
JavaScript
agpl-3.0
ca6f910f0bb3dcd1b99409f9cd17008690b3fdeb
0
xwiki-labs/cryptpad,xwiki-labs/cryptpad,xwiki-labs/cryptpad
define(function () { var out = {}; out.main_title = "CryptPad: Éditeur collaboratif en temps réel, zero knowledge"; out.main_slogan = "L'unité est la force, la collaboration est la clé"; out.type = {}; out.type.pad = 'Texte'; out.type.code = 'Code'; out.type.poll = 'Sondage'; out.type.slide = 'Présentation'; out.type.drive = 'Drive'; out.type.whiteboard = "Tableau Blanc"; out.type.file = "Fichier"; out.type.media = "Média"; out.button_newpad = 'Nouveau document texte'; out.button_newcode = 'Nouvelle page de code'; out.button_newpoll = 'Nouveau sondage'; out.button_newslide = 'Nouvelle présentation'; out.button_newwhiteboard = 'Nouveau tableau blanc'; out.updated_0_common_connectionLost = "<b>Connexion au serveur perdue</b><br>Vous êtes désormais en mode lecture seule jusqu'au retour de la connexion."; out.common_connectionLost = out.updated_0_common_connectionLost; out.websocketError = 'Impossible de se connecter au serveur WebSocket...'; out.typeError = "Ce pad n'est pas compatible avec l'application sélectionnée"; out.onLogout = 'Vous êtes déconnecté de votre compte utilisateur, <a href="/" target="_blank">cliquez ici</a> pour vous authentifier<br>ou appuyez sur <em>Échap</em> pour accéder au pad en mode lecture seule.'; out.wrongApp = "Impossible d'afficher le contenu de ce document temps-réel dans votre navigateur. Vous pouvez essayer de recharger la page."; out.loading = "Chargement..."; out.error = "Erreur"; out.saved = "Enregistré"; out.synced = "Tout est enregistré"; out.disconnected = 'Déconnecté'; out.synchronizing = 'Synchronisation'; out.reconnecting = 'Reconnexion...'; out.lag = 'Latence'; out.readonly = 'Lecture seule'; out.anonymous = "Anonyme"; out.yourself = "Vous-même"; out.anonymousUsers = "éditeurs anonymes"; out.anonymousUser = "éditeur anonyme"; out.users = "Utilisateurs"; out.and = "Et"; out.viewer = "lecteur"; out.viewers = "lecteurs"; out.editor = "éditeur"; out.editors = "éditeurs"; out.language = "Langue"; out.upgrade = "Améliorer"; out.upgradeTitle = "Améliorer votre compte pour augmenter la limite de stockage"; out.MB = "Mo"; out.greenLight = "Tout fonctionne bien"; out.orangeLight = "Votre connexion est lente, ce qui réduit la qualité de l'éditeur"; out.redLight = "Vous êtes déconnectés de la session"; out.pinLimitReached = "Vous avez atteint votre limite de stockage"; out.pinLimitReachedAlert = "Vous avez atteint votre limite de stockage. Les nouveaux pads ne seront pas enregistrés dans votre CrypDrive.<br>" + "Pour résoudre ce problème, vous pouvez soit supprimer des pads de votre CryptDrive (y compris la corbeille), soit vous abonner à une offre premium pour augmenter la limite maximale."; out.pinLimitNotPinned = "Vous avez atteint votre limite de stockage.<br>"+ "Ce pad n'est pas enregistré dans votre CryptDrive."; out.pinLimitDrive = out.pinLimitReached+ ".<br>" + "Vous ne pouvez pas créer de nouveaux pads."; out.importButtonTitle = 'Importer un pad depuis un fichier local'; out.exportButtonTitle = 'Exporter ce pad vers un fichier local'; out.exportPrompt = 'Comment souhaitez-vous nommer ce fichier ?'; out.changeNamePrompt = 'Changer votre nom (laisser vide pour rester anonyme) : '; out.user_rename = "Changer le nom affiché"; out.user_displayName = "Nom affiché"; out.user_accountName = "Nom d'utilisateur"; out.clickToEdit = 'Cliquer pour modifier'; out.forgetButtonTitle = 'Déplacer ce pad vers la corbeille'; out.forgetPrompt = 'Cliquer sur OK déplacera ce pad vers la corbeille de votre CryptDrive, êtes-vous sûr ?'; out.movedToTrash = 'Ce pad a été déplacé vers la corbeille.<br><a href="/drive/">Accéder à mon Drive</a>'; out.shareButton = 'Partager'; out.shareSuccess = 'Lien copié dans le presse-papiers'; out.newButton = 'Nouveau'; out.newButtonTitle = 'Créer un nouveau pad'; out.saveTemplateButton = "Sauver en tant que modèle"; out.saveTemplatePrompt = "Choisir un titre pour ce modèle"; out.templateSaved = "Modèle enregistré !"; out.selectTemplate = "Sélectionner un modèle ou appuyer sur Échap"; out.presentButtonTitle = "Entrer en mode présentation"; out.presentSuccess = 'Appuyer sur Échap pour quitter le mode présentation'; out.backgroundButtonTitle = 'Changer la couleur de fond de la présentation'; out.colorButtonTitle = 'Changer la couleur du texte en mode présentation'; out.printButton = "Imprimer (Entrée)"; out.printButtonTitle = "Imprimer votre présentation ou l'enregistrer au format PDF"; out.printOptions = "Options de mise en page"; out.printSlideNumber = "Afficher le numéro des slides"; out.printDate = "Afficher la date"; out.printTitle = "Afficher le titre du pad"; out.printCSS = "Personnaliser l'apparence (CSS):"; out.slideOptionsTitle = "Personnaliser la présentation"; out.slideOptionsButton = "Enregistrer (Entrée)"; out.editShare = "Lien d'édition"; out.editShareTitle = "Copier le lien d'édition dans le presse-papiers"; out.editOpen = "Éditer dans un nouvel onglet"; out.editOpenTitle = "Ouvrir le lien d'édition dans un nouvel onglet"; out.viewShare = "Lien de lecture-seule"; out.viewShareTitle = "Copier lien d'accès en lecture seule dans le presse-papiers"; out.viewOpen = "Voir dans un nouvel onglet"; out.viewOpenTitle = "Ouvrir le lien en lecture seule dans un nouvel onglet"; out.notifyJoined = "{0} a rejoint la session collaborative"; out.notifyRenamed = "{0} a changé son nom en {1}"; out.notifyLeft = "{0} a quitté la session collaborative"; out.okButton = 'OK (Entrée)'; out.cancel = "Annuler"; out.cancelButton = 'Annuler (Echap)'; out.historyButton = "Afficher l'historique du document"; out.history_next = "Voir la version suivante"; out.history_prev = "Voir la version précédente"; out.history_goTo = "Voir la version sélectionnée"; out.history_close = "Retour"; out.history_closeTitle = "Fermer l'historique"; out.history_restore = "Restaurer"; out.history_restoreTitle = "Restaurer la version du document sélectionnée"; out.history_restorePrompt = "Êtes-vous sûr de vouloir remplacer la version actuelle du document par la version affichée ?"; out.history_restoreDone = "Document restauré"; out.history_version = "Version :"; // Polls out.poll_title = "Sélecteur de date Zero Knowledge"; out.poll_subtitle = "Planification de rendez-vous et sondages en <em>temps-réel</em> et Zero Knowledge"; out.poll_p_save = "Vos modifications sont mises à jour instantanément, donc vous n'avez jamais besoin de sauver le contenu."; out.poll_p_encryption = "Tout ce que vous entrez est chiffré donc seules les personnes possédant le lien du sondage y ont accès. Même le serveur ne peut pas voir le contenu."; out.wizardLog = "Cliquez sur le bouton dans le coin supérieur gauche pour retourner au sondage"; out.wizardTitle = "Utiliser l'assistant pour créer votre sondage"; out.wizardConfirm = "Êtes-vous vraiment prêt à ajouter ces options au sondage ?"; out.poll_publish_button = "Publier"; out.poll_admin_button = "Administrer"; out.poll_create_user = "Ajouter un utilisateur"; out.poll_create_option = "Ajouter une option"; out.poll_commit = "Valider"; out.poll_closeWizardButton = "Fermer l'assistant"; out.poll_closeWizardButtonTitle = "Fermer l'assistant"; out.poll_wizardComputeButton = "Générer les options"; out.poll_wizardClearButton = "Vider le tableau"; out.poll_wizardDescription = "Créer automatiquement des options en entrant des dates et des horaires correspondant"; out.poll_wizardAddDateButton = "+ Dates"; out.poll_wizardAddTimeButton = "+ Horaires"; out.poll_optionPlaceholder = "Option"; out.poll_userPlaceholder = "Votre nom"; out.poll_removeOption = "Êtes-vous sûr de vouloir supprimer cette option ?"; out.poll_removeUser = "Êtes-vous sûr de vouloir supprimer cet utilisateur ?"; out.poll_titleHint = "Titre"; out.poll_descriptionHint = "Description"; // Canvas out.canvas_clear = "Nettoyer"; out.canvas_delete = "Supprimer la sélection"; out.canvas_disable = "Désactiver le dessin"; out.canvas_enable = "Activer le dessin"; out.canvas_width = "Épaisseur"; out.canvas_opacity = "Opacité"; // File manager out.fm_rootName = "Documents"; out.fm_trashName = "Corbeille"; out.fm_unsortedName = "Fichiers non triés"; out.fm_filesDataName = "Tous les fichiers"; out.fm_templateName = "Modèles"; out.fm_searchName = "Recherche"; out.fm_searchPlaceholder = "Rechercher..."; out.fm_newButton = "Nouveau"; out.fm_newButtonTitle = "Créer un nouveau pad ou un dossier"; out.fm_newFolder = "Nouveau dossier"; out.fm_newFile = "Nouveau pad"; out.fm_folder = "Dossier"; out.fm_folderName = "Nom du dossier"; out.fm_numberOfFolders = "# de dossiers"; out.fm_numberOfFiles = "# de fichiers"; out.fm_fileName = "Nom du fichier"; out.fm_title = "Titre"; out.fm_type = "Type"; out.fm_lastAccess = "Dernier accès"; out.fm_creation = "Création"; out.fm_forbidden = "Action interdite"; out.fm_originalPath = "Chemin d'origine"; out.fm_openParent = "Montrer dans le dossier"; out.fm_noname = "Document sans titre"; out.fm_emptyTrashDialog = "Êtes-vous sûr de vouloir vider la corbeille ?"; out.fm_removeSeveralPermanentlyDialog = "Êtes-vous sûr de vouloir supprimer ces {0} éléments de manière permanente ?"; out.fm_removePermanentlyDialog = "Êtes-vous sûr de vouloir supprimer cet élément de manière permanente ?"; out.fm_restoreDialog = "Êtes-vous sûr de vouloir restaurer {0} à son emplacement précédent ?"; out.fm_removeSeveralDialog = "Êtes-vous sûr de vouloir déplacer ces {0} éléments vers la corbeille ?"; out.fm_removeDialog = "Êtes-vous sûr de vouloir déplacer {0} vers la corbeille ?"; out.fm_unknownFolderError = "Le dossier sélectionné ou le dernier dossier visité n'existe plus. Ouverture du dossier parent..."; out.fm_contextMenuError = "Impossible d'ouvrir le menu contextuel pour cet élément. Si le problème persiste, essayez de rechercher la page."; out.fm_selectError = "Impossible de sélectionner l'élément ciblé. Si le problème persiste, essayez de recharger la page."; out.fm_categoryError = "Impossible d'afficher la catégorie sélectionnée, affichage de Documents"; out.fm_info_root = "Créez ici autant de dossiers que vous le souhaitez pour trier vos fichiers."; out.fm_info_unsorted = 'Contient tous les pads que vous avez ouvert et qui ne sont pas triés dans "Documents" ou déplacés vers la "Corbeille".'; // "My Documents" should match with the "out.fm_rootName" key, and "Trash" with "out.fm_trashName" out.fm_info_template = "Contient tous les fichiers que vous avez sauvés en tant que modèle afin de les réutiliser lors de la création d'un nouveau pad."; out.fm_info_trash = 'Les fichiers supprimés dans la corbeille sont également enlevés de "Tous les fichiers" et il est impossible de les récupérer depuis l\'explorateur de fichiers.'; // Same here for "All files" and "out.fm_filesDataName" out.fm_info_allFiles = 'Contient tous les fichiers de "Documents", "Fichiers non triés" et "Corbeille". Vous ne pouvez pas supprimer ou déplacer des fichiers depuis cet endroit.'; // Same here out.fm_alert_backupUrl = "Lien de secours pour ce disque.<br>" + "Il est <strong>fortement recommandé</strong> de garder ce lien pour vous-même.<br>" + "Elle vous servira en cas de perte des données de votre navigateur afin de retrouver vos fichiers.<br>" + "Quiconque se trouve en possession de celle-ci peut modifier ou supprimer tous les fichiers de ce gestionnaire.<br>"; out.fm_backup_title = 'Lien de secours'; out.fm_nameFile = 'Comment souhaitez-vous nommer ce fichier ?'; // File - Context menu out.fc_newfolder = "Nouveau dossier"; out.fc_rename = "Renommer"; out.fc_open = "Ouvrir"; out.fc_open_ro = "Ouvrir (lecture seule)"; out.fc_delete = "Supprimer"; out.fc_restore = "Restaurer"; out.fc_remove = "Supprimer définitivement"; out.fc_empty = "Vider la corbeille"; out.fc_prop = "Propriétés"; out.fc_sizeInKilobytes = "Taille en kilo-octets"; // fileObject.js (logs) out.fo_moveUnsortedError = "La liste des éléments non triés ne peut pas contenir de dossiers."; out.fo_existingNameError = "Ce nom est déjà utilisé dans ce répertoire. Veuillez en choisir un autre."; out.fo_moveFolderToChildError = "Vous ne pouvez pas déplacer un dossier dans un de ses descendants"; out.fo_unableToRestore = "Impossible de restaurer ce fichier à son emplacement d'origine. Vous pouvez essayer de le déplacer à un nouvel emplacement."; out.fo_unavailableName = "Un fichier ou dossier avec le même nom existe déjà au nouvel emplacement. Renommez cet élément avant d'essayer à nouveau."; // login out.login_login = "Connexion"; out.login_makeAPad = 'Créer un pad anonymement'; out.login_nologin = "Voir les pads récents"; out.login_register = "Inscription"; out.logoutButton = "Déconnexion"; out.settingsButton = "Préférences"; out.login_username = "Nom d'utilisateur"; out.login_password = "Mot de passe"; out.login_confirm = "Confirmer votre mot de passe"; out.login_remember = "Se souvenir de moi"; out.login_hashing = "Traitement de vos identifiants, cela peut nécessiter quelques instants."; out.login_hello = 'Bonjour {0},'; // {0} is the username out.login_helloNoName = 'Bonjour,'; out.login_accessDrive = 'Accédez à votre drive'; out.login_orNoLogin = 'ou'; out.login_noSuchUser = "Nom d'utilisateur ou mot de passe invalide. Veuillez vous inscrire ou réessayer."; out.login_invalUser = "Nom d'utilisateur requis"; out.login_invalPass = 'Mot de passe requis'; out.login_unhandledError = "Une erreur inattendue s'est produite :("; out.register_importRecent = "Importer l'historique (Recommendé)"; out.register_acceptTerms = "J'accepte <a href='/terms.html'>les conditions d'utilisation</a>"; out.register_passwordsDontMatch = "Les mots de passe doivent être identiques!"; out.register_mustAcceptTerms = "Vous devez accepter les conditions d'utilisation."; out.register_mustRememberPass = "Nous ne pouvons pas réinitialiser votre mot de passe si vous l'oubliez. C'est important que vous vous en souveniez! Veuillez cocher la case pour confirmer."; out.register_writtenPassword = "J'ai bien noté mon nom d'utilisateur et mon mot de passe, continuer"; out.register_cancel = "Retour"; out.register_warning = "Zero Knowledge signifie que nous ne pouvons pas récupérer vos données si vous perdez vos identifiants."; out.register_alreadyRegistered = "Cet utilisateur existe déjà, souhaitez-vous vous connecter ?"; out.register_header = "Bienvenue dans CryptPad"; out.register_explanation = [ "<p>Faisons d'abord le point sur certaines choses</p>", "<ul>", "<li>Votre mot de passe est la clé secrète de tous vos pads. Si vous le perdez, il n'y a aucun moyen de récupérer vos données.</li>", "<li>Vous pouvez importer les pads récents de ce navigateur pour les avoir dans votre compte utilisateur.</li>", "<li>Si vous utilisez un ordinateur partagé, vous devez vous déconnecter avant de partir, fermer l'onglet n'est pas suffisant.</li>", "</ul>" ]; // Settings out.settings_title = "Préférences"; out.settings_save = "Sauver"; out.settings_backupTitle = "Créer ou restaurer une sauvegarde de vos données"; out.settings_backup = "Créer une sauvegarde"; out.settings_restore = "Restaurer une sauvegarde"; out.settings_resetTitle = "Vider votre drive"; out.settings_reset = "Supprimer tous les fichiers et dossiers de votre CryptDrive"; out.settings_resetPrompt = "Cette action va supprimer tous les pads de votre drive.<br>"+ "Êtes-vous sûr de vouloir continuer ?<br>" + "Tapez “<em>I love CryptPad</em>” pour confirmer."; out.settings_resetDone = "Votre drive est désormais vide!"; out.settings_resetError = "Texte de vérification incorrect. Votre CryptDrive n'a pas été modifié."; out.settings_resetTips = "Astuces et informations dans CryptDrive"; out.settings_resetTipsButton = "Réinitialiser les astuces visibles dans CryptDrive"; out.settings_resetTipsDone = "Toutes les astuces sont de nouveau visibles."; out.settings_importTitle = "Importer les pads récents de ce navigateur dans mon CryptDrive"; out.settings_import = "Importer"; out.settings_importConfirm = "Êtes-vous sûr de vouloir importer les pads récents de ce navigateur dans le CryptDrive de votre compte utilisateur ?"; out.settings_importDone = "Importation terminée"; out.settings_userFeedbackHint1 = "CryptPad peut envoyer des retours d'expérience très limités vers le serveur, de manière à nous permettre d'améliorer l'expérience des utilisateurs."; out.settings_userFeedbackHint2 = "Le contenu de vos pads et les clés de déchiffrement ne seront jamais partagés avec le serveur."; out.settings_userFeedback = "Activer l'envoi de retours d'expérience"; out.settings_anonymous = "Vous n'êtes pas connectés. Ces préférences seront utilisées pour ce navigateur."; out.settings_publicSigningKey = "Clé publique de signature"; out.settings_usage = "Utilisation"; out.settings_usageTitle = "Voir la taille totale de vos pads épinglés en Mo"; out.settings_pinningNotAvailable = "Les pads épinglés sont disponibles uniquement pour les utilisateurs enregistrés."; out.settings_pinningError = "Un problème est survenu"; out.settings_usageAmount = "Vos pads épinglés occupent {0} Mo"; out.settings_logoutEverywhereTitle = "Se déconnecter partout"; out.settings_logoutEverywhere = "Se déconnecter de toutes les autres sessions."; out.settings_logoutEverywhereConfirm = "Êtes-vous sûr ? Vous devrez vous reconnecter sur tous vos autres appareils."; // index.html //about.html out.main_p2 = 'Ce projet utilise l\'éditeur visuel (WYSIWYG) <a href="http://ckeditor.com/">CKEditor</a>, l\'éditeur de code source <a href="https://codemirror.net/">CodeMirror</a>, et le moteur temps-réel <a href="https://github.com/xwiki-contrib/chainpad">ChainPad</a>.'; out.main_howitworks_p1 = 'CryptPad utilise une variante de l\'algorithme d\'<a href="https://en.wikipedia.org/wiki/Operational_transformation">Operational transformation</a> qui est capable de trouver un consensus distribué en utilisant <a href="https://bitcoin.org/bitcoin.pdf">une chaîne de bloc Nakamoto</a>, un outil popularisé par le <a href="https://fr.wikipedia.org/wiki/Bitcoin">Bitcoin</a>. De cette manière, l\'algorithme évite la nécessité d\'utiliser un serveur central pour résoudre les conflits d\'édition de l\'Operational Transformation, et sans ce besoin de résolution des conflits le serveur peut rester ignorant du contenu qui est édité dans le pad.'; //contact.html out.main_about_p2 = 'Si vous avez des questions ou commentaires, vous pouvez <a href="https://twitter.com/cryptpad">nous tweeter</a>, ouvrir une issue sur <a href="https://github.com/xwiki-labs/cryptpad/issues/" title="our issue tracker">Github</a>, venir dire bonjour sur IRC (<a href="http://webchat.freenode.net?channels=%23cryptpad&uio=MT1mYWxzZSY5PXRydWUmMTE9Mjg3JjE1PXRydWUe7" title="freenode webchat">irc.freenode.net</a>), ou <a href="mailto:[email protected]">nous envoyer un email</a>.'; out.main_info = "<h2>Collaborez avec confiance</h2><br>Développez vos idées en groupe avec des document partagés; la technologie <strong>Zero Knowledge</strong> sécurise vos données."; out.main_howitworks = 'Comment ça fonctionne'; out.main_zeroKnowledge = 'Zero Knowledge'; out.main_zeroKnowledge_p = "Vous n'avez pas besoin de croire que nous n'<em>allons</em> pas regarder vos pads. Avec la technologie Zero Knowledge de CryptPad, nous ne <em>pouvons</em> pas le faire. Apprenez-en plus sur notre manière de <a href=\"privacy.html\" title='Protection des données'>protéger vos données</a>."; out.main_writeItDown = 'Prenez-en note'; out.main_writeItDown_p = "Les plus grands projets naissent des plus petites idées. Prenez note de vos moments d'inspiration et de vos idées inattendues car vous ne savez pas lesquels seront des découvertes capitales."; out.main_share = 'Partager le lien, partager le pad'; out.main_share_p = "Faites croître vos idées à plusieurs : réalisez des réunions efficaes, collaborez sur vos listes de tâches et réalisez des présentations rapide avec tous vos amis sur tous vos appareils."; out.main_organize = 'Soyez organisés'; out.main_organize_p = "Avec le CryptPad Drive, vous pouvez garder vos vues sur ce qui est important. Les dossiers vous permettent de garder la trace de vos projets et d'avoir une vision globale du travail effectué."; out.tryIt = 'Essayez-le !'; out.main_richText = 'Éditeur de texte'; out.main_richText_p = 'Éditez des documents texte collaborativement avec notre application <a href="http://ckeditor.com" target="_blank">CkEditor</a> temps-réel et Zero Knowledge.'; out.main_code = 'Éditeur de code'; out.main_code_p = 'Modifier votre code collaborativement grâce à notre application <a href="https://www.codemirror.net" target="_blank">CodeMirror</a> temps-réel et Zero Knowledge.'; out.main_slide = 'Présentations'; out.main_slide_p = 'Créez vos présentations en syntaxe Markdown collaborativement de manière sécurisée et affichez les dans votre navigateur.'; out.main_poll = 'Sondages'; out.main_poll_p = 'Plannifiez vos réunions ou évènements, ou votez pour la meilleure solution concernant votre problème.'; out.main_drive = 'CryptDrive'; out.footer_applications = "Applications"; out.footer_contact = "Contact"; out.footer_aboutUs = "À propos"; out.about = "À propos"; out.privacy = "Vie privée"; out.contact = "Contact"; out.terms = "Conditions"; out.blog = "Blog"; // privacy.html out.policy_title = 'Politique de confidentialité de CryptPad'; out.policy_whatweknow = 'Ce que nous savons de vous'; out.policy_whatweknow_p1 = 'En tant qu\'application hébergée sur le web, CryptPad a accès aux meta-données exposées par le protocole HTTP. Ceci inclus votre adresse IP et d\'autres en-têtes HTTP qui peuvent être utilisées pour identifier votre propre navigateur. Vous pouvez voir quelles informations votre navigateur partage en visitant <a target="_blank" rel="noopener noreferrer" href="https://www.whatismybrowser.com/detect/what-http-headers-is-my-browser-sending" title="what http headers is my browser sending">WhatIsMyBrowser.com</a>.'; out.policy_whatweknow_p2 = 'Nous utilisons <a href="https://piwik.org/" target="_blank" rel="noopener noreferrer" title="open source analytics platform">Piwik</a>, une plateforme open source d\'analytique, afin d\'en apprendre plus sur nos utilisateurs. Piwik nous indique comment vous avez trouvé CryptPad, que ce soit par une entrée directe, par un moteur de recherche ou depuis un lien provenant d\'un autre site web tel que Reddit ou Twitter. Nous savons également quand vous visitez le site, sur quels liens vous cliquez dans les pages informatives et combien de temps vous restez sur une page donnée.'; out.policy_howweuse = 'Comment nous utilisons ce que nous apprenons'; out.policy_howweuse_p1 = 'Nous utilisons ces informations pour prendre de meilleures décisions concernant la communication autour de CryptPad, en évaluant le succès de ce qui a été realisé par le passé. Les informations concernant votre localisation nous permettent de savoir si nous devons considérer l\'ajout de traductions de CryptPad dans d\'autres langues que l\'anglais.'; out.policy_howweuse_p2 = "Les informations concernant votre navigateur (que ce soit un système d\'exploitation de bureau ou d\'appareil portable) nous aident à prendre des décisions lors de la priorisation des ajouts et améliorations de fonctionnalités. Notre équipe de développement est petite, et nous essayons de prendre des décisions qui amélioreront l\'expérience du plus grand nombre d\'utilisateurs possible."; out.policy_whatwetell = 'Ce que nous dévoilons à d\'autres à propos de vous'; out.policy_whatwetell_p1 = 'Nous ne fournissons aucune information que nous récoltons ou que vous nous fournissez à des tierces parties à moins d\'y être contraints par la loi.'; out.policy_links = 'Liens vers d\'autres sites'; out.policy_links_p1 = 'Ce site contient des liens vers d\'autres sites, certains étant produits par d\'autres organisations. Nous ne sommes responsables des pratiques de confidentialité ou du contenu d\'aucun site externe. De manière générale, les liens vers des sites externes sont lancés dans une nouvelle fenêtre (ou onglet) du navigateur, pour rendre clair le fait que vous quittez CryptpPad.fr.'; out.policy_ads = 'Publicité'; out.policy_ads_p1 = 'Nous n\'affichons pas de publicité en ligne, bien que nous puissions afficher des liens vers les sites des organisations qui financent nos recherches.'; out.policy_choices = 'Vos choix'; out.policy_choices_open = 'Notre code est open source, ce qui signifie que vous avez toujours la possibilité d\'héberger votre propre instance de CryptPad.'; out.policy_choices_vpn = 'Si vous souhaitez utiliser notre instance hébergée (cryptpad.fr) mais que vous ne souhaitez pas exposer votre adresse IP, vous pouvez la protéger en utilisant le <a href="https://www.torproject.org/projects/torbrowser.html.en" title="téléchargements du projet Tor" target="_blank" rel="noopener noreferrer">navigateur Tor</a>, ou un <a href="https://riseup.net/fr/vpn" title="VPNs fournis par Riseup" target="_blank" rel="noopener noreferrer">VPN</a>.'; out.policy_choices_ads = 'Si vous souhaitez uniquement bloquer notre plateforme d\'analytique, vous pouvez utiliser un bloqueur de publicités tel que <a href="https://www.eff.org/fr/privacybadger" title="télécharger privacy badger" target="_blank" rel="noopener noreferrer">Privacy Badger</a>.'; // terms.html out.tos_title = "Conditions d'utilisation de CryptPad"; out.tos_legal = "Veuillez ne pas être malveillant, abusif, ou faire quoi que ce soit d'illégal."; out.tos_availability = "Nous espérons que vous trouvez ce service utile, mais nous ne pouvons garantir ses performances et disponibilités. Nous vous recommandons d'exporter vos données régurlièrement."; out.tos_e2ee = "Le contenu sur CryptPad peuvent être lus et modifiés par quiconque est en mesure de deviner ou d'obtenir de quelque manière que ce soit l'identificateur de fragment du pad. Nous vous recommandons d'utiliser des technologies de messagerie chiffrées de bout à bout (end-to-end encryption ou e2ee) pour partager les liens, et déclinons toute responsabilité dans le cas ou un tel lien serait divulgué."; out.tos_logs = "Les meta-données fournies par votre navigateur au serveur peuvent être enregistrées dans le but de maintenir le service."; out.tos_3rdparties = "Nous ne fournissons aucune donnée individuelle à des tierces parties à moins d'y être contraints par la loi."; // BottomBar.html out.bottom_france = '<a href="http://www.xwiki.com/fr" target="_blank" rel="noopener noreferrer">Fait avec <img class="bottom-bar-heart" src="/customize/heart.png" alt="amour" /> en <img class="bottom-bar-fr" src="/customize/fr.png" alt="France" /></a>'; out.bottom_support = '<a href="http://labs.xwiki.com/" title="XWiki Labs" target="_blank" rel="noopener noreferrer">Un projet <img src="/customize/logo-xwiki2.png" alt="XWiki SAS" class="bottom-bar-xwiki"/> Labs</a> avec le soutien de <a href="http://ng.open-paas.org/" title="OpenPaaS::ng" target="_blank" rel="noopener noreferrer"> <img src="/customize/openpaasng.png" alt="OpenPaaS-ng" class="bottom-bar-openpaas" /></a>'; // Header.html out.header_france = '<a href="http://www.xwiki.com/fr" target="_blank" rel="noopener noreferrer">Fait avec <img class="bottom-bar-heart" src="/customize/heart.png" alt="amour" /> en <img class="bottom-bar-fr" title="France" alt="France" src="/customize/fr.png" /> par <img src="/customize/logo-xwiki.png" alt="XWiki SAS" class="bottom-bar-xwiki"/></a>'; out.header_support = '<a href="http://ng.open-paas.org/" title="OpenPaaS::ng" target="_blank" rel="noopener noreferrer"> <img src="/customize/openpaasng.png" alt="OpenPaaS-ng" class="bottom-bar-openpaas" /></a>'; out.header_logoTitle = "Aller vers la page d'accueil"; // Initial states out.initialState = [ '<span style="font-size:16px;"><p>', 'Voici <strong>CryptPad</strong>, l\'éditeur collaboratif en temps-réel Zero Knowledge. Tout est sauvegardé dés que vous le tapez.', '<br>', 'Partagez le lien vers ce pad avec des amis ou utilisez le bouton <span style="background-color:#5cb85c;color:#ffffff;">&nbsp;Partager&nbsp;</span> pour obtenir le <em>lien de lecture-seule</em>, qui permet la lecture mais non la modification.', '</p>', '<p><span style="color:#808080; font-size: 16px;">', '<em>', 'Lancez-vous, commencez à taper...', '</em></span></p></span>', '<p>&nbsp;<br></p>' ].join(''); out.codeInitialState = [ '/*\n', ' Voici l\'éditeur de code collaboratif et Zero Knowledge de CryptPad.\n', ' Ce que vous tapez ici est chiffré de manière que seules les personnes avec le lien peuvent y accéder.\n', ' Vous pouvez choisir le langage de programmation pour la coloration syntaxique, ainsi que le thème de couleurs, dans le coin supérieur droit.\n', '*/' ].join(''); out.slideInitialState = [ '# CryptSlide\n', '* Voici CryptPad, l\'éditeur collaboratif en temps-réel Zero Knowledge.\n', '* Ce que vous tapez ici est chiffré de manière que seules les personnes avec le lien peuvent y accéder.\n', '* Même le serveur est incapable de voir ce que vous tapez.\n', '* Ce que vous voyez ici, ce que vous entendez, quand vous partez, ça reste ici.\n', '\n', '---', '\n', '# Comment l\'utiliser\n', '1. Écrivez le contenu de votre présentation avec la syntaxe Markdown\n', ' - Apprenez à utiliser markdown en cliquant [ici](http://www.markdowntutorial.com/)\n', '2. Séparez vos slides avec ---\n', '3. Cliquez sur la bouton "lecture" pour afficher le résultat en mode présentation', ' - La présentation est mise à jour en temps-réel' ].join(''); out.driveReadmeTitle = "Qu'est-ce que CryptDrive ?"; out.readme_welcome = "Bienvenue dans CryptPad !"; out.readme_p1 = "Bienvenue dans CryptPad, le lieu où vous pouvez prendre des notes seul ou avec des amis."; out.readme_p2 = "Ce pad va vous donner un aperçu de la manière dont vous pouvez utiliser CryptPad pour prendre des notes, les organiser et travailler en groupe sur celles-ci."; out.readme_cat1 = "Découvrez votre CryptDrive"; out.readme_cat1_l1 = "Créer un pad : Dans votre CryptDrive, cliquez sur {0} puis {1} et vous obtenez un nouveau pad."; // 0: New, 1: Rich Text out.readme_cat1_l2 = "Ouvrir des pads depuis votre CryptDrive : Double-cliquez sur l'icone d'un pad pour l'ouvrir."; out.readme_cat1_l3 = "Organiser vos pads : Quand vous êtes connectés, tous les pads auquel vous accédez sont ajoutés dans la section {0} de votre CryptDrive."; // 0: Unsorted files out.readme_cat1_l3_l1 = "Vous pouvez cliquer et faire glisser des fichiers dans des dossiers dans la section {0} de votre CryptDrive, et créer de nouveaux dossiers."; // 0: Documents out.readme_cat1_l3_l2 = "N'hésitez pas à utiliser le clic droit sur les icones puisque des menus sont souvent disponibles."; out.readme_cat1_l4 = "Déplacer des pads vers la corbeille : Vous pouvez cliquer et faire glisser vos pads dans la {0} de la même manière que vous pouvez les déposer dans des dossiers."; // 0: Trash out.readme_cat2 = "Créer des pads comme un pro"; out.edit = "éditer"; out.view = "voir"; out.readme_cat2_l1 = "Le bouton {0} dans votre pad vous permet de donner l'accès à vos collaborateurs que ce soit pour l'{1} ou pour le {2}."; // 0: Share, 1: edit, 2: view out.readme_cat2_l2 = "Vous pouvez changer le titre d'un pad en cliquant sur le crayon"; out.readme_cat3 = "Découvrez les autres applications CryptPad"; out.readme_cat3_l1 = "Avec l'éditeur de code de CryptPad, vous pouvez collaborer sur du code comme Javascript ou des langages comme HTML ou Markdown."; out.readme_cat3_l2 = "Avec l'éditeur de présentations de CryptPad, vous pouvez réaliser des présentations rapides en utilisant Markdown"; out.readme_cat3_l3 = "Avec CryptPoll vous pouvez créer rapidement des sondages, et en particulier plannifier des meetings qui rentrent dans l'agenda de tout ceux qui souhaitent participer."; // Tips out.tips = {}; out.tips.lag = "L'icône verte dans le coin supérieur droit montre la qualité de votre connexion Internet vers le serveur CryptPad."; out.tips.shortcuts = "`ctrl+b`, `ctrl+i` et `ctrl+u` sont des raccourcis rapides pour mettre en gras, en italique ou souligner."; out.tips.indent = "Dans les listes à puces ou numérotées, vous pouvez utiliser `Tab` ou `Maj+Tab` pour augmenter ou réduire rapidement l'indentation."; out.tips.title = "Vous pouvez changer le titre de votre pad en cliquant au centre en haut de la page."; out.tips.store = "Dés que vous ouvrez un nouveau pad, il est automatiquement stocké dans votre CryptDrive si vous êtes connectés."; out.tips.marker = "Vous pouvez surligner du texte dans un pad en utilisant l'option \"marker\" dans le menu déroulant des styles."; out.feedback_about = "Si vous lisez ceci, vous vous demandez probablement pourquoi CryptPad envoie des requêtes vers des pages web quand vous realisez certaines actions."; out.feedback_privacy = "Nous prenons au sérieux le respect de votre vie privée, et en même temps nous souhaitons rendre CryptPad très simple à utiliser. Nous utilisons cette page pour comprendre quelles fonctionnalités dans l'interface comptent le plus pour les utilisateurs, en l'appelant avec un paramètre spécifiant quelle action a été réalisée."; out.feedback_optout = "Si vous le souhaitez, vous pouvez désactiver ces requêtes en vous rendant dans <a href='/settings/'>votre page de préférences</a>, où vous trouverez une case à cocher pour désactiver le retour d'expérience."; return out; });
customize.dist/translations/messages.fr.js
define(function () { var out = {}; out.main_title = "CryptPad: Éditeur collaboratif en temps réel, zero knowledge"; out.main_slogan = "L'unité est la force, la collaboration est la clé"; out.type = {}; out.type.pad = 'Texte'; out.type.code = 'Code'; out.type.poll = 'Sondage'; out.type.slide = 'Présentation'; out.type.drive = 'Drive'; out.type.whiteboard = "Tableau Blanc"; out.type.file = "Fichier"; out.type.media = "Média"; out.button_newpad = 'Nouveau document texte'; out.button_newcode = 'Nouvelle page de code'; out.button_newpoll = 'Nouveau sondage'; out.button_newslide = 'Nouvelle présentation'; out.button_newwhiteboard = 'Nouveau tableau blanc'; out.updated_0_common_connectionLost = "<b>Connexion au serveur perdue</b><br>Vous êtes désormais en mode lecture seule jusqu'au retour de la connexion."; out.common_connectionLost = out.updated_0_common_connectionLost; out.websocketError = 'Impossible de se connecter au serveur WebSocket...'; out.typeError = "Ce pad n'est pas compatible avec l'application sélectionnée"; out.onLogout = 'Vous êtes déconnecté de votre compte utilisateur, <a href="/" target="_blank">cliquez ici</a> pour vous authentifier<br>ou appuyez sur <em>Échap</em> pour accéder au pad en mode lecture seule.'; out.wrongApp = "Impossible d'afficher le contenu de ce document temps-réel dans votre navigateur. Vous pouvez essayer de recharger la page."; out.loading = "Chargement..."; out.error = "Erreur"; out.saved = "Enregistré"; out.synced = "Tout est enregistré"; out.disconnected = 'Déconnecté'; out.synchronizing = 'Synchronisation'; out.reconnecting = 'Reconnexion...'; out.lag = 'Latence'; out.readonly = 'Lecture seule'; out.anonymous = "Anonyme"; out.yourself = "Vous-même"; out.anonymousUsers = "éditeurs anonymes"; out.anonymousUser = "éditeur anonyme"; out.users = "Utilisateurs"; out.and = "Et"; out.viewer = "lecteur"; out.viewers = "lecteurs"; out.editor = "éditeur"; out.editors = "éditeurs"; out.language = "Langue"; out.upgrade = "Améliorer"; out.upgradeTitle = "Améliorer votre compte pour augmenter la limite de stockage"; out.MB = "Mo"; out.greenLight = "Tout fonctionne bien"; out.orangeLight = "Votre connexion est lente, ce qui réduit la qualité de l'éditeur"; out.redLight = "Vous êtes déconnectés de la session"; out.pinLimitReached = "Vous avez atteint votre limite de stockage"; out.pinLimitReachedAlert = "Vous avez atteint votre limite de stockage. Les nouveaux pads ne seront pas enregistrés dans votre CrypDrive.<br>" + "Pour résoudre ce problème, vous pouvez soit supprimer des pads de votre CryptDrive (y compris la corbeille), soit vous abonner à une offre premium pour augmenter la limite maximale."; out.pinLimitNotPinned = "Vous avez atteint votre limite de stockage.<br>"+ "Ce pad n'est pas enregistré dans votre CryptDrive."; out.importButtonTitle = 'Importer un pad depuis un fichier local'; out.exportButtonTitle = 'Exporter ce pad vers un fichier local'; out.exportPrompt = 'Comment souhaitez-vous nommer ce fichier ?'; out.changeNamePrompt = 'Changer votre nom (laisser vide pour rester anonyme) : '; out.user_rename = "Changer le nom affiché"; out.user_displayName = "Nom affiché"; out.user_accountName = "Nom d'utilisateur"; out.clickToEdit = 'Cliquer pour modifier'; out.forgetButtonTitle = 'Déplacer ce pad vers la corbeille'; out.forgetPrompt = 'Cliquer sur OK déplacera ce pad vers la corbeille de votre CryptDrive, êtes-vous sûr ?'; out.movedToTrash = 'Ce pad a été déplacé vers la corbeille.<br><a href="/drive/">Accéder à mon Drive</a>'; out.shareButton = 'Partager'; out.shareSuccess = 'Lien copié dans le presse-papiers'; out.newButton = 'Nouveau'; out.newButtonTitle = 'Créer un nouveau pad'; out.saveTemplateButton = "Sauver en tant que modèle"; out.saveTemplatePrompt = "Choisir un titre pour ce modèle"; out.templateSaved = "Modèle enregistré !"; out.selectTemplate = "Sélectionner un modèle ou appuyer sur Échap"; out.presentButtonTitle = "Entrer en mode présentation"; out.presentSuccess = 'Appuyer sur Échap pour quitter le mode présentation'; out.backgroundButtonTitle = 'Changer la couleur de fond de la présentation'; out.colorButtonTitle = 'Changer la couleur du texte en mode présentation'; out.printButton = "Imprimer (Entrée)"; out.printButtonTitle = "Imprimer votre présentation ou l'enregistrer au format PDF"; out.printOptions = "Options de mise en page"; out.printSlideNumber = "Afficher le numéro des slides"; out.printDate = "Afficher la date"; out.printTitle = "Afficher le titre du pad"; out.printCSS = "Personnaliser l'apparence (CSS):"; out.slideOptionsTitle = "Personnaliser la présentation"; out.slideOptionsButton = "Enregistrer (Entrée)"; out.editShare = "Lien d'édition"; out.editShareTitle = "Copier le lien d'édition dans le presse-papiers"; out.editOpen = "Éditer dans un nouvel onglet"; out.editOpenTitle = "Ouvrir le lien d'édition dans un nouvel onglet"; out.viewShare = "Lien de lecture-seule"; out.viewShareTitle = "Copier lien d'accès en lecture seule dans le presse-papiers"; out.viewOpen = "Voir dans un nouvel onglet"; out.viewOpenTitle = "Ouvrir le lien en lecture seule dans un nouvel onglet"; out.notifyJoined = "{0} a rejoint la session collaborative"; out.notifyRenamed = "{0} a changé son nom en {1}"; out.notifyLeft = "{0} a quitté la session collaborative"; out.okButton = 'OK (Entrée)'; out.cancel = "Annuler"; out.cancelButton = 'Annuler (Echap)'; out.historyButton = "Afficher l'historique du document"; out.history_next = "Voir la version suivante"; out.history_prev = "Voir la version précédente"; out.history_goTo = "Voir la version sélectionnée"; out.history_close = "Retour"; out.history_closeTitle = "Fermer l'historique"; out.history_restore = "Restaurer"; out.history_restoreTitle = "Restaurer la version du document sélectionnée"; out.history_restorePrompt = "Êtes-vous sûr de vouloir remplacer la version actuelle du document par la version affichée ?"; out.history_restoreDone = "Document restauré"; out.history_version = "Version :"; // Polls out.poll_title = "Sélecteur de date Zero Knowledge"; out.poll_subtitle = "Planification de rendez-vous et sondages en <em>temps-réel</em> et Zero Knowledge"; out.poll_p_save = "Vos modifications sont mises à jour instantanément, donc vous n'avez jamais besoin de sauver le contenu."; out.poll_p_encryption = "Tout ce que vous entrez est chiffré donc seules les personnes possédant le lien du sondage y ont accès. Même le serveur ne peut pas voir le contenu."; out.wizardLog = "Cliquez sur le bouton dans le coin supérieur gauche pour retourner au sondage"; out.wizardTitle = "Utiliser l'assistant pour créer votre sondage"; out.wizardConfirm = "Êtes-vous vraiment prêt à ajouter ces options au sondage ?"; out.poll_publish_button = "Publier"; out.poll_admin_button = "Administrer"; out.poll_create_user = "Ajouter un utilisateur"; out.poll_create_option = "Ajouter une option"; out.poll_commit = "Valider"; out.poll_closeWizardButton = "Fermer l'assistant"; out.poll_closeWizardButtonTitle = "Fermer l'assistant"; out.poll_wizardComputeButton = "Générer les options"; out.poll_wizardClearButton = "Vider le tableau"; out.poll_wizardDescription = "Créer automatiquement des options en entrant des dates et des horaires correspondant"; out.poll_wizardAddDateButton = "+ Dates"; out.poll_wizardAddTimeButton = "+ Horaires"; out.poll_optionPlaceholder = "Option"; out.poll_userPlaceholder = "Votre nom"; out.poll_removeOption = "Êtes-vous sûr de vouloir supprimer cette option ?"; out.poll_removeUser = "Êtes-vous sûr de vouloir supprimer cet utilisateur ?"; out.poll_titleHint = "Titre"; out.poll_descriptionHint = "Description"; // Canvas out.canvas_clear = "Nettoyer"; out.canvas_delete = "Supprimer la sélection"; out.canvas_disable = "Désactiver le dessin"; out.canvas_enable = "Activer le dessin"; out.canvas_width = "Épaisseur"; out.canvas_opacity = "Opacité"; // File manager out.fm_rootName = "Documents"; out.fm_trashName = "Corbeille"; out.fm_unsortedName = "Fichiers non triés"; out.fm_filesDataName = "Tous les fichiers"; out.fm_templateName = "Modèles"; out.fm_searchName = "Recherche"; out.fm_searchPlaceholder = "Rechercher..."; out.fm_newButton = "Nouveau"; out.fm_newButtonTitle = "Créer un nouveau pad ou un dossier"; out.fm_newFolder = "Nouveau dossier"; out.fm_newFile = "Nouveau pad"; out.fm_folder = "Dossier"; out.fm_folderName = "Nom du dossier"; out.fm_numberOfFolders = "# de dossiers"; out.fm_numberOfFiles = "# de fichiers"; out.fm_fileName = "Nom du fichier"; out.fm_title = "Titre"; out.fm_type = "Type"; out.fm_lastAccess = "Dernier accès"; out.fm_creation = "Création"; out.fm_forbidden = "Action interdite"; out.fm_originalPath = "Chemin d'origine"; out.fm_openParent = "Montrer dans le dossier"; out.fm_noname = "Document sans titre"; out.fm_emptyTrashDialog = "Êtes-vous sûr de vouloir vider la corbeille ?"; out.fm_removeSeveralPermanentlyDialog = "Êtes-vous sûr de vouloir supprimer ces {0} éléments de manière permanente ?"; out.fm_removePermanentlyDialog = "Êtes-vous sûr de vouloir supprimer cet élément de manière permanente ?"; out.fm_restoreDialog = "Êtes-vous sûr de vouloir restaurer {0} à son emplacement précédent ?"; out.fm_removeSeveralDialog = "Êtes-vous sûr de vouloir déplacer ces {0} éléments vers la corbeille ?"; out.fm_removeDialog = "Êtes-vous sûr de vouloir déplacer {0} vers la corbeille ?"; out.fm_unknownFolderError = "Le dossier sélectionné ou le dernier dossier visité n'existe plus. Ouverture du dossier parent..."; out.fm_contextMenuError = "Impossible d'ouvrir le menu contextuel pour cet élément. Si le problème persiste, essayez de rechercher la page."; out.fm_selectError = "Impossible de sélectionner l'élément ciblé. Si le problème persiste, essayez de recharger la page."; out.fm_categoryError = "Impossible d'afficher la catégorie sélectionnée, affichage de Documents"; out.fm_info_root = "Créez ici autant de dossiers que vous le souhaitez pour trier vos fichiers."; out.fm_info_unsorted = 'Contient tous les pads que vous avez ouvert et qui ne sont pas triés dans "Documents" ou déplacés vers la "Corbeille".'; // "My Documents" should match with the "out.fm_rootName" key, and "Trash" with "out.fm_trashName" out.fm_info_template = "Contient tous les fichiers que vous avez sauvés en tant que modèle afin de les réutiliser lors de la création d'un nouveau pad."; out.fm_info_trash = 'Les fichiers supprimés dans la corbeille sont également enlevés de "Tous les fichiers" et il est impossible de les récupérer depuis l\'explorateur de fichiers.'; // Same here for "All files" and "out.fm_filesDataName" out.fm_info_allFiles = 'Contient tous les fichiers de "Documents", "Fichiers non triés" et "Corbeille". Vous ne pouvez pas supprimer ou déplacer des fichiers depuis cet endroit.'; // Same here out.fm_alert_backupUrl = "Lien de secours pour ce disque.<br>" + "Il est <strong>fortement recommandé</strong> de garder ce lien pour vous-même.<br>" + "Elle vous servira en cas de perte des données de votre navigateur afin de retrouver vos fichiers.<br>" + "Quiconque se trouve en possession de celle-ci peut modifier ou supprimer tous les fichiers de ce gestionnaire.<br>"; out.fm_backup_title = 'Lien de secours'; out.fm_nameFile = 'Comment souhaitez-vous nommer ce fichier ?'; // File - Context menu out.fc_newfolder = "Nouveau dossier"; out.fc_rename = "Renommer"; out.fc_open = "Ouvrir"; out.fc_open_ro = "Ouvrir (lecture seule)"; out.fc_delete = "Supprimer"; out.fc_restore = "Restaurer"; out.fc_remove = "Supprimer définitivement"; out.fc_empty = "Vider la corbeille"; out.fc_prop = "Propriétés"; out.fc_sizeInKilobytes = "Taille en kilo-octets"; // fileObject.js (logs) out.fo_moveUnsortedError = "La liste des éléments non triés ne peut pas contenir de dossiers."; out.fo_existingNameError = "Ce nom est déjà utilisé dans ce répertoire. Veuillez en choisir un autre."; out.fo_moveFolderToChildError = "Vous ne pouvez pas déplacer un dossier dans un de ses descendants"; out.fo_unableToRestore = "Impossible de restaurer ce fichier à son emplacement d'origine. Vous pouvez essayer de le déplacer à un nouvel emplacement."; out.fo_unavailableName = "Un fichier ou dossier avec le même nom existe déjà au nouvel emplacement. Renommez cet élément avant d'essayer à nouveau."; // login out.login_login = "Connexion"; out.login_makeAPad = 'Créer un pad anonymement'; out.login_nologin = "Voir les pads récents"; out.login_register = "Inscription"; out.logoutButton = "Déconnexion"; out.settingsButton = "Préférences"; out.login_username = "Nom d'utilisateur"; out.login_password = "Mot de passe"; out.login_confirm = "Confirmer votre mot de passe"; out.login_remember = "Se souvenir de moi"; out.login_hashing = "Traitement de vos identifiants, cela peut nécessiter quelques instants."; out.login_hello = 'Bonjour {0},'; // {0} is the username out.login_helloNoName = 'Bonjour,'; out.login_accessDrive = 'Accédez à votre drive'; out.login_orNoLogin = 'ou'; out.login_noSuchUser = "Nom d'utilisateur ou mot de passe invalide. Veuillez vous inscrire ou réessayer."; out.login_invalUser = "Nom d'utilisateur requis"; out.login_invalPass = 'Mot de passe requis'; out.login_unhandledError = "Une erreur inattendue s'est produite :("; out.register_importRecent = "Importer l'historique (Recommendé)"; out.register_acceptTerms = "J'accepte <a href='/terms.html'>les conditions d'utilisation</a>"; out.register_passwordsDontMatch = "Les mots de passe doivent être identiques!"; out.register_mustAcceptTerms = "Vous devez accepter les conditions d'utilisation."; out.register_mustRememberPass = "Nous ne pouvons pas réinitialiser votre mot de passe si vous l'oubliez. C'est important que vous vous en souveniez! Veuillez cocher la case pour confirmer."; out.register_writtenPassword = "J'ai bien noté mon nom d'utilisateur et mon mot de passe, continuer"; out.register_cancel = "Retour"; out.register_warning = "Zero Knowledge signifie que nous ne pouvons pas récupérer vos données si vous perdez vos identifiants."; out.register_alreadyRegistered = "Cet utilisateur existe déjà, souhaitez-vous vous connecter ?"; out.register_header = "Bienvenue dans CryptPad"; out.register_explanation = [ "<p>Faisons d'abord le point sur certaines choses</p>", "<ul>", "<li>Votre mot de passe est la clé secrète de tous vos pads. Si vous le perdez, il n'y a aucun moyen de récupérer vos données.</li>", "<li>Vous pouvez importer les pads récents de ce navigateur pour les avoir dans votre compte utilisateur.</li>", "<li>Si vous utilisez un ordinateur partagé, vous devez vous déconnecter avant de partir, fermer l'onglet n'est pas suffisant.</li>", "</ul>" ]; // Settings out.settings_title = "Préférences"; out.settings_save = "Sauver"; out.settings_backupTitle = "Créer ou restaurer une sauvegarde de vos données"; out.settings_backup = "Créer une sauvegarde"; out.settings_restore = "Restaurer une sauvegarde"; out.settings_resetTitle = "Vider votre drive"; out.settings_reset = "Supprimer tous les fichiers et dossiers de votre CryptDrive"; out.settings_resetPrompt = "Cette action va supprimer tous les pads de votre drive.<br>"+ "Êtes-vous sûr de vouloir continuer ?<br>" + "Tapez “<em>I love CryptPad</em>” pour confirmer."; out.settings_resetDone = "Votre drive est désormais vide!"; out.settings_resetError = "Texte de vérification incorrect. Votre CryptDrive n'a pas été modifié."; out.settings_resetTips = "Astuces et informations dans CryptDrive"; out.settings_resetTipsButton = "Réinitialiser les astuces visibles dans CryptDrive"; out.settings_resetTipsDone = "Toutes les astuces sont de nouveau visibles."; out.settings_importTitle = "Importer les pads récents de ce navigateur dans mon CryptDrive"; out.settings_import = "Importer"; out.settings_importConfirm = "Êtes-vous sûr de vouloir importer les pads récents de ce navigateur dans le CryptDrive de votre compte utilisateur ?"; out.settings_importDone = "Importation terminée"; out.settings_userFeedbackHint1 = "CryptPad peut envoyer des retours d'expérience très limités vers le serveur, de manière à nous permettre d'améliorer l'expérience des utilisateurs."; out.settings_userFeedbackHint2 = "Le contenu de vos pads et les clés de déchiffrement ne seront jamais partagés avec le serveur."; out.settings_userFeedback = "Activer l'envoi de retours d'expérience"; out.settings_anonymous = "Vous n'êtes pas connectés. Ces préférences seront utilisées pour ce navigateur."; out.settings_publicSigningKey = "Clé publique de signature"; out.settings_usage = "Utilisation"; out.settings_usageTitle = "Voir la taille totale de vos pads épinglés en Mo"; out.settings_pinningNotAvailable = "Les pads épinglés sont disponibles uniquement pour les utilisateurs enregistrés."; out.settings_pinningError = "Un problème est survenu"; out.settings_usageAmount = "Vos pads épinglés occupent {0} Mo"; // index.html //about.html out.main_p2 = 'Ce projet utilise l\'éditeur visuel (WYSIWYG) <a href="http://ckeditor.com/">CKEditor</a>, l\'éditeur de code source <a href="https://codemirror.net/">CodeMirror</a>, et le moteur temps-réel <a href="https://github.com/xwiki-contrib/chainpad">ChainPad</a>.'; out.main_howitworks_p1 = 'CryptPad utilise une variante de l\'algorithme d\'<a href="https://en.wikipedia.org/wiki/Operational_transformation">Operational transformation</a> qui est capable de trouver un consensus distribué en utilisant <a href="https://bitcoin.org/bitcoin.pdf">une chaîne de bloc Nakamoto</a>, un outil popularisé par le <a href="https://fr.wikipedia.org/wiki/Bitcoin">Bitcoin</a>. De cette manière, l\'algorithme évite la nécessité d\'utiliser un serveur central pour résoudre les conflits d\'édition de l\'Operational Transformation, et sans ce besoin de résolution des conflits le serveur peut rester ignorant du contenu qui est édité dans le pad.'; //contact.html out.main_about_p2 = 'Si vous avez des questions ou commentaires, vous pouvez <a href="https://twitter.com/cryptpad">nous tweeter</a>, ouvrir une issue sur <a href="https://github.com/xwiki-labs/cryptpad/issues/" title="our issue tracker">Github</a>, venir dire bonjour sur IRC (<a href="http://webchat.freenode.net?channels=%23cryptpad&uio=MT1mYWxzZSY5PXRydWUmMTE9Mjg3JjE1PXRydWUe7" title="freenode webchat">irc.freenode.net</a>), ou <a href="mailto:[email protected]">nous envoyer un email</a>.'; out.main_info = "<h2>Collaborez avec confiance</h2><br>Développez vos idées en groupe avec des document partagés; la technologie <strong>Zero Knowledge</strong> sécurise vos données."; out.main_howitworks = 'Comment ça fonctionne'; out.main_zeroKnowledge = 'Zero Knowledge'; out.main_zeroKnowledge_p = "Vous n'avez pas besoin de croire que nous n'<em>allons</em> pas regarder vos pads. Avec la technologie Zero Knowledge de CryptPad, nous ne <em>pouvons</em> pas le faire. Apprenez-en plus sur notre manière de <a href=\"privacy.html\" title='Protection des données'>protéger vos données</a>."; out.main_writeItDown = 'Prenez-en note'; out.main_writeItDown_p = "Les plus grands projets naissent des plus petites idées. Prenez note de vos moments d'inspiration et de vos idées inattendues car vous ne savez pas lesquels seront des découvertes capitales."; out.main_share = 'Partager le lien, partager le pad'; out.main_share_p = "Faites croître vos idées à plusieurs : réalisez des réunions efficaes, collaborez sur vos listes de tâches et réalisez des présentations rapide avec tous vos amis sur tous vos appareils."; out.main_organize = 'Soyez organisés'; out.main_organize_p = "Avec le CryptPad Drive, vous pouvez garder vos vues sur ce qui est important. Les dossiers vous permettent de garder la trace de vos projets et d'avoir une vision globale du travail effectué."; out.tryIt = 'Essayez-le !'; out.main_richText = 'Éditeur de texte'; out.main_richText_p = 'Éditez des documents texte collaborativement avec notre application <a href="http://ckeditor.com" target="_blank">CkEditor</a> temps-réel et Zero Knowledge.'; out.main_code = 'Éditeur de code'; out.main_code_p = 'Modifier votre code collaborativement grâce à notre application <a href="https://www.codemirror.net" target="_blank">CodeMirror</a> temps-réel et Zero Knowledge.'; out.main_slide = 'Présentations'; out.main_slide_p = 'Créez vos présentations en syntaxe Markdown collaborativement de manière sécurisée et affichez les dans votre navigateur.'; out.main_poll = 'Sondages'; out.main_poll_p = 'Plannifiez vos réunions ou évènements, ou votez pour la meilleure solution concernant votre problème.'; out.main_drive = 'CryptDrive'; out.footer_applications = "Applications"; out.footer_contact = "Contact"; out.footer_aboutUs = "À propos"; out.about = "À propos"; out.privacy = "Vie privée"; out.contact = "Contact"; out.terms = "Conditions"; out.blog = "Blog"; // privacy.html out.policy_title = 'Politique de confidentialité de CryptPad'; out.policy_whatweknow = 'Ce que nous savons de vous'; out.policy_whatweknow_p1 = 'En tant qu\'application hébergée sur le web, CryptPad a accès aux meta-données exposées par le protocole HTTP. Ceci inclus votre adresse IP et d\'autres en-têtes HTTP qui peuvent être utilisées pour identifier votre propre navigateur. Vous pouvez voir quelles informations votre navigateur partage en visitant <a target="_blank" rel="noopener noreferrer" href="https://www.whatismybrowser.com/detect/what-http-headers-is-my-browser-sending" title="what http headers is my browser sending">WhatIsMyBrowser.com</a>.'; out.policy_whatweknow_p2 = 'Nous utilisons <a href="https://piwik.org/" target="_blank" rel="noopener noreferrer" title="open source analytics platform">Piwik</a>, une plateforme open source d\'analytique, afin d\'en apprendre plus sur nos utilisateurs. Piwik nous indique comment vous avez trouvé CryptPad, que ce soit par une entrée directe, par un moteur de recherche ou depuis un lien provenant d\'un autre site web tel que Reddit ou Twitter. Nous savons également quand vous visitez le site, sur quels liens vous cliquez dans les pages informatives et combien de temps vous restez sur une page donnée.'; out.policy_howweuse = 'Comment nous utilisons ce que nous apprenons'; out.policy_howweuse_p1 = 'Nous utilisons ces informations pour prendre de meilleures décisions concernant la communication autour de CryptPad, en évaluant le succès de ce qui a été realisé par le passé. Les informations concernant votre localisation nous permettent de savoir si nous devons considérer l\'ajout de traductions de CryptPad dans d\'autres langues que l\'anglais.'; out.policy_howweuse_p2 = "Les informations concernant votre navigateur (que ce soit un système d\'exploitation de bureau ou d\'appareil portable) nous aident à prendre des décisions lors de la priorisation des ajouts et améliorations de fonctionnalités. Notre équipe de développement est petite, et nous essayons de prendre des décisions qui amélioreront l\'expérience du plus grand nombre d\'utilisateurs possible."; out.policy_whatwetell = 'Ce que nous dévoilons à d\'autres à propos de vous'; out.policy_whatwetell_p1 = 'Nous ne fournissons aucune information que nous récoltons ou que vous nous fournissez à des tierces parties à moins d\'y être contraints par la loi.'; out.policy_links = 'Liens vers d\'autres sites'; out.policy_links_p1 = 'Ce site contient des liens vers d\'autres sites, certains étant produits par d\'autres organisations. Nous ne sommes responsables des pratiques de confidentialité ou du contenu d\'aucun site externe. De manière générale, les liens vers des sites externes sont lancés dans une nouvelle fenêtre (ou onglet) du navigateur, pour rendre clair le fait que vous quittez CryptpPad.fr.'; out.policy_ads = 'Publicité'; out.policy_ads_p1 = 'Nous n\'affichons pas de publicité en ligne, bien que nous puissions afficher des liens vers les sites des organisations qui financent nos recherches.'; out.policy_choices = 'Vos choix'; out.policy_choices_open = 'Notre code est open source, ce qui signifie que vous avez toujours la possibilité d\'héberger votre propre instance de CryptPad.'; out.policy_choices_vpn = 'Si vous souhaitez utiliser notre instance hébergée (cryptpad.fr) mais que vous ne souhaitez pas exposer votre adresse IP, vous pouvez la protéger en utilisant le <a href="https://www.torproject.org/projects/torbrowser.html.en" title="téléchargements du projet Tor" target="_blank" rel="noopener noreferrer">navigateur Tor</a>, ou un <a href="https://riseup.net/fr/vpn" title="VPNs fournis par Riseup" target="_blank" rel="noopener noreferrer">VPN</a>.'; out.policy_choices_ads = 'Si vous souhaitez uniquement bloquer notre plateforme d\'analytique, vous pouvez utiliser un bloqueur de publicités tel que <a href="https://www.eff.org/fr/privacybadger" title="télécharger privacy badger" target="_blank" rel="noopener noreferrer">Privacy Badger</a>.'; // terms.html out.tos_title = "Conditions d'utilisation de CryptPad"; out.tos_legal = "Veuillez ne pas être malveillant, abusif, ou faire quoi que ce soit d'illégal."; out.tos_availability = "Nous espérons que vous trouvez ce service utile, mais nous ne pouvons garantir ses performances et disponibilités. Nous vous recommandons d'exporter vos données régurlièrement."; out.tos_e2ee = "Le contenu sur CryptPad peuvent être lus et modifiés par quiconque est en mesure de deviner ou d'obtenir de quelque manière que ce soit l'identificateur de fragment du pad. Nous vous recommandons d'utiliser des technologies de messagerie chiffrées de bout à bout (end-to-end encryption ou e2ee) pour partager les liens, et déclinons toute responsabilité dans le cas ou un tel lien serait divulgué."; out.tos_logs = "Les meta-données fournies par votre navigateur au serveur peuvent être enregistrées dans le but de maintenir le service."; out.tos_3rdparties = "Nous ne fournissons aucune donnée individuelle à des tierces parties à moins d'y être contraints par la loi."; // BottomBar.html out.bottom_france = '<a href="http://www.xwiki.com/fr" target="_blank" rel="noopener noreferrer">Fait avec <img class="bottom-bar-heart" src="/customize/heart.png" alt="amour" /> en <img class="bottom-bar-fr" src="/customize/fr.png" alt="France" /></a>'; out.bottom_support = '<a href="http://labs.xwiki.com/" title="XWiki Labs" target="_blank" rel="noopener noreferrer">Un projet <img src="/customize/logo-xwiki2.png" alt="XWiki SAS" class="bottom-bar-xwiki"/> Labs</a> avec le soutien de <a href="http://ng.open-paas.org/" title="OpenPaaS::ng" target="_blank" rel="noopener noreferrer"> <img src="/customize/openpaasng.png" alt="OpenPaaS-ng" class="bottom-bar-openpaas" /></a>'; // Header.html out.header_france = '<a href="http://www.xwiki.com/fr" target="_blank" rel="noopener noreferrer">Fait avec <img class="bottom-bar-heart" src="/customize/heart.png" alt="amour" /> en <img class="bottom-bar-fr" title="France" alt="France" src="/customize/fr.png" /> par <img src="/customize/logo-xwiki.png" alt="XWiki SAS" class="bottom-bar-xwiki"/></a>'; out.header_support = '<a href="http://ng.open-paas.org/" title="OpenPaaS::ng" target="_blank" rel="noopener noreferrer"> <img src="/customize/openpaasng.png" alt="OpenPaaS-ng" class="bottom-bar-openpaas" /></a>'; out.header_logoTitle = "Aller vers la page d'accueil"; // Initial states out.initialState = [ '<span style="font-size:16px;"><p>', 'Voici <strong>CryptPad</strong>, l\'éditeur collaboratif en temps-réel Zero Knowledge. Tout est sauvegardé dés que vous le tapez.', '<br>', 'Partagez le lien vers ce pad avec des amis ou utilisez le bouton <span style="background-color:#5cb85c;color:#ffffff;">&nbsp;Partager&nbsp;</span> pour obtenir le <em>lien de lecture-seule</em>, qui permet la lecture mais non la modification.', '</p>', '<p><span style="color:#808080; font-size: 16px;">', '<em>', 'Lancez-vous, commencez à taper...', '</em></span></p></span>', '<p>&nbsp;<br></p>' ].join(''); out.codeInitialState = [ '/*\n', ' Voici l\'éditeur de code collaboratif et Zero Knowledge de CryptPad.\n', ' Ce que vous tapez ici est chiffré de manière que seules les personnes avec le lien peuvent y accéder.\n', ' Vous pouvez choisir le langage de programmation pour la coloration syntaxique, ainsi que le thème de couleurs, dans le coin supérieur droit.\n', '*/' ].join(''); out.slideInitialState = [ '# CryptSlide\n', '* Voici CryptPad, l\'éditeur collaboratif en temps-réel Zero Knowledge.\n', '* Ce que vous tapez ici est chiffré de manière que seules les personnes avec le lien peuvent y accéder.\n', '* Même le serveur est incapable de voir ce que vous tapez.\n', '* Ce que vous voyez ici, ce que vous entendez, quand vous partez, ça reste ici.\n', '\n', '---', '\n', '# Comment l\'utiliser\n', '1. Écrivez le contenu de votre présentation avec la syntaxe Markdown\n', ' - Apprenez à utiliser markdown en cliquant [ici](http://www.markdowntutorial.com/)\n', '2. Séparez vos slides avec ---\n', '3. Cliquez sur la bouton "lecture" pour afficher le résultat en mode présentation', ' - La présentation est mise à jour en temps-réel' ].join(''); out.driveReadmeTitle = "Qu'est-ce que CryptDrive ?"; out.readme_welcome = "Bienvenue dans CryptPad !"; out.readme_p1 = "Bienvenue dans CryptPad, le lieu où vous pouvez prendre des notes seul ou avec des amis."; out.readme_p2 = "Ce pad va vous donner un aperçu de la manière dont vous pouvez utiliser CryptPad pour prendre des notes, les organiser et travailler en groupe sur celles-ci."; out.readme_cat1 = "Découvrez votre CryptDrive"; out.readme_cat1_l1 = "Créer un pad : Dans votre CryptDrive, cliquez sur {0} puis {1} et vous obtenez un nouveau pad."; // 0: New, 1: Rich Text out.readme_cat1_l2 = "Ouvrir des pads depuis votre CryptDrive : Double-cliquez sur l'icone d'un pad pour l'ouvrir."; out.readme_cat1_l3 = "Organiser vos pads : Quand vous êtes connectés, tous les pads auquel vous accédez sont ajoutés dans la section {0} de votre CryptDrive."; // 0: Unsorted files out.readme_cat1_l3_l1 = "Vous pouvez cliquer et faire glisser des fichiers dans des dossiers dans la section {0} de votre CryptDrive, et créer de nouveaux dossiers."; // 0: Documents out.readme_cat1_l3_l2 = "N'hésitez pas à utiliser le clic droit sur les icones puisque des menus sont souvent disponibles."; out.readme_cat1_l4 = "Déplacer des pads vers la corbeille : Vous pouvez cliquer et faire glisser vos pads dans la {0} de la même manière que vous pouvez les déposer dans des dossiers."; // 0: Trash out.readme_cat2 = "Créer des pads comme un pro"; out.edit = "éditer"; out.view = "voir"; out.readme_cat2_l1 = "Le bouton {0} dans votre pad vous permet de donner l'accès à vos collaborateurs que ce soit pour l'{1} ou pour le {2}."; // 0: Share, 1: edit, 2: view out.readme_cat2_l2 = "Vous pouvez changer le titre d'un pad en cliquant sur le crayon"; out.readme_cat3 = "Découvrez les autres applications CryptPad"; out.readme_cat3_l1 = "Avec l'éditeur de code de CryptPad, vous pouvez collaborer sur du code comme Javascript ou des langages comme HTML ou Markdown."; out.readme_cat3_l2 = "Avec l'éditeur de présentations de CryptPad, vous pouvez réaliser des présentations rapides en utilisant Markdown"; out.readme_cat3_l3 = "Avec CryptPoll vous pouvez créer rapidement des sondages, et en particulier plannifier des meetings qui rentrent dans l'agenda de tout ceux qui souhaitent participer."; // Tips out.tips = {}; out.tips.lag = "L'icône verte dans le coin supérieur droit montre la qualité de votre connexion Internet vers le serveur CryptPad."; out.tips.shortcuts = "`ctrl+b`, `ctrl+i` et `ctrl+u` sont des raccourcis rapides pour mettre en gras, en italique ou souligner."; out.tips.indent = "Dans les listes à puces ou numérotées, vous pouvez utiliser `Tab` ou `Maj+Tab` pour augmenter ou réduire rapidement l'indentation."; out.tips.title = "Vous pouvez changer le titre de votre pad en cliquant au centre en haut de la page."; out.tips.store = "Dés que vous ouvrez un nouveau pad, il est automatiquement stocké dans votre CryptDrive si vous êtes connectés."; out.tips.marker = "Vous pouvez surligner du texte dans un pad en utilisant l'option \"marker\" dans le menu déroulant des styles."; out.feedback_about = "Si vous lisez ceci, vous vous demandez probablement pourquoi CryptPad envoie des requêtes vers des pages web quand vous realisez certaines actions."; out.feedback_privacy = "Nous prenons au sérieux le respect de votre vie privée, et en même temps nous souhaitons rendre CryptPad très simple à utiliser. Nous utilisons cette page pour comprendre quelles fonctionnalités dans l'interface comptent le plus pour les utilisateurs, en l'appelant avec un paramètre spécifiant quelle action a été réalisée."; out.feedback_optout = "Si vous le souhaitez, vous pouvez désactiver ces requêtes en vous rendant dans <a href='/settings/'>votre page de préférences</a>, où vous trouverez une case à cocher pour désactiver le retour d'expérience."; return out; });
Add french translations for 'log out everywhere'
customize.dist/translations/messages.fr.js
Add french translations for 'log out everywhere'
<ide><path>ustomize.dist/translations/messages.fr.js <ide> "Pour résoudre ce problème, vous pouvez soit supprimer des pads de votre CryptDrive (y compris la corbeille), soit vous abonner à une offre premium pour augmenter la limite maximale."; <ide> out.pinLimitNotPinned = "Vous avez atteint votre limite de stockage.<br>"+ <ide> "Ce pad n'est pas enregistré dans votre CryptDrive."; <add> out.pinLimitDrive = out.pinLimitReached+ ".<br>" + <add> "Vous ne pouvez pas créer de nouveaux pads."; <ide> <ide> out.importButtonTitle = 'Importer un pad depuis un fichier local'; <ide> <ide> out.settings_pinningError = "Un problème est survenu"; <ide> out.settings_usageAmount = "Vos pads épinglés occupent {0} Mo"; <ide> <add> out.settings_logoutEverywhereTitle = "Se déconnecter partout"; <add> out.settings_logoutEverywhere = "Se déconnecter de toutes les autres sessions."; <add> out.settings_logoutEverywhereConfirm = "Êtes-vous sûr ? Vous devrez vous reconnecter sur tous vos autres appareils."; <add> <ide> // index.html <ide> <ide> //about.html
Java
mpl-2.0
error: pathspec 'qadevOOo/tests/java/ifc/sheet/_XPrintAreas.java' did not match any file(s) known to git
81f61484794513a8d4328853be673bf9ee9e84fa
1
JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core
/************************************************************************* * * $RCSfile: _XPrintAreas.java,v $ * * $Revision: 1.2 $ * * last change: $Date: 2004-11-02 11:56:56 $ * * The Contents of this file are made available subject to the terms of * either of the following licenses * * - GNU Lesser General Public License Version 2.1 * - Sun Industry Standards Source License Version 1.1 * * Sun Microsystems Inc., October, 2000 * * GNU Lesser General Public License Version 2.1 * ============================================= * Copyright 2000 by Sun Microsystems, Inc. * 901 San Antonio Road, Palo Alto, CA 94303, USA * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License version 2.1, as published by the Free Software Foundation. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, * MA 02111-1307 USA * * * Sun Industry Standards Source License Version 1.1 * ================================================= * The contents of this file are subject to the Sun Industry Standards * Source License Version 1.1 (the "License"); You may not use this file * except in compliance with the License. You may obtain a copy of the * License at http://www.openoffice.org/license.html. * * Software provided under this License is provided on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, * WITHOUT LIMITATION, WARRANTIES THAT THE SOFTWARE IS FREE OF DEFECTS, * MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE, OR NON-INFRINGING. * See the License for the specific provisions governing your rights and * obligations concerning the Software. * * The Initial Developer of the Original Code is: Sun Microsystems, Inc. * * Copyright: 2000 by Sun Microsystems, Inc. * * All Rights Reserved. * * Contributor(s): _______________________________________ * * ************************************************************************/ package ifc.sheet; import com.sun.star.sheet.XPrintAreas; import com.sun.star.table.CellRangeAddress; import lib.MultiMethodTest; import lib.Status; import lib.StatusException; import util.ValueComparer; /** * */ public class _XPrintAreas extends MultiMethodTest { public XPrintAreas oObj = null; CellRangeAddress address = null; CellRangeAddress subaddress = null; CellRangeAddress titleColumns; CellRangeAddress titleRows; public void before() { address = (CellRangeAddress)tEnv.getObjRelation("CellRangeAddress"); subaddress = (CellRangeAddress)tEnv.getObjRelation("CellRangeSubAddress"); if (address == null) throw new StatusException(Status.failed("Object relation CellRangeAddress not found")); if (subaddress == null) throw new StatusException(Status.failed("Object relation CellRangeSubAddress not found")); } public void _getPrintAreas() { requiredMethod("getPrintTitleColumns()"); requiredMethod("getPrintTitleRows()"); executeMethod("getTitleColumns()"); executeMethod("getTitleRows()"); CellRangeAddress[] printAreas = oObj.getPrintAreas(); CellRangeAddress[] setValue = new CellRangeAddress[]{address}; boolean ret = ValueComparer.equalValue(printAreas, setValue); // delete the print area oObj.setPrintAreas(null); printAreas = oObj.getPrintAreas(); ret &= printAreas.length == 0; tRes.tested("getPrintAreas()", ret); } public void _getPrintTitleColumns() { requiredMethod("setPrintTitleColumns()"); tRes.tested("getPrintTitleColumns()", !oObj.getPrintTitleColumns()); } public void _getPrintTitleRows() { requiredMethod("setPrintTitleRows()"); tRes.tested("getPrintTitleRows()", !oObj.getPrintTitleRows()); } public void _getTitleColumns() { requiredMethod("setTitleColumns()"); CellRangeAddress setValue = oObj.getTitleColumns(); tRes.tested("getTitleColumns()", ValueComparer.equalValue(setValue,titleColumns)); } public void _getTitleRows() { requiredMethod("setTitleRows()"); CellRangeAddress setValue = oObj.getTitleRows(); tRes.tested("getTitleRows()", ValueComparer.equalValue(setValue,titleRows)); } public void _setPrintAreas() { boolean ret = false; CellRangeAddress[]setValue = new CellRangeAddress[]{subaddress}; oObj.setPrintAreas(setValue); CellRangeAddress[]newVal = oObj.getPrintAreas(); ret = ValueComparer.equalValue(newVal, setValue); setValue = new CellRangeAddress[]{address}; oObj.setPrintAreas(setValue); newVal = oObj.getPrintAreas(); ret &= ValueComparer.equalValue(newVal, setValue); tRes.tested("setPrintAreas()", ret); } public void _setPrintTitleColumns() { requiredMethod("setTitleColumns()"); boolean ret = false; boolean value = oObj.getPrintTitleColumns(); oObj.setPrintTitleColumns(!value); ret = value != oObj.getPrintTitleColumns(); oObj.setPrintTitleColumns(false); tRes.tested("setPrintTitleColumns()", ret); } public void _setPrintTitleRows() { requiredMethod("setTitleRows()"); boolean ret = false; boolean value = oObj.getPrintTitleRows(); oObj.setPrintTitleRows(!value); ret = value != oObj.getPrintTitleRows(); oObj.setPrintTitleRows(false); tRes.tested("setPrintTitleRows()", ret); } public void _setTitleColumns() { requiredMethod("setPrintAreas()"); boolean ret = false; CellRangeAddress newVal = oObj.getTitleColumns(); ret = ValueComparer.equalValue(newVal, new CellRangeAddress((short)0, 0, 0, 0, 0)); // use first row of range as title column titleColumns = new CellRangeAddress(); titleColumns.Sheet = address.Sheet; titleColumns.StartColumn = address.StartColumn; titleColumns.StartRow = address.StartRow; titleColumns.EndColumn = address.EndColumn; titleColumns.EndRow = address.StartRow; oObj.setTitleColumns(titleColumns); tRes.tested("setTitleColumns()", ret); } public void _setTitleRows() { requiredMethod("setPrintAreas()"); boolean ret = false; CellRangeAddress newVal = oObj.getTitleRows(); ret = ValueComparer.equalValue(newVal, new CellRangeAddress((short)0, 0, 0, 0, 0)); // use first column of range as title row titleRows = new CellRangeAddress(); titleRows.Sheet = address.Sheet; titleRows.StartColumn = address.StartColumn; titleRows.StartRow = address.StartRow; titleRows.EndColumn = address.StartColumn; titleRows.EndRow = address.EndRow; oObj.setTitleColumns(titleRows); tRes.tested("setTitleRows()", ret); } }
qadevOOo/tests/java/ifc/sheet/_XPrintAreas.java
INTEGRATION: CWS qadev19 (1.1.2); FILE ADDED 2004/09/17 12:19:36 sg 1.1.2.1: #i23086#NEW: initial version
qadevOOo/tests/java/ifc/sheet/_XPrintAreas.java
INTEGRATION: CWS qadev19 (1.1.2); FILE ADDED 2004/09/17 12:19:36 sg 1.1.2.1: #i23086#NEW: initial version
<ide><path>adevOOo/tests/java/ifc/sheet/_XPrintAreas.java <add>/************************************************************************* <add> * <add> * $RCSfile: _XPrintAreas.java,v $ <add> * <add> * $Revision: 1.2 $ <add> * <add> * last change: $Date: 2004-11-02 11:56:56 $ <add> * <add> * The Contents of this file are made available subject to the terms of <add> * either of the following licenses <add> * <add> * - GNU Lesser General Public License Version 2.1 <add> * - Sun Industry Standards Source License Version 1.1 <add> * <add> * Sun Microsystems Inc., October, 2000 <add> * <add> * GNU Lesser General Public License Version 2.1 <add> * ============================================= <add> * Copyright 2000 by Sun Microsystems, Inc. <add> * 901 San Antonio Road, Palo Alto, CA 94303, USA <add> * <add> * This library is free software; you can redistribute it and/or <add> * modify it under the terms of the GNU Lesser General Public <add> * License version 2.1, as published by the Free Software Foundation. <add> * <add> * This library is distributed in the hope that it will be useful, <add> * but WITHOUT ANY WARRANTY; without even the implied warranty of <add> * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU <add> * Lesser General Public License for more details. <add> * <add> * You should have received a copy of the GNU Lesser General Public <add> * License along with this library; if not, write to the Free Software <add> * Foundation, Inc., 59 Temple Place, Suite 330, Boston, <add> * MA 02111-1307 USA <add> * <add> * <add> * Sun Industry Standards Source License Version 1.1 <add> * ================================================= <add> * The contents of this file are subject to the Sun Industry Standards <add> * Source License Version 1.1 (the "License"); You may not use this file <add> * except in compliance with the License. You may obtain a copy of the <add> * License at http://www.openoffice.org/license.html. <add> * <add> * Software provided under this License is provided on an "AS IS" basis, <add> * WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, <add> * WITHOUT LIMITATION, WARRANTIES THAT THE SOFTWARE IS FREE OF DEFECTS, <add> * MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE, OR NON-INFRINGING. <add> * See the License for the specific provisions governing your rights and <add> * obligations concerning the Software. <add> * <add> * The Initial Developer of the Original Code is: Sun Microsystems, Inc. <add> * <add> * Copyright: 2000 by Sun Microsystems, Inc. <add> * <add> * All Rights Reserved. <add> * <add> * Contributor(s): _______________________________________ <add> * <add> * <add> ************************************************************************/ <add>package ifc.sheet; <add> <add>import com.sun.star.sheet.XPrintAreas; <add>import com.sun.star.table.CellRangeAddress; <add>import lib.MultiMethodTest; <add>import lib.Status; <add>import lib.StatusException; <add>import util.ValueComparer; <add> <add>/** <add> * <add> */ <add>public class _XPrintAreas extends MultiMethodTest { <add> public XPrintAreas oObj = null; <add> CellRangeAddress address = null; <add> CellRangeAddress subaddress = null; <add> CellRangeAddress titleColumns; <add> CellRangeAddress titleRows; <add> <add> public void before() { <add> address = (CellRangeAddress)tEnv.getObjRelation("CellRangeAddress"); <add> subaddress = (CellRangeAddress)tEnv.getObjRelation("CellRangeSubAddress"); <add> if (address == null) <add> throw new StatusException(Status.failed("Object relation CellRangeAddress not found")); <add> if (subaddress == null) <add> throw new StatusException(Status.failed("Object relation CellRangeSubAddress not found")); <add> } <add> <add> public void _getPrintAreas() { <add> requiredMethod("getPrintTitleColumns()"); <add> requiredMethod("getPrintTitleRows()"); <add> executeMethod("getTitleColumns()"); <add> executeMethod("getTitleRows()"); <add> CellRangeAddress[] printAreas = oObj.getPrintAreas(); <add> CellRangeAddress[] setValue = new CellRangeAddress[]{address}; <add> boolean ret = ValueComparer.equalValue(printAreas, setValue); <add> // delete the print area <add> oObj.setPrintAreas(null); <add> printAreas = oObj.getPrintAreas(); <add> ret &= printAreas.length == 0; <add> <add> tRes.tested("getPrintAreas()", ret); <add> } <add> <add> public void _getPrintTitleColumns() { <add> requiredMethod("setPrintTitleColumns()"); <add> tRes.tested("getPrintTitleColumns()", !oObj.getPrintTitleColumns()); <add> } <add> <add> public void _getPrintTitleRows() { <add> requiredMethod("setPrintTitleRows()"); <add> tRes.tested("getPrintTitleRows()", !oObj.getPrintTitleRows()); <add> } <add> <add> public void _getTitleColumns() { <add> requiredMethod("setTitleColumns()"); <add> CellRangeAddress setValue = oObj.getTitleColumns(); <add> tRes.tested("getTitleColumns()", ValueComparer.equalValue(setValue,titleColumns)); <add> } <add> <add> public void _getTitleRows() { <add> requiredMethod("setTitleRows()"); <add> CellRangeAddress setValue = oObj.getTitleRows(); <add> tRes.tested("getTitleRows()", ValueComparer.equalValue(setValue,titleRows)); <add> } <add> <add> public void _setPrintAreas() { <add> boolean ret = false; <add> CellRangeAddress[]setValue = new CellRangeAddress[]{subaddress}; <add> oObj.setPrintAreas(setValue); <add> CellRangeAddress[]newVal = oObj.getPrintAreas(); <add> ret = ValueComparer.equalValue(newVal, setValue); <add> setValue = new CellRangeAddress[]{address}; <add> oObj.setPrintAreas(setValue); <add> newVal = oObj.getPrintAreas(); <add> ret &= ValueComparer.equalValue(newVal, setValue); <add> tRes.tested("setPrintAreas()", ret); <add> } <add> <add> public void _setPrintTitleColumns() { <add> requiredMethod("setTitleColumns()"); <add> boolean ret = false; <add> boolean value = oObj.getPrintTitleColumns(); <add> oObj.setPrintTitleColumns(!value); <add> ret = value != oObj.getPrintTitleColumns(); <add> oObj.setPrintTitleColumns(false); <add> tRes.tested("setPrintTitleColumns()", ret); <add> } <add> <add> public void _setPrintTitleRows() { <add> requiredMethod("setTitleRows()"); <add> boolean ret = false; <add> boolean value = oObj.getPrintTitleRows(); <add> oObj.setPrintTitleRows(!value); <add> ret = value != oObj.getPrintTitleRows(); <add> oObj.setPrintTitleRows(false); <add> tRes.tested("setPrintTitleRows()", ret); <add> } <add> <add> public void _setTitleColumns() { <add> requiredMethod("setPrintAreas()"); <add> boolean ret = false; <add> CellRangeAddress newVal = oObj.getTitleColumns(); <add> ret = ValueComparer.equalValue(newVal, new CellRangeAddress((short)0, 0, 0, 0, 0)); <add> // use first row of range as title column <add> titleColumns = new CellRangeAddress(); <add> titleColumns.Sheet = address.Sheet; <add> titleColumns.StartColumn = address.StartColumn; <add> titleColumns.StartRow = address.StartRow; <add> titleColumns.EndColumn = address.EndColumn; <add> titleColumns.EndRow = address.StartRow; <add> oObj.setTitleColumns(titleColumns); <add> tRes.tested("setTitleColumns()", ret); <add> } <add> <add> public void _setTitleRows() { <add> requiredMethod("setPrintAreas()"); <add> boolean ret = false; <add> CellRangeAddress newVal = oObj.getTitleRows(); <add> ret = ValueComparer.equalValue(newVal, new CellRangeAddress((short)0, 0, 0, 0, 0)); <add> // use first column of range as title row <add> titleRows = new CellRangeAddress(); <add> titleRows.Sheet = address.Sheet; <add> titleRows.StartColumn = address.StartColumn; <add> titleRows.StartRow = address.StartRow; <add> titleRows.EndColumn = address.StartColumn; <add> titleRows.EndRow = address.EndRow; <add> oObj.setTitleColumns(titleRows); <add> tRes.tested("setTitleRows()", ret); <add> } <add> <add>}
Java
agpl-3.0
508bc83d512233e463ec386741b0521f9bb3e5c0
0
maligulzar/Rstudio-instrumented,thklaus/rstudio,edrogers/rstudio,jar1karp/rstudio,suribes/rstudio,sfloresm/rstudio,pssguy/rstudio,tbarrongh/rstudio,pssguy/rstudio,more1/rstudio,pssguy/rstudio,suribes/rstudio,jzhu8803/rstudio,tbarrongh/rstudio,brsimioni/rstudio,brsimioni/rstudio,john-r-mcpherson/rstudio,jzhu8803/rstudio,thklaus/rstudio,vbelakov/rstudio,more1/rstudio,piersharding/rstudio,jar1karp/rstudio,more1/rstudio,john-r-mcpherson/rstudio,jar1karp/rstudio,sfloresm/rstudio,piersharding/rstudio,jrnold/rstudio,JanMarvin/rstudio,nvoron23/rstudio,maligulzar/Rstudio-instrumented,piersharding/rstudio,suribes/rstudio,piersharding/rstudio,jar1karp/rstudio,pssguy/rstudio,jzhu8803/rstudio,jzhu8803/rstudio,maligulzar/Rstudio-instrumented,edrogers/rstudio,sfloresm/rstudio,jzhu8803/rstudio,brsimioni/rstudio,sfloresm/rstudio,githubfun/rstudio,sfloresm/rstudio,nvoron23/rstudio,piersharding/rstudio,john-r-mcpherson/rstudio,jar1karp/rstudio,jrnold/rstudio,edrogers/rstudio,thklaus/rstudio,more1/rstudio,piersharding/rstudio,edrogers/rstudio,jzhu8803/rstudio,more1/rstudio,thklaus/rstudio,tbarrongh/rstudio,jrnold/rstudio,maligulzar/Rstudio-instrumented,brsimioni/rstudio,jar1karp/rstudio,jzhu8803/rstudio,JanMarvin/rstudio,githubfun/rstudio,edrogers/rstudio,more1/rstudio,thklaus/rstudio,thklaus/rstudio,nvoron23/rstudio,edrogers/rstudio,tbarrongh/rstudio,pssguy/rstudio,nvoron23/rstudio,maligulzar/Rstudio-instrumented,more1/rstudio,nvoron23/rstudio,edrogers/rstudio,pssguy/rstudio,jar1karp/rstudio,githubfun/rstudio,thklaus/rstudio,pssguy/rstudio,JanMarvin/rstudio,sfloresm/rstudio,jrnold/rstudio,jzhu8803/rstudio,suribes/rstudio,suribes/rstudio,jar1karp/rstudio,john-r-mcpherson/rstudio,suribes/rstudio,piersharding/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,john-r-mcpherson/rstudio,piersharding/rstudio,maligulzar/Rstudio-instrumented,githubfun/rstudio,githubfun/rstudio,tbarrongh/rstudio,JanMarvin/rstudio,vbelakov/rstudio,nvoron23/rstudio,githubfun/rstudio,thklaus/rstudio,jrnold/rstudio,jrnold/rstudio,edrogers/rstudio,vbelakov/rstudio,jrnold/rstudio,JanMarvin/rstudio,vbelakov/rstudio,jar1karp/rstudio,brsimioni/rstudio,john-r-mcpherson/rstudio,sfloresm/rstudio,brsimioni/rstudio,jrnold/rstudio,pssguy/rstudio,suribes/rstudio,maligulzar/Rstudio-instrumented,piersharding/rstudio,vbelakov/rstudio,john-r-mcpherson/rstudio,JanMarvin/rstudio,vbelakov/rstudio,githubfun/rstudio,sfloresm/rstudio,tbarrongh/rstudio,brsimioni/rstudio,vbelakov/rstudio,maligulzar/Rstudio-instrumented,brsimioni/rstudio,githubfun/rstudio,more1/rstudio,suribes/rstudio,jrnold/rstudio,tbarrongh/rstudio,maligulzar/Rstudio-instrumented,john-r-mcpherson/rstudio,tbarrongh/rstudio,nvoron23/rstudio,vbelakov/rstudio
/* * BuildPane.java * * Copyright (C) 2009-12 by RStudio, Inc. * * This program is licensed to you under the terms of version 3 of the * GNU Affero General Public License. This program is distributed WITHOUT * ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT, * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the * AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details. * */ package org.rstudio.studio.client.workbench.views.buildtools; import com.google.gwt.event.dom.client.HasClickHandlers; import com.google.gwt.resources.client.ImageResource; import com.google.gwt.user.client.ui.SimplePanel; import com.google.gwt.user.client.ui.Widget; import com.google.inject.Inject; import org.rstudio.core.client.widget.Toolbar; import org.rstudio.core.client.widget.ToolbarButton; import org.rstudio.core.client.widget.ToolbarPopupMenu; import org.rstudio.studio.client.common.OutputBuffer; import org.rstudio.studio.client.common.icons.StandardIcons; import org.rstudio.studio.client.workbench.commands.Commands; import org.rstudio.studio.client.workbench.model.Session; import org.rstudio.studio.client.workbench.model.SessionInfo; import org.rstudio.studio.client.workbench.ui.WorkbenchPane; import org.rstudio.studio.client.workbench.views.buildtools.ui.BuildPaneResources; public class BuildPane extends WorkbenchPane implements BuildPresenter.Display { @Inject public BuildPane(Commands commands, Session session) { super("Build"); commands_ = commands; session_ = session; ensureWidget(); } @Override protected Toolbar createMainToolbar() { Toolbar toolbar = new Toolbar(); // always include build all toolbar.addLeftWidget(commands_.buildAll().createToolbarButton()); toolbar.addLeftSeparator(); // packages get check package String type = session_.getSessionInfo().getBuildToolsType(); if (type.equals(SessionInfo.BUILD_TOOLS_PACKAGE)) { toolbar.addLeftWidget(commands_.checkPackage().createToolbarButton()); toolbar.addLeftSeparator(); } // create more menu ToolbarPopupMenu moreMenu = new ToolbarPopupMenu(); if (type.equals(SessionInfo.BUILD_TOOLS_MAKEFILE)) { moreMenu.addItem(commands_.rebuildAll().createMenuItem(false)); moreMenu.addItem(commands_.cleanAll().createMenuItem(false)); moreMenu.addSeparator(); } // packages get additional commands else if (type.equals(SessionInfo.BUILD_TOOLS_PACKAGE)) { moreMenu.addItem(commands_.devtoolsLoadAll().createMenuItem(false)); moreMenu.addSeparator(); moreMenu.addItem(commands_.buildSourcePackage().createMenuItem(false)); moreMenu.addItem(commands_.buildBinaryPackage().createMenuItem(false)); moreMenu.addSeparator(); moreMenu.addItem(commands_.roxygenizePackage().createMenuItem(false)); moreMenu.addSeparator(); } moreMenu.addItem(commands_.buildToolsProjectSetup().createMenuItem(false)); // add more menu ToolbarButton moreButton = new ToolbarButton( "More", StandardIcons.INSTANCE.more_actions(), moreMenu); toolbar.addLeftWidget(moreButton); // stop button (initially hidden) ImageResource stopImage = commands_.interruptR().getImageResource(); stopButton_ = new ToolbarButton(stopImage, null); stopButton_.setVisible(false); toolbar.addRightWidget(stopButton_); return toolbar; } @Override protected Widget createMainWidget() { panel_ = new SimplePanel(); outputBuffer_ = new OutputBuffer(); panel_.setWidget(outputBuffer_); return panel_; } @Override public void buildStarted() { outputBuffer_.clear(); stopButton_.setVisible(true); } @Override public void showOutput(String output) { outputBuffer_.append(output); } @Override public void buildCompleted() { stopButton_.setVisible(false); } @Override public HasClickHandlers stopButton() { return stopButton_; } @Override public void scrollToBottom() { outputBuffer_.scrollToBottom(); } private Commands commands_; private Session session_; private SimplePanel panel_; private OutputBuffer outputBuffer_; private ToolbarButton stopButton_; @SuppressWarnings("unused") private static BuildPaneResources RES = BuildPaneResources.INSTANCE; }
src/gwt/src/org/rstudio/studio/client/workbench/views/buildtools/BuildPane.java
/* * BuildPane.java * * Copyright (C) 2009-12 by RStudio, Inc. * * This program is licensed to you under the terms of version 3 of the * GNU Affero General Public License. This program is distributed WITHOUT * ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT, * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the * AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details. * */ package org.rstudio.studio.client.workbench.views.buildtools; import com.google.gwt.event.dom.client.HasClickHandlers; import com.google.gwt.resources.client.ImageResource; import com.google.gwt.user.client.ui.SimplePanel; import com.google.gwt.user.client.ui.Widget; import com.google.inject.Inject; import org.rstudio.core.client.widget.Toolbar; import org.rstudio.core.client.widget.ToolbarButton; import org.rstudio.core.client.widget.ToolbarPopupMenu; import org.rstudio.studio.client.common.OutputBuffer; import org.rstudio.studio.client.workbench.commands.Commands; import org.rstudio.studio.client.workbench.model.Session; import org.rstudio.studio.client.workbench.model.SessionInfo; import org.rstudio.studio.client.workbench.ui.WorkbenchPane; import org.rstudio.studio.client.workbench.views.buildtools.ui.BuildPaneResources; public class BuildPane extends WorkbenchPane implements BuildPresenter.Display { @Inject public BuildPane(Commands commands, Session session) { super("Build"); commands_ = commands; session_ = session; ensureWidget(); } @Override protected Toolbar createMainToolbar() { Toolbar toolbar = new Toolbar(); // always include build all toolbar.addLeftWidget(commands_.buildAll().createToolbarButton()); // makefiles get extra build menu entries String type = session_.getSessionInfo().getBuildToolsType(); if (type.equals(SessionInfo.BUILD_TOOLS_MAKEFILE)) { ToolbarPopupMenu buildMenu = new ToolbarPopupMenu(); buildMenu.addItem(commands_.buildAll().createMenuItem(false)); buildMenu.addItem(commands_.rebuildAll().createMenuItem(false)); buildMenu.addSeparator(); buildMenu.addItem(commands_.cleanAll().createMenuItem(false)); ToolbarButton buildMenuButton = new ToolbarButton(buildMenu, true); toolbar.addLeftWidget(buildMenuButton); } // packages get additional commands else if (type.equals(SessionInfo.BUILD_TOOLS_PACKAGE)) { ToolbarPopupMenu buildMenu = new ToolbarPopupMenu(); buildMenu.addItem(commands_.devtoolsLoadAll().createMenuItem(false)); buildMenu.addSeparator(); buildMenu.addItem(commands_.buildSourcePackage().createMenuItem(false)); buildMenu.addItem(commands_.buildBinaryPackage().createMenuItem(false)); buildMenu.addSeparator(); buildMenu.addItem(commands_.roxygenizePackage().createMenuItem(false)); ToolbarButton buildMenuButton = new ToolbarButton(buildMenu, true); toolbar.addLeftWidget(buildMenuButton); toolbar.addLeftSeparator(); toolbar.addLeftWidget(commands_.checkPackage().createToolbarButton()); } toolbar.addLeftSeparator(); // always include configuration toolbar.addLeftWidget( commands_.buildToolsProjectSetup().createToolbarButton()); // stop button (initially hidden) ImageResource stopImage = commands_.interruptR().getImageResource(); stopButton_ = new ToolbarButton(stopImage, null); stopButton_.setVisible(false); toolbar.addRightWidget(stopButton_); return toolbar; } @Override protected Widget createMainWidget() { panel_ = new SimplePanel(); outputBuffer_ = new OutputBuffer(); panel_.setWidget(outputBuffer_); return panel_; } @Override public void buildStarted() { outputBuffer_.clear(); stopButton_.setVisible(true); } @Override public void showOutput(String output) { outputBuffer_.append(output); } @Override public void buildCompleted() { stopButton_.setVisible(false); } @Override public HasClickHandlers stopButton() { return stopButton_; } @Override public void scrollToBottom() { outputBuffer_.scrollToBottom(); } private Commands commands_; private Session session_; private SimplePanel panel_; private OutputBuffer outputBuffer_; private ToolbarButton stopButton_; @SuppressWarnings("unused") private static BuildPaneResources RES = BuildPaneResources.INSTANCE; }
use more menu in build pane
src/gwt/src/org/rstudio/studio/client/workbench/views/buildtools/BuildPane.java
use more menu in build pane
<ide><path>rc/gwt/src/org/rstudio/studio/client/workbench/views/buildtools/BuildPane.java <ide> import org.rstudio.core.client.widget.ToolbarButton; <ide> import org.rstudio.core.client.widget.ToolbarPopupMenu; <ide> import org.rstudio.studio.client.common.OutputBuffer; <add>import org.rstudio.studio.client.common.icons.StandardIcons; <ide> import org.rstudio.studio.client.workbench.commands.Commands; <ide> import org.rstudio.studio.client.workbench.model.Session; <ide> import org.rstudio.studio.client.workbench.model.SessionInfo; <ide> <ide> // always include build all <ide> toolbar.addLeftWidget(commands_.buildAll().createToolbarButton()); <add> toolbar.addLeftSeparator(); <ide> <del> // makefiles get extra build menu entries <add> // packages get check package <ide> String type = session_.getSessionInfo().getBuildToolsType(); <add> if (type.equals(SessionInfo.BUILD_TOOLS_PACKAGE)) <add> { <add> toolbar.addLeftWidget(commands_.checkPackage().createToolbarButton()); <add> toolbar.addLeftSeparator(); <add> } <add> <add> // create more menu <add> ToolbarPopupMenu moreMenu = new ToolbarPopupMenu(); <ide> if (type.equals(SessionInfo.BUILD_TOOLS_MAKEFILE)) <ide> { <del> ToolbarPopupMenu buildMenu = new ToolbarPopupMenu(); <del> buildMenu.addItem(commands_.buildAll().createMenuItem(false)); <del> buildMenu.addItem(commands_.rebuildAll().createMenuItem(false)); <del> buildMenu.addSeparator(); <del> buildMenu.addItem(commands_.cleanAll().createMenuItem(false)); <del> ToolbarButton buildMenuButton = new ToolbarButton(buildMenu, true); <del> toolbar.addLeftWidget(buildMenuButton); <add> moreMenu.addItem(commands_.rebuildAll().createMenuItem(false)); <add> moreMenu.addItem(commands_.cleanAll().createMenuItem(false)); <add> moreMenu.addSeparator(); <ide> } <ide> <ide> // packages get additional commands <ide> else if (type.equals(SessionInfo.BUILD_TOOLS_PACKAGE)) <ide> { <del> ToolbarPopupMenu buildMenu = new ToolbarPopupMenu(); <del> buildMenu.addItem(commands_.devtoolsLoadAll().createMenuItem(false)); <del> buildMenu.addSeparator(); <del> buildMenu.addItem(commands_.buildSourcePackage().createMenuItem(false)); <del> buildMenu.addItem(commands_.buildBinaryPackage().createMenuItem(false)); <del> buildMenu.addSeparator(); <del> buildMenu.addItem(commands_.roxygenizePackage().createMenuItem(false)); <del> ToolbarButton buildMenuButton = new ToolbarButton(buildMenu, true); <del> toolbar.addLeftWidget(buildMenuButton); <del> toolbar.addLeftSeparator(); <del> toolbar.addLeftWidget(commands_.checkPackage().createToolbarButton()); <add> moreMenu.addItem(commands_.devtoolsLoadAll().createMenuItem(false)); <add> moreMenu.addSeparator(); <add> moreMenu.addItem(commands_.buildSourcePackage().createMenuItem(false)); <add> moreMenu.addItem(commands_.buildBinaryPackage().createMenuItem(false)); <add> moreMenu.addSeparator(); <add> moreMenu.addItem(commands_.roxygenizePackage().createMenuItem(false)); <add> moreMenu.addSeparator(); <ide> } <add> moreMenu.addItem(commands_.buildToolsProjectSetup().createMenuItem(false)); <ide> <del> toolbar.addLeftSeparator(); <del> <del> // always include configuration <del> toolbar.addLeftWidget( <del> commands_.buildToolsProjectSetup().createToolbarButton()); <add> // add more menu <add> ToolbarButton moreButton = new ToolbarButton( <add> "More", <add> StandardIcons.INSTANCE.more_actions(), <add> moreMenu); <add> toolbar.addLeftWidget(moreButton); <ide> <ide> // stop button (initially hidden) <ide> ImageResource stopImage = commands_.interruptR().getImageResource();
Java
apache-2.0
error: pathspec 'app/src/main/java/cn/com/sdq/youyouqweathear/utils/HttpUtil.java' did not match any file(s) known to git
6cde4287c4eca8e7eb224aa6f8c107f23d2a0cf3
1
sudeqiangxx/youyouqweather
package cn.com.sdq.youyouqweathear.utils; import java.io.BufferedReader; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; import javax.net.ssl.HttpsURLConnection; import cn.com.sdq.youyouqweathear.common.HttpCallbackListener; /** * Created by Administrator on 2016/6/19. */ public class HttpUtil { private static void sendHttpRequest(final String address, final HttpCallbackListener httpCallbackListener){ new Thread(new Runnable() { @Override public void run() { HttpsURLConnection httpsURLConnection=null; InputStream in=null; BufferedReader bufferedReader; StringBuffer sb=null; try { URL url=new URL(address); httpsURLConnection= (HttpsURLConnection) url.openConnection(); httpsURLConnection.setRequestMethod("GET"); httpsURLConnection.setReadTimeout(8000); httpsURLConnection.setConnectTimeout(8000); in=httpsURLConnection.getInputStream(); bufferedReader=new BufferedReader(new InputStreamReader(in)); sb=new StringBuffer(); String line; while ((line=bufferedReader.readLine())!=null){ sb.append(line); } if (httpCallbackListener!=null){ httpCallbackListener.onFinish(sb.toString()); } } catch (Exception e) { if (httpCallbackListener==null){ httpCallbackListener.onError(e); } e.printStackTrace(); }finally { if (httpsURLConnection!=null){ httpsURLConnection.disconnect(); } } } }).start(); } }
app/src/main/java/cn/com/sdq/youyouqweathear/utils/HttpUtil.java
添加了Http工具类和回调接口
app/src/main/java/cn/com/sdq/youyouqweathear/utils/HttpUtil.java
添加了Http工具类和回调接口
<ide><path>pp/src/main/java/cn/com/sdq/youyouqweathear/utils/HttpUtil.java <add>package cn.com.sdq.youyouqweathear.utils; <add> <add>import java.io.BufferedReader; <add>import java.io.InputStream; <add>import java.io.InputStreamReader; <add>import java.net.URL; <add> <add>import javax.net.ssl.HttpsURLConnection; <add> <add>import cn.com.sdq.youyouqweathear.common.HttpCallbackListener; <add> <add>/** <add> * Created by Administrator on 2016/6/19. <add> */ <add>public class HttpUtil { <add> private static void sendHttpRequest(final String address, final HttpCallbackListener httpCallbackListener){ <add> new Thread(new Runnable() { <add> @Override <add> public void run() { <add> HttpsURLConnection httpsURLConnection=null; <add> InputStream in=null; <add> BufferedReader bufferedReader; <add> StringBuffer sb=null; <add> try { <add> URL url=new URL(address); <add> httpsURLConnection= (HttpsURLConnection) url.openConnection(); <add> httpsURLConnection.setRequestMethod("GET"); <add> httpsURLConnection.setReadTimeout(8000); <add> httpsURLConnection.setConnectTimeout(8000); <add> in=httpsURLConnection.getInputStream(); <add> bufferedReader=new BufferedReader(new InputStreamReader(in)); <add> sb=new StringBuffer(); <add> String line; <add> while ((line=bufferedReader.readLine())!=null){ <add> sb.append(line); <add> } <add> if (httpCallbackListener!=null){ <add> httpCallbackListener.onFinish(sb.toString()); <add> } <add> } catch (Exception e) { <add> if (httpCallbackListener==null){ <add> httpCallbackListener.onError(e); <add> } <add> e.printStackTrace(); <add> }finally { <add> if (httpsURLConnection!=null){ <add> httpsURLConnection.disconnect(); <add> } <add> } <add> <add> } <add> }).start(); <add> } <add>}
JavaScript
mpl-2.0
65746a17a21e5eb7d2f1cbda0ba0ccccbd3001d3
0
mmmavis/webmaker.org,jbuck/webmaker.org,mozilla/webmaker.org,mozilla/webmaker.org,alicoding/webmaker.org,mmmavis/webmaker.org,alicoding/webmaker.org,cadecairos/webmaker.org,cadecairos/webmaker.org
define(['jquery', 'google', 'forms', 'domReady!'], function ($, google, forms) { var $editForm = $('form#edit-event'); $editForm.validate({ rules: { registerLink: 'url' } }); function toggleEditMode() { $('.show').toggleClass('hidden'); $('.edit').toggleClass('hidden'); location.hash = (location.hash == '#edit') ? '' : '#edit'; } function enterEditMode() { $('.show').addClass('hidden'); $('.edit').removeClass('hidden'); location.hash = '#edit'; } function leaveEditMode() { $('.show').removeClass('hidden'); $('.edit').addClass('hidden'); location.hash = ''; $editForm[0].reset(); } $editForm.find('button#edit-mode').click(enterEditMode); $editForm.find('button#cancel-edit').click(leaveEditMode); if (location.hash == '#edit') enterEditMode(); var delete_safety = 1; $editForm.find('button#delete-event').click(function(ev) { var $deleteSubmit = $(this); if (delete_safety) { $('#delete-confirm').dialog({ resizable: false, height: 160, modal: true, buttons: { 'Do It!': function() { delete_safety = 0; $(this).dialog('close'); $deleteSubmit.click(); }, Cancel: function() { $(this).dialog('close'); } } }); return false; } }); forms.setupImageUpload($editForm); var ac = new google.maps.places.Autocomplete( $editForm.find('input[name="address"]')[0], { types: ['geocode'] }); google.maps.event.addListener(ac, 'place_changed', function() { var place = ac.getPlace(); var loc = { latitude: place.geometry.location.lat(), longitude: place.geometry.location.lng() }; Object.keys(loc).forEach(function(k) { $editForm.find('input[name="'+k+'"]').val(loc[k]); }); }); navigator.idSSO.app.onlogin = function(assert) { $('#owner-panel').removeClass('hidden'); }; navigator.idSSO.app.onlogout = function() { $('#owner-panel').addClass('hidden'); }; });
public/events/js/events/details/index.js
define(['jquery', 'google', 'forms', 'domReady!'], function ($, google, forms) { var $editForm = $('form#edit-event'); $editForm.validate({ rules: { registerLink: 'url' } }); function toggleEditMode() { $('.show').toggleClass('hidden'); $('.edit').toggleClass('hidden'); location.hash = (location.hash == '#edit') ? '' : '#edit'; } function enterEditMode() { $('.show').addClass('hidden'); $('.edit').removeClass('hidden'); location.hash = '#edit'; } function leaveEditMode() { $('.show').removeClass('hidden'); $('.edit').addClass('hidden'); location.hash = ''; $editForm[0].reset(); } $editForm.find('button#edit-mode').click(enterEditMode); $editForm.find('button#cancel-edit').click(leaveEditMode); if (location.hash == '#edit') enterEditMode(); var delete_safety = 1; $editForm.find('button#delete-event').click(function(ev) { var $deleteSubmit = $(this); if (delete_safety) { $('#delete-confirm').dialog({ resizable: false, height: 160, modal: true, buttons: { 'Do It!': function() { delete_safety = 0; $(this).dialog('close'); $deleteSubmit.click(); }, Cancel: function() { $(this).dialog('close'); } } }); return false; } }); forms.setupImageUpload($editForm); var ac = new google.maps.places.Autocomplete( $editForm.find('input[name="address"]')[0], { types: ['geocode'] }); google.maps.event.addListener(ac, 'place_changed', function() { var place = autocomplete.getPlace(); var loc = { latitude: place.geometry.location.lat(), longitude: place.geometry.location.lng() }; Object.keys(loc).forEach(function(k) { $editForm.find('input[name="'+k+'"]').val(loc[k]); }); }); navigator.idSSO.app.onlogin = function(assert) { $('#owner-panel').removeClass('hidden'); }; navigator.idSSO.app.onlogout = function() { $('#owner-panel').addClass('hidden'); }; });
fix for bug 924704
public/events/js/events/details/index.js
fix for bug 924704
<ide><path>ublic/events/js/events/details/index.js <ide> <ide> var ac = new google.maps.places.Autocomplete( <ide> $editForm.find('input[name="address"]')[0], { types: ['geocode'] }); <del> google.maps.event.addListener(ac, 'place_changed', function() { <del> var place = autocomplete.getPlace(); <add> google.maps.event.addListener(ac, 'place_changed', function() { <add> var place = ac.getPlace(); <ide> var loc = { latitude: place.geometry.location.lat(), <ide> longitude: place.geometry.location.lng() }; <ide> Object.keys(loc).forEach(function(k) {
Java
apache-2.0
8f90d9d99caee748649335ee84cec21168953d5f
0
wmedvede/guvnor,wmedvede/guvnor,yurloc/guvnor,porcelli-forks/guvnor,nmirasch/guvnor,hxf0801/guvnor,adrielparedes/guvnor,etirelli/guvnor,cristianonicolai/guvnor,baldimir/guvnor,etirelli/guvnor,mbiarnes/guvnor,hxf0801/guvnor,yurloc/guvnor,kiereleaseuser/guvnor,Rikkola/guvnor,wmedvede/guvnor,Rikkola/guvnor,kiereleaseuser/guvnor,psiroky/guvnor,cristianonicolai/guvnor,mswiderski/guvnor,mbiarnes/guvnor,psiroky/guvnor,baldimir/guvnor,droolsjbpm/guvnor,etirelli/guvnor,adrielparedes/guvnor,droolsjbpm/guvnor,cristianonicolai/guvnor,kiereleaseuser/guvnor,psiroky/guvnor,nmirasch/guvnor,adrielparedes/guvnor,mbiarnes/guvnor,Rikkola/guvnor,porcelli-forks/guvnor,nmirasch/guvnor,baldimir/guvnor,porcelli-forks/guvnor,hxf0801/guvnor,droolsjbpm/guvnor
package org.drools.factconstraints.server.predefined; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.drools.factconstraint.server.DefaultConstraintImpl; import org.drools.factconstraints.client.ConstraintConfiguration; public class MatchesConstraint extends DefaultConstraintImpl { public static final String MATCHES_ARGUMENT = "matches"; private static final long serialVersionUID = 501L; public static final String NAME = "Matches"; @Override protected String internalVerifierRule(ConstraintConfiguration config, Map<String, Object> context) { List<String> constraints = new ArrayList<String>(); constraints.add("valueAsString matches \"" + config.getArgumentValue(MATCHES_ARGUMENT) + "\""); return this.createVerifierRuleTemplate(config, context, "Matches_Field_Constraint", constraints, "The value must match: " + config.getArgumentValue(MATCHES_ARGUMENT)); // I18N } }
drools-factconstraint/src/main/java/org/drools/factconstraints/server/predefined/MatchesConstraint.java
package org.drools.factconstraints.server.predefined; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.drools.factconstraint.server.DefaultConstraintImpl; import org.drools.factconstraints.client.ConstraintConfiguration; public class MatchesConstraint extends DefaultConstraintImpl { public static final String MATCHES_ARGUMENT = "matches"; private static final long serialVersionUID = 501L; public static final String NAME = "Matches"; @Override protected String internalVerifierRule(ConstraintConfiguration config, Map<String, Object> context) { List<String> constraints = new ArrayList<String>(); constraints.add("valueAsString \"" + config.getArgumentValue(MATCHES_ARGUMENT) + "\""); return this.createVerifierRuleTemplate(config, context, "Matches_Field_Constraint", constraints, "The value must match: " + config.getArgumentValue(MATCHES_ARGUMENT)); // I18N } }
added missing operator git-svn-id: a243bed356d289ca0d1b6d299a0597bdc4ecaa09@32350 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70
drools-factconstraint/src/main/java/org/drools/factconstraints/server/predefined/MatchesConstraint.java
added missing operator
<ide><path>rools-factconstraint/src/main/java/org/drools/factconstraints/server/predefined/MatchesConstraint.java <ide> @Override <ide> protected String internalVerifierRule(ConstraintConfiguration config, Map<String, Object> context) { <ide> List<String> constraints = new ArrayList<String>(); <del> constraints.add("valueAsString \"" + config.getArgumentValue(MATCHES_ARGUMENT) + "\""); <add> constraints.add("valueAsString matches \"" + config.getArgumentValue(MATCHES_ARGUMENT) + "\""); <ide> <ide> return this.createVerifierRuleTemplate(config, context, <ide> "Matches_Field_Constraint", constraints,
Java
apache-2.0
efec3e34046dea84999dc9175cbe6080d3a9487f
0
baszero/yanel,baszero/yanel,wyona/yanel,baszero/yanel,wyona/yanel,wyona/yanel,wyona/yanel,wyona/yanel,baszero/yanel,wyona/yanel,baszero/yanel,baszero/yanel
package org.wyona.yanel.impl.resources; import java.io.ByteArrayInputStream; import java.io.UnsupportedEncodingException; import org.apache.log4j.Category; import org.wyona.yarep.core.Repository; import org.wyona.yarep.core.RepositoryFactory; import org.wyona.yarep.util.RepoPath; import org.wyona.yanel.core.Yanel; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; public class LinkChecker extends DefaultHandler { private static Category log = Category.getInstance(LinkChecker.class); private ByteArrayInputStream byteArrayInputStream = null; private StringBuffer transformedXmlAsBuffer = null; private Repository dataRepo = null; private String refererPath = null; private DataPath dataPathImpl = null; /** * this array with protocols will all be handled as external links * and therefor they dont have to be checked if they exist in repository */ private String[] externalLinks = { "http:", "ftp:", "https:", "mailto:", "news:", "file:", "rtsp:", "mms:", "ldap:", "gopher:", "nntp:", "telnet:", "wais:", "prospero:", "z39.50s", "z39.50r", "vemmi:", "imap:", "nfs:", "acap:", "tip:", "pop:", "dav:", "opaquelocktoken:", "sip:", "sips:", "tel:", "fax:", "modem:", "soap.beep:", "soap.beeps", "xmlrpc.beep", "xmlrpc.beeps", "urn:", "go:", "h323:", "ipp:", "tftp:", "mupdate:", "pres:", "im:", "mtqp", "smb:" }; public LinkChecker(Repository dataRepo, String refererPath, DataPath dataPathImpl) { this.dataRepo = dataRepo; this.refererPath = refererPath; this.dataPathImpl = dataPathImpl; } public void startDocument() throws SAXException { transformedXmlAsBuffer = new StringBuffer(); } public void endDocument() throws SAXException { setResultInputStream(); } public void startElement(String namespaceURI, String localName, String qName, Attributes attrs) throws SAXException { String eName = ("".equals(localName)) ? qName : localName; transformedXmlAsBuffer.append("<" + eName); if(eName.equals("Link")) { for(int i = 0; i < attrs.getLength(); i++) { String aName = attrs.getQName(i); String aValue = attrs.getValue(i); if(aName.equals("href")) { if(aValue.startsWith("external_")) { //do not check this link cause it is EXTERNAL aValue = aValue.substring(9); } else { //check internal links if they already exist boolean externalLink = false; for(int j = 0; j < externalLinks.length; j++) { if(aValue.startsWith(externalLinks[j])) { externalLink = true; break; } } if(!externalLink && !resourceExists(aValue)) { log.warn("Link : [" + aValue + "] does not exist"); transformedXmlAsBuffer.append(" exist=\"false\""); } } } transformedXmlAsBuffer.append(" " + aName + "=\"" + replaceEntities(aValue) + "\""); } } else { for(int i = 0; i < attrs.getLength(); i++) { String aName = attrs.getQName(i); String aValue = attrs.getValue(i); transformedXmlAsBuffer.append(" " + aName + "=\"" + replaceEntities(aValue) + "\""); } } transformedXmlAsBuffer.append(">"); } public void endElement(String namespaceURI, String localName, String qName) throws SAXException { String eName = ("".equals(localName)) ? qName : localName; transformedXmlAsBuffer.append("</" + eName + ">"); } public void characters(char[] buf, int offset, int len) throws SAXException { String s = new String(buf, offset, len); transformedXmlAsBuffer.append(replaceEntities(s)); } /** * Replaces some characters by their corresponding xml entities. * @param str * @return */ private String replaceEntities(String str) { str = str.replaceAll("&", "&amp;"); str = str.replaceAll("<", "&lt;"); str = str.replaceAll(">", "&gt;"); str = str.replaceAll("'", "&apos;"); str = str.replaceAll("\"", "&quot;"); return str; } private void setResultInputStream() { try { this.byteArrayInputStream = new ByteArrayInputStream(transformedXmlAsBuffer.toString().getBytes("utf-8")); } catch (UnsupportedEncodingException e) { log.error(e, e); } } public ByteArrayInputStream getInputStream() { return this.byteArrayInputStream; } /** * Check if resource exists within repository */ private boolean resourceExists(String path) { try { String absolutePath = org.wyona.commons.io.PathUtil.concat(refererPath, path); //log.debug("Referer: " + refererPath + ", path: " + path); //log.debug("Absolute Path: " + absolutePath); return dataRepo.existsNode(dataPathImpl.getDataPath(absolutePath)); } catch (Exception e) { log.error(e.getMessage(), e); } return false; } }
src/contributions/resources/wiki/src/java/org/wyona/yanel/impl/resources/LinkChecker.java
package org.wyona.yanel.impl.resources; import java.io.ByteArrayInputStream; import java.io.UnsupportedEncodingException; import org.apache.log4j.Category; import org.wyona.yarep.core.Repository; import org.wyona.yarep.core.RepositoryFactory; import org.wyona.yarep.util.RepoPath; import org.wyona.yanel.core.Yanel; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; public class LinkChecker extends DefaultHandler { private static Category log = Category.getInstance(LinkChecker.class); private ByteArrayInputStream byteArrayInputStream = null; private StringBuffer transformedXmlAsBuffer = null; private Repository dataRepo = null; private String refererPath = null; private DataPath dataPathImpl = null; /** * this array with protocols will all be handled as external links * and therefor they dont have to be checked if they exist in repository */ private String[] externalLinks = { "http:", "ftp:", "https:", "mailto:", "news:", "file:", "rtsp:", "mms:", "ldap:", "gopher:", "nntp:", "telnet:", "wais:", "prospero:", "z39.50s", "z39.50r", "vemmi:", "imap:", "nfs:", "acap:", "tip:", "pop:", "dav:", "opaquelocktoken:", "sip:", "sips:", "tel:", "fax:", "modem:", "soap.beep:", "soap.beeps", "xmlrpc.beep", "xmlrpc.beeps", "urn:", "go:", "h323:", "ipp:", "tftp:", "mupdate:", "pres:", "im:", "mtqp", "smb:" }; public LinkChecker(Repository dataRepo, String refererPath, DataPath dataPathImpl) { this.dataRepo = dataRepo; this.refererPath = refererPath; this.dataPathImpl = dataPathImpl; } public void startDocument() throws SAXException { transformedXmlAsBuffer = new StringBuffer(); } public void endDocument() throws SAXException { setResultInputStream(); } public void startElement(String namespaceURI, String localName, String qName, Attributes attrs) throws SAXException { String eName = ("".equals(localName)) ? qName : localName; transformedXmlAsBuffer.append("<" + eName); if(eName.equals("Link")) { for(int i = 0; i < attrs.getLength(); i++) { String aName = attrs.getQName(i); String aValue = attrs.getValue(i); if(aName.equals("href")) { if(aValue.startsWith("external_")) { //do not check this link cause it is EXTERNAL aValue = aValue.substring(9); } else { //check internal links if they already exist boolean externalLink = false; for(int j = 0; j < externalLinks.length; j++) { if(aValue.startsWith(externalLinks[j])) { externalLink = true; break; } } if(!externalLink && !resourceExists(aValue)) { log.warn("Link : [" + aValue + "] does not exist"); transformedXmlAsBuffer.append(" exist=\"false\""); } } } transformedXmlAsBuffer.append(" " + aName + "=\"" + replaceEntities(aValue) + "\""); } } else { for(int i = 0; i < attrs.getLength(); i++) { String aName = attrs.getQName(i); String aValue = attrs.getValue(i); transformedXmlAsBuffer.append(" " + aName + "=\"" + replaceEntities(aValue) + "\""); } } transformedXmlAsBuffer.append(">"); } public void endElement(String namespaceURI, String localName, String qName) throws SAXException { String eName = ("".equals(localName)) ? qName : localName; transformedXmlAsBuffer.append("</" + eName + ">"); } public void characters(char[] buf, int offset, int len) throws SAXException { String s = new String(buf, offset, len); transformedXmlAsBuffer.append(replaceEntities(s)); } /** * Replaces some characters by their corresponding xml entities. * @param str * @return */ private String replaceEntities(String str) { str = str.replaceAll("&", "&amp;"); str = str.replaceAll("<", "&lt;"); str = str.replaceAll(">", "&gt;"); str = str.replaceAll("'", "&apos;"); str = str.replaceAll("\"", "&quot;"); return str; } private void setResultInputStream() { try { this.byteArrayInputStream = new ByteArrayInputStream(transformedXmlAsBuffer.toString().getBytes("utf-8")); } catch (UnsupportedEncodingException e) { log.error(e, e); } } public ByteArrayInputStream getInputStream() { return this.byteArrayInputStream; } /** * Check if resource exists within repository */ private boolean resourceExists(String path) { try { String absolutePath = org.wyona.commons.io.PathUtil.concat(refererPath, path); //log.error("DEBUG: Referer: " + refererPath + ", path: " + path); //log.error("DEBUG: Absolute Path: " + absolutePath); return dataRepo.existsNode(dataPathImpl.getDataPath(absolutePath)); } catch (Exception e) { log.error(e.getMessage(), e); } return false; } }
log messages fixed
src/contributions/resources/wiki/src/java/org/wyona/yanel/impl/resources/LinkChecker.java
log messages fixed
<ide><path>rc/contributions/resources/wiki/src/java/org/wyona/yanel/impl/resources/LinkChecker.java <ide> private boolean resourceExists(String path) { <ide> try { <ide> String absolutePath = org.wyona.commons.io.PathUtil.concat(refererPath, path); <del> //log.error("DEBUG: Referer: " + refererPath + ", path: " + path); <del> //log.error("DEBUG: Absolute Path: " + absolutePath); <add> //log.debug("Referer: " + refererPath + ", path: " + path); <add> //log.debug("Absolute Path: " + absolutePath); <ide> return dataRepo.existsNode(dataPathImpl.getDataPath(absolutePath)); <ide> } catch (Exception e) { <ide> log.error(e.getMessage(), e);
Java
apache-2.0
6f7a7682be993e3b4eb29a639139dd063f51487c
0
gbif/registry,gbif/registry
/* * Copyright 2020 Global Biodiversity Information Facility (GBIF) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gbif.registry.service; import org.gbif.api.annotation.NullToNotFound; import org.gbif.api.model.Constants; import org.gbif.api.model.common.DOI; import org.gbif.api.model.common.paging.PagingRequest; import org.gbif.api.model.common.paging.PagingResponse; import org.gbif.api.model.registry.Citation; import org.gbif.api.model.registry.Dataset; import org.gbif.api.model.registry.Metadata; import org.gbif.api.model.registry.Organization; import org.gbif.api.vocabulary.MetadataType; import org.gbif.registry.doi.util.RegistryDoiUtils; import org.gbif.registry.domain.ws.CitationDatasetUsage; import org.gbif.registry.metadata.CitationGenerator; import org.gbif.registry.metadata.parse.DatasetParser; import org.gbif.registry.persistence.mapper.DatasetMapper; import org.gbif.registry.persistence.mapper.MetadataMapper; import org.gbif.registry.persistence.mapper.OrganizationMapper; import org.gbif.registry.persistence.mapper.handler.ByteArrayWrapper; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import javax.annotation.Nullable; import org.apache.commons.collections.CollectionUtils; import org.owasp.html.HtmlPolicyBuilder; import org.owasp.html.PolicyFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import com.google.common.base.Strings; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.Lists; @SuppressWarnings("UnstableApiUsage") @Service public class RegistryDatasetServiceImpl implements RegistryDatasetService { private static final Logger LOG = LoggerFactory.getLogger(RegistryDatasetServiceImpl.class); private static final UUID IUCN_DATASET_KEY = UUID.fromString("19491596-35ae-4a91-9a98-85cf505f1bd3"); // HTML sanitizer policy for paragraph private static final PolicyFactory PARAGRAPH_HTML_SANITIZER = new HtmlPolicyBuilder() .allowCommonBlockElements() // "p", "div", "h1", ... .allowCommonInlineFormattingElements() // "b", "i" ... .allowElements("a") .allowUrlProtocols("https", "http") .allowAttributes("href") .onElements("a") .toFactory(); private final DatasetMapper datasetMapper; private final MetadataMapper metadataMapper; private final LoadingCache<UUID, Organization> organizationCache; private final LoadingCache<UUID, Set<UUID>> datasetKeysInNetworkCache; public RegistryDatasetServiceImpl( MetadataMapper metadataMapper, OrganizationMapper organizationMapper, DatasetMapper datasetMapper) { this.metadataMapper = metadataMapper; this.datasetMapper = datasetMapper; this.organizationCache = CacheBuilder.newBuilder() .expireAfterWrite(5, TimeUnit.MINUTES) .build( new CacheLoader<UUID, Organization>() { @Override public Organization load(UUID key) { return organizationMapper.get(key); } }); datasetKeysInNetworkCache = CacheBuilder.newBuilder() .expireAfterWrite(1, TimeUnit.MINUTES) .build( new CacheLoader<UUID, Set<UUID>>() { @Override public Set<UUID> load(UUID key) { return datasetMapper.listDatasetsInNetwork(key, null).stream() .map(Dataset::getKey) .collect(Collectors.toSet()); } }); } @NullToNotFound @Override public Dataset get(UUID key) { Dataset dataset = merge(getPreferredMetadataDataset(key), datasetMapper.get(key)); if (dataset == null) { return null; } setGeneratedCitation(dataset); return sanitizeDataset(dataset); } /** * Sanitize data on Dataset object mainly to restrict HTML tags that can be used. * * @param dataset * @return the original dataset with its content sanitized */ private Dataset sanitizeDataset(Dataset dataset) { if (!Strings.isNullOrEmpty(dataset.getDescription())) { dataset.setDescription(PARAGRAPH_HTML_SANITIZER.sanitize(dataset.getDescription())); } return dataset; } /** * Augments a list of datasets with information from their preferred metadata document. * * @return a the same paging response with a new list of augmented dataset instances */ @Override public PagingResponse<Dataset> augmentWithMetadata(PagingResponse<Dataset> resp) { List<Dataset> augmented = Lists.newArrayList(); for (Dataset d : resp.getResults()) { augmented.add(setGeneratedCitation(merge(getPreferredMetadataDataset(d.getKey()), d))); } resp.setResults(augmented); return resp; } /** * Augments the target dataset with all persistable properties from the supplementary dataset. * Typically the target would be a dataset built from rich XML metadata, and the supplementary * would be the persisted view of the same dataset. NULL values in the supplementary dataset * overwrite existing values in the target. Developers please note: * * <ul> * <li>If the target is null, then the supplementary dataset object itself is returned - not a * copy * <li>These objects are all mutable, and care should be taken that the returned object may be * one or the other of the supplied, thus you need to {@code Dataset result = merge(Dataset * emlView, Dataset dbView);} * </ul> * * @param target that will be modified with persitable values from the supplementary * @param supplementary holding the preferred properties for the target * @return the modified target dataset, or the supplementary dataset if the target is null */ private Dataset merge(@Nullable Dataset target, @Nullable Dataset supplementary) { // nothing to merge, return the target (which may be null) if (supplementary == null) { return target; } // nothing to overlay into if (target == null) { return supplementary; } // otherwise, copy all persisted values target.setKey(supplementary.getKey()); target.setDoi(supplementary.getDoi()); target.setParentDatasetKey(supplementary.getParentDatasetKey()); target.setDuplicateOfDatasetKey(supplementary.getDuplicateOfDatasetKey()); target.setInstallationKey(supplementary.getInstallationKey()); target.setPublishingOrganizationKey(supplementary.getPublishingOrganizationKey()); target.setExternal(supplementary.isExternal()); target.setNumConstituents(supplementary.getNumConstituents()); target.setType(supplementary.getType()); target.setSubtype(supplementary.getSubtype()); target.setTitle(supplementary.getTitle()); target.setAlias(supplementary.getAlias()); target.setAbbreviation(supplementary.getAbbreviation()); target.setDescription(supplementary.getDescription()); target.setLanguage(supplementary.getLanguage()); target.setHomepage(supplementary.getHomepage()); target.setLogoUrl(supplementary.getLogoUrl()); target.setCitation(supplementary.getCitation()); target.setRights(supplementary.getRights()); target.setLicense(supplementary.getLicense()); target.setMaintenanceUpdateFrequency(supplementary.getMaintenanceUpdateFrequency()); target.setLockedForAutoUpdate(supplementary.isLockedForAutoUpdate()); target.setCreated(supplementary.getCreated()); target.setCreatedBy(supplementary.getCreatedBy()); target.setModified(supplementary.getModified()); target.setModifiedBy(supplementary.getModifiedBy()); target.setDeleted(supplementary.getDeleted()); // nested properties target.setComments(supplementary.getComments()); target.setContacts(supplementary.getContacts()); target.setEndpoints(supplementary.getEndpoints()); target.setIdentifiers(supplementary.getIdentifiers()); target.setMachineTags(supplementary.getMachineTags()); target.setTags(supplementary.getTags()); return target; } /** * Set the generated GBIF citation on the provided Dataset object. * * https://github.com/gbif/registry/issues/4 * * Where the provider is in particular networks (OBIS), or part of CoL, we use the provided citation and check * for a DOI. * * https://github.com/gbif/registry/issues/43 (OBIS) * https://github.com/gbif/portal-feedback/issues/1819 (CoL) * @param dataset * @return */ private Dataset setGeneratedCitation(Dataset dataset) { if (dataset == null || dataset.getPublishingOrganizationKey() == null // for CoL and its constituents we want to show the verbatim citation and not the // GBIF-generated one: || Constants.COL_DATASET_KEY.equals(dataset.getKey()) || Constants.COL_DATASET_KEY.equals(dataset.getParentDatasetKey())) { return dataset; } boolean notObisDataset = !datasetKeysInNetworkCache .getUnchecked(Constants.OBIS_NETWORK_KEY) .contains(dataset.getKey()); Citation originalCitation = dataset.getCitation(); if (notObisDataset || dataset.getKey() != IUCN_DATASET_KEY || originalCitation == null || Strings.isNullOrEmpty(originalCitation.getText())) { // if the citation already exists keep it and only change the text. That allows us to keep the // identifier if provided. Citation citation = originalCitation == null ? new Citation() : originalCitation; citation.setText( CitationGenerator.generateCitation( dataset, organizationCache.getUnchecked(dataset.getPublishingOrganizationKey()))); dataset.setCitation(citation); } else { // Append DOI if necessary, and append "accessed via GBIF.org". originalCitation.setText(CitationGenerator.generatePublisherProvidedCitation(dataset)); } return dataset; } /** * Returns the parsed, preferred metadata document as a dataset. */ @Nullable @Override public Dataset getPreferredMetadataDataset(UUID key) { Dataset result = null; List<Metadata> docs = listMetadata(key, null); if (!docs.isEmpty()) { // the list is sorted by priority already, just pick the first! Integer metadataKey = docs.get(0).getKey(); byte[] metadataDocument = getMetadataDocument(metadataKey); try { result = DatasetParser.build(metadataDocument); } catch (IOException | IllegalArgumentException e) { // Not sure if we should not propagate an Exception to return a 500 instead LOG.error("Stored metadata document {} cannot be read", metadataKey, e); } } return result; } @Override public List<Metadata> listMetadata(UUID datasetKey, @Nullable MetadataType type) { return metadataMapper.list(datasetKey, type); } @NullToNotFound @Override public byte[] getMetadataDocument(int metadataKey) { ByteArrayWrapper document = metadataMapper.getDocument(metadataKey); if (document == null) { return null; } return document.getData(); } @Override public List<CitationDatasetUsage> ensureCitationDatasetUsagesValid(Map<String, Long> data) { LOG.debug("Ensure citation dataset usages {}", data); List<CitationDatasetUsage> result = new ArrayList<>(); for (Map.Entry<String, Long> item : data.entrySet()) { String datasetKeyOrDoi = item.getKey(); LOG.debug("Try identifier {}", datasetKeyOrDoi); if (RegistryDoiUtils.isUuid(datasetKeyOrDoi)) { LOG.debug("Identifier {} is a valid UUID", datasetKeyOrDoi); UUID key = UUID.fromString(datasetKeyOrDoi); Dataset dataset = datasetMapper.get(key); if (dataset == null) { LOG.error("Dataset with the UUID {} was not found", datasetKeyOrDoi); throw new IllegalArgumentException(); } else { CitationDatasetUsage citationDatasetUsage = new CitationDatasetUsage(); citationDatasetUsage.setDatasetKey(key); citationDatasetUsage.setDatasetDoi(dataset.getDoi()); citationDatasetUsage.setNumberRecords(item.getValue()); result.add(citationDatasetUsage); } } else if (DOI.isParsable(datasetKeyOrDoi)) { LOG.debug("Identifier {} is a valid DOI", datasetKeyOrDoi); List<Dataset> datasets = datasetMapper.listByDOI(datasetKeyOrDoi, new PagingRequest()); if (CollectionUtils.isEmpty(datasets)) { LOG.error("Dataset with the DOI {} was not found", datasetKeyOrDoi); throw new IllegalArgumentException(); } else { Dataset dataset = datasets.get(0); CitationDatasetUsage citationDatasetUsage = new CitationDatasetUsage(); citationDatasetUsage.setDatasetKey(dataset.getKey()); citationDatasetUsage.setDatasetDoi(dataset.getDoi()); citationDatasetUsage.setNumberRecords(item.getValue()); result.add(citationDatasetUsage); } } else { LOG.error("Identifier {} is not UUID or DOI", datasetKeyOrDoi); throw new IllegalArgumentException(); } } return result; } }
registry-service/src/main/java/org/gbif/registry/service/RegistryDatasetServiceImpl.java
/* * Copyright 2020 Global Biodiversity Information Facility (GBIF) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gbif.registry.service; import org.gbif.api.annotation.NullToNotFound; import org.gbif.api.model.Constants; import org.gbif.api.model.common.DOI; import org.gbif.api.model.common.paging.PagingRequest; import org.gbif.api.model.common.paging.PagingResponse; import org.gbif.api.model.registry.Citation; import org.gbif.api.model.registry.Dataset; import org.gbif.api.model.registry.Metadata; import org.gbif.api.model.registry.Organization; import org.gbif.api.vocabulary.MetadataType; import org.gbif.registry.doi.util.RegistryDoiUtils; import org.gbif.registry.domain.ws.CitationDatasetUsage; import org.gbif.registry.metadata.CitationGenerator; import org.gbif.registry.metadata.parse.DatasetParser; import org.gbif.registry.persistence.mapper.DatasetMapper; import org.gbif.registry.persistence.mapper.MetadataMapper; import org.gbif.registry.persistence.mapper.OrganizationMapper; import org.gbif.registry.persistence.mapper.handler.ByteArrayWrapper; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import javax.annotation.Nullable; import org.apache.commons.collections.CollectionUtils; import org.owasp.html.HtmlPolicyBuilder; import org.owasp.html.PolicyFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import com.google.common.base.Strings; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.Lists; @SuppressWarnings("UnstableApiUsage") @Service public class RegistryDatasetServiceImpl implements RegistryDatasetService { private static final Logger LOG = LoggerFactory.getLogger(RegistryDatasetServiceImpl.class); // HTML sanitizer policy for paragraph private static final PolicyFactory PARAGRAPH_HTML_SANITIZER = new HtmlPolicyBuilder() .allowCommonBlockElements() // "p", "div", "h1", ... .allowCommonInlineFormattingElements() // "b", "i" ... .allowElements("a") .allowUrlProtocols("https", "http") .allowAttributes("href") .onElements("a") .toFactory(); private final DatasetMapper datasetMapper; private final MetadataMapper metadataMapper; private final LoadingCache<UUID, Organization> organizationCache; private final LoadingCache<UUID, Set<UUID>> datasetKeysInNetworkCache; public RegistryDatasetServiceImpl( MetadataMapper metadataMapper, OrganizationMapper organizationMapper, DatasetMapper datasetMapper) { this.metadataMapper = metadataMapper; this.datasetMapper = datasetMapper; this.organizationCache = CacheBuilder.newBuilder() .expireAfterWrite(5, TimeUnit.MINUTES) .build( new CacheLoader<UUID, Organization>() { @Override public Organization load(UUID key) { return organizationMapper.get(key); } }); datasetKeysInNetworkCache = CacheBuilder.newBuilder() .expireAfterWrite(1, TimeUnit.MINUTES) .build( new CacheLoader<UUID, Set<UUID>>() { @Override public Set<UUID> load(UUID key) { return datasetMapper.listDatasetsInNetwork(key, null).stream() .map(Dataset::getKey) .collect(Collectors.toSet()); } }); } @NullToNotFound @Override public Dataset get(UUID key) { Dataset dataset = merge(getPreferredMetadataDataset(key), datasetMapper.get(key)); if (dataset == null) { return null; } setGeneratedCitation(dataset); return sanitizeDataset(dataset); } /** * Sanitize data on Dataset object mainly to restrict HTML tags that can be used. * * @param dataset * @return the original dataset with its content sanitized */ private Dataset sanitizeDataset(Dataset dataset) { if (!Strings.isNullOrEmpty(dataset.getDescription())) { dataset.setDescription(PARAGRAPH_HTML_SANITIZER.sanitize(dataset.getDescription())); } return dataset; } /** * Augments a list of datasets with information from their preferred metadata document. * * @return a the same paging response with a new list of augmented dataset instances */ @Override public PagingResponse<Dataset> augmentWithMetadata(PagingResponse<Dataset> resp) { List<Dataset> augmented = Lists.newArrayList(); for (Dataset d : resp.getResults()) { augmented.add(setGeneratedCitation(merge(getPreferredMetadataDataset(d.getKey()), d))); } resp.setResults(augmented); return resp; } /** * Augments the target dataset with all persistable properties from the supplementary dataset. * Typically the target would be a dataset built from rich XML metadata, and the supplementary * would be the persisted view of the same dataset. NULL values in the supplementary dataset * overwrite existing values in the target. Developers please note: * * <ul> * <li>If the target is null, then the supplementary dataset object itself is returned - not a * copy * <li>These objects are all mutable, and care should be taken that the returned object may be * one or the other of the supplied, thus you need to {@code Dataset result = merge(Dataset * emlView, Dataset dbView);} * </ul> * * @param target that will be modified with persitable values from the supplementary * @param supplementary holding the preferred properties for the target * @return the modified target dataset, or the supplementary dataset if the target is null */ private Dataset merge(@Nullable Dataset target, @Nullable Dataset supplementary) { // nothing to merge, return the target (which may be null) if (supplementary == null) { return target; } // nothing to overlay into if (target == null) { return supplementary; } // otherwise, copy all persisted values target.setKey(supplementary.getKey()); target.setDoi(supplementary.getDoi()); target.setParentDatasetKey(supplementary.getParentDatasetKey()); target.setDuplicateOfDatasetKey(supplementary.getDuplicateOfDatasetKey()); target.setInstallationKey(supplementary.getInstallationKey()); target.setPublishingOrganizationKey(supplementary.getPublishingOrganizationKey()); target.setExternal(supplementary.isExternal()); target.setNumConstituents(supplementary.getNumConstituents()); target.setType(supplementary.getType()); target.setSubtype(supplementary.getSubtype()); target.setTitle(supplementary.getTitle()); target.setAlias(supplementary.getAlias()); target.setAbbreviation(supplementary.getAbbreviation()); target.setDescription(supplementary.getDescription()); target.setLanguage(supplementary.getLanguage()); target.setHomepage(supplementary.getHomepage()); target.setLogoUrl(supplementary.getLogoUrl()); target.setCitation(supplementary.getCitation()); target.setRights(supplementary.getRights()); target.setLicense(supplementary.getLicense()); target.setMaintenanceUpdateFrequency(supplementary.getMaintenanceUpdateFrequency()); target.setLockedForAutoUpdate(supplementary.isLockedForAutoUpdate()); target.setCreated(supplementary.getCreated()); target.setCreatedBy(supplementary.getCreatedBy()); target.setModified(supplementary.getModified()); target.setModifiedBy(supplementary.getModifiedBy()); target.setDeleted(supplementary.getDeleted()); // nested properties target.setComments(supplementary.getComments()); target.setContacts(supplementary.getContacts()); target.setEndpoints(supplementary.getEndpoints()); target.setIdentifiers(supplementary.getIdentifiers()); target.setMachineTags(supplementary.getMachineTags()); target.setTags(supplementary.getTags()); return target; } /** * Set the generated GBIF citation on the provided Dataset object. * * https://github.com/gbif/registry/issues/4 * * Where the provider is in particular networks (OBIS), or part of CoL, we use the provided citation and check * for a DOI. * * https://github.com/gbif/registry/issues/43 (OBIS) * https://github.com/gbif/portal-feedback/issues/1819 (CoL) * @param dataset * @return */ private Dataset setGeneratedCitation(Dataset dataset) { if (dataset == null || dataset.getPublishingOrganizationKey() == null // for CoL and its constituents we want to show the verbatim citation and not the // GBIF-generated one: || Constants.COL_DATASET_KEY.equals(dataset.getKey()) || Constants.COL_DATASET_KEY.equals(dataset.getParentDatasetKey())) { return dataset; } boolean notObisDataset = !datasetKeysInNetworkCache .getUnchecked(Constants.OBIS_NETWORK_KEY) .contains(dataset.getKey()); Citation originalCitation = dataset.getCitation(); if (notObisDataset || originalCitation == null || Strings.isNullOrEmpty(originalCitation.getText())) { // if the citation already exists keep it and only change the text. That allows us to keep the // identifier // if provided. Citation citation = originalCitation == null ? new Citation() : originalCitation; citation.setText( CitationGenerator.generateCitation( dataset, organizationCache.getUnchecked(dataset.getPublishingOrganizationKey()))); dataset.setCitation(citation); } else { // Append DOI if necessary, and append "accessed via GBIF.org". originalCitation.setText(CitationGenerator.generatePublisherProvidedCitation(dataset)); } return dataset; } /** * Returns the parsed, preferred metadata document as a dataset. */ @Nullable @Override public Dataset getPreferredMetadataDataset(UUID key) { Dataset result = null; List<Metadata> docs = listMetadata(key, null); if (!docs.isEmpty()) { // the list is sorted by priority already, just pick the first! Integer metadataKey = docs.get(0).getKey(); byte[] metadataDocument = getMetadataDocument(metadataKey); try { result = DatasetParser.build(metadataDocument); } catch (IOException | IllegalArgumentException e) { // Not sure if we should not propagate an Exception to return a 500 instead LOG.error("Stored metadata document {} cannot be read", metadataKey, e); } } return result; } @Override public List<Metadata> listMetadata(UUID datasetKey, @Nullable MetadataType type) { return metadataMapper.list(datasetKey, type); } @NullToNotFound @Override public byte[] getMetadataDocument(int metadataKey) { ByteArrayWrapper document = metadataMapper.getDocument(metadataKey); if (document == null) { return null; } return document.getData(); } @Override public List<CitationDatasetUsage> ensureCitationDatasetUsagesValid(Map<String, Long> data) { LOG.debug("Ensure citation dataset usages {}", data); List<CitationDatasetUsage> result = new ArrayList<>(); for (Map.Entry<String, Long> item : data.entrySet()) { String datasetKeyOrDoi = item.getKey(); LOG.debug("Try identifier {}", datasetKeyOrDoi); if (RegistryDoiUtils.isUuid(datasetKeyOrDoi)) { LOG.debug("Identifier {} is a valid UUID", datasetKeyOrDoi); UUID key = UUID.fromString(datasetKeyOrDoi); Dataset dataset = datasetMapper.get(key); if (dataset == null) { LOG.error("Dataset with the UUID {} was not found", datasetKeyOrDoi); throw new IllegalArgumentException(); } else { CitationDatasetUsage citationDatasetUsage = new CitationDatasetUsage(); citationDatasetUsage.setDatasetKey(key); citationDatasetUsage.setDatasetDoi(dataset.getDoi()); citationDatasetUsage.setNumberRecords(item.getValue()); result.add(citationDatasetUsage); } } else if (DOI.isParsable(datasetKeyOrDoi)) { LOG.debug("Identifier {} is a valid DOI", datasetKeyOrDoi); List<Dataset> datasets = datasetMapper.listByDOI(datasetKeyOrDoi, new PagingRequest()); if (CollectionUtils.isEmpty(datasets)) { LOG.error("Dataset with the DOI {} was not found", datasetKeyOrDoi); throw new IllegalArgumentException(); } else { Dataset dataset = datasets.get(0); CitationDatasetUsage citationDatasetUsage = new CitationDatasetUsage(); citationDatasetUsage.setDatasetKey(dataset.getKey()); citationDatasetUsage.setDatasetDoi(dataset.getDoi()); citationDatasetUsage.setNumberRecords(item.getValue()); result.add(citationDatasetUsage); } } else { LOG.error("Identifier {} is not UUID or DOI", datasetKeyOrDoi); throw new IllegalArgumentException(); } } return result; } }
#218 Use publisher's citation for IUCN dataset
registry-service/src/main/java/org/gbif/registry/service/RegistryDatasetServiceImpl.java
#218 Use publisher's citation for IUCN dataset
<ide><path>egistry-service/src/main/java/org/gbif/registry/service/RegistryDatasetServiceImpl.java <ide> public class RegistryDatasetServiceImpl implements RegistryDatasetService { <ide> <ide> private static final Logger LOG = LoggerFactory.getLogger(RegistryDatasetServiceImpl.class); <add> <add> private static final UUID IUCN_DATASET_KEY = UUID.fromString("19491596-35ae-4a91-9a98-85cf505f1bd3"); <ide> <ide> // HTML sanitizer policy for paragraph <ide> private static final PolicyFactory PARAGRAPH_HTML_SANITIZER = <ide> Citation originalCitation = dataset.getCitation(); <ide> <ide> if (notObisDataset <add> || dataset.getKey() != IUCN_DATASET_KEY <ide> || originalCitation == null <ide> || Strings.isNullOrEmpty(originalCitation.getText())) { <ide> // if the citation already exists keep it and only change the text. That allows us to keep the <del> // identifier <del> // if provided. <add> // identifier if provided. <ide> Citation citation = originalCitation == null ? new Citation() : originalCitation; <ide> citation.setText( <ide> CitationGenerator.generateCitation(
JavaScript
mit
adde839c0b88df7c892e4cbf9df5457c65d0198d
0
radishengine/drowsy,radishengine/drowsy
require(['ByteSource', 'Item', 'AppleVolume'], function(ByteSource, Item, AppleVolume) { 'use strict'; function makeFileDrop(el, callback) { if (typeof el === 'string') { el = document.getElementById(el); if (!el) { console.error('filedrop element not found'); return; } el.addEventListener('dragenter', function(e) { el.classList.add('dropping'); }); el.addEventListener('dragleave', function(e) { el.classList.remove('dropping'); }); el.addEventListener('dragover', function(e) { e.stopPropagation(); e.preventDefault(); e.dataTransfer.dropEffect = 'copy'; }); el.addEventListener('drop', function(e) { e.stopPropagation(); e.preventDefault(); el.classList.remove('dropping'); if (e.dataTransfer.files[0]) { callback(e.dataTransfer.files[0]); } }); el.classList.add('drop-target'); } } makeFileDrop('drop-zone', function(droppedFile) { if (/\.(iso|toast|dsk|img)$/i.test(droppedFile.name)) { var byteSource = ByteSource.from(droppedFile); var appleVolume = new AppleVolume(byteSource); appleVolume.read({}); } else { var item = new Item(ByteSource.from(droppedFile)); var extension = droppedFile.name.match(/\.([^\.]+)$/); if (extension) { extension = extension && encodeURIComponent(extension[1].toUpperCase().replace(/[\\\/\*\"\:\?\|<>]/g, '_')); var importString = 'ext/open_' + extension; require([importString], function(open) { open(item); }, function() { console.log('Unsupported extension: ' + extension); }); } } }); });
main.js
require(['ByteSource', 'AppleVolume'], function(ByteSource, AppleVolume) { 'use strict'; function makeFileDrop(el, callback) { if (typeof el === 'string') { el = document.getElementById(el); if (!el) { console.error('filedrop element not found'); return; } el.addEventListener('dragenter', function(e) { el.classList.add('dropping'); }); el.addEventListener('dragleave', function(e) { el.classList.remove('dropping'); }); el.addEventListener('dragover', function(e) { e.stopPropagation(); e.preventDefault(); e.dataTransfer.dropEffect = 'copy'; }); el.addEventListener('drop', function(e) { e.stopPropagation(); e.preventDefault(); el.classList.remove('dropping'); if (e.dataTransfer.files[0]) { callback(e.dataTransfer.files[0]); } }); el.classList.add('drop-target'); } } makeFileDrop('drop-zone', function(droppedFile) { if (/\.(iso|toast|dsk|img)$/i.test(droppedFile.name)) { var byteSource = ByteSource.from(droppedFile); var appleVolume = new AppleVolume(byteSource); appleVolume.read({}); } else { var extension = droppedFile.name.match(/\.([^\.]+)$/); if (extension) { extension = extension && encodeURIComponent(extension[1].toUpperCase().replace(/[\\\/\*\"\:\?\|<>]/g, '_')); var importString = 'ext/open_' + extension; console.log(importString); } } }); });
Create item and pass it to extension-based opener
main.js
Create item and pass it to extension-based opener
<ide><path>ain.js <ide> <del>require(['ByteSource', 'AppleVolume'], function(ByteSource, AppleVolume) { <add>require(['ByteSource', 'Item', 'AppleVolume'], function(ByteSource, Item, AppleVolume) { <ide> <ide> 'use strict'; <ide> <ide> <ide> } <ide> else { <add> var item = new Item(ByteSource.from(droppedFile)); <ide> var extension = droppedFile.name.match(/\.([^\.]+)$/); <ide> if (extension) { <ide> extension = extension && encodeURIComponent(extension[1].toUpperCase().replace(/[\\\/\*\"\:\?\|<>]/g, '_')); <ide> var importString = 'ext/open_' + extension; <del> console.log(importString); <add> require([importString], <add> function(open) { <add> open(item); <add> }, <add> function() { <add> console.log('Unsupported extension: ' + extension); <add> }); <ide> } <ide> } <ide>
JavaScript
mit
baa8b7d886ad555e86905b005497823054365adb
0
aheckmann/mpromise
'use strict'; /*! * Module dependencies. */ var slice = require('sliced'); var EventEmitter = require('events').EventEmitter; /** * compat with node 0.10 */ var next = 'function' == typeof setImmediate ? setImmediate : process.nextTick; /** * Promise constructor. * * _NOTE: The success and failure event names can be overridden by setting `Promise.SUCCESS` and `Promise.FAILURE` respectively._ * * @param {Function} back a function that accepts `fn(err, ...){}` as signature * @inherits NodeJS EventEmitter http://nodejs.org/api/events.html#events_class_events_eventemitter * @event `reject`: Emits when the promise is rejected (event name may be overridden) * @event `fulfill`: Emits when the promise is fulfilled (event name may be overridden) * @api public */ function Promise (back) { this.emitted = {}; this.ended = false; if ('function' == typeof back) this.onResolve(back); } /*! * event names */ Promise.SUCCESS = 'fulfill'; Promise.FAILURE = 'reject'; /*! * Inherits from EventEmitter. */ Promise.prototype.__proto__ = EventEmitter.prototype; /** * Adds `listener` to the `event`. * * If `event` is either the success or failure event and the event has already been emitted, the`listener` is called immediately and passed the results of the original emitted event. * * @param {String} event * @param {Function} callback * @return {Promise} this * @api public */ Promise.prototype.on = function (event, callback) { if (this.emitted[event]) callback.apply(this, this.emitted[event]); else EventEmitter.prototype.on.call(this, event, callback); return this; } /** * Keeps track of emitted events to run them on `on`. * * @api private */ Promise.prototype.emit = function (event) { // ensures a promise can't be fulfill() or reject() more than once var success = this.constructor.SUCCESS; var failure = this.constructor.FAILURE; if (event == success || event == failure) { if (this.emitted[success] || this.emitted[failure]) { return this; } this.emitted[event] = slice(arguments, 1); } return EventEmitter.prototype.emit.apply(this, arguments); } /** * Fulfills this promise with passed arguments. * * If this promise has already been fulfilled or rejected, no action is taken. * * @api public */ Promise.prototype.fulfill = function () { var args = slice(arguments); return this.emit.apply(this, [this.constructor.SUCCESS].concat(args)); } /** * Rejects this promise with `reason`. * * If this promise has already been fulfilled or rejected, no action is taken. * * @api public * @param {Object|String} reason * @return {Promise} this */ Promise.prototype.reject = function (reason) { return this.emit(this.constructor.FAILURE, reason); } /** * Resolves this promise to a rejected state if `err` is passed or * fulfilled state if no `err` is passed. * * @param {Error} [err] error or null * @param {Object} [val] value to fulfill the promise with * @api public */ Promise.prototype.resolve = function (err, val) { if (err) return this.reject(err); return this.fulfill(val); } /** * Adds a listener to the SUCCESS event. * * @return {Promise} this * @api public */ Promise.prototype.onFulfill = function (fn) { return this.on(this.constructor.SUCCESS, fn); } /** * Adds a listener to the FAILURE event. * * @return {Promise} this * @api public */ Promise.prototype.onReject = function (fn) { return this.on(this.constructor.FAILURE, fn); } /** * Adds a single function as a listener to both SUCCESS and FAILURE. * * It will be executed with traditional node.js argument position: * function (err, args...) {} * * @param {Function} fn * @return {Promise} this */ Promise.prototype.onResolve = function (fn) { this.on(this.constructor.FAILURE, function(err){ fn.call(this, err); }); this.on(this.constructor.SUCCESS, function(){ var args = slice(arguments); fn.apply(this, [null].concat(args)); }); return this; } /** * Creates a new promise and returns it. If `onFulfill` or * `onReject` are passed, they are added as SUCCESS/ERROR callbacks * to this promise after the next tick. * * Conforms to [promises/A+](https://github.com/promises-aplus/promises-spec) specification. Read for more detail how to use this method. * * ####Example: * * var p = new Promise; * p.then(function (arg) { * return arg + 1; * }).then(function (arg) { * throw new Error(arg + ' is an error!'); * }).then(null, function (err) { * assert.ok(err instanceof Error); * assert.equal('2 is an error', err.message); * }); * p.complete(1); * * @see promises-A+ https://github.com/promises-aplus/promises-spec * @param {Function} onFulFill * @param {Function} onReject * @return {Promise} newPromise */ Promise.prototype.then = function (onFulfill, onReject) { var self = this , retPromise = new Promise; next(function () { if ('function' == typeof onReject) { self.onReject(handler(retPromise, onReject)); } else { self.onReject(retPromise.reject.bind(retPromise)); } if ('function' == typeof onFulfill) { self.onFulfill(handler(retPromise, onFulfill)); } else { self.onFulfill(retPromise.fulfill.bind(retPromise)); } }) return retPromise; } function handler (promise, fn) { return function handle (arg) { var val; try { val = fn(arg); } catch (err) { if (promise.ended) throw err; return promise.reject(err); } var type = typeof val; if ('undefined' == type) { return promise.fulfill(val); } resolve(promise, val); } } function resolve (promise, x) { var then; var type; var done; var reject_; var resolve_; if (null != x) { type = typeof x; if ('object' == type || 'function' == type) { try { then = x.then; } catch (err) { if (promise.ended) throw err; return promise.reject(err); } if ('function' == typeof then) { try { resolve_ = resolve.bind(this, promise); reject_ = promise.reject.bind(promise); done = false; return then.call( x , function fulfill () { if (done) return; done = true; return resolve_.apply(this, arguments); } , function reject () { if (done) return; done = true; return reject_.apply(this, arguments); }) } catch (err) { if (done) return; done = true; if (promise.ended) throw err; return promise.reject(err); } } } } promise.fulfill(x); } /** * Signifies that this promise was the last in a chain of `then()s`: if a handler passed to the call to `then` which produced this promise throws, the exception will go uncaught. * * ####Example: * * var p = new Promise; * p.then(function(){ throw new Error('shucks') }); * setTimeout(function () { * p.fulfill(); * // error was caught and swallowed by the promise returned from * // p.then(). we either have to always register handlers on * // the returned promises or we can do the following... * }, 10); * * // this time we use .end() which prevents catching thrown errors * var p = new Promise; * var p2 = p.then(function(){ throw new Error('shucks') }).end(); // <-- * setTimeout(function () { * p.fulfill(); // throws "shucks" * }, 10); * * @api public */ Promise.prototype.end = function () { this.ended = true; } /*! * Module exports. */ module.exports = Promise;
lib/promise.js
'use strict'; /*! * Module dependencies. */ var slice = require('sliced'); var EventEmitter = require('events').EventEmitter; /** * Promise constructor. * * _NOTE: The success and failure event names can be overridden by setting `Promise.SUCCESS` and `Promise.FAILURE` respectively._ * * @param {Function} back a function that accepts `fn(err, ...){}` as signature * @inherits NodeJS EventEmitter http://nodejs.org/api/events.html#events_class_events_eventemitter * @event `reject`: Emits when the promise is rejected (event name may be overridden) * @event `fulfill`: Emits when the promise is fulfilled (event name may be overridden) * @api public */ function Promise (back) { this.emitted = {}; this.ended = false; if ('function' == typeof back) this.onResolve(back); } /*! * event names */ Promise.SUCCESS = 'fulfill'; Promise.FAILURE = 'reject'; /*! * Inherits from EventEmitter. */ Promise.prototype.__proto__ = EventEmitter.prototype; /** * Adds `listener` to the `event`. * * If `event` is either the success or failure event and the event has already been emitted, the`listener` is called immediately and passed the results of the original emitted event. * * @param {String} event * @param {Function} callback * @return {Promise} this * @api public */ Promise.prototype.on = function (event, callback) { if (this.emitted[event]) callback.apply(this, this.emitted[event]); else EventEmitter.prototype.on.call(this, event, callback); return this; } /** * Keeps track of emitted events to run them on `on`. * * @api private */ Promise.prototype.emit = function (event) { // ensures a promise can't be fulfill() or reject() more than once var success = this.constructor.SUCCESS; var failure = this.constructor.FAILURE; if (event == success || event == failure) { if (this.emitted[success] || this.emitted[failure]) { return this; } this.emitted[event] = slice(arguments, 1); } return EventEmitter.prototype.emit.apply(this, arguments); } /** * Fulfills this promise with passed arguments. * * If this promise has already been fulfilled or rejected, no action is taken. * * @api public */ Promise.prototype.fulfill = function () { var args = slice(arguments); return this.emit.apply(this, [this.constructor.SUCCESS].concat(args)); } /** * Rejects this promise with `reason`. * * If this promise has already been fulfilled or rejected, no action is taken. * * @api public * @param {Object|String} reason * @return {Promise} this */ Promise.prototype.reject = function (reason) { return this.emit(this.constructor.FAILURE, reason); } /** * Resolves this promise to a rejected state if `err` is passed or * fulfilled state if no `err` is passed. * * @param {Error} [err] error or null * @param {Object} [val] value to fulfill the promise with * @api public */ Promise.prototype.resolve = function (err, val) { if (err) return this.reject(err); return this.fulfill(val); } /** * Adds a listener to the SUCCESS event. * * @return {Promise} this * @api public */ Promise.prototype.onFulfill = function (fn) { return this.on(this.constructor.SUCCESS, fn); } /** * Adds a listener to the FAILURE event. * * @return {Promise} this * @api public */ Promise.prototype.onReject = function (fn) { return this.on(this.constructor.FAILURE, fn); } /** * Adds a single function as a listener to both SUCCESS and FAILURE. * * It will be executed with traditional node.js argument position: * function (err, args...) {} * * @param {Function} fn * @return {Promise} this */ Promise.prototype.onResolve = function (fn) { this.on(this.constructor.FAILURE, function(err){ fn.call(this, err); }); this.on(this.constructor.SUCCESS, function(){ var args = slice(arguments); fn.apply(this, [null].concat(args)); }); return this; } /** * Creates a new promise and returns it. If `onFulfill` or * `onReject` are passed, they are added as SUCCESS/ERROR callbacks * to this promise after the nextTick. * * Conforms to [promises/A+](https://github.com/promises-aplus/promises-spec) specification. Read for more detail how to use this method. * * ####Example: * * var p = new Promise; * p.then(function (arg) { * return arg + 1; * }).then(function (arg) { * throw new Error(arg + ' is an error!'); * }).then(null, function (err) { * assert.ok(err instanceof Error); * assert.equal('2 is an error', err.message); * }); * p.complete(1); * * @see promises-A+ https://github.com/promises-aplus/promises-spec * @param {Function} onFulFill * @param {Function} onReject * @return {Promise} newPromise */ Promise.prototype.then = function (onFulfill, onReject) { var self = this , retPromise = new Promise; process.nextTick(function () { if ('function' == typeof onReject) { self.onReject(handler(retPromise, onReject)); } else { self.onReject(retPromise.reject.bind(retPromise)); } if ('function' == typeof onFulfill) { self.onFulfill(handler(retPromise, onFulfill)); } else { self.onFulfill(retPromise.fulfill.bind(retPromise)); } }) return retPromise; } function handler (promise, fn) { return function handle (arg) { var val; try { val = fn(arg); } catch (err) { if (promise.ended) throw err; return promise.reject(err); } var type = typeof val; if ('undefined' == type) { return promise.fulfill(val); } resolve(promise, val); } } function resolve (promise, x) { var then; var type; var done; var reject_; var resolve_; if (null != x) { type = typeof x; if ('object' == type || 'function' == type) { try { then = x.then; } catch (err) { if (promise.ended) throw err; return promise.reject(err); } if ('function' == typeof then) { try { resolve_ = resolve.bind(this, promise); reject_ = promise.reject.bind(promise); done = false; return then.call( x , function fulfill () { if (done) return; done = true; return resolve_.apply(this, arguments); } , function reject () { if (done) return; done = true; return reject_.apply(this, arguments); }) } catch (err) { if (done) return; done = true; if (promise.ended) throw err; return promise.reject(err); } } } } promise.fulfill(x); } /** * Signifies that this promise was the last in a chain of `then()s`: if a handler passed to the call to `then` which produced this promise throws, the exception will go uncaught. * * ####Example: * * var p = new Promise; * p.then(function(){ throw new Error('shucks') }); * setTimeout(function () { * p.fulfill(); * // error was caught and swallowed by the promise returned from * // p.then(). we either have to always register handlers on * // the returned promises or we can do the following... * }, 10); * * // this time we use .end() which prevents catching thrown errors * var p = new Promise; * var p2 = p.then(function(){ throw new Error('shucks') }).end(); // <-- * setTimeout(function () { * p.fulfill(); // throws "shucks" * }, 10); * * @api public */ Promise.prototype.end = function () { this.ended = true; } /*! * Module exports. */ module.exports = Promise;
use setImmediate if available
lib/promise.js
use setImmediate if available
<ide><path>ib/promise.js <ide> <ide> var slice = require('sliced'); <ide> var EventEmitter = require('events').EventEmitter; <add> <add>/** <add> * compat with node 0.10 <add> */ <add> <add>var next = 'function' == typeof setImmediate <add> ? setImmediate <add> : process.nextTick; <ide> <ide> /** <ide> * Promise constructor. <ide> /** <ide> * Creates a new promise and returns it. If `onFulfill` or <ide> * `onReject` are passed, they are added as SUCCESS/ERROR callbacks <del> * to this promise after the nextTick. <add> * to this promise after the next tick. <ide> * <ide> * Conforms to [promises/A+](https://github.com/promises-aplus/promises-spec) specification. Read for more detail how to use this method. <ide> * <ide> var self = this <ide> , retPromise = new Promise; <ide> <del> process.nextTick(function () { <add> next(function () { <ide> if ('function' == typeof onReject) { <ide> self.onReject(handler(retPromise, onReject)); <ide> } else {
Java
apache-2.0
5e1a42ad00608ae401906bf746e7cfd41e7e1425
0
retomerz/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,amith01994/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,caot/intellij-community,fnouama/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,caot/intellij-community,semonte/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,supersven/intellij-community,samthor/intellij-community,Lekanich/intellij-community,blademainer/intellij-community,asedunov/intellij-community,supersven/intellij-community,da1z/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,ryano144/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,dslomov/intellij-community,petteyg/intellij-community,robovm/robovm-studio,semonte/intellij-community,fnouama/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,clumsy/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,blademainer/intellij-community,michaelgallacher/intellij-community,samthor/intellij-community,holmes/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,robovm/robovm-studio,youdonghai/intellij-community,signed/intellij-community,vladmm/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,lucafavatella/intellij-community,SerCeMan/intellij-community,izonder/intellij-community,ahb0327/intellij-community,slisson/intellij-community,adedayo/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,signed/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,vladmm/intellij-community,TangHao1987/intellij-community,muntasirsyed/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,da1z/intellij-community,ibinti/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,ryano144/intellij-community,asedunov/intellij-community,vladmm/intellij-community,semonte/intellij-community,robovm/robovm-studio,retomerz/intellij-community,fitermay/intellij-community,petteyg/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,tmpgit/intellij-community,semonte/intellij-community,MER-GROUP/intellij-community,vladmm/intellij-community,ibinti/intellij-community,SerCeMan/intellij-community,retomerz/intellij-community,vvv1559/intellij-community,kool79/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,hurricup/intellij-community,samthor/intellij-community,izonder/intellij-community,fengbaicanhe/intellij-community,jagguli/intellij-community,xfournet/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,izonder/intellij-community,FHannes/intellij-community,slisson/intellij-community,ibinti/intellij-community,fnouama/intellij-community,fnouama/intellij-community,xfournet/intellij-community,adedayo/intellij-community,fnouama/intellij-community,apixandru/intellij-community,slisson/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,MER-GROUP/intellij-community,kdwink/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,dslomov/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,petteyg/intellij-community,nicolargo/intellij-community,gnuhub/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,gnuhub/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,vladmm/intellij-community,adedayo/intellij-community,izonder/intellij-community,FHannes/intellij-community,slisson/intellij-community,blademainer/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,retomerz/intellij-community,supersven/intellij-community,adedayo/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,da1z/intellij-community,FHannes/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,adedayo/intellij-community,wreckJ/intellij-community,xfournet/intellij-community,signed/intellij-community,holmes/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,apixandru/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,fengbaicanhe/intellij-community,semonte/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,semonte/intellij-community,nicolargo/intellij-community,vvv1559/intellij-community,ryano144/intellij-community,fnouama/intellij-community,holmes/intellij-community,blademainer/intellij-community,amith01994/intellij-community,orekyuu/intellij-community,robovm/robovm-studio,Distrotech/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,dslomov/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,amith01994/intellij-community,pwoodworth/intellij-community,kdwink/intellij-community,michaelgallacher/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,petteyg/intellij-community,tmpgit/intellij-community,wreckJ/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,robovm/robovm-studio,signed/intellij-community,Distrotech/intellij-community,petteyg/intellij-community,jagguli/intellij-community,izonder/intellij-community,kdwink/intellij-community,ahb0327/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,allotria/intellij-community,ftomassetti/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,Distrotech/intellij-community,SerCeMan/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,mglukhikh/intellij-community,samthor/intellij-community,clumsy/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,fnouama/intellij-community,pwoodworth/intellij-community,MichaelNedzelsky/intellij-community,dslomov/intellij-community,lucafavatella/intellij-community,gnuhub/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,FHannes/intellij-community,adedayo/intellij-community,signed/intellij-community,holmes/intellij-community,clumsy/intellij-community,robovm/robovm-studio,FHannes/intellij-community,ryano144/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,nicolargo/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,izonder/intellij-community,fengbaicanhe/intellij-community,ibinti/intellij-community,slisson/intellij-community,samthor/intellij-community,ryano144/intellij-community,alphafoobar/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,dslomov/intellij-community,wreckJ/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,kool79/intellij-community,Lekanich/intellij-community,kool79/intellij-community,hurricup/intellij-community,apixandru/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,petteyg/intellij-community,lucafavatella/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,SerCeMan/intellij-community,tmpgit/intellij-community,youdonghai/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,TangHao1987/intellij-community,TangHao1987/intellij-community,holmes/intellij-community,ftomassetti/intellij-community,MichaelNedzelsky/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,ivan-fedorov/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,hurricup/intellij-community,slisson/intellij-community,TangHao1987/intellij-community,ibinti/intellij-community,ivan-fedorov/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,vvv1559/intellij-community,kool79/intellij-community,fitermay/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,alphafoobar/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,fitermay/intellij-community,kool79/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,signed/intellij-community,asedunov/intellij-community,ivan-fedorov/intellij-community,allotria/intellij-community,adedayo/intellij-community,slisson/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,caot/intellij-community,apixandru/intellij-community,robovm/robovm-studio,FHannes/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,izonder/intellij-community,blademainer/intellij-community,xfournet/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,kool79/intellij-community,FHannes/intellij-community,caot/intellij-community,petteyg/intellij-community,izonder/intellij-community,samthor/intellij-community,da1z/intellij-community,nicolargo/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,muntasirsyed/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,signed/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,Lekanich/intellij-community,dslomov/intellij-community,xfournet/intellij-community,clumsy/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,caot/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,retomerz/intellij-community,signed/intellij-community,alphafoobar/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,supersven/intellij-community,ryano144/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,allotria/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,izonder/intellij-community,samthor/intellij-community,fitermay/intellij-community,supersven/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,fnouama/intellij-community,allotria/intellij-community,amith01994/intellij-community,holmes/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,SerCeMan/intellij-community,youdonghai/intellij-community,holmes/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,dslomov/intellij-community,ahb0327/intellij-community,wreckJ/intellij-community,adedayo/intellij-community,adedayo/intellij-community,muntasirsyed/intellij-community,dslomov/intellij-community,adedayo/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,semonte/intellij-community,xfournet/intellij-community,wreckJ/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,SerCeMan/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,kool79/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,caot/intellij-community,muntasirsyed/intellij-community,pwoodworth/intellij-community,izonder/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,gnuhub/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,caot/intellij-community,slisson/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,supersven/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,ftomassetti/intellij-community,muntasirsyed/intellij-community,amith01994/intellij-community,clumsy/intellij-community,ryano144/intellij-community,TangHao1987/intellij-community,orekyuu/intellij-community,amith01994/intellij-community,slisson/intellij-community,Lekanich/intellij-community,hurricup/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,MER-GROUP/intellij-community,muntasirsyed/intellij-community,semonte/intellij-community,kdwink/intellij-community,izonder/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,slisson/intellij-community,gnuhub/intellij-community,semonte/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,da1z/intellij-community,asedunov/intellij-community,holmes/intellij-community,vvv1559/intellij-community,allotria/intellij-community,vladmm/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,slisson/intellij-community,TangHao1987/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,retomerz/intellij-community,blademainer/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,ryano144/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,gnuhub/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,ibinti/intellij-community,allotria/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,fnouama/intellij-community,vvv1559/intellij-community,kool79/intellij-community,vladmm/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,kool79/intellij-community,robovm/robovm-studio,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,tmpgit/intellij-community,youdonghai/intellij-community,holmes/intellij-community,nicolargo/intellij-community,blademainer/intellij-community,ivan-fedorov/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,xfournet/intellij-community,semonte/intellij-community,suncycheng/intellij-community,dslomov/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,fengbaicanhe/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,izonder/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,alphafoobar/intellij-community,asedunov/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,MichaelNedzelsky/intellij-community,samthor/intellij-community,jagguli/intellij-community,clumsy/intellij-community,kdwink/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,signed/intellij-community,signed/intellij-community,caot/intellij-community,apixandru/intellij-community,jagguli/intellij-community,clumsy/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,da1z/intellij-community,ryano144/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,petteyg/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,petteyg/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,suncycheng/intellij-community,fnouama/intellij-community,blademainer/intellij-community,dslomov/intellij-community,clumsy/intellij-community,holmes/intellij-community,xfournet/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,retomerz/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,mglukhikh/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,supersven/intellij-community,caot/intellij-community,da1z/intellij-community,MichaelNedzelsky/intellij-community,da1z/intellij-community,orekyuu/intellij-community,MichaelNedzelsky/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,caot/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,Distrotech/intellij-community,semonte/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,da1z/intellij-community,vladmm/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,ahb0327/intellij-community,clumsy/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,jagguli/intellij-community,fitermay/intellij-community,MichaelNedzelsky/intellij-community,petteyg/intellij-community
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.javaFX.fxml; import com.intellij.codeInsight.intention.IntentionAction; import com.intellij.openapi.application.PluginPathManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.roots.ContentEntry; import com.intellij.openapi.roots.ModifiableRootModel; import com.intellij.openapi.util.SystemInfo; import com.intellij.psi.xml.XmlFile; import com.intellij.testFramework.LightProjectDescriptor; import com.intellij.testFramework.PsiTestUtil; import com.intellij.testFramework.fixtures.DefaultLightProjectDescriptor; import com.intellij.testFramework.fixtures.LightCodeInsightFixtureTestCase; import org.jetbrains.annotations.NotNull; import org.jetbrains.plugins.javaFX.fxml.codeInsight.inspections.JavaFxUnresolvedFxIdReferenceInspection; import org.jetbrains.plugins.javaFX.fxml.codeInsight.intentions.JavaFxInjectPageLanguageIntention; import org.junit.Assume; import java.util.Set; public class JavaFXQuickfixTest extends LightCodeInsightFixtureTestCase { public static final DefaultLightProjectDescriptor JAVA_FX_WITH_GROOVY_DESCRIPTOR = new DefaultLightProjectDescriptor() { @Override public void configureModule(Module module, ModifiableRootModel model, ContentEntry contentEntry) { PsiTestUtil.addLibrary(module, model, "javafx", PluginPathManager.getPluginHomePath("javaFX") + "/testData", "jfxrt.jar"); PsiTestUtil.addLibrary(module, model, "javafx", PluginPathManager.getPluginHomePath("javaFX") + "/testData", "groovy-1.8.0.jar"); super.configureModule(module, model, contentEntry); } }; @NotNull @Override protected LightProjectDescriptor getProjectDescriptor() { return JAVA_FX_WITH_GROOVY_DESCRIPTOR; } public void testCreateControllerMethod() throws Exception { doTest("Create method 'void bar(ActionEvent)'", ".java"); } public void testCreateControllerMethodInGroovy() throws Exception { doTest("Create method 'void bar(ActionEvent)'", ".groovy"); } public void testCreateField() throws Exception { doTest("Create field 'btn'", ".java"); } public void testCreateFieldEmptyName() throws Exception { String path = getTestName(true) + ".fxml"; final IntentionAction intention = myFixture.getAvailableIntention("Create field 'btn'", path, getTestName(false) + ".java"); assertNull(intention); } public void testRegisterPageLanguage() throws Exception { myFixture.configureByFile(getTestName(true) + ".fxml"); final IntentionAction intention = myFixture.findSingleIntention("Specify page language"); assertNotNull(intention); Set<String> languages = JavaFxInjectPageLanguageIntention.getAvailableLanguages(getProject()); assertContainsElements(languages, "groovy"); JavaFxInjectPageLanguageIntention languageIntention = (JavaFxInjectPageLanguageIntention)intention; languageIntention.registerPageLanguage(getProject(), (XmlFile)myFixture.getFile(), "groovy"); myFixture.checkResultByFile(getTestName(true) + ".fxml", getTestName(true) + "_after.fxml", true); } public void testWrapWithDefine() throws Exception { final IntentionAction intention = myFixture.getAvailableIntention("Wrap \"lb\" with fx:define", getTestName(true) + ".fxml"); assertNotNull(intention); myFixture.launchAction(intention); myFixture.checkResultByFile(getTestName(true) + "_after.fxml"); } private void doTest(final String actionName, final String extension) throws Exception { String path = getTestName(true) + ".fxml"; final IntentionAction intention = myFixture.getAvailableIntention(actionName, path, getTestName(false) + extension); assertNotNull(intention); myFixture.launchAction(intention); myFixture.checkResultByFile(getTestName(false) + extension, getTestName(false) + "_after" + extension, true); } @Override protected void setUp() throws Exception { super.setUp(); myFixture.enableInspections(new JavaFxUnresolvedFxIdReferenceInspection()); } @Override protected void runTest() throws Throwable { Assume.assumeFalse(SystemInfo.isMac); super.runTest(); } @NotNull @Override protected String getTestDataPath() { return PluginPathManager.getPluginHomePath("javaFX") + "/testData/quickfix/"; } }
plugins/javaFX/javaFX-CE/testSrc/org/jetbrains/plugins/javaFX/fxml/JavaFXQuickfixTest.java
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.javaFX.fxml; import com.intellij.codeInsight.intention.IntentionAction; import com.intellij.openapi.application.PluginPathManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.roots.ContentEntry; import com.intellij.openapi.roots.ModifiableRootModel; import com.intellij.psi.xml.XmlFile; import com.intellij.testFramework.LightProjectDescriptor; import com.intellij.testFramework.PsiTestUtil; import com.intellij.testFramework.fixtures.DefaultLightProjectDescriptor; import com.intellij.testFramework.fixtures.LightCodeInsightFixtureTestCase; import org.jetbrains.annotations.NotNull; import org.jetbrains.plugins.javaFX.fxml.codeInsight.inspections.JavaFxUnresolvedFxIdReferenceInspection; import org.jetbrains.plugins.javaFX.fxml.codeInsight.intentions.JavaFxInjectPageLanguageIntention; import java.util.Set; public class JavaFXQuickfixTest extends LightCodeInsightFixtureTestCase { public static final DefaultLightProjectDescriptor JAVA_FX_WITH_GROOVY_DESCRIPTOR = new DefaultLightProjectDescriptor() { @Override public void configureModule(Module module, ModifiableRootModel model, ContentEntry contentEntry) { PsiTestUtil.addLibrary(module, model, "javafx", PluginPathManager.getPluginHomePath("javaFX") + "/testData", "jfxrt.jar"); PsiTestUtil.addLibrary(module, model, "javafx", PluginPathManager.getPluginHomePath("javaFX") + "/testData", "groovy-1.8.0.jar"); super.configureModule(module, model, contentEntry); } }; @NotNull @Override protected LightProjectDescriptor getProjectDescriptor() { return JAVA_FX_WITH_GROOVY_DESCRIPTOR; } public void testCreateControllerMethod() throws Exception { doTest("Create method 'void bar(ActionEvent)'", ".java"); } public void testCreateControllerMethodInGroovy() throws Exception { doTest("Create method 'void bar(ActionEvent)'", ".groovy"); } public void testCreateField() throws Exception { doTest("Create field 'btn'", ".java"); } public void testCreateFieldEmptyName() throws Exception { String path = getTestName(true) + ".fxml"; final IntentionAction intention = myFixture.getAvailableIntention("Create field 'btn'", path, getTestName(false) + ".java"); assertNull(intention); } public void testRegisterPageLanguage() throws Exception { myFixture.configureByFile(getTestName(true) + ".fxml"); final IntentionAction intention = myFixture.findSingleIntention("Specify page language"); assertNotNull(intention); Set<String> languages = JavaFxInjectPageLanguageIntention.getAvailableLanguages(getProject()); assertContainsElements(languages, "groovy"); JavaFxInjectPageLanguageIntention languageIntention = (JavaFxInjectPageLanguageIntention)intention; languageIntention.registerPageLanguage(getProject(), (XmlFile)myFixture.getFile(), "groovy"); myFixture.checkResultByFile(getTestName(true) + ".fxml", getTestName(true) + "_after.fxml", true); } public void testWrapWithDefine() throws Exception { final IntentionAction intention = myFixture.getAvailableIntention("Wrap \"lb\" with fx:define", getTestName(true) + ".fxml"); assertNotNull(intention); myFixture.launchAction(intention); myFixture.checkResultByFile(getTestName(true) + "_after.fxml"); } private void doTest(final String actionName, final String extension) throws Exception { String path = getTestName(true) + ".fxml"; final IntentionAction intention = myFixture.getAvailableIntention(actionName, path, getTestName(false) + extension); assertNotNull(intention); myFixture.launchAction(intention); myFixture.checkResultByFile(getTestName(false) + extension, getTestName(false) + "_after" + extension, true); } @Override protected void setUp() throws Exception { super.setUp(); myFixture.enableInspections(new JavaFxUnresolvedFxIdReferenceInspection()); } @NotNull @Override protected String getTestDataPath() { return PluginPathManager.getPluginHomePath("javaFX") + "/testData/quickfix/"; } }
disable JavaFXQuickfixTest on Mac
plugins/javaFX/javaFX-CE/testSrc/org/jetbrains/plugins/javaFX/fxml/JavaFXQuickfixTest.java
disable JavaFXQuickfixTest on Mac
<ide><path>lugins/javaFX/javaFX-CE/testSrc/org/jetbrains/plugins/javaFX/fxml/JavaFXQuickfixTest.java <ide> import com.intellij.openapi.module.Module; <ide> import com.intellij.openapi.roots.ContentEntry; <ide> import com.intellij.openapi.roots.ModifiableRootModel; <add>import com.intellij.openapi.util.SystemInfo; <ide> import com.intellij.psi.xml.XmlFile; <ide> import com.intellij.testFramework.LightProjectDescriptor; <ide> import com.intellij.testFramework.PsiTestUtil; <ide> import org.jetbrains.annotations.NotNull; <ide> import org.jetbrains.plugins.javaFX.fxml.codeInsight.inspections.JavaFxUnresolvedFxIdReferenceInspection; <ide> import org.jetbrains.plugins.javaFX.fxml.codeInsight.intentions.JavaFxInjectPageLanguageIntention; <add>import org.junit.Assume; <ide> <ide> import java.util.Set; <ide> <ide> myFixture.enableInspections(new JavaFxUnresolvedFxIdReferenceInspection()); <ide> } <ide> <add> @Override <add> protected void runTest() throws Throwable { <add> Assume.assumeFalse(SystemInfo.isMac); <add> super.runTest(); <add> } <add> <ide> @NotNull <ide> @Override <ide> protected String getTestDataPath() {
JavaScript
agpl-3.0
6e2f924d9b76a1de1483bc469178d219c0dc5cb2
0
alisman/cbioportal-frontend,alisman/cbioportal-frontend,cBioPortal/cbioportal-frontend,alisman/cbioportal-frontend,cBioPortal/cbioportal-frontend,cBioPortal/cbioportal-frontend,alisman/cbioportal-frontend,cBioPortal/cbioportal-frontend,cBioPortal/cbioportal-frontend,alisman/cbioportal-frontend,cBioPortal/cbioportal-frontend,alisman/cbioportal-frontend
var ExtractTextPlugin = require('extract-text-webpack-plugin'); var HtmlWebpackPlugin = require('html-webpack-plugin'); var WebpackFailPlugin = require('webpack-fail-plugin'); var ProgressBarPlugin = require('progress-bar-webpack-plugin'); var CopyWebpackPlugin = require('copy-webpack-plugin'); var ForkTsCheckerWebpackPlugin = require('fork-ts-checker-webpack-plugin'); var commit = '"unknown"'; var version = '"unknown"'; // Don't show COMMIT/VERSION on Heroku (crashes, because no git dir) if (process.env.PATH.indexOf('heroku') === -1) { // show full git version var GitRevisionPlugin = require('git-revision-webpack-plugin'); var gitRevisionPlugin = new GitRevisionPlugin({ versionCommand: 'describe --always --tags --dirty' }); commit = JSON.stringify(gitRevisionPlugin.commithash()) version = JSON.stringify(gitRevisionPlugin.version()) } function cleanAndValidateUrl(url) { if (typeof url === 'string') { // we need to support legacy configuration values console.log(url); if (/^http/.test(url) === false) { throw("URLS MUST START WITH PROTOCOL"); } let cleanAndValidateUrl = url.replace(/\/$/,""); // get rid of trailing slashes return cleanAndValidateUrl; } else { throw `Not a url: ${url}` } } const NODE_ENV = process.env.NODE_ENV || 'development'; var jsonFN = require('json-fn'); const dotenv = require('dotenv'); const webpack = require('webpack'); const path = require('path'); const join = path.join; const resolve = path.resolve; const isDev = NODE_ENV === 'development'; const isTest = NODE_ENV === 'test'; // devServer config const devHost = process.env.HOST || 'localhost'; const devPort = process.env.PORT || 3000; const root = resolve(__dirname); const src = join(root, 'src'); const modules = join(root, 'node_modules'); const dest = join(root, 'dist'); const css = join(src, 'styles'); const fontPath = 'reactapp/[hash].[ext]'; const imgPath = 'reactapp/images/[hash].[ext]'; var routeComponentRegex = /routes\/([^\/]+\/?[^\/]+).js$/; var sassResourcesLoader = { loader:'sass-resources-loader', options: { resources:[ path.resolve(__dirname, 'node_modules/bootstrap-sass/assets/stylesheets/bootstrap/_variables.scss'), path.resolve(__dirname, 'node_modules/bootstrap-sass/assets/stylesheets/bootstrap/_mixins'), './src/globalStyles/variables.scss' ] } }; var config = { entry: [ `babel-polyfill`, `${path.join(src, 'appBootstrapper.jsx')}` ], output: { path: path.resolve(__dirname, 'dist'), filename: 'reactapp/[name].app.js', chunkFilename: 'reactapp/[name].[chunkhash].chunk.js', // cssFilename: 'reactapp/app.css', // hash: false, publicPath: '/', }, resolve: { 'extensions': [ '.js', '.jsx', '.json', '.ts', '.tsx', ], alias: { css: join(src, 'styles'), containers: join(src, 'containers'), components: join(src, 'components'), utils: join(src, 'utils'), styles: join(src, 'styles'), pages: join(src, 'pages'), shared: join(src, 'shared'), appConfig: path.join(__dirname + '/src', 'config', ((process.env.NODE_ENV === 'test')? 'test.' : '') + 'config') } }, resolveLoader: { modules: [ path.resolve(__dirname, 'loaders'), path.join(process.cwd(), 'node_modules') ] }, plugins: [ new webpack.DefinePlugin({ 'VERSION': version, 'COMMIT': commit, 'IS_DEV_MODE': isDev, 'ENV_CBIOPORTAL_URL': isDev && process.env.CBIOPORTAL_URL? JSON.stringify(cleanAndValidateUrl(process.env.CBIOPORTAL_URL)) : '"replace_me_env_cbioportal_url"', 'ENV_GENOME_NEXUS_URL': isDev && process.env.GENOME_NEXUS_URL? JSON.stringify(cleanAndValidateUrl(process.env.GENOME_NEXUS_URL)) : '"replace_me_env_genome_nexus_url"', }), new HtmlWebpackPlugin({cache: false, template: 'my-index.ejs'}), WebpackFailPlugin, new ProgressBarPlugin(), new webpack.DllReferencePlugin({ context: '.', manifest: require('./common-dist/common-manifest.json') }), new CopyWebpackPlugin([ {from: './common-dist', to: 'reactapp'}, {from: './src/rootImages', to: 'images'}, {from: './src/pages/resultsView/network', to: 'reactapp/network'}, {from: './src/globalStyles/prefixed-bootstrap.min.css', to: 'reactapp/prefixed-bootstrap.min.css'}, {from: './src/shared/legacy/igv.min.js', to: 'reactapp/igv.min.js'}, {from: './src/shared/legacy/igv.css', to: 'reactapp/igv.css'}, {from: './src/globalStyles/prefixed-bootstrap.min.css.map', to: 'reactapp/prefixed-bootstrap.min.css.map'} ]) // destination is relative to dist directory ], module: { rules: [ { test: /\.tsx?$/, use: [ { loader: "babel-loader" }, { loader: "ts-loader", options:{ transpileOnly: (isDev || isTest) } } ] }, { test: /\.(js|jsx|babel)$/, use: [{ loader: "babel-loader" }], exclude: function(modulePath) { return /node_modules/.test(modulePath) && !/node_modules\/igv\/dist/.test(modulePath); } }, { test: /\.otf(\?\S*)?$/, use: [{ loader: `url-loader`, options: { name:fontPath, limit:10000 } }] }, { test: /\.eot(\?\S*)?$/, use: [{ loader: `url-loader`, options: { name:fontPath, limit: 10000 } }], }, { test: /\.svg(\?\S*)?$/, use: [ { loader: `url-loader`, options: { name:fontPath, mimetype:'image/svg+xml', limit:10000 } } ], }, { test: /\.ttf(\?\S*)?$/, use: [{ loader: `url-loader`, options: { name:fontPath, mimetype:'application/octet-stream', limit:10000 } }], }, { test: /\.woff2?(\?\S*)?$/, use: [{ loader: `url-loader`, options: { name:fontPath, mimetype:'application/font-woff', limit:10000 } }], }, { test: /\.(jpe?g|png|gif)$/, use: [{ loader: `url-loader`, options:{ name:imgPath, limit:10000 } }], }, { test: /\.swf$/, use: [ { loader: `file-loader`, options:{ name:imgPath } } ], }, { test: /\.pdf$/, use: [ { loader: `url-loader`, options:{ name:imgPath, limit:1 } } ], }, { test: /lodash/, use: [ {loader: 'imports-loader?define=>false'} ] }, { test: /\.js$/, enforce:"pre", use: [{ loader: "source-map-loader", }], exclude: [ /node_modules\/igv\//g, /node_modules\/svg2pdf.js\//g ] } ], noParse: [/3Dmol-nojquery.js/, /jspdf/], }, devServer: { contentBase: './dist', hot: true, historyApiFallback:true, noInfo:false, quiet:false, lazy:false, publicPath:'/', https:false, host:'localhost', headers: {"Access-Control-Allow-Origin": "*"}, stats:'errors-only' }, }; // ENV variables const dotEnvVars = dotenv.config(); const environmentEnv = dotenv.config({ path: join(root, 'config', `${NODE_ENV}.config.js`), silent: true }); const envVariables = Object.assign({}, dotEnvVars, environmentEnv); const defines = Object.keys(envVariables) .reduce((memo, key) => { const val = JSON.stringify(envVariables[key]); memo[`__${key.toUpperCase()}__`] = val; return memo; }, { __NODE_ENV__: JSON.stringify(NODE_ENV), __DEBUG__: isDev }); config.plugins = [ new webpack.DefinePlugin(defines), new ExtractTextPlugin({ filename:'reactapp/styles.css', allChunks: true }), new webpack.ProvidePlugin({ $: "jquery", jQuery: "jquery" }) ].concat(config.plugins); // END ENV variables //config.module.loaders.push.apply(this); // include jquery when we load boostrap-sass config.module.rules.push( { test: /bootstrap-sass[\/\\]assets[\/\\]javascripts[\/\\]/, use: [{ loader:'imports-loader', options:{ 'jQuery':'jquery' } }] } ); if (isDev) { //add for testwriter config.module.rules.push( { test: /\.ts|tsx/, use:[{loader: 'testwriter'}] } ); config.entry.push(`${path.join(src, 'testWriter.js')}`); config.plugins.push( new webpack.NamedModulesPlugin(), new webpack.HotModuleReplacementPlugin() ); } if (isDev || isTest) { config.devtool = 'source-map'; // in dev we don't want to load the twitter widget b/c it can block load of site config.resolve.alias['react-twitter-widgets'] = join(src, 'shared/Empty.tsx'); config.plugins.push(new ForkTsCheckerWebpackPlugin()); // css modules for any scss matching test config.module.rules.push( { test: /\.module\.scss$/, use:[ 'style-loader', { loader:'css-loader', options: { modules:true, importLoaders:2, localIdentName:'[name]__[local]__[hash:base64:5]' } }, 'sass-loader', sassResourcesLoader ] } ); // IN DEV WE WANT TO LOAD CSS AND SCSS BUT NOT USE EXTRACT TEXT PLUGIN // STYLES WILL BE IN JS BUNDLE AND APPENDED TO DOM IN <STYLE> TAGS config.module.rules.push( { test: /\.css$/, use: ['style-loader','css-loader'] } ); config.module.rules.push( { test: /\.scss$/, exclude: /\.module\.scss/, use:[ 'style-loader', 'css-loader', 'sass-loader', sassResourcesLoader ] } ); config.devServer.port = devPort; //config.devServer.hostname = devHost; // force hot module reloader to hit absolute path so it can load // from dev server config.output.publicPath = '//localhost:3000/'; } else { config.devtool = 'source-map', config.output.publicPath = '/'; // css modules for any scss matching test config.module.rules.push( { test: /\.module\.scss$/, use: ExtractTextPlugin.extract({ fallback:'style-loader', use:[ { loader: 'css-loader', options:{ modules:true, importLoaders:2, localIdentName:'[name]__[local]__[hash:base64:5]' } }, 'sass-loader', sassResourcesLoader ] }) } ); config.module.rules.push( { test: /\.scss$/, exclude: /\.module\.scss/, use: ExtractTextPlugin.extract({ fallback:'style-loader', use:[ 'css-loader', 'sass-loader', sassResourcesLoader ] }) } ); config.module.rules.push( { test: /\.css/, loader: ExtractTextPlugin.extract({ fallback:'style-loader', use:'css-loader' }) } ); config.plugins.push( new webpack.DefinePlugin({ 'process.env': { 'NODE_ENV': `"${process.env.NODE_ENV || 'production'}"` } }), new webpack.optimize.UglifyJsPlugin({ compress: { warnings: false }, sourceMap: true, comments: false }) ); } //config.entry.push('bootstrap-loader'); // END BOOTSTRAP LOADER config.entry.push('font-awesome-webpack'); // Roots config.resolve.modules = [ src, modules ]; // end Roots // Testing if (isTest) { config.externals = { 'react/addons': true, 'react/lib/ReactContext': true, 'react/lib/ExecutionEnvironment': true }; config.module.noParse = /[/\\]sinon\.js/; config.resolve.alias.sinon = 'sinon/pkg/sinon'; } // End Testing module.exports = config;
webpack.config.js
var ExtractTextPlugin = require('extract-text-webpack-plugin'); var HtmlWebpackPlugin = require('html-webpack-plugin'); var WebpackFailPlugin = require('webpack-fail-plugin'); var ProgressBarPlugin = require('progress-bar-webpack-plugin'); var CopyWebpackPlugin = require('copy-webpack-plugin'); var ForkTsCheckerWebpackPlugin = require('fork-ts-checker-webpack-plugin'); var commit = '"unknown"'; var version = '"unknown"'; // Don't show COMMIT/VERSION on Heroku (crashes, because no git dir) if (process.env.PATH.indexOf('heroku') === -1) { // show full git version var GitRevisionPlugin = require('git-revision-webpack-plugin'); var gitRevisionPlugin = new GitRevisionPlugin({ versionCommand: 'describe --always --tags --dirty' }); commit = JSON.stringify(gitRevisionPlugin.commithash()) version = JSON.stringify(gitRevisionPlugin.version()) } function cleanAndValidateUrl(url) { if (typeof url === 'string') { // we need to support legacy configuration values console.log(url); if (/^http/.test(url) === false) { throw("URLS MUST START WITH PROTOCOL"); } let cleanAndValidateUrl = url.replace(/\/$/,""); // get rid of trailing slashes return cleanAndValidateUrl; } else { throw `Not a url: ${url}` } } const NODE_ENV = process.env.NODE_ENV || 'development'; var jsonFN = require('json-fn'); const dotenv = require('dotenv'); const webpack = require('webpack'); const path = require('path'); const join = path.join; const resolve = path.resolve; const isDev = NODE_ENV === 'development'; const isTest = NODE_ENV === 'test'; // devServer config const devHost = process.env.HOST || 'localhost'; const devPort = process.env.PORT || 3000; const root = resolve(__dirname); const src = join(root, 'src'); const modules = join(root, 'node_modules'); const dest = join(root, 'dist'); const css = join(src, 'styles'); const fontPath = 'reactapp/[hash].[ext]'; const imgPath = 'reactapp/images/[hash].[ext]'; var routeComponentRegex = /routes\/([^\/]+\/?[^\/]+).js$/; var sassResourcesLoader = { loader:'sass-resources-loader', options: { resources:[ path.resolve(__dirname, 'node_modules/bootstrap-sass/assets/stylesheets/bootstrap/_variables.scss'), path.resolve(__dirname, 'node_modules/bootstrap-sass/assets/stylesheets/bootstrap/_mixins'), './src/globalStyles/variables.scss' ] } }; var config = { entry: [ `babel-polyfill`, `${path.join(src, 'appBootstrapper.jsx')}` ], output: { path: path.resolve(__dirname, 'dist'), filename: 'reactapp/[name].app.js', chunkFilename: 'reactapp/[name].[chunkhash].chunk.js', // cssFilename: 'reactapp/app.css', // hash: false, publicPath: '/', }, resolve: { 'extensions': [ '.js', '.jsx', '.json', '.ts', '.tsx', ], alias: { css: join(src, 'styles'), containers: join(src, 'containers'), components: join(src, 'components'), utils: join(src, 'utils'), styles: join(src, 'styles'), pages: join(src, 'pages'), shared: join(src, 'shared'), appConfig: path.join(__dirname + '/src', 'config', ((process.env.NODE_ENV === 'test')? 'test.' : '') + 'config') } }, resolveLoader: { modules: [ path.resolve(__dirname, 'loaders'), path.join(process.cwd(), 'node_modules') ] }, plugins: [ new webpack.DefinePlugin({ 'VERSION': version, 'COMMIT': commit, 'IS_DEV_MODE': isDev, 'ENV_CBIOPORTAL_URL': isDev && process.env.CBIOPORTAL_URL? JSON.stringify(cleanAndValidateUrl(process.env.CBIOPORTAL_URL)) : '"replace_me_env_cbioportal_url"', 'ENV_GENOME_NEXUS_URL': isDev && process.env.GENOME_NEXUS_URL? JSON.stringify(cleanAndValidateUrl(process.env.GENOME_NEXUS_URL)) : '"replace_me_env_genome_nexus_url"', }), new HtmlWebpackPlugin({cache: false, template: 'my-index.ejs'}), WebpackFailPlugin, new ProgressBarPlugin(), new webpack.DllReferencePlugin({ context: '.', manifest: require('./common-dist/common-manifest.json') }), new CopyWebpackPlugin([ {from: './common-dist', to: 'reactapp'}, {from: './src/rootImages', to: 'images'}, {from: './src/pages/resultsView/network', to: 'reactapp/network'}, {from: './src/globalStyles/prefixed-bootstrap.min.css', to: 'reactapp/prefixed-bootstrap.min.css'}, {from: './src/shared/legacy/igv.min.js', to: 'reactapp/igv.min.js'}, {from: './src/shared/legacy/igv.css', to: 'reactapp/igv.css'}, {from: './src/globalStyles/prefixed-bootstrap.min.css.map', to: 'reactapp/prefixed-bootstrap.min.css.map'} ]) // destination is relative to dist directory ], module: { rules: [ { test: /\.tsx?$/, use: [ { loader: "babel-loader" }, { loader: "ts-loader", options:{ transpileOnly: (isDev || isTest) } } ] }, { test: /\.(js|jsx|babel)$/, use: [{ loader: "babel-loader" }], exclude: function(modulePath) { return /node_modules/.test(modulePath) && !/node_modules\/igv\/dist/.test(modulePath); } }, { test: /\.otf(\?\S*)?$/, use: [{ loader: `url-loader`, options: { name:fontPath, limit:10000 } }] }, { test: /\.eot(\?\S*)?$/, use: [{ loader: `url-loader`, options: { name:fontPath, limit: 10000 } }], }, { test: /\.svg(\?\S*)?$/, use: [ { loader: `url-loader`, options: { name:fontPath, mimetype:'image/svg+xml', limit:10000 } } ], }, { test: /\.ttf(\?\S*)?$/, use: [{ loader: `url-loader`, options: { name:fontPath, mimetype:'application/octet-stream', limit:10000 } }], }, { test: /\.woff2?(\?\S*)?$/, use: [{ loader: `url-loader`, options: { name:fontPath, mimetype:'application/font-woff', limit:10000 } }], }, { test: /\.(jpe?g|png|gif)$/, use: [{ loader: `url-loader`, options:{ name:imgPath, limit:10000 } }], }, { test: /\.swf$/, use: [ { loader: `file-loader`, options:{ name:imgPath } } ], }, { test: /\.pdf$/, use: [ { loader: `url-loader`, options:{ name:imgPath, limit:1 } } ], }, { test: /lodash/, use: [ {loader: 'imports-loader?define=>false'} ] }, { test: /\.js$/, enforce:"pre", use: [{ loader: "source-map-loader", }], exclude: [ /node_modules\/igv\//g, /node_modules\/svg2pdf.js\//g ] } ], noParse: [/3Dmol-nojquery.js/, /jspdf/], }, devServer: { contentBase: './dist', hot: true, historyApiFallback:true, noInfo:false, quiet:false, lazy:false, publicPath:'/', https:false, host:'localhost', headers: {"Access-Control-Allow-Origin": "*"}, stats:'errors-only' }, }; // ENV variables const dotEnvVars = dotenv.config(); const environmentEnv = dotenv.config({ path: join(root, 'config', `${NODE_ENV}.config.js`), silent: true }); const envVariables = Object.assign({}, dotEnvVars, environmentEnv); const defines = Object.keys(envVariables) .reduce((memo, key) => { const val = JSON.stringify(envVariables[key]); memo[`__${key.toUpperCase()}__`] = val; return memo; }, { __NODE_ENV__: JSON.stringify(NODE_ENV), __DEBUG__: isDev }); config.plugins = [ new webpack.DefinePlugin(defines), new ExtractTextPlugin({ filename:'reactapp/styles.css', allChunks: true }), new webpack.ProvidePlugin({ $: "jquery", jQuery: "jquery" }) ].concat(config.plugins); // END ENV variables //config.module.loaders.push.apply(this); // include jquery when we load boostrap-sass config.module.rules.push( { test: /bootstrap-sass[\/\\]assets[\/\\]javascripts[\/\\]/, use: [{ loader:'imports-loader', options:{ 'jQuery':'jquery' } }] } ); if (isDev) { //add for testwriter config.module.rules.push( { test: /\.ts|tsx/, use:[{loader: 'testwriter'}] } ); config.entry.push(`${path.join(src, 'testWriter.js')}`); config.plugins.push( new webpack.NamedModulesPlugin(), new webpack.HotModuleReplacementPlugin() ); } if (isDev || isTest) { config.devtool = 'source-map'; // in dev we don't want to load the twitter widget b/c it can block load of site config.resolve.alias['react-twitter-widgets'] = join(src, 'shared/Empty.tsx'); config.plugins.push(new ForkTsCheckerWebpackPlugin()); // css modules for any scss matching test config.module.rules.push( { test: /\.module\.scss$/, use:[ 'style-loader', { loader:'css-loader', options: { modules:true, importLoaders:2, localIdentName:'[name]__[local]__[hash:base64:5]' } }, 'sass-loader', sassResourcesLoader ] } ); // IN DEV WE WANT TO LOAD CSS AND SCSS BUT NOT USE EXTRACT TEXT PLUGIN // STYLES WILL BE IN JS BUNDLE AND APPENDED TO DOM IN <STYLE> TAGS config.module.rules.push( { test: /\.css$/, use: ['style-loader','css-loader'] } ); config.module.rules.push( { test: /\.scss$/, exclude: /\.module\.scss/, use:[ 'style-loader', 'css-loader', 'sass-loader', sassResourcesLoader ] } ); config.devServer.port = devPort; //config.devServer.hostname = devHost; // force hot module reloader to hit absolute path so it can load // from dev server config.output.publicPath = '//localhost:3000/'; } else { config.devtool = 'source-map', config.output.publicPath = '/'; // css modules for any scss matching test config.module.rules.push( { test: /\.module\.scss$/, use: ExtractTextPlugin.extract({ fallback:'style-loader', use:[ { loader: 'css-loader', options:{ modules:true, importLoaders:2, localIdentName:'[name]__[local]__[hash:base64:5]' } }, 'sass-loader', sassResourcesLoader ] }) } ); config.module.rules.push( { test: /\.scss$/, exclude: /\.module\.scss/, use: ExtractTextPlugin.extract({ fallback:'style-loader', use:[ 'css-loader', 'sass-loader', sassResourcesLoader ] }) } ); config.module.rules.push( { test: /\.css/, loader: ExtractTextPlugin.extract({ fallback:'style-loader', use:'css-loader' }) } ); config.plugins.push( new webpack.DefinePlugin({ 'process.env': { 'NODE_ENV': `"${process.env.NODE_ENV || 'production'}"` } }) ); } //config.entry.push('bootstrap-loader'); // END BOOTSTRAP LOADER config.entry.push('font-awesome-webpack'); // Roots config.resolve.modules = [ src, modules ]; // end Roots // Testing if (isTest) { config.externals = { 'react/addons': true, 'react/lib/ReactContext': true, 'react/lib/ExecutionEnvironment': true }; config.module.noParse = /[/\\]sinon\.js/; config.resolve.alias.sinon = 'sinon/pkg/sinon'; } // End Testing module.exports = config;
Readd Uglify plugin [PR feedback Aaron] Former-commit-id: 1e5473c22bb80fad31f3eec04015fa4244917d3e
webpack.config.js
Readd Uglify plugin [PR feedback Aaron]
<ide><path>ebpack.config.js <ide> 'process.env': { <ide> 'NODE_ENV': `"${process.env.NODE_ENV || 'production'}"` <ide> } <add> }), <add> new webpack.optimize.UglifyJsPlugin({ <add> compress: { <add> warnings: false <add> }, <add> sourceMap: true, <add> comments: false <ide> }) <ide> ); <ide>
Java
mit
d41908d5a63260e627adbbe612dd26adb409b309
0
tennaito/entity-services
/* * The MIT License * * Copyright 2015 Antonio Rabelo. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.github.tennaito.test.jpa; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertTrue; import static junit.framework.Assert.assertFalse; import java.lang.reflect.Field; import java.util.Collection; import java.util.Set; import javax.persistence.EntityManager; import org.junit.Test; import com.github.tennaito.entity.service.EntityQueryService; import com.github.tennaito.entity.service.data.DefaultEntityStateConverter; import com.github.tennaito.entity.service.data.EntityState; import com.github.tennaito.entity.service.data.EntityStateConverter; import com.github.tennaito.entity.service.impl.DefaultEntityQueryService; import com.github.tennaito.test.jpa.entity.InvoiceList; import com.github.tennaito.test.jpa.entity.Item; /** * @author Antonio Rabelo * */ public class EntityStateConverterTest extends AbstractEntityServicesTest { @Test public void testCreateState() { EntityState state = createState(); assertEquals("InvoiceList", state.getName()); assertEquals(1, state.get("id")); assertEquals("Fruits", state.get("description")); assertEquals(3, state.<Set<EntityState>>get("items").size()); } @Test public void testCreateStateList() { EntityManager manager = EntityManagerFactoryInitializer.getEntityManagerFactory().createEntityManager(); EntityQueryService<InvoiceList> service = new DefaultEntityQueryService<InvoiceList>(manager); InvoiceList invoice = service.querySingle(InvoiceList.class, "id==1"); EntityStateConverter<Item> converter = new DefaultEntityStateConverter<Item>(); Collection<EntityState> states = converter.createStateList(invoice.getItems()); assertEquals(3, states.size()); } @Test public void testCreateEntityList() { EntityState state = createState(); EntityStateConverter<Item> converter = new DefaultEntityStateConverter<Item>(); Collection<Item> states = converter.createEntityList(state.<Set<EntityState>>get("items")); assertEquals(3, states.size()); } @Test public void testStateEquals() throws Exception { EntityState state1 = createState(); EntityState state2 = createState(); // same reference assertTrue(state1.equals(state1)); // same object assertTrue(state1.equals(state2)); // not the same... assertFalse(state1.equals(null)); assertFalse(state1.equals("teste")); String name = state1.getName(); Field f = state1.getClass().getDeclaredField("name"); f.setAccessible(true); f.set(state1, null); assertFalse(state1.equals(state2)); f.set(state1, name); f.setAccessible(false); String description = state1.get("description"); state1.put("description", "fail"); assertFalse(state1.equals(state2)); state1.put("description", description); Field p = state1.getClass().getDeclaredField("properties"); p.setAccessible(true); Object value = p.get(state1); p.set(state1, null); assertFalse(state1.equals(state2)); p.set(state1, value); p.setAccessible(false); EntityState otherState = createStateFromId1(Item.class); assertFalse(state1.equals(otherState)); } @Test public void testCreateEntity() { EntityStateConverter<InvoiceList> converter = new DefaultEntityStateConverter<InvoiceList>(); EntityState state = createState(); InvoiceList entity = converter.createEntity(state); assertEquals(1, (Object) entity.getId()); assertEquals("Fruits", entity.getDescription()); assertEquals(3, entity.getItems().size()); for (Object element : entity.getItems()) { assertTrue(Item.class.isAssignableFrom(element.getClass())); } } @Test(expected=IllegalArgumentException.class) public void testInvalidCreateEntity() { EntityStateConverter<InvoiceList> converter = new DefaultEntityStateConverter<InvoiceList>(); converter.createEntity(null); } @Test(expected=IllegalArgumentException.class) public void testInvalidCreateState() { EntityStateConverter<InvoiceList> converter = new DefaultEntityStateConverter<InvoiceList>(); converter.createState(null); } @Test(expected=IllegalArgumentException.class) public void testInvalidDepth() { new DefaultEntityStateConverter<InvoiceList>(-1); } @Test(expected=IllegalArgumentException.class) public void testEntityStateInvalidProperty() { EntityState state = createState(); state.get("invalid"); } private EntityState createState() { return createStateFromId1(InvoiceList.class); } private <T> EntityState createStateFromId1(Class<T> type) { EntityManager manager = EntityManagerFactoryInitializer.getEntityManagerFactory().createEntityManager(); EntityQueryService<T> service = new DefaultEntityQueryService<T>(manager); T object = service.querySingle(type, "id==1"); EntityStateConverter<T> converter = new DefaultEntityStateConverter<T>(); EntityState state = converter.createState(object); return state; } }
src/test/java/com/github/tennaito/test/jpa/EntityStateConverterTest.java
/* * The MIT License * * Copyright 2015 Antonio Rabelo. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.github.tennaito.test.jpa; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertTrue; import java.util.Collection; import java.util.Set; import javax.persistence.EntityManager; import org.junit.Test; import com.github.tennaito.entity.service.EntityQueryService; import com.github.tennaito.entity.service.data.DefaultEntityStateConverter; import com.github.tennaito.entity.service.data.EntityState; import com.github.tennaito.entity.service.data.EntityStateConverter; import com.github.tennaito.entity.service.impl.DefaultEntityQueryService; import com.github.tennaito.test.jpa.entity.InvoiceList; import com.github.tennaito.test.jpa.entity.Item; /** * @author Antonio Rabelo * */ public class EntityStateConverterTest extends AbstractEntityServicesTest { @Test public void testCreateState() { EntityState state = createState(); assertEquals("InvoiceList", state.getName()); assertEquals(1, state.get("id")); assertEquals("Fruits", state.get("description")); assertEquals(3, state.<Set<EntityState>>get("items").size()); } @Test public void testCreateStateList() { EntityManager manager = EntityManagerFactoryInitializer.getEntityManagerFactory().createEntityManager(); EntityQueryService<InvoiceList> service = new DefaultEntityQueryService<InvoiceList>(manager); InvoiceList invoice = service.querySingle(InvoiceList.class, "id==1"); EntityStateConverter<Item> converter = new DefaultEntityStateConverter<Item>(); Collection<EntityState> states = converter.createStateList(invoice.getItems()); assertEquals(3, states.size()); } @Test public void testCreateEntityList() { EntityState state = createState(); EntityStateConverter<Item> converter = new DefaultEntityStateConverter<Item>(); Collection<Item> states = converter.createEntityList(state.<Set<EntityState>>get("items")); assertEquals(3, states.size()); } @Test public void testStateEquals() { EntityState state1 = createState(); EntityState state2 = createState(); assertTrue(state1.equals(state2)); } @Test public void testCreateEntity() { EntityStateConverter<InvoiceList> converter = new DefaultEntityStateConverter<InvoiceList>(); EntityState state = createState(); InvoiceList entity = converter.createEntity(state); assertEquals(1, (Object) entity.getId()); assertEquals("Fruits", entity.getDescription()); assertEquals(3, entity.getItems().size()); for (Object element : entity.getItems()) { assertTrue(Item.class.isAssignableFrom(element.getClass())); } } @Test(expected=IllegalArgumentException.class) public void testInvalidCreateEntity() { EntityStateConverter<InvoiceList> converter = new DefaultEntityStateConverter<InvoiceList>(); converter.createEntity(null); } @Test(expected=IllegalArgumentException.class) public void testInvalidCreateState() { EntityStateConverter<InvoiceList> converter = new DefaultEntityStateConverter<InvoiceList>(); converter.createState(null); } @Test(expected=IllegalArgumentException.class) public void testInvalidDepth() { new DefaultEntityStateConverter<InvoiceList>(-1); } @Test(expected=IllegalArgumentException.class) public void testEntityStateInvalidProperty() { EntityState state = createState(); state.get("invalid"); } private EntityState createState() { EntityManager manager = EntityManagerFactoryInitializer.getEntityManagerFactory().createEntityManager(); EntityQueryService<InvoiceList> service = new DefaultEntityQueryService<InvoiceList>(manager); InvoiceList invoice = service.querySingle(InvoiceList.class, "id==1"); EntityStateConverter<InvoiceList> converter = new DefaultEntityStateConverter<InvoiceList>(); EntityState state = converter.createState(invoice); return state; } }
Adjustments.
src/test/java/com/github/tennaito/test/jpa/EntityStateConverterTest.java
Adjustments.
<ide><path>rc/test/java/com/github/tennaito/test/jpa/EntityStateConverterTest.java <ide> <ide> import static junit.framework.Assert.assertEquals; <ide> import static junit.framework.Assert.assertTrue; <add>import static junit.framework.Assert.assertFalse; <ide> <add>import java.lang.reflect.Field; <ide> import java.util.Collection; <ide> import java.util.Set; <ide> <ide> <ide> <ide> @Test <del> public void testStateEquals() { <add> public void testStateEquals() throws Exception { <ide> EntityState state1 = createState(); <ide> EntityState state2 = createState(); <ide> <add> // same reference <add> assertTrue(state1.equals(state1)); <add> // same object <ide> assertTrue(state1.equals(state2)); <add> // not the same... <add> assertFalse(state1.equals(null)); <add> assertFalse(state1.equals("teste")); <add> <add> String name = state1.getName(); <add> Field f = state1.getClass().getDeclaredField("name"); <add> f.setAccessible(true); <add> f.set(state1, null); <add> <add> assertFalse(state1.equals(state2)); <add> f.set(state1, name); <add> f.setAccessible(false); <add> <add> String description = state1.get("description"); <add> state1.put("description", "fail"); <add> <add> assertFalse(state1.equals(state2)); <add> <add> state1.put("description", description); <add> <add> Field p = state1.getClass().getDeclaredField("properties"); <add> p.setAccessible(true); <add> Object value = p.get(state1); <add> p.set(state1, null); <add> <add> assertFalse(state1.equals(state2)); <add> p.set(state1, value); <add> p.setAccessible(false); <add> <add> EntityState otherState = createStateFromId1(Item.class); <add> assertFalse(state1.equals(otherState)); <add> <ide> } <ide> <ide> @Test <ide> } <ide> <ide> private EntityState createState() { <add> return createStateFromId1(InvoiceList.class); <add> } <add> <add> private <T> EntityState createStateFromId1(Class<T> type) { <ide> EntityManager manager = EntityManagerFactoryInitializer.getEntityManagerFactory().createEntityManager(); <del> EntityQueryService<InvoiceList> service = new DefaultEntityQueryService<InvoiceList>(manager); <del> InvoiceList invoice = service.querySingle(InvoiceList.class, "id==1"); <add> EntityQueryService<T> service = new DefaultEntityQueryService<T>(manager); <add> T object = service.querySingle(type, "id==1"); <ide> <del> EntityStateConverter<InvoiceList> converter = new DefaultEntityStateConverter<InvoiceList>(); <del> EntityState state = converter.createState(invoice); <add> EntityStateConverter<T> converter = new DefaultEntityStateConverter<T>(); <add> EntityState state = converter.createState(object); <ide> return state; <del> } <add> } <ide> }
JavaScript
mit
b44044a3872d9df1e9e995b59c6faeb2f9f00c63
0
CoughDrop/gazelinger
(function() { var eyex; var eyetribe; var mygaze; try { eyex = require('eyex'); } catch(e) { } try { eyetribe = require('eyetribe'); } catch(e) { } try { mygaze = require('mygaze'); } catch(e) { } if(eyex) { try { eyex.setup(); } catch(e) { eyex = null; } } if(eyetribe) { // TODO... } if(mygaze) { // TODO... } var poll = null; var last_linger_ts = null; var last_gaze_ts = null; var gaze_history = []; var callbacks = []; var run_callbacks = function(message) { callbacks.forEach(function(cb) { cb(message); }); }; var gazelinger = { listen: function(callback) { if(!poll) { poll = window.setInterval(function() { if(callbacks.length == 0) { return; } var data = null; if(eyex && eyex.ping) { data = eyex.ping() } else if(eyetribe && eyetribe.ping) { // TODO ... } else if(mygaze && mygaze.ping) { // TODO ... } var message = { raw: data }; message.ts = (new Date()).getTime(); if(data.end_ts && data.begin_ts && data.end_ts > data.begin_ts && data.end_ts != last_linger_ts) { // console.log("linger duration " + (data.end_ts - data.begin_ts)); // console.log("time since last " + (data.end_ts - last_linger_ts)); last_linger_ts = data.end_ts; message = { raw: data}; message.ts = (new Date()).getTime(); message.screenX = data.data_x; //(data.data_x / ratio) - (window.screenInnerOffsetX || window.screenX); message.screenY = data.data_y; // = (data.data_y / ratio) - (window.screenInnerOffsetY || window.screenY); message.duration = (data.end_ts - data.begin_ts); message.type = 'linger'; run_callbacks(message); } if(data.gaze_ts && data.gaze_ts != last_gaze_ts) { last_gaze_ts = data.gaze_ts; message = { raw: data}; message.ts = (new Date()).getTime(); message.screenX = data.gaze_x; //(data.gaze_x / ratio) - (window.screenInnerOffsetX || window.screenX); message.screenY = data.gaze_y; //(data.gaze_y / ratio) - (window.screenInnerOffsetY || window.screenY); message.type = 'over'; message.ts = data.gaze_ts; run_callbacks(message); gaze_history.push({ x: message.screenX, y: message.screenY, ts: message.ts }); // prune based on distance from latest timestamp var new_history = []; gaze_history.forEach(function (e) { if (last_gaze_ts - e.ts < 200) { new_history.push(e); } }); gaze_history = new_history; // find a quick median var xs = gaze_history.sort(function (a, b) { return b.x - a.x }); var midx = xs[Math.ceil(xs.length / 2)]; var ys = gaze_history.sort(function (a, b) { return b.y - a.y }); var midy = ys[Math.ceil(ys.length / 2)]; // temporarily remove outliers if (midx && midy) { midx = midx.x; midy = midy.y; var filtered_history = gaze_history.filter(function (e) { return (Math.abs(e.x - midx) < 50) && (Math.abs(e.y - midy) < 50); }); if (filtered_history.length > 0) { var biggest_dist = 0; last_history_ts = gaze_history[0].ts; filtered_history.forEach(function (e) { biggest_dist = Math.max(biggest_dist, e.ts - last_history_ts); last_history_ts - e.ts; }); // if there are no significant time gaps, compute a new middle and trigger a linger event if (biggest_dist <= 50) { var mean_x = 0; var mean_y = 0; filtered_history.forEach(function (e) { mean_x = mean_x + e.x; mean_y = mean_y + e.y; }); mean_x = mean_x / filtered_history.length; mean_y = mean_y / filtered_history.length; message = { raw: data}; message.ts = (new Date()).getTime(); message.screenX = mean_x; message.screenY = mean_y; message.type = 'linger'; message.duration = filtered_history[filtered_history.length - 1].ts - filtered_history[0].ts; run_callbacks(message); gaze_history = gaze_history.slice(4, 50); } } } } }, 20); } callbacks.push(callback); }, stop_listening: function() { // TODO: support multiple listeners callbacks = []; window.clearInterval(poll); } }; module.exports = gazelinger; })();
gazelinger.js
(function() { let eyex; let eyetribe; let mygaze; try { eyex = require('eyex'); } catch(e) { } try { eyetribe = require('eyetribe'); } catch(e) { } try { mygaze = require('mygaze'); } catch(e) { } if(eyex) { try { eyex.setup(); } catch(e) { eyex = null; } } if(eyetribe) { // TODO... } if(mygaze) { // TODO... } var poll = null; var last_linger_ts = null; var last_gaze_ts = null; var gaze_history = []; var callbacks = []; var run_callbacks = function(message) { callbacks.forEach(function(cb) { cb(message); }); }; var gazelinger = { listen: function(callback) { if(!poll) { poll = window.setInterval(function() { if(callbacks.length == 0) { return; } var data = null; if(eyex && eyex.ping) { data = eyex.ping() } else if(eyetribe && eyetribe.ping) { } else if(mygaze && mygaze.ping) { } var message = { raw: data }; message.ts = (new Date()).getTime(); if(data.end_ts && data.begin_ts && data.end_ts > data.begin_ts && data.end_ts != last_linger_ts) { console.log("linger duration " + (data.end_ts - data.begin_ts)); console.log("time since last " + (data.end_ts - last_linger_ts)); last_linger_ts = data.end_ts; message = { raw: data}; message.ts = (new Date()).getTime(); message.screenX = data.data_x; //(data.data_x / ratio) - (window.screenInnerOffsetX || window.screenX); message.screenY = data.data_y; // = (data.data_y / ratio) - (window.screenInnerOffsetY || window.screenY); message.duration = (data.end_ts - data.begin_ts); message.type = 'linger'; run_callbacks(message); } if(data.gaze_ts && data.gaze_ts != last_gaze_ts) { last_gaze_ts = data.gaze_ts; message = { raw: data}; message.ts = (new Date()).getTime(); message.screenX = data.gaze_x; //(data.gaze_x / ratio) - (window.screenInnerOffsetX || window.screenX); message.screenY = data.gaze_y; //(data.gaze_y / ratio) - (window.screenInnerOffsetY || window.screenY); message.type = 'over'; message.ts = data.gaze_ts; run_callbacks(message); gaze_history.push({ x: message.x, y: message.y, ts: message.ts }); // prune based on distance from latest timestamp var new_history = []; gaze_history.forEach(function (e) { if (last_gaze_ts - e.ts < 200) { new_history.push(e); } }); gaze_history = new_history; // find a quick median var xs = gaze_history.sort(function (a, b) { return b.x - a.x }); var midx = xs[Math.ceil(xs.length / 2)]; var ys = gaze_history.sort(function (a, b) { return b.y - a.y }); var midy = ys[Math.ceil(ys.length / 2)]; // temporarily remove outliers if (midx && midy) { midx = midx.x; midy = midy.y; var filtered_history = gaze_history.filter(function (e) { return (Math.abs(e.x - midx) < 50) && (Math.abs(e.y - midy) < 50); }); if (filtered_history.length > 0) { var biggest_dist = 0; last_history_ts = gaze_history[0].ts; filtered_history.forEach(function (e) { biggest_dist = Math.max(biggest_dist, e.ts - last_history_ts); last_history_ts - e.ts; }); // if there are no significant time gaps, compute a new middle and trigger a linger event if (biggest_dist <= 50) { var mean_x = 0; var mean_y = 0; filtered_history.forEach(function (e) { mean_x = mean_x + e.x; mean_y = mean_y + e.y; }); mean_x = mean_x / filtered_history.length; mean_y = mean_y / filtered_history.length; message = { raw: data}; message.ts = (new Date()).getTime(); message.screenX = mean_x; message.screenY = mean_y; message.type = 'linger'; message.duration = filtered_history[filtered_history.length - 1].ts - filtered_history[0].ts; run_callbacks(message); gaze_history = gaze_history.slice(4, 50); } } } } }, 20); } callbacks.push(callback); }, stop_listening: function() { window.clearInterval(poll); } }; module.exports = gazelinger; })();
fix screen calculation
gazelinger.js
fix screen calculation
<ide><path>azelinger.js <ide> (function() { <del> let eyex; <del> let eyetribe; <del> let mygaze; <add> var eyex; <add> var eyetribe; <add> var mygaze; <ide> <ide> try { <ide> eyex = require('eyex'); <ide> if(eyex && eyex.ping) { <ide> data = eyex.ping() <ide> } else if(eyetribe && eyetribe.ping) { <add> // TODO ... <ide> } else if(mygaze && mygaze.ping) { <add> // TODO ... <ide> } <ide> <ide> var message = { raw: data }; <ide> message.ts = (new Date()).getTime(); <ide> <ide> if(data.end_ts && data.begin_ts && data.end_ts > data.begin_ts && data.end_ts != last_linger_ts) { <del> console.log("linger duration " + (data.end_ts - data.begin_ts)); <del> console.log("time since last " + (data.end_ts - last_linger_ts)); <add>// console.log("linger duration " + (data.end_ts - data.begin_ts)); <add>// console.log("time since last " + (data.end_ts - last_linger_ts)); <ide> last_linger_ts = data.end_ts; <ide> <ide> message = { raw: data}; <ide> run_callbacks(message); <ide> <ide> gaze_history.push({ <del> x: message.x, <del> y: message.y, <add> x: message.screenX, <add> y: message.screenY, <ide> ts: message.ts <ide> }); <ide> <ide> callbacks.push(callback); <ide> }, <ide> stop_listening: function() { <add> // TODO: support multiple listeners <add> callbacks = []; <ide> window.clearInterval(poll); <ide> } <ide> };
JavaScript
mit
7fcb12abd5e394b306746b1248742025e81801c5
0
HsuTing/generator-cat,HsuTing/generator-cat
'use strict'; import assert from 'yeoman-assert'; export default () => { it('src/__tests__/Index.js', () => { assert.fileContent( 'src/__tests__/Index.js', 'import Index from \'components/Index\'' ); assert.fileContent( 'src/__tests__/Index.js', 'it(\'Index\', () => {' ); assert.fileContent( 'src/__tests__/Index.js', '<Index />' ); assert.fileContent( 'src/__tests__/Index.js', '<div>This is Index!</div>' ); }); };
__tests__/files/add/jest/component.js
'use strict'; import assert from 'yeoman-assert'; export default () => { it('src/__tests__/Index.js', () => { assert.fileContent( 'src/__tests__/Index.js', 'import Index from \'components/Index\'' ); assert.fileContent( 'src/__tests__/Index.js', 'it(\'Index\', () => {' ); assert.fileContent( 'src/__tests__/Index.js', '<Index />' ); assert.fileContent( 'src/__tests__/Index.js', 'expect(wrapper.html()).toBe(\'This is Index!\');' ); }); };
fix test for component test
__tests__/files/add/jest/component.js
fix test for component test
<ide><path>_tests__/files/add/jest/component.js <ide> ); <ide> assert.fileContent( <ide> 'src/__tests__/Index.js', <del> 'expect(wrapper.html()).toBe(\'This is Index!\');' <add> '<div>This is Index!</div>' <ide> ); <ide> }); <ide> };
Java
mit
5efe248c72a81c4c5939c0c8bce432039f1f5db5
0
m-matsubara/sort
/* * Many Pivot Sort * * メニー・ピボット・ソート * 事前にピボット値をたくさん確定することで高速化を図った改良版クイックソート * * Copyright (c) 2015 masakazu matsubara * Released under the MIT license * https://github.com/m-matsubara/sort/blob/master/LICENSE.txt */ package mmsort; import java.util.Comparator; public class ManyPivotSort implements ISortAlgorithm { protected static final int PIVOTS_SIZE = 127; // ピボットリストのサイズ。大きすぎなければ何でもよいが、2のベぎ乗 - 1が無駄がなくてよい。 protected static final int SWITCH_SIZE = 3000; // Quick Sort (Median of 3) に切り替えるサイズ /** * Many pivot sort * * メニー・ピボット・ソート * * internal method (Added argument the pivot array) * 内部的に呼び出される。ピボットの配列(ピボット候補)を引数にもつ * * @param array sort target / ソート対象 * @param from index of first element / ソート対象の開始位置 * @param to index of last element (exclusive) / ソート対象の終了位置 + 1 * @param pivots array of pivot / ピボットの配列 * @param fromPivots from index of pivots / 使用対象となる pivots 配列の添え字の最小値 * @param toPivots to index of pivots (last element of exclusive) / 使用対象となる pivots 配列の添え字の最大値 + 1 * @param comparator comparator of array element / 比較器 */ protected static final <T> void mpSort(final T[] array, final int from, final int to, final T[] pivots, final int fromPivots, final int toPivots, final Comparator<? super T> comparator) { final int pivotIdx = fromPivots + (toPivots - fromPivots) / 2; // using index from pivots (center position) / pivots配列の中で、今回使うべき要素の添え字 final T pivot = pivots[pivotIdx]; // pivot value / ピボット値 int curFrom = from; // min index / 現在処理中位置の小さい方の位置 int curTo = to - 1; // max index / 現在処理中位置の大きい方の位置 while (true) { while (comparator.compare(array[curFrom], pivot) < 0) curFrom++; while (comparator.compare(pivot, array[curTo]) < 0) curTo--; if (curTo < curFrom) break; final T work = array[curFrom]; array[curFrom] = array[curTo]; array[curTo] = work; curFrom++; curTo--; } if (from < curTo) { if (fromPivots >= pivotIdx - 3) // pivotsの残りが3つを切ったらpivotsを作り直す。(最後まで使い切らないのは、最後の1個は範囲内の中間値に近いとは言えないので) mpSort(array, from, curTo + 1, comparator); else mpSort(array, from, curTo + 1, pivots, fromPivots, pivotIdx, comparator); } if (curFrom < to - 1) { if (pivotIdx + 1 >= toPivots - 3) // pivotsの残りが3つを切ったらpivotsを作り直す。(最後まで使い切らないのは、最後の1個は範囲内の中間値に近いとは言えないので) mpSort(array, curFrom, to, comparator); else mpSort(array, curFrom, to, pivots, pivotIdx + 1, toPivots, comparator); } } /** * Many pivot sort * * メニー・ピボット・ソート * @param array sort target / ソート対象 * @param from index of first element / ソート対象の開始位置 * @param to index of last element (exclusive) / ソート対象の終了位置 + 1 * @param comparator comparator of array element / 比較器 */ public static final <T> void mpSort(final T[] array, final int from, final int to, final Comparator<? super T> comparator) { final int range = to - from; // sort range / ソート範囲サイズ // ソート対象配列サイズが3以下のときは特別扱い if (range < SWITCH_SIZE) { // しきい値以下ではクイックソート(3つのメディアン)に切り替える。 QuickSortM3.quickSortMedian3(array, from, to, comparator); return; } int pivotsSize = PIVOTS_SIZE; /* if (range >= 1000000) pivotsSize = 2048 - 1; else if (range >= 500000) pivotsSize = 1024 - 1; else if (range >= 250000) pivotsSize = 512 - 1; else if (range >= 120000) pivotsSize = 256 - 1; else if (range >= 60000) pivotsSize = 128 - 1; else if (range >= 30000) pivotsSize = 64 - 1; else pivotsSize = 32 - 1; */ @SuppressWarnings("unchecked") final T[] pivots = (T[])new Object[pivotsSize]; // pivot candidates / ピボット候補の配列 // Selection of the pivot values (Binary insertion sort ish processing). // ピボット(複数)の選出 for (int i = 0; i < pivots.length; i++) { pivots[i] = array[(int)(from + (long)range * i / pivots.length + range / 2 / pivots.length)]; } // sort of pivot candidates / ピボット値のみをソート BinInsertionSort.binInsertionSort(pivots, 0, pivots.length, comparator); // sort of array / ソート対象本体のソート mpSort(array, from, to, pivots, 0, pivots.length, comparator); } @Override public <T> void sort(final T[] array, final int from, final int to, final Comparator<? super T> comparator) { mpSort(array, from, to, comparator); } @Override public boolean isStable() { return false; } @Override public String getName() { return "Many Pivot Sort"; } }
src/mmsort/ManyPivotSort.java
/* * Many Pivot Sort * * メニー・ピボット・ソート * 事前にピボット値をたくさん確定することで高速化を図った改良版クイックソート * * Copyright (c) 2015 masakazu matsubara * Released under the MIT license * https://github.com/m-matsubara/sort/blob/master/LICENSE.txt */ package mmsort; import java.util.Comparator; public class ManyPivotSort implements ISortAlgorithm { protected static final int PIVOTS_SIZE = 127; // ピボットリストのサイズ。大きすぎなければ何でもよいが、2のベぎ乗 - 1が無駄がなくてよい。 protected static final int SWITCH_SIZE = 3000; // Quick Sort (Median of 3) に切り替えるサイズ /** * Many pivot sort * * メニー・ピボット・ソート * * internal method (Added argument the pivot array) * 内部的に呼び出される。ピボットの配列(ピボット候補)を引数にもつ * * @param array sort target / ソート対象 * @param from index of first element / ソート対象の開始位置 * @param to index of last element (exclusive) / ソート対象の終了位置 + 1 * @param pivots array of pivot / ピボットの配列 * @param fromPivots from index of pivots / 使用対象となる pivots 配列の添え字の最小値 * @param toPivots to index of pivots (last element of exclusive) / 使用対象となる pivots 配列の添え字の最大値 + 1 * @param comparator comparator of array element / 比較器 */ protected static final <T> void mpSort(final T[] array, final int from, final int to, final T[] pivots, final int fromPivots, final int toPivots, final Comparator<? super T> comparator) { final int pivotIdx = fromPivots + (toPivots - fromPivots) / 2; // using index from pivots (center position) / pivots配列の中で、今回使うべき要素の添え字 final T pivot = pivots[pivotIdx]; // pivot value / ピボット値 int curFrom = from; // min index / 現在処理中位置の小さい方の位置 int curTo = to - 1; // max index / 現在処理中位置の大きい方の位置 while (true) { while (comparator.compare(array[curFrom], pivot) < 0) curFrom++; while (comparator.compare(pivot, array[curTo]) < 0) curTo--; if (curTo < curFrom) break; final T work = array[curFrom]; array[curFrom] = array[curTo]; array[curTo] = work; curFrom++; curTo--; } if (from < curTo) { if (fromPivots >= pivotIdx - 3) // pivotsの残りが3つを切ったらpivotsを作り直す。(最後まで使い切らないのは、最後の1個は範囲内の中間値に近いとは言えないので) mpSort(array, from, curTo + 1, comparator); else mpSort(array, from, curTo + 1, pivots, fromPivots, pivotIdx, comparator); } if (curFrom < to - 1) { if (pivotIdx + 1 >= toPivots - 3) // pivotsの残りが3つを切ったらpivotsを作り直す。(最後まで使い切らないのは、最後の1個は範囲内の中間値に近いとは言えないので) mpSort(array, curFrom, to, comparator); else mpSort(array, curFrom, to, pivots, pivotIdx + 1, toPivots, comparator); } } /** * Many pivot sort * * メニー・ピボット・ソート * @param array sort target / ソート対象 * @param from index of first element / ソート対象の開始位置 * @param to index of last element (exclusive) / ソート対象の終了位置 + 1 * @param comparator comparator of array element / 比較器 */ public static final <T> void mpSort(final T[] array, final int from, final int to, final Comparator<? super T> comparator) { final int range = to - from; // sort range / ソート範囲サイズ // ソート対象配列サイズが3以下のときは特別扱い if (range <= 1) { return; } else if (range == 2) { if (comparator.compare(array[from + 1], array[from]) < 0) { T work = array[from]; array[from] = array[from + 1]; array[from + 1] = work; } return; } else if (range == 3) { if (comparator.compare(array[from + 1], array[from]) < 0) { T work = array[from]; array[from] = array[from + 1]; array[from + 1] = work; } if (comparator.compare(array[from + 2], array[from + 1]) < 0) { T work = array[from + 1]; array[from + 1] = array[from + 2]; array[from + 2] = work; if (comparator.compare(array[from + 1], array[from]) < 0) { work = array[from]; array[from] = array[from + 1]; array[from + 1] = work; } } return; } if (range < SWITCH_SIZE) { // しきい値以下ではクイックソート(3つのメディアン)に切り替える。 QuickSortM3.quickSortMedian3(array, from, to, comparator); return; } int pivotsSize = PIVOTS_SIZE; /* if (range >= 1000000) pivotsSize = 2048 - 1; else if (range >= 500000) pivotsSize = 1024 - 1; else if (range >= 250000) pivotsSize = 512 - 1; else if (range >= 120000) pivotsSize = 256 - 1; else if (range >= 60000) pivotsSize = 128 - 1; else if (range >= 30000) pivotsSize = 64 - 1; else pivotsSize = 32 - 1; */ @SuppressWarnings("unchecked") final T[] pivots = (T[])new Object[pivotsSize]; // pivot candidates / ピボット候補の配列 // ピボット(複数)の選出 for (int i = 0; i < pivots.length; i++) { pivots[i] = array[(int)(from + (long)range * i / pivots.length + range / 2 / pivots.length)]; } // sort of pivot candidates / ピボット値のみをソート BinInsertionSort.binInsertionSort(pivots, 0, pivots.length, comparator); // sort of array / ソート対象本体のソート mpSort(array, from, to, pivots, 0, pivots.length, comparator); } @Override public <T> void sort(final T[] array, final int from, final int to, final Comparator<? super T> comparator) { mpSort(array, from, to, comparator); } @Override public boolean isStable() { return false; } @Override public String getName() { return "Many Pivot Sort"; } }
不要ロジック削除
src/mmsort/ManyPivotSort.java
不要ロジック削除
<ide><path>rc/mmsort/ManyPivotSort.java <ide> final int range = to - from; // sort range / ソート範囲サイズ <ide> <ide> // ソート対象配列サイズが3以下のときは特別扱い <del> if (range <= 1) { <del> return; <del> } else if (range == 2) { <del> if (comparator.compare(array[from + 1], array[from]) < 0) { <del> T work = array[from]; <del> array[from] = array[from + 1]; <del> array[from + 1] = work; <del> } <del> return; <del> } else if (range == 3) { <del> if (comparator.compare(array[from + 1], array[from]) < 0) { <del> T work = array[from]; <del> array[from] = array[from + 1]; <del> array[from + 1] = work; <del> } <del> if (comparator.compare(array[from + 2], array[from + 1]) < 0) { <del> T work = array[from + 1]; <del> array[from + 1] = array[from + 2]; <del> array[from + 2] = work; <del> if (comparator.compare(array[from + 1], array[from]) < 0) { <del> work = array[from]; <del> array[from] = array[from + 1]; <del> array[from + 1] = work; <del> } <del> } <del> return; <del> } <del> <ide> if (range < SWITCH_SIZE) { <ide> // しきい値以下ではクイックソート(3つのメディアン)に切り替える。 <ide> QuickSortM3.quickSortMedian3(array, from, to, comparator); <ide> @SuppressWarnings("unchecked") <ide> final T[] pivots = (T[])new Object[pivotsSize]; // pivot candidates / ピボット候補の配列 <ide> <del> // ピボット(複数)の選出 <add> // Selection of the pivot values (Binary insertion sort ish processing). <add> // ピボット(複数)の選出 <ide> for (int i = 0; i < pivots.length; i++) { <ide> pivots[i] = array[(int)(from + (long)range * i / pivots.length + range / 2 / pivots.length)]; <ide> } <del> // sort of pivot candidates / ピボット値のみをソート <add> // sort of pivot candidates / ピボット値のみをソート <ide> BinInsertionSort.binInsertionSort(pivots, 0, pivots.length, comparator); <del> // sort of array / ソート対象本体のソート <add> // sort of array / ソート対象本体のソート <ide> mpSort(array, from, to, pivots, 0, pivots.length, comparator); <ide> } <ide>
Java
apache-2.0
2ffa743d7745550d0e2e59681097171717a532ad
0
marssa/footprint
/** * Copyright 2012 MARSEC-XL International Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package mise.marssa.footprint.datatypes.decimal; import static javax.measure.unit.NonSI.MILE; import java.math.MathContext; import javax.persistence.Entity; import javax.xml.bind.annotation.XmlType; import mise.marssa.footprint.datatypes.MString; import mise.marssa.footprint.exceptions.OutOfRange; import flexjson.JSONSerializer; /** * @author Alan Grech * @version 1.0 * @created 08-Jul-2011 09:53:24 */ @XmlType(name = "DegreesDecimal") @Entity public class DegreesDecimal extends MDecimal { /** * */ private static final long serialVersionUID = -6725449434062082433L; private DegreesDecimal() { super(0); } public DegreesDecimal(double value) { super(value); } public DegreesDecimal(double value,MathContext mc) { super(value,mc); } public MString toJSON() { MString JSON = new MString(new JSONSerializer().exclude("value") .deepSerialize(this)); return JSON; } }
src/main/java/mise/marssa/footprint/datatypes/decimal/DegreesDecimal.java
/** * Copyright 2012 MARSEC-XL International Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package mise.marssa.footprint.datatypes.decimal; import javax.persistence.Entity; import javax.xml.bind.annotation.XmlType; import mise.marssa.footprint.datatypes.MString; import flexjson.JSONSerializer; /** * @author Alan Grech * @version 1.0 * @created 08-Jul-2011 09:53:24 */ @XmlType(name = "DegreesDecimal") @Entity public class DegreesDecimal extends MDecimal { /** * */ private static final long serialVersionUID = -6725449434062082433L; private DegreesDecimal() { super(0); } public DegreesDecimal(double value) { super(value); } public MString toJSON() { MString JSON = new MString(new JSONSerializer().exclude("value") .deepSerialize(this)); return JSON; } }
a new constructor taking MathContext as a parameter
src/main/java/mise/marssa/footprint/datatypes/decimal/DegreesDecimal.java
a new constructor taking MathContext as a parameter
<ide><path>rc/main/java/mise/marssa/footprint/datatypes/decimal/DegreesDecimal.java <ide> */ <ide> package mise.marssa.footprint.datatypes.decimal; <ide> <add>import static javax.measure.unit.NonSI.MILE; <add> <add>import java.math.MathContext; <add> <ide> import javax.persistence.Entity; <ide> import javax.xml.bind.annotation.XmlType; <ide> <ide> import mise.marssa.footprint.datatypes.MString; <add>import mise.marssa.footprint.exceptions.OutOfRange; <ide> import flexjson.JSONSerializer; <ide> <ide> /** <ide> super(value); <ide> } <ide> <add> public DegreesDecimal(double value,MathContext mc) { <add> super(value,mc); <add> } <add> <ide> public MString toJSON() { <ide> MString JSON = new MString(new JSONSerializer().exclude("value") <ide> .deepSerialize(this));
Java
apache-2.0
9f0bac50be2a19ffdc67db6f996faab7274e20cf
0
rdblue/kite,bbrownz/kite,prazanna/kite,gabrielreid/kite,kite-sdk/kite,bbrownz/kite,joey/kite,mkwhitacre/kite,joey/kite,prazanna/kite,gabrielreid/kite,scalingdata/cdk,andrewrothstein/kite,ronanstokes/kite,scalingdata/cdk,andrewrothstein/kite,dlanza1/kite,scalingdata/cdk,megnataraj/kite,tinkujohn/kite,bbrownz/kite,rdblue/kite,StephanieMak/kite,ronanstokes/kite,rbrush/kite,rbrush/kite,tinkujohn/kite,megnataraj/kite,StephanieMak/kite,EdwardSkoviak/kite,kite-sdk/kite,mkwhitacre/kite,bbaugher/kite,gabrielreid/kite,bbaugher/kite,StephanieMak/kite,megnataraj/kite,rdblue/kite,bbaugher/kite,joey/kite,kite-sdk/kite,ronanstokes/kite,EdwardSkoviak/kite,tinkujohn/kite,busbey/kite,EdwardSkoviak/kite,andrewrothstein/kite,rbrush/kite,busbey/kite,mkwhitacre/kite,dlanza1/kite,busbey/kite,prazanna/kite
/** * Copyright 2014 Cloudera Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kitesdk.minicluster.cli; import com.beust.jcommander.DynamicParameter; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.CountDownLatch; import org.apache.hadoop.conf.Configuration; import org.kitesdk.minicluster.MiniCluster; import org.kitesdk.minicluster.Service; import org.slf4j.Logger; @Parameters(commandDescription = "Run a minicluster of services. The following" + " service short names exist: hdfs, zk, hbase, hive, flume.") public class RunCommand implements Command { private static final Map<String, String> simpleServiceNameMap = Maps .newHashMap(); static { simpleServiceNameMap.put("hdfs", "org.kitesdk.minicluster.HdfsService"); simpleServiceNameMap.put("zk", "org.kitesdk.minicluster.ZookeeperService"); simpleServiceNameMap.put("flume", "org.kitesdk.minicluster.FlumeService"); simpleServiceNameMap.put("hbase", "org.kitesdk.minicluster.HBaseService"); simpleServiceNameMap.put("hive", "org.kitesdk.minicluster.HiveService"); } private final Logger console; private final Configuration conf; @Parameter(description = "<service names>") public List<String> services = new ArrayList<String>(); @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "UWF_NULL_FIELD", justification = "Field set by JCommander") @Parameter(names = { "-d", "--work-dir" }, description = "The base directory to store mini cluster data in. Defaults to /tmp/kite-minicluster.") String workDir = "/tmp/kite-minicluster"; @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "UWF_NULL_FIELD", justification = "Field set by JCommander") @Parameter(names = { "-b", "--bind" }, description = "The IP address for all mini cluster services to bind to. Defaults to 127.0.0.1.") String bindIP = "127.0.0.1"; @Parameter(names = { "-c", "--clean" }, description = "Clean the mini cluster data directory before starting.") boolean clean = false; @Parameter(names = "--hdfs-namenode-rpc-port", description = "The namenode RPC port. Defaults to 8020.") int namenodeRpcPort = 8020; @Parameter(names = "--zk-port", description = "The zookeeper port. Defaults to 2828.") int zkPort = 2828; @Parameter(names = "--hive-metastore-port", description = "The Hive Metastore port. Defaults to 9083.") int hiveMetastorePort = 9083; @Parameter(names = "--hive-server-port", description = "The Hive Server port. Defaults to 10000.") int hiveServerPort = 10000; @Parameter(names = "--flume-configuration" , description = "The Flume configuration file.") String flumeConfiguration; @Parameter(names = "--flume-agent-name" , description = "The name of the Flume agent.") String flumeAgentName; @DynamicParameter(names = "-D", description = "Service specific configs go here. These configs are passed through " + "to the ServiceConfig using the key/value specified here.") private Map<String, String> serviceParams = new HashMap<String, String>(); public RunCommand(Logger console, Configuration conf) { this.console = console; this.conf = conf; } @SuppressWarnings("unchecked") @Override public int run() throws IOException { Preconditions.checkArgument(services.size() > 0, "At least one service must be specified."); Preconditions.checkArgument(workDir != null, "A valid work dir is required."); MiniCluster.Builder builder = new MiniCluster.Builder().workDir(workDir) .clean(clean).hadoopConf(conf).bindIP(bindIP) .namenodeRpcPort(namenodeRpcPort) .zkPort(zkPort) .hiveMetastorePort(hiveMetastorePort).hiveServerPort(hiveServerPort); if (flumeConfiguration != null) { builder.flumeConfiguration(flumeConfiguration); } if (flumeAgentName != null) { builder.flumeAgentName(flumeAgentName); } for (String serviceName : services) { if (simpleServiceNameMap.containsKey(serviceName)) { serviceName = simpleServiceNameMap.get(serviceName); } try { builder.addService((Class<? extends Service>) Class .forName(serviceName)); } catch (ClassNotFoundException e) { console.error("Unknown service class specified: " + serviceName); throw new RuntimeException(e); } } for (Entry<String, String> serviceParam : serviceParams.entrySet()) { builder.setServiceConfig(serviceParam.getKey(), serviceParam.getValue()); } final MiniCluster miniCluster = builder.build(); // Create an exit thread that listens for a kill command, and notifies the // main thread to exit. final CountDownLatch doneSignal = new CountDownLatch(1); Thread exitThread = new Thread() { @Override public void run() { try { miniCluster.stop(); } catch (Throwable e) { console.error("Error stopping mini cluster. Exiting anyways...", e); } doneSignal.countDown(); } }; Runtime.getRuntime().addShutdownHook(exitThread); // Start the mini cluster, and wait for the exit notification. try { miniCluster.start(); doneSignal.await(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); return 1; } return 0; } @Override public List<String> getExamples() { return Lists .newArrayList( "# Run a mini HDFS cluster:", "hdfs", "# Run an HBase cluster that forces everything to listen on IP 10.0.0.1:", "hdfs zk hbase -b 10.0.0.1", "# Run an HBase cluster, cleaning out any data from previous cluster runs:", "hdfs zk hbase -d /tmp/kite-minicluster -c", "# Run an HBase cluster with custom ports using the service configs:", "hdfs zk hbase -Dhbase-master-port=63000 -Dhbase-regionserver-port=63020"); } }
kite-minicluster/src/main/java/org/kitesdk/minicluster/cli/RunCommand.java
/** * Copyright 2014 Cloudera Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kitesdk.minicluster.cli; import com.beust.jcommander.DynamicParameter; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.CountDownLatch; import org.apache.hadoop.conf.Configuration; import org.kitesdk.minicluster.MiniCluster; import org.kitesdk.minicluster.Service; import org.slf4j.Logger; @Parameters(commandDescription = "Run a minicluster of services. The following" + " service short names exist: hdfs, zk, hbase, hive, flume.") public class RunCommand implements Command { private static final Map<String, String> simpleServiceNameMap = Maps .newHashMap(); static { simpleServiceNameMap.put("hdfs", "org.kitesdk.minicluster.HdfsService"); simpleServiceNameMap.put("zk", "org.kitesdk.minicluster.ZookeeperService"); simpleServiceNameMap.put("flume", "org.kitesdk.minicluster.FlumeService"); simpleServiceNameMap.put("hbase", "org.kitesdk.minicluster.HBaseService"); simpleServiceNameMap.put("hive", "org.kitesdk.minicluster.HiveService"); } private final Logger console; private final Configuration conf; @Parameter(description = "<service names>") public List<String> services = new ArrayList<String>(); @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "UWF_NULL_FIELD", justification = "Field set by JCommander") @Parameter(names = { "-d", "--work-dir" }, description = "The base directory to store mini cluster data in. Defaults to /tmp/kite-minicluster.") String workDir = "/tmp/kite-minicluster"; @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "UWF_NULL_FIELD", justification = "Field set by JCommander") @Parameter(names = { "-b", "--bind" }, description = "The IP address for all mini cluster services to bind to. Defaults to 127.0.0.1.") String bindIP = "127.0.0.1"; @Parameter(names = { "-c", "--clean" }, description = "Clean the mini cluster data directory before starting.") boolean clean = false; @Parameter(names = "--hdfs-namenode-rpc-port", description = "The namenode RPC port. Defaults to 8020.") int namenodeRpcPort = 8020; @Parameter(names = "--zk-port", description = "The zookeeper port. Defaults to 2828.") int zkPort = 2828; @Parameter(names = "--hive-metastore-port", description = "The Hive Metastore port. Defaults to 9083.") int hiveMetastorePort = 9083; @Parameter(names = "--hive-server-port", description = "The Hive Server port. Defaults to 10000.") int hiveServerPort = 10000; @Parameter(names = "--flume-configuration" , description = "The Flume configuration file.") String flumeConfiguration; @Parameter(names = "--flume-agent-name" , description = "The name of the Flume agent.") String flumeAgentName; @DynamicParameter(names = "-D", description = "Service specific configs go here. These configs are passed through " + "to the ServiceConfig using the key/value specified here.") private Map<String, String> serviceParams = new HashMap<String, String>(); public RunCommand(Logger console, Configuration conf) { this.console = console; this.conf = conf; } @SuppressWarnings("unchecked") @Override public int run() throws IOException { Preconditions.checkArgument(services.size() > 0, "At least one service must be specified."); Preconditions.checkArgument(workDir != null, "A valid work dir is required."); MiniCluster.Builder builder = new MiniCluster.Builder().workDir(workDir) .clean(clean).hadoopConf(conf).bindIP(bindIP) .namenodeRpcPort(namenodeRpcPort) .zkPort(zkPort) .hiveMetastorePort(hiveMetastorePort).hiveServerPort(hiveServerPort) .flumeConfiguration(flumeConfiguration).flumeAgentName(flumeAgentName); for (String serviceName : services) { if (simpleServiceNameMap.containsKey(serviceName)) { serviceName = simpleServiceNameMap.get(serviceName); } try { builder.addService((Class<? extends Service>) Class .forName(serviceName)); } catch (ClassNotFoundException e) { console.error("Unknown service class specified: " + serviceName); throw new RuntimeException(e); } } for (Entry<String, String> serviceParam : serviceParams.entrySet()) { builder.setServiceConfig(serviceParam.getKey(), serviceParam.getValue()); } final MiniCluster miniCluster = builder.build(); // Create an exit thread that listens for a kill command, and notifies the // main thread to exit. final CountDownLatch doneSignal = new CountDownLatch(1); Thread exitThread = new Thread() { @Override public void run() { try { miniCluster.stop(); } catch (Throwable e) { console.error("Error stopping mini cluster. Exiting anyways...", e); } doneSignal.countDown(); } }; Runtime.getRuntime().addShutdownHook(exitThread); // Start the mini cluster, and wait for the exit notification. try { miniCluster.start(); doneSignal.await(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); return 1; } return 0; } @Override public List<String> getExamples() { return Lists .newArrayList( "# Run a mini HDFS cluster:", "hdfs", "# Run an HBase cluster that forces everything to listen on IP 10.0.0.1:", "hdfs zk hbase -b 10.0.0.1", "# Run an HBase cluster, cleaning out any data from previous cluster runs:", "hdfs zk hbase -d /tmp/kite-minicluster -c", "# Run an HBase cluster with custom ports using the service configs:", "hdfs zk hbase -Dhbase-master-port=63000 -Dhbase-regionserver-port=63020"); } }
Fix NPE in minicluster RunCommand.
kite-minicluster/src/main/java/org/kitesdk/minicluster/cli/RunCommand.java
Fix NPE in minicluster RunCommand.
<ide><path>ite-minicluster/src/main/java/org/kitesdk/minicluster/cli/RunCommand.java <ide> .clean(clean).hadoopConf(conf).bindIP(bindIP) <ide> .namenodeRpcPort(namenodeRpcPort) <ide> .zkPort(zkPort) <del> .hiveMetastorePort(hiveMetastorePort).hiveServerPort(hiveServerPort) <del> .flumeConfiguration(flumeConfiguration).flumeAgentName(flumeAgentName); <add> .hiveMetastorePort(hiveMetastorePort).hiveServerPort(hiveServerPort); <add> if (flumeConfiguration != null) { <add> builder.flumeConfiguration(flumeConfiguration); <add> } <add> if (flumeAgentName != null) { <add> builder.flumeAgentName(flumeAgentName); <add> } <ide> for (String serviceName : services) { <ide> if (simpleServiceNameMap.containsKey(serviceName)) { <ide> serviceName = simpleServiceNameMap.get(serviceName);
Java
mit
6a607166bae2d7c53e950dcb8fd391be397ab11c
0
JCThePants/NucleusFramework,JCThePants/NucleusFramework
/* This file is part of GenericsLib for Bukkit, licensed under the MIT License (MIT). * * Copyright (c) JCThePants (www.jcwhatever.com) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.jcwhatever.bukkit.generic.events; import com.jcwhatever.bukkit.generic.events.exceptions.EventManagerDisposedException; import com.jcwhatever.bukkit.generic.events.exceptions.HandlerAlreadyRegisteredException; import com.jcwhatever.bukkit.generic.events.exceptions.ListenerAlreadyRegisteredException; import com.jcwhatever.bukkit.generic.utils.PreCon; import javax.annotation.Nullable; import java.lang.reflect.Method; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * Generics event manager. */ public class GenericsEventManager { private static GenericsEventManager _globalInstance; /** * Get the global event manager. */ public static GenericsEventManager getGlobal() { if (_globalInstance == null) _globalInstance = new GenericsEventManager(true); return _globalInstance; } private final Map<Class<?>, EventHandlerCollection> _handlerMap = new HashMap<>(100); private final Map<GenericsEventListener, ListenerContainer> _listeners = new HashMap<>(100); private final GenericsEventManager _parent; private final boolean _isGlobal; private boolean _isDisposed; /** * Constructor. * * <p>Create a new event manager using the global event manager as parent.</p> */ public GenericsEventManager() { this(getGlobal()); } /** * Constructor. * * @param parent The parent event manager. The parent receives all * event calls the child receives. */ public GenericsEventManager(@Nullable GenericsEventManager parent) { _parent = parent; _isGlobal = false; } /** * Private constructor for global event manager. */ private GenericsEventManager(boolean isGlobal) { _parent = null; _isGlobal = isGlobal; } /** * Register an event handler for the specified event. * * @param eventClass The event class. * @param priority The event priority. * @param handler The event handler. */ public void register(Class<? extends AbstractGenericsEvent> eventClass, GenericsEventPriority priority, EventHandler handler) { PreCon.notNull(eventClass); PreCon.notNull(priority); PreCon.notNull(handler); // cannot use a disposed event manager if (_isDisposed) throw new EventManagerDisposedException(); // get event handler collection for the event EventHandlerCollection handlers =_handlerMap.get(eventClass); // add an event handler collection if one does not exist if (handlers == null) { handlers = new EventHandlerCollection(); _handlerMap.put(eventClass, handlers); } // add the handler to the handler collection if (!handlers.add(handler, priority)) { throw new HandlerAlreadyRegisteredException(handler); } } /** * Register an event listener. * * @param eventListener The event listener. */ public void register(GenericsEventListener eventListener) { PreCon.notNull(eventListener); // cannot use a disposed event manager if (_isDisposed) throw new EventManagerDisposedException(); // listeners can only be registered once. if (_listeners.containsKey(eventListener)) { throw new ListenerAlreadyRegisteredException(eventListener); } // create a listener container ListenerContainer listener = new ListenerContainer(eventListener); _listeners.put(eventListener, listener); // get all methods from listener so we can filter out the event handlers Method[] methods = eventListener.getClass().getDeclaredMethods(); // filter out the event handlers for (Method method : methods) { // event handlers must have a special annotation GenericsEventHandler annotation = method.getAnnotation(GenericsEventHandler.class); if (annotation == null) continue; // event handlers must have exactly one parameter Class<?>[] paramTypes = method.getParameterTypes(); if (paramTypes == null || paramTypes.length != 1) continue; // event handler parameter must be a type that extends AbstractGenericsEvent Class<?> eventClass = paramTypes[0]; if (!AbstractGenericsEvent.class.isAssignableFrom(eventClass)) continue; // get the event handler collection for the event EventHandlerCollection handlers = _handlerMap.get(eventClass); // create a new event handler collection if one is not present if (handlers == null) { handlers = new EventHandlerCollection(); _handlerMap.put(eventClass, handlers); } // add the event handler to the handlers collection try { handlers.add(eventListener, eventClass, method, annotation); } catch (IllegalAccessException e) { e.printStackTrace(); continue; } // add the handler to the listener container listener.addHandlers(handlers); } } /** * Unregister an event listener * * @param eventListener The event listener to unregister. */ public void unregister(GenericsEventListener eventListener) { PreCon.notNull(eventListener); // cannot use a disposed event manager. if (_isDisposed) return; // get the listener container. ListenerContainer listener = _listeners.remove(eventListener); if (listener == null) return; // unregister listener.unregister(); } /** * Unregister an event handler from the specified event. * * @param eventClass The event class. * @param handler The event handler to unregister. */ public void unregister(Class<? extends AbstractGenericsEvent> eventClass, EventHandler handler) { PreCon.notNull(eventClass); PreCon.notNull(handler); // cannot use a disposed event manager. if (_isDisposed) return; // get the handlers collection for the event EventHandlerCollection handlers =_handlerMap.get(eventClass); if (handlers == null) { return; } // remove the handler handlers.removeHandler(handler); } /** * Call an event. * * @param event The event to call. * @param <T> The event type which must extend {@code AbstractGenericsEvent} */ public <T extends AbstractGenericsEvent> T call(T event) { PreCon.notNull(event); // cannot use a disposed event manager if (_isDisposed) throw new EventManagerDisposedException(); // call event on parent first. if (_parent != null) { _parent.call(event); } // get event handler collection EventHandlerCollection handlers = _handlerMap.get(event.getClass()); if (handlers == null) return event; // call event on handlers. handlers.call(event); return event; } /** * Dispose the event manager. */ public void dispose() { // The global manager cannot be disposed. if (_isGlobal) throw new RuntimeException("Cannot dispose the global event manager."); _isDisposed = true; // clear event handlers on all handler collections for (EventHandlerCollection handlers : _handlerMap.values()) { handlers.clear(); } _handlerMap.clear(); // unregister all listeners for (ListenerContainer listener : _listeners.values()) { listener.unregister(); } _listeners.clear(); } /** * A container for a generics listener that contains the * event handler collections which contain the listeners * event handlers. */ private static class ListenerContainer { private GenericsEventListener _listener; private Set<EventHandlerCollection> _handlers = new HashSet<>(50); /** * Constructor. * * @param listener The listener to encapsulate. */ ListenerContainer(GenericsEventListener listener) { _listener = listener; } /** * Add an event handlers collection that one of the * listeners event handlers have been added to so * it will have a means to unregister from the handlers * collection. * * @param handlers The handler collection to add. */ public void addHandlers(EventHandlerCollection handlers) { _handlers.add(handlers); } /** * Unregister the listener from the handler collections. */ public void unregister() { for (EventHandlerCollection handlers : _handlers) { handlers.removeListener(_listener); } _handlers.clear(); } } }
src/com/jcwhatever/bukkit/generic/events/GenericsEventManager.java
/* This file is part of GenericsLib for Bukkit, licensed under the MIT License (MIT). * * Copyright (c) JCThePants (www.jcwhatever.com) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.jcwhatever.bukkit.generic.events; import com.jcwhatever.bukkit.generic.events.exceptions.EventManagerDisposedException; import com.jcwhatever.bukkit.generic.events.exceptions.HandlerAlreadyRegisteredException; import com.jcwhatever.bukkit.generic.events.exceptions.ListenerAlreadyRegisteredException; import com.jcwhatever.bukkit.generic.utils.PreCon; import javax.annotation.Nullable; import java.lang.reflect.Method; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * Generics event manager. */ public class GenericsEventManager { private static GenericsEventManager _globalInstance; /** * Get the global event manager. */ public static GenericsEventManager getGlobal() { if (_globalInstance == null) _globalInstance = new GenericsEventManager(true); return _globalInstance; } private final Map<Class<?>, EventHandlerCollection> _handlerMap = new HashMap<>(100); private final Map<GenericsEventListener, ListenerContainer> _listeners = new HashMap<>(100); private final GenericsEventManager _parent; private final boolean _isGlobal; private boolean _isDisposed; /** * Constructor. * * <p>Create a new event manager using the global event manager as parent.</p> */ public GenericsEventManager() { this(getGlobal()); } /** * Constructor. * * @param parent The parent event manager. */ public GenericsEventManager(@Nullable GenericsEventManager parent) { _parent = parent; _isGlobal = false; } /** * Private constructor for global event manager. */ private GenericsEventManager(boolean isGlobal) { _parent = null; _isGlobal = isGlobal; } /** * Register an event handler for the specified event. * * @param eventClass The event class. * @param priority The event priority. * @param handler The event handler. */ public void register(Class<? extends AbstractGenericsEvent> eventClass, GenericsEventPriority priority, EventHandler handler) { PreCon.notNull(eventClass); PreCon.notNull(priority); PreCon.notNull(handler); // cannot use a disposed event manager if (_isDisposed) throw new EventManagerDisposedException(); // get event handler collection for the event EventHandlerCollection handlers =_handlerMap.get(eventClass); // add an event handler collection if one does not exist if (handlers == null) { handlers = new EventHandlerCollection(); _handlerMap.put(eventClass, handlers); } // add the handler to the handler collection if (!handlers.add(handler, priority)) { throw new HandlerAlreadyRegisteredException(handler); } } /** * Register an event listener. * * @param eventListener The event listener. */ public void register(GenericsEventListener eventListener) { PreCon.notNull(eventListener); // cannot use a disposed event manager if (_isDisposed) throw new EventManagerDisposedException(); // listeners can only be registered once. if (_listeners.containsKey(eventListener)) { throw new ListenerAlreadyRegisteredException(eventListener); } // create a listener container ListenerContainer listener = new ListenerContainer(eventListener); _listeners.put(eventListener, listener); // get all methods from listener so we can filter out the event handlers Method[] methods = eventListener.getClass().getDeclaredMethods(); // filter out the event handlers for (Method method : methods) { // event handlers must have a special annotation GenericsEventHandler annotation = method.getAnnotation(GenericsEventHandler.class); if (annotation == null) continue; // event handlers must have exactly one parameter Class<?>[] paramTypes = method.getParameterTypes(); if (paramTypes == null || paramTypes.length != 1) continue; // event handler parameter must be a type that extends AbstractGenericsEvent Class<?> eventClass = paramTypes[0]; if (!AbstractGenericsEvent.class.isAssignableFrom(eventClass)) continue; // get the event handler collection for the event EventHandlerCollection handlers = _handlerMap.get(eventClass); // create a new event handler collection if one is not present if (handlers == null) { handlers = new EventHandlerCollection(); _handlerMap.put(eventClass, handlers); } // add the event handler to the handlers collection try { handlers.add(eventListener, eventClass, method, annotation); } catch (IllegalAccessException e) { e.printStackTrace(); continue; } // add the handler to the listener container listener.addHandlers(handlers); } } /** * Unregister an event listener * * @param eventListener The event listener to unregister. */ public void unregister(GenericsEventListener eventListener) { PreCon.notNull(eventListener); // cannot use a disposed event manager. if (_isDisposed) return; // get the listener container. ListenerContainer listener = _listeners.remove(eventListener); if (listener == null) return; // unregister listener.unregister(); } /** * Unregister an event handler from the specified event. * * @param eventClass The event class. * @param handler The event handler to unregister. */ public void unregister(Class<? extends AbstractGenericsEvent> eventClass, EventHandler handler) { PreCon.notNull(eventClass); PreCon.notNull(handler); // cannot use a disposed event manager. if (_isDisposed) return; // get the handlers collection for the event EventHandlerCollection handlers =_handlerMap.get(eventClass); if (handlers == null) { return; } // remove the handler handlers.removeHandler(handler); } /** * Call an event. * * @param event The event to call. * @param <T> The event type which must extend {@code AbstractGenericsEvent} */ public <T extends AbstractGenericsEvent> T call(T event) { PreCon.notNull(event); // cannot use a disposed event manager if (_isDisposed) throw new EventManagerDisposedException(); // call event on parent first. if (_parent != null) { _parent.call(event); } // get event handler collection EventHandlerCollection handlers = _handlerMap.get(event.getClass()); if (handlers == null) return event; // call event on handlers. handlers.call(event); return event; } /** * Dispose the event manager. */ public void dispose() { // The global manager cannot be disposed. if (_isGlobal) throw new RuntimeException("Cannot dispose the global event manager."); _isDisposed = true; // clear event handlers on all handler collections for (EventHandlerCollection handlers : _handlerMap.values()) { handlers.clear(); } _handlerMap.clear(); // unregister all listeners for (ListenerContainer listener : _listeners.values()) { listener.unregister(); } _listeners.clear(); } /** * A container for a generics listener that contains the * event handler collections which contain the listeners * event handlers. */ private static class ListenerContainer { private GenericsEventListener _listener; private Set<EventHandlerCollection> _handlers = new HashSet<>(50); /** * Constructor. * * @param listener The listener to encapsulate. */ ListenerContainer(GenericsEventListener listener) { _listener = listener; } /** * Add an event handlers collection that one of the * listeners event handlers have been added to so * it will have a means to unregister from the handlers * collection. * * @param handlers The handler collection to add. */ public void addHandlers(EventHandlerCollection handlers) { _handlers.add(handlers); } /** * Unregister the listener from the handler collections. */ public void unregister() { for (EventHandlerCollection handlers : _handlers) { handlers.removeListener(_listener); } _handlers.clear(); } } }
comment fix
src/com/jcwhatever/bukkit/generic/events/GenericsEventManager.java
comment fix
<ide><path>rc/com/jcwhatever/bukkit/generic/events/GenericsEventManager.java <ide> /** <ide> * Constructor. <ide> * <del> * @param parent The parent event manager. <add> * @param parent The parent event manager. The parent receives all <add> * event calls the child receives. <ide> */ <ide> public GenericsEventManager(@Nullable GenericsEventManager parent) { <ide> _parent = parent;
Java
apache-2.0
4f08993a03da9891f8065444ada9e1a33977d5a5
0
BD2K-DDI/ddi-annotation
package uk.ac.ebi.ddi.extservices.entrez.client.taxonomy; import com.google.common.collect.Lists; import org.apache.commons.lang3.ArrayUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.web.util.UriComponentsBuilder; import uk.ac.ebi.ddi.extservices.ebiprotein.utils.EBITaxonomyUtils; import uk.ac.ebi.ddi.extservices.entrez.config.TaxWsConfigProd; import uk.ac.ebi.ddi.extservices.entrez.ncbiresult.NCBIEResult; import uk.ac.ebi.ddi.extservices.entrez.ncbiresult.NCBITaxResult; import uk.ac.ebi.ddi.extservices.entrez.ncbiresult.NCBITaxonomyEntry; import uk.ac.ebi.ddi.extservices.entrez.ncbiresult.NCBITaxonomyEntrySet; import java.net.URI; import java.util.ArrayList; import java.util.List; import java.util.Set; /** * @author Yasset Perez-Riverol ypriverol */ public class TaxonomyWsClient extends WsClient { private static final Logger LOGGER = LoggerFactory.getLogger(TaxonomyWsClient.class); private static final int MAX_TAX_PER_REQUEST = 30; /** * Default constructor for Ws clients * * @param config */ public TaxonomyWsClient(TaxWsConfigProd config) { super(config); } public NCBITaxResult getNCBITax(String term) { if (term != null && term.length() > 0) { UriComponentsBuilder uriComponentsBuilder = UriComponentsBuilder.newInstance() .scheme(config.getProtocol()) .host(config.getHostName()) .path("/entrez/eutils/esearch.fcgi") .queryParam("db", "taxonomy") .queryParam("term", term) .queryParam("retmode", "JSON"); URI uri = uriComponentsBuilder.build().encode().toUri(); return getRetryTemplate().execute(context -> restTemplate.getForObject(uri, NCBITaxResult.class)); } return null; } public NCBITaxResult getNCBITax(Set<String> terms) { if (terms == null || terms.size() == 0) { return null; } List<List<String>> partitions = Lists.partition(new ArrayList<>(terms), MAX_TAX_PER_REQUEST); NCBITaxResult ncbiTaxResult = null; for (List<String> partition : partitions) { String query = String.join("+OR+", partition); if (ncbiTaxResult == null) { ncbiTaxResult = getNCBITax(query); } else { NCBITaxResult tmp = getNCBITax(query); NCBIEResult oldResult = ncbiTaxResult.getResult(); oldResult.setCount(tmp.getResult().getCount() + oldResult.getCount()); oldResult.setIdList(ArrayUtils.addAll(oldResult.getIdList(), tmp.getResult().getIdList())); ncbiTaxResult.setResult(oldResult); } } return ncbiTaxResult; } public NCBITaxonomyEntrySet getTaxonomyEntryById(String id) { if (id != null && id.length() > 0) { UriComponentsBuilder uriComponentsBuilder = UriComponentsBuilder.newInstance() .scheme(config.getProtocol()) .host(config.getHostName()) .path("/entrez/eutils/efetch.fcgi") .queryParam("db", "taxonomy") .queryParam("id", id); URI uri = uriComponentsBuilder.build().encode().toUri(); return getRetryTemplate().execute(context -> restTemplate.getForObject(uri, NCBITaxonomyEntrySet.class)); } return null; } /** * Get the parent Entry for a current Entry * @param entry entry to search * @return Parent Entry */ public NCBITaxonomyEntrySet getParentByEntry(NCBITaxonomyEntry entry) { String url = entry.getParentTaxId(); //Todo: Needs to be removed in the future, this is for debugging LOGGER.debug(url); return this.restTemplate.getForObject(url, NCBITaxonomyEntrySet.class); } /** * Check if the Entry is a Non Rank species and return the parent term if is an Specie * or a Genues. See the NCBI Taxonomy Documentation https://www.ncbi.nlm.nih.gov/taxonomy * @param id of the Taxonomy * @return the Taxonomy of the NonRan parent Taxonomy */ public NCBITaxonomyEntrySet getParentForNonRanSpecie(String id) { NCBITaxonomyEntrySet entry = getTaxonomyEntryById(id); if ((entry != null) && (entry.getTaxSet() != null) && (entry.getTaxSet().length == 1) && entry.getTaxSet()[0].getRank().equalsIgnoreCase(EBITaxonomyUtils.EbiTaxRank.NO_RANK.getName())) { return entry; } if (entry != null && entry.getTaxSet() != null && entry.getTaxSet().length > 0) { return getParentSpecieOrGenuesTaxonomy(entry.getTaxSet()[0].getTaxId()); } NCBITaxonomyEntry ncbiTaxonomyEntry = new NCBITaxonomyEntry(); return getParentSpecieOrGenuesTaxonomy(ncbiTaxonomyEntry.getParentTaxId()); } public NCBITaxonomyEntrySet getParentSpecieOrGenuesTaxonomy(String id) { NCBITaxonomyEntrySet parent = getTaxonomyEntryById(id); if ((parent != null) && (parent.getTaxSet() != null) && (parent.getTaxSet().length == 1) && (EBITaxonomyUtils.EbiTaxRank.isSpeciesOrGenues(parent.getTaxSet()[0].getRank()))) { return parent; } if (parent != null && parent.getTaxSet() != null && parent.getTaxSet().length > 0) { return getTaxonomyEntryById(parent.getTaxSet()[0].getTaxId()); } NCBITaxonomyEntry ncbiTaxonomyEntry = new NCBITaxonomyEntry(); return getParentSpecieOrGenuesTaxonomy(ncbiTaxonomyEntry.getParentTaxId()); } }
src/main/java/uk/ac/ebi/ddi/extservices/entrez/client/taxonomy/TaxonomyWsClient.java
package uk.ac.ebi.ddi.extservices.entrez.client.taxonomy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.web.util.UriComponentsBuilder; import uk.ac.ebi.ddi.extservices.ebiprotein.utils.EBITaxonomyUtils; import uk.ac.ebi.ddi.extservices.entrez.config.TaxWsConfigProd; import uk.ac.ebi.ddi.extservices.entrez.ncbiresult.NCBITaxResult; import uk.ac.ebi.ddi.extservices.entrez.ncbiresult.NCBITaxonomyEntry; import uk.ac.ebi.ddi.extservices.entrez.ncbiresult.NCBITaxonomyEntrySet; import java.net.URI; import java.util.Set; /** * @author Yasset Perez-Riverol ypriverol */ public class TaxonomyWsClient extends WsClient { private static final Logger LOGGER = LoggerFactory.getLogger(TaxonomyWsClient.class); /** * Default constructor for Ws clients * * @param config */ public TaxonomyWsClient(TaxWsConfigProd config) { super(config); } public NCBITaxResult getNCBITax(String term) { if (term != null && term.length() > 0) { UriComponentsBuilder uriComponentsBuilder = UriComponentsBuilder.newInstance() .scheme(config.getProtocol()) .host(config.getHostName()) .path("/entrez/eutils/esearch.fcgi") .queryParam("db", "taxonomy") .queryParam("term", term) .queryParam("retmode", "JSON"); URI uri = uriComponentsBuilder.build().encode().toUri(); return getRetryTemplate().execute(context -> restTemplate.getForObject(uri, NCBITaxResult.class)); } return null; } public NCBITaxResult getNCBITax(Set<String> terms) { StringBuilder query = new StringBuilder(); if (terms != null && terms.size() > 0) { for (String term : terms) { query.append("+OR+").append(term); } query = new StringBuilder(query.toString().replaceFirst("\\+OR\\+", "")); return getNCBITax(query.toString()); } return null; } public NCBITaxonomyEntrySet getTaxonomyEntryById(String id) { if (id != null && id.length() > 0) { UriComponentsBuilder uriComponentsBuilder = UriComponentsBuilder.newInstance() .scheme(config.getProtocol()) .host(config.getHostName()) .path("/entrez/eutils/efetch.fcgi") .queryParam("db", "taxonomy") .queryParam("id", id); URI uri = uriComponentsBuilder.build().encode().toUri(); return getRetryTemplate().execute(context -> restTemplate.getForObject(uri, NCBITaxonomyEntrySet.class)); } return null; } /** * Get the parent Entry for a current Entry * @param entry entry to search * @return Parent Entry */ public NCBITaxonomyEntrySet getParentByEntry(NCBITaxonomyEntry entry) { String url = entry.getParentTaxId(); //Todo: Needs to be removed in the future, this is for debugging LOGGER.debug(url); return this.restTemplate.getForObject(url, NCBITaxonomyEntrySet.class); } /** * Check if the Entry is a Non Rank species and return the parent term if is an Specie * or a Genues. See the NCBI Taxonomy Documentation https://www.ncbi.nlm.nih.gov/taxonomy * @param id of the Taxonomy * @return the Taxonomy of the NonRan parent Taxonomy */ public NCBITaxonomyEntrySet getParentForNonRanSpecie(String id) { NCBITaxonomyEntrySet entry = getTaxonomyEntryById(id); if ((entry != null) && (entry.getTaxSet() != null) && (entry.getTaxSet().length == 1) && entry.getTaxSet()[0].getRank().equalsIgnoreCase(EBITaxonomyUtils.EbiTaxRank.NO_RANK.getName())) { return entry; } if (entry != null && entry.getTaxSet() != null && entry.getTaxSet().length > 0) { return getParentSpecieOrGenuesTaxonomy(entry.getTaxSet()[0].getTaxId()); } NCBITaxonomyEntry ncbiTaxonomyEntry = new NCBITaxonomyEntry(); return getParentSpecieOrGenuesTaxonomy(ncbiTaxonomyEntry.getParentTaxId()); } public NCBITaxonomyEntrySet getParentSpecieOrGenuesTaxonomy(String id) { NCBITaxonomyEntrySet parent = getTaxonomyEntryById(id); if ((parent != null) && (parent.getTaxSet() != null) && (parent.getTaxSet().length == 1) && (EBITaxonomyUtils.EbiTaxRank.isSpeciesOrGenues(parent.getTaxSet()[0].getRank()))) { return parent; } if (parent != null && parent.getTaxSet() != null && parent.getTaxSet().length > 0) { return getTaxonomyEntryById(parent.getTaxSet()[0].getTaxId()); } NCBITaxonomyEntry ncbiTaxonomyEntry = new NCBITaxonomyEntry(); return getParentSpecieOrGenuesTaxonomy(ncbiTaxonomyEntry.getParentTaxId()); } }
Fixed 414 Request-URI Too Long problem
src/main/java/uk/ac/ebi/ddi/extservices/entrez/client/taxonomy/TaxonomyWsClient.java
Fixed 414 Request-URI Too Long problem
<ide><path>rc/main/java/uk/ac/ebi/ddi/extservices/entrez/client/taxonomy/TaxonomyWsClient.java <ide> package uk.ac.ebi.ddi.extservices.entrez.client.taxonomy; <ide> <add>import com.google.common.collect.Lists; <add>import org.apache.commons.lang3.ArrayUtils; <ide> import org.slf4j.Logger; <ide> import org.slf4j.LoggerFactory; <ide> import org.springframework.web.util.UriComponentsBuilder; <ide> import uk.ac.ebi.ddi.extservices.ebiprotein.utils.EBITaxonomyUtils; <ide> import uk.ac.ebi.ddi.extservices.entrez.config.TaxWsConfigProd; <add>import uk.ac.ebi.ddi.extservices.entrez.ncbiresult.NCBIEResult; <ide> import uk.ac.ebi.ddi.extservices.entrez.ncbiresult.NCBITaxResult; <ide> import uk.ac.ebi.ddi.extservices.entrez.ncbiresult.NCBITaxonomyEntry; <ide> import uk.ac.ebi.ddi.extservices.entrez.ncbiresult.NCBITaxonomyEntrySet; <ide> <ide> import java.net.URI; <add>import java.util.ArrayList; <add>import java.util.List; <ide> import java.util.Set; <ide> <ide> <ide> public class TaxonomyWsClient extends WsClient { <ide> <ide> private static final Logger LOGGER = LoggerFactory.getLogger(TaxonomyWsClient.class); <add> <add> private static final int MAX_TAX_PER_REQUEST = 30; <ide> <ide> /** <ide> * Default constructor for Ws clients <ide> } <ide> <ide> public NCBITaxResult getNCBITax(Set<String> terms) { <del> <del> StringBuilder query = new StringBuilder(); <del> if (terms != null && terms.size() > 0) { <del> for (String term : terms) { <del> query.append("+OR+").append(term); <add> if (terms == null || terms.size() == 0) { <add> return null; <add> } <add> List<List<String>> partitions = Lists.partition(new ArrayList<>(terms), MAX_TAX_PER_REQUEST); <add> NCBITaxResult ncbiTaxResult = null; <add> for (List<String> partition : partitions) { <add> String query = String.join("+OR+", partition); <add> if (ncbiTaxResult == null) { <add> ncbiTaxResult = getNCBITax(query); <add> } else { <add> NCBITaxResult tmp = getNCBITax(query); <add> NCBIEResult oldResult = ncbiTaxResult.getResult(); <add> oldResult.setCount(tmp.getResult().getCount() + oldResult.getCount()); <add> oldResult.setIdList(ArrayUtils.addAll(oldResult.getIdList(), tmp.getResult().getIdList())); <add> ncbiTaxResult.setResult(oldResult); <ide> } <del> query = new StringBuilder(query.toString().replaceFirst("\\+OR\\+", "")); <del> return getNCBITax(query.toString()); <ide> } <del> return null; <add> return ncbiTaxResult; <ide> } <ide> <ide> public NCBITaxonomyEntrySet getTaxonomyEntryById(String id) {
Java
apache-2.0
59ada1c7ad3317938054f82c8f4e58fbfd67619d
0
ahmadshahwan/cohorte-runtime,isandlaTech/cohorte-runtime,isandlaTech/cohorte-runtime,ahmadshahwan/cohorte-runtime,isandlaTech/cohorte-runtime,ahmadshahwan/cohorte-runtime,isandlaTech/cohorte-runtime,isandlaTech/cohorte-runtime,ahmadshahwan/cohorte-runtime,ahmadshahwan/cohorte-runtime,ahmadshahwan/cohorte-runtime,isandlaTech/cohorte-runtime
/** * File: CFileFinderSvc.java * Author: Thomas Calmant * Date: 6 sept. 2011 */ package org.psem2m.isolates.base.dirs.impl; import java.io.File; import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import org.psem2m.isolates.services.dirs.IFileFinderSvc; import org.psem2m.isolates.services.dirs.IPlatformDirsSvc; /** * Simple file finder : tries to find the given file in the platform main * directories. * * @author Thomas Calmant */ public class CFileFinderSvc implements IFileFinderSvc { /** Platform directories service */ private final IPlatformDirsSvc pPlatformDirs; /** * Constructor without injection * * @param aPlatformDirs * Platform directory service instance */ public CFileFinderSvc(final IPlatformDirsSvc aPlatformDirs) { pPlatformDirs = aPlatformDirs; } /** * Tries to extract a platform root path from the given. Non-null result * indicates that the given path is a root sub-path. * * @param aPath * Path to be transformed * @return The root-path if any, else null */ protected String extractPlatformPath(final String aPath) { if (aPath == null || aPath.isEmpty()) { return null; } for (final File rootDir : pPlatformDirs.getPlatformRootDirs()) { // Test if the path starts with the root path if (aPath.startsWith(rootDir.getPath())) { return aPath.substring(rootDir.getPath().length()); } } return null; } /* * (non-Javadoc) * * @see org.psem2m.isolates.base.dirs.IFileFinderSvc#find(java.io.File, * java.lang.String) */ @Override public File[] find(final File aBaseFile, final String aFileName) { // Use a set to avoid duplicates final Set<File> foundFiles = new LinkedHashSet<File>(); if (aBaseFile != null) { // Try to be relative to the parent, if the base file is a file File baseDir = null; if (aBaseFile.isFile()) { // Base file is a file : get its parent directory baseDir = aBaseFile.getParentFile(); } else if (aBaseFile.isDirectory()) { // Use the directory baseDir = aBaseFile; } if (baseDir != null) { // We have a valid base final File testRelFile = new File(baseDir, aFileName); if (testRelFile.exists()) { foundFiles.add(testRelFile); } /* * If the base file path begins with a platform root, remove it. * Allows cross conf/ repo/ references. */ final String platformSubDir = extractPlatformPath(baseDir .getPath()); if (platformSubDir != null) { foundFiles.addAll(internalFind(platformSubDir + File.separator + aFileName)); } } else { // Test the path directly in the platform dirs foundFiles.addAll(internalFind(aBaseFile.getPath() + File.separator + aFileName)); } } // In any case, try using only the file name foundFiles.addAll(internalFind(aFileName)); return foundFiles.toArray(new File[0]); } /* * (non-Javadoc) * * @see org.psem2m.isolates.base.dirs.IFileFinderSvc#find(java.lang.String) */ @Override public File[] find(final String aFileName) { final List<File> foundFiles = internalFind(aFileName); if (foundFiles.isEmpty()) { // Return null if no file was found return null; } return foundFiles.toArray(new File[0]); } /** * Tries to find the given file in the platform directories. Never returns * null. * * @param aFileName * Name of the file to search for * @return The list of the corresponding files (never null, can be empty) */ protected List<File> internalFind(final String aFileName) { final List<File> foundFiles = new ArrayList<File>(); // Test on each PSEM2M root directory for (final File rootDir : pPlatformDirs.getPlatformRootDirs()) { final File testFile = new File(rootDir, aFileName); if (testFile.exists()) { foundFiles.add(testFile); } } // Test as an absolute file path final File testFile = new File(aFileName); if (testFile.exists()) { foundFiles.add(testFile); } return foundFiles; } }
trunk/org.psem2m.isolates.base/src/org/psem2m/isolates/base/dirs/impl/CFileFinderSvc.java
/** * File: CFileFinderSvc.java * Author: Thomas Calmant * Date: 6 sept. 2011 */ package org.psem2m.isolates.base.dirs.impl; import java.io.File; import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import org.psem2m.isolates.services.dirs.IFileFinderSvc; import org.psem2m.isolates.services.dirs.IPlatformDirsSvc; /** * Simple file finder : tries to find the given file in the platform main * directories. * * @author Thomas Calmant */ public class CFileFinderSvc implements IFileFinderSvc { /** Platform directories service */ private IPlatformDirsSvc pPlatformDirs; /** * Constructor without injection * * @param aPlatformDirs * Platform directory service instance */ public CFileFinderSvc(final IPlatformDirsSvc aPlatformDirs) { pPlatformDirs = aPlatformDirs; } /** * Tries to extract a platform root path from the given. Non-null result * indicates that the given path is a root sub-path. * * @param aPath * Path to be transformed * @return The root-path if any, else null */ protected String extractPlatformPath(final String aPath) { if (aPath == null || aPath.isEmpty()) { return null; } for (File rootDir : pPlatformDirs.getPlatformRootDirs()) { // Test if the path starts with the root path if (aPath.startsWith(rootDir.getPath())) { return aPath.substring(rootDir.getPath().length()); } } return null; } /* * (non-Javadoc) * * @see org.psem2m.isolates.base.dirs.IFileFinderSvc#find(java.io.File, * java.lang.String) */ @Override public File[] find(final File aBaseFile, final String aFileName) { // Use a set to avoid duplicates final Set<File> foundFiles = new LinkedHashSet<File>(); if (aBaseFile != null) { // Try to be relative to the parent, if the base file is a file File baseDir = null; if (aBaseFile.isFile()) { // Base file is a file : get its parent directory baseDir = aBaseFile.getParentFile(); } else if (aBaseFile.isDirectory()) { // Use the directory baseDir = aBaseFile; } if (baseDir != null) { // We have a valid base final File testRelFile = new File(baseDir, aFileName); if (testRelFile.exists()) { foundFiles.add(testRelFile); } /* * If the base file path begins with a platform root, remove it. * Allows cross conf/ repo/ references. */ final String platformSubDir = extractPlatformPath(baseDir .getPath()); if (platformSubDir != null) { foundFiles.addAll(internalFind(platformSubDir + File.separator + aFileName)); } } else { // Test the path directly in the platform dirs foundFiles.addAll(internalFind(aBaseFile.getPath() + File.separator + aFileName)); } } // In any case, try using only the file name foundFiles.addAll(internalFind(aFileName)); return foundFiles.toArray(new File[0]); } /* * (non-Javadoc) * * @see org.psem2m.isolates.base.dirs.IFileFinderSvc#find(java.lang.String) */ @Override public File[] find(final String aFileName) { final List<File> foundFiles = internalFind(aFileName); if (foundFiles.isEmpty()) { // Return null if no file was found return null; } return foundFiles.toArray(new File[0]); } /** * Tries to find the given file in the platform directories. Never returns * null. * * @param aFileName * Name of the file to search for * @return The list of the corresponding files (never null, can be empty) */ protected List<File> internalFind(final String aFileName) { final List<File> foundFiles = new ArrayList<File>(); // Test on each PSEM2M root directory for (File rootDir : pPlatformDirs.getPlatformRootDirs()) { final File testFile = new File(rootDir, aFileName); if (testFile.exists()) { foundFiles.add(testFile); } } return foundFiles; } }
Correction du FileFinder pour tester le chemin complet Le dernier chemin cherché est le nom donné directement, pour gérer les noms de fichiers complets
trunk/org.psem2m.isolates.base/src/org/psem2m/isolates/base/dirs/impl/CFileFinderSvc.java
Correction du FileFinder pour tester le chemin complet
<ide><path>runk/org.psem2m.isolates.base/src/org/psem2m/isolates/base/dirs/impl/CFileFinderSvc.java <ide> */ <ide> public class CFileFinderSvc implements IFileFinderSvc { <ide> <del> /** Platform directories service */ <del> private IPlatformDirsSvc pPlatformDirs; <add> /** Platform directories service */ <add> private final IPlatformDirsSvc pPlatformDirs; <ide> <del> /** <del> * Constructor without injection <del> * <del> * @param aPlatformDirs <del> * Platform directory service instance <del> */ <del> public CFileFinderSvc(final IPlatformDirsSvc aPlatformDirs) { <add> /** <add> * Constructor without injection <add> * <add> * @param aPlatformDirs <add> * Platform directory service instance <add> */ <add> public CFileFinderSvc(final IPlatformDirsSvc aPlatformDirs) { <ide> <del> pPlatformDirs = aPlatformDirs; <del> } <add> pPlatformDirs = aPlatformDirs; <add> } <ide> <del> /** <del> * Tries to extract a platform root path from the given. Non-null result <del> * indicates that the given path is a root sub-path. <del> * <del> * @param aPath <del> * Path to be transformed <del> * @return The root-path if any, else null <del> */ <del> protected String extractPlatformPath(final String aPath) { <add> /** <add> * Tries to extract a platform root path from the given. Non-null result <add> * indicates that the given path is a root sub-path. <add> * <add> * @param aPath <add> * Path to be transformed <add> * @return The root-path if any, else null <add> */ <add> protected String extractPlatformPath(final String aPath) { <ide> <del> if (aPath == null || aPath.isEmpty()) { <del> return null; <del> } <add> if (aPath == null || aPath.isEmpty()) { <add> return null; <add> } <ide> <del> for (File rootDir : pPlatformDirs.getPlatformRootDirs()) { <del> // Test if the path starts with the root path <del> if (aPath.startsWith(rootDir.getPath())) { <del> return aPath.substring(rootDir.getPath().length()); <del> } <del> } <add> for (final File rootDir : pPlatformDirs.getPlatformRootDirs()) { <add> // Test if the path starts with the root path <add> if (aPath.startsWith(rootDir.getPath())) { <add> return aPath.substring(rootDir.getPath().length()); <add> } <add> } <ide> <del> return null; <del> } <add> return null; <add> } <ide> <del> /* <del> * (non-Javadoc) <del> * <del> * @see org.psem2m.isolates.base.dirs.IFileFinderSvc#find(java.io.File, <del> * java.lang.String) <del> */ <del> @Override <del> public File[] find(final File aBaseFile, final String aFileName) { <add> /* <add> * (non-Javadoc) <add> * <add> * @see org.psem2m.isolates.base.dirs.IFileFinderSvc#find(java.io.File, <add> * java.lang.String) <add> */ <add> @Override <add> public File[] find(final File aBaseFile, final String aFileName) { <ide> <del> // Use a set to avoid duplicates <del> final Set<File> foundFiles = new LinkedHashSet<File>(); <add> // Use a set to avoid duplicates <add> final Set<File> foundFiles = new LinkedHashSet<File>(); <ide> <del> if (aBaseFile != null) { <del> // Try to be relative to the parent, if the base file is a file <del> File baseDir = null; <add> if (aBaseFile != null) { <add> // Try to be relative to the parent, if the base file is a file <add> File baseDir = null; <ide> <del> if (aBaseFile.isFile()) { <del> // Base file is a file : get its parent directory <del> baseDir = aBaseFile.getParentFile(); <add> if (aBaseFile.isFile()) { <add> // Base file is a file : get its parent directory <add> baseDir = aBaseFile.getParentFile(); <ide> <del> } else if (aBaseFile.isDirectory()) { <del> // Use the directory <del> baseDir = aBaseFile; <del> } <add> } else if (aBaseFile.isDirectory()) { <add> // Use the directory <add> baseDir = aBaseFile; <add> } <ide> <del> if (baseDir != null) { <del> // We have a valid base <del> final File testRelFile = new File(baseDir, aFileName); <del> if (testRelFile.exists()) { <del> foundFiles.add(testRelFile); <del> } <add> if (baseDir != null) { <add> // We have a valid base <add> final File testRelFile = new File(baseDir, aFileName); <add> if (testRelFile.exists()) { <add> foundFiles.add(testRelFile); <add> } <ide> <del> /* <del> * If the base file path begins with a platform root, remove it. <del> * Allows cross conf/ repo/ references. <del> */ <del> final String platformSubDir = extractPlatformPath(baseDir <del> .getPath()); <del> if (platformSubDir != null) { <del> foundFiles.addAll(internalFind(platformSubDir <del> + File.separator + aFileName)); <add> /* <add> * If the base file path begins with a platform root, remove it. <add> * Allows cross conf/ repo/ references. <add> */ <add> final String platformSubDir = extractPlatformPath(baseDir <add> .getPath()); <add> if (platformSubDir != null) { <add> foundFiles.addAll(internalFind(platformSubDir <add> + File.separator + aFileName)); <ide> <del> } <add> } <ide> <del> } else { <del> // Test the path directly in the platform dirs <del> foundFiles.addAll(internalFind(aBaseFile.getPath() <del> + File.separator + aFileName)); <del> } <del> } <add> } else { <add> // Test the path directly in the platform dirs <add> foundFiles.addAll(internalFind(aBaseFile.getPath() <add> + File.separator + aFileName)); <add> } <add> } <ide> <del> // In any case, try using only the file name <del> foundFiles.addAll(internalFind(aFileName)); <add> // In any case, try using only the file name <add> foundFiles.addAll(internalFind(aFileName)); <ide> <del> return foundFiles.toArray(new File[0]); <del> } <add> return foundFiles.toArray(new File[0]); <add> } <ide> <del> /* <del> * (non-Javadoc) <del> * <del> * @see org.psem2m.isolates.base.dirs.IFileFinderSvc#find(java.lang.String) <del> */ <del> @Override <del> public File[] find(final String aFileName) { <add> /* <add> * (non-Javadoc) <add> * <add> * @see org.psem2m.isolates.base.dirs.IFileFinderSvc#find(java.lang.String) <add> */ <add> @Override <add> public File[] find(final String aFileName) { <ide> <del> final List<File> foundFiles = internalFind(aFileName); <del> if (foundFiles.isEmpty()) { <del> // Return null if no file was found <del> return null; <del> } <add> final List<File> foundFiles = internalFind(aFileName); <add> if (foundFiles.isEmpty()) { <add> // Return null if no file was found <add> return null; <add> } <ide> <del> return foundFiles.toArray(new File[0]); <del> } <add> return foundFiles.toArray(new File[0]); <add> } <ide> <del> /** <del> * Tries to find the given file in the platform directories. Never returns <del> * null. <del> * <del> * @param aFileName <del> * Name of the file to search for <del> * @return The list of the corresponding files (never null, can be empty) <del> */ <del> protected List<File> internalFind(final String aFileName) { <add> /** <add> * Tries to find the given file in the platform directories. Never returns <add> * null. <add> * <add> * @param aFileName <add> * Name of the file to search for <add> * @return The list of the corresponding files (never null, can be empty) <add> */ <add> protected List<File> internalFind(final String aFileName) { <ide> <del> final List<File> foundFiles = new ArrayList<File>(); <add> final List<File> foundFiles = new ArrayList<File>(); <ide> <del> // Test on each PSEM2M root directory <del> for (File rootDir : pPlatformDirs.getPlatformRootDirs()) { <add> // Test on each PSEM2M root directory <add> for (final File rootDir : pPlatformDirs.getPlatformRootDirs()) { <ide> <del> final File testFile = new File(rootDir, aFileName); <del> if (testFile.exists()) { <del> foundFiles.add(testFile); <del> } <del> } <add> final File testFile = new File(rootDir, aFileName); <add> if (testFile.exists()) { <add> foundFiles.add(testFile); <add> } <add> } <ide> <del> return foundFiles; <del> } <add> // Test as an absolute file path <add> final File testFile = new File(aFileName); <add> if (testFile.exists()) { <add> foundFiles.add(testFile); <add> } <add> <add> return foundFiles; <add> } <ide> }
Java
apache-2.0
e389daea4b9cfc417d6ddee6125465e45e326254
0
kalaspuffar/pdfbox,kalaspuffar/pdfbox,apache/pdfbox,apache/pdfbox
/* * Copyright 2015 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.examples.pdfa; import junit.framework.TestCase; import java.io.File; import java.io.FileInputStream; import java.security.KeyStore; import org.apache.pdfbox.examples.pdmodel.CreatePDFA; import org.apache.pdfbox.examples.signature.CreateSignature; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.PDDocumentCatalog; import org.apache.pdfbox.pdmodel.common.PDMetadata; import org.apache.pdfbox.preflight.PreflightDocument; import org.apache.pdfbox.preflight.ValidationResult; import org.apache.pdfbox.preflight.ValidationResult.ValidationError; import org.apache.pdfbox.preflight.parser.PreflightParser; import org.apache.xmpbox.XMPMetadata; import org.apache.xmpbox.schema.DublinCoreSchema; import org.apache.xmpbox.xml.DomXmpParser; /** * * @author Tilman Hausherr */ public class CreatePDFATest extends TestCase { private final String outDir = "target/test-output"; @Override protected void setUp() throws Exception { super.setUp(); new File(outDir).mkdirs(); } /** * Test of doIt method of class CreatePDFA. */ public void testCreatePDFA() throws Exception { System.out.println("testCreatePDFA"); String pdfaFilename = outDir + "/PDFA.pdf"; String signedPdfaFilename = outDir + "/PDFA_signed.pdf"; String keystorePath = "src/test/resources/org/apache/pdfbox/examples/signature/keystore.p12"; String message = "The quick brown fox jumps over the lazy dog äöüÄÖÜß @°^²³ {[]}"; String dir = "../pdfbox/src/main/resources/org/apache/pdfbox/resources/ttf/"; String fontfile = dir + "LiberationSans-Regular.ttf"; CreatePDFA.main(new String[] { pdfaFilename, message, fontfile }); // sign PDF - because we want to make sure that the signed PDF is also PDF/A-1b KeyStore keystore = KeyStore.getInstance("PKCS12"); keystore.load(new FileInputStream(keystorePath), "123456".toCharArray()); CreateSignature signing = new CreateSignature(keystore, "123456".toCharArray()); signing.signDetached(new File(pdfaFilename), new File(signedPdfaFilename)); // Verify that it is PDF/A-1b PreflightParser preflightParser = new PreflightParser(new File(signedPdfaFilename)); preflightParser.parse(); try (PreflightDocument preflightDocument = preflightParser.getPreflightDocument()) { preflightDocument.validate(); ValidationResult result = preflightDocument.getResult(); for (ValidationError ve : result.getErrorsList()) { System.err.println(ve.getErrorCode() + ": " + ve.getDetails()); } assertTrue("PDF file created with CreatePDFA is not valid PDF/A-1b", result.isValid()); } // check the XMP metadata try (PDDocument document = PDDocument.load(new File(pdfaFilename))) { PDDocumentCatalog catalog = document.getDocumentCatalog(); PDMetadata meta = catalog.getMetadata(); DomXmpParser xmpParser = new DomXmpParser(); XMPMetadata metadata = xmpParser.parse(meta.createInputStream()); DublinCoreSchema dc = metadata.getDublinCoreSchema(); assertEquals(pdfaFilename, dc.getTitle()); } } }
examples/src/test/java/org/apache/pdfbox/examples/pdfa/CreatePDFATest.java
/* * Copyright 2015 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.examples.pdfa; import junit.framework.TestCase; import java.io.File; import org.apache.pdfbox.examples.pdmodel.CreatePDFA; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.PDDocumentCatalog; import org.apache.pdfbox.pdmodel.common.PDMetadata; import org.apache.pdfbox.preflight.PreflightDocument; import org.apache.pdfbox.preflight.ValidationResult; import org.apache.pdfbox.preflight.ValidationResult.ValidationError; import org.apache.pdfbox.preflight.parser.PreflightParser; import org.apache.xmpbox.XMPMetadata; import org.apache.xmpbox.schema.DublinCoreSchema; import org.apache.xmpbox.xml.DomXmpParser; /** * * @author Tilman Hausherr */ public class CreatePDFATest extends TestCase { private final String outDir = "target/test-output"; @Override protected void setUp() throws Exception { super.setUp(); new File(outDir).mkdirs(); } /** * Test of doIt method of class CreatePDFA. */ public void testCreatePDFA() throws Exception { System.out.println("testCreatePDFA"); String pdfaFilename = outDir + "/PDFA.pdf"; String message = "The quick brown fox jumps over the lazy dog äöüÄÖÜß @°^²³ {[]}"; String dir = "../pdfbox/src/main/resources/org/apache/pdfbox/resources/ttf/"; String fontfile = dir + "LiberationSans-Regular.ttf"; CreatePDFA.main(new String[] { pdfaFilename, message, fontfile }); PreflightParser preflightParser = new PreflightParser(new File(pdfaFilename)); preflightParser.parse(); try (PreflightDocument preflightDocument = preflightParser.getPreflightDocument()) { preflightDocument.validate(); ValidationResult result = preflightDocument.getResult(); for (ValidationError ve : result.getErrorsList()) { System.err.println(ve.getErrorCode() + ": " + ve.getDetails()); } assertTrue("PDF file created with CreatePDFA is not valid PDF/A-1b", result.isValid()); } // check the XMP metadata try (PDDocument document = PDDocument.load(new File(pdfaFilename))) { PDDocumentCatalog catalog = document.getDocumentCatalog(); PDMetadata meta = catalog.getMetadata(); DomXmpParser xmpParser = new DomXmpParser(); XMPMetadata metadata = xmpParser.parse(meta.createInputStream()); DublinCoreSchema dc = metadata.getDublinCoreSchema(); assertEquals(pdfaFilename, dc.getTitle()); } } }
PDFBOX-4689: add test for preflight to make sure that signed PDF/A-1b is still PDF/A-1b git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1869992 13f79535-47bb-0310-9956-ffa450edef68
examples/src/test/java/org/apache/pdfbox/examples/pdfa/CreatePDFATest.java
PDFBOX-4689: add test for preflight to make sure that signed PDF/A-1b is still PDF/A-1b
<ide><path>xamples/src/test/java/org/apache/pdfbox/examples/pdfa/CreatePDFATest.java <ide> import junit.framework.TestCase; <ide> <ide> import java.io.File; <add>import java.io.FileInputStream; <add>import java.security.KeyStore; <ide> import org.apache.pdfbox.examples.pdmodel.CreatePDFA; <add>import org.apache.pdfbox.examples.signature.CreateSignature; <ide> import org.apache.pdfbox.pdmodel.PDDocument; <ide> import org.apache.pdfbox.pdmodel.PDDocumentCatalog; <ide> import org.apache.pdfbox.pdmodel.common.PDMetadata; <ide> { <ide> System.out.println("testCreatePDFA"); <ide> String pdfaFilename = outDir + "/PDFA.pdf"; <add> String signedPdfaFilename = outDir + "/PDFA_signed.pdf"; <add> String keystorePath = "src/test/resources/org/apache/pdfbox/examples/signature/keystore.p12"; <ide> String message = "The quick brown fox jumps over the lazy dog äöüÄÖÜß @°^²³ {[]}"; <ide> String dir = "../pdfbox/src/main/resources/org/apache/pdfbox/resources/ttf/"; <ide> String fontfile = dir + "LiberationSans-Regular.ttf"; <ide> CreatePDFA.main(new String[] { pdfaFilename, message, fontfile }); <del> <del> PreflightParser preflightParser = new PreflightParser(new File(pdfaFilename)); <add> <add> // sign PDF - because we want to make sure that the signed PDF is also PDF/A-1b <add> KeyStore keystore = KeyStore.getInstance("PKCS12"); <add> keystore.load(new FileInputStream(keystorePath), "123456".toCharArray()); <add> CreateSignature signing = new CreateSignature(keystore, "123456".toCharArray()); <add> signing.signDetached(new File(pdfaFilename), new File(signedPdfaFilename)); <add> <add> // Verify that it is PDF/A-1b <add> PreflightParser preflightParser = new PreflightParser(new File(signedPdfaFilename)); <ide> preflightParser.parse(); <ide> try (PreflightDocument preflightDocument = preflightParser.getPreflightDocument()) <ide> {
JavaScript
apache-2.0
a56597b51cebf87c80dbe0c9d4cc0048311de6e0
0
mitmedialab/MediaCloud-Web-Tools,mitmedialab/MediaCloud-Web-Tools,mitmedialab/MediaCloud-Web-Tools,mitmedialab/MediaCloud-Web-Tools
import { setAppName } from './config'; import { setAppColors } from './styles/colors'; import routes from './routes/sourceRoutes'; import initializeApp from './index'; /** * This serves as the primary entry point to the Media Cloud Source Manager app. */ setAppName('sources'); setAppColors({ light: '#4b9fcb', dark: '#3c97bd', darker: '#1c779d', }); initializeApp(routes);
src/sourcesIndex.js
import { setAppName } from './config'; import { setAppColors } from './styles/colors'; import routes from './routes/sourceRoutes'; import initializeApp from './index'; /** * This serves as the primary entry point to the Media Cloud Source Manager app. */ setAppName('sources'); setAppColors({ light: '#4b9fcb', dark: '#3c97bd', }); initializeApp(routes);
using #1c779d for sources rollover color for now (#45)
src/sourcesIndex.js
using #1c779d for sources rollover color for now (#45)
<ide><path>rc/sourcesIndex.js <ide> setAppColors({ <ide> light: '#4b9fcb', <ide> dark: '#3c97bd', <add> darker: '#1c779d', <ide> }); <ide> <ide> initializeApp(routes);
Java
apache-2.0
7be6cfd81362fb927666e93f4d20590ab4017910
0
ecsec/open-ecard,ecsec/open-ecard,ecsec/open-ecard
/**************************************************************************** * Copyright (C) 2012-2014 ecsec GmbH. * All rights reserved. * Contact: ecsec GmbH ([email protected]) * * This file is part of the Open eCard App. * * GNU General Public License Usage * This file may be used under the terms of the GNU General Public * License version 3.0 as published by the Free Software Foundation * and appearing in the file LICENSE.GPL included in the packaging of * this file. Please review the following information to ensure the * GNU General Public License version 3.0 requirements will be met: * http://www.gnu.org/copyleft/gpl.html. * * Other Usage * Alternatively, this file may be used in accordance with the terms * and conditions contained in a signed written agreement between * you and ecsec GmbH. * ***************************************************************************/ package org.openecard.sal.protocol.eac; import iso.std.iso_iec._24727.tech.schema.ConnectionHandleType; import iso.std.iso_iec._24727.tech.schema.DIDAuthenticate; import iso.std.iso_iec._24727.tech.schema.DIDAuthenticateResponse; import iso.std.iso_iec._24727.tech.schema.DIDAuthenticationDataType; import iso.std.iso_iec._24727.tech.schema.DIDStructureType; import iso.std.iso_iec._24727.tech.schema.GetIFDCapabilities; import iso.std.iso_iec._24727.tech.schema.GetIFDCapabilitiesResponse; import iso.std.iso_iec._24727.tech.schema.InputAPDUInfoType; import iso.std.iso_iec._24727.tech.schema.SlotCapabilityType; import iso.std.iso_iec._24727.tech.schema.Transmit; import iso.std.iso_iec._24727.tech.schema.TransmitResponse; import java.net.MalformedURLException; import java.net.URL; import java.security.cert.CertificateException; import java.util.Arrays; import java.util.List; import java.util.Map; import javax.smartcardio.ResponseAPDU; import javax.xml.bind.JAXBException; import oasis.names.tc.dss._1_0.core.schema.Result; import org.openecard.addon.sal.FunctionType; import org.openecard.addon.sal.ProtocolStep; import org.openecard.binding.tctoken.TR03112Keys; import org.openecard.bouncycastle.crypto.tls.Certificate; import org.openecard.common.DynamicContext; import org.openecard.common.ECardConstants; import org.openecard.common.I18n; import org.openecard.common.WSHelper; import org.openecard.common.anytype.AuthDataMap; import org.openecard.common.ifd.PACECapabilities; import org.openecard.common.interfaces.Dispatcher; import org.openecard.common.interfaces.EventManager; import org.openecard.common.sal.state.CardStateEntry; import org.openecard.common.util.ByteUtils; import org.openecard.common.util.JAXBSchemaValidator; import org.openecard.common.util.Pair; import org.openecard.common.util.Promise; import org.openecard.common.util.TR03112Utils; import org.openecard.crypto.common.asn1.cvc.CHAT; import org.openecard.crypto.common.asn1.cvc.CHATVerifier; import org.openecard.crypto.common.asn1.cvc.CardVerifiableCertificate; import org.openecard.crypto.common.asn1.cvc.CardVerifiableCertificateChain; import org.openecard.crypto.common.asn1.cvc.CardVerifiableCertificateVerifier; import org.openecard.crypto.common.asn1.cvc.CertificateDescription; import org.openecard.crypto.common.asn1.eac.AuthenticatedAuxiliaryData; import org.openecard.crypto.common.asn1.eac.SecurityInfos; import org.openecard.gui.ResultStatus; import org.openecard.gui.UserConsent; import org.openecard.gui.UserConsentNavigator; import org.openecard.gui.definition.UserConsentDescription; import org.openecard.gui.executor.ExecutionEngine; import org.openecard.sal.protocol.eac.anytype.EAC1InputType; import org.openecard.sal.protocol.eac.anytype.EAC1OutputType; import org.openecard.sal.protocol.eac.anytype.PACEMarkerType; import org.openecard.sal.protocol.eac.anytype.PACEOutputType; import org.openecard.sal.protocol.eac.anytype.PasswordID; import org.openecard.sal.protocol.eac.gui.CHATStep; import org.openecard.sal.protocol.eac.gui.CVCStep; import org.openecard.sal.protocol.eac.gui.CVCStepAction; import org.openecard.sal.protocol.eac.gui.CardMonitor; import org.openecard.sal.protocol.eac.gui.CardRemovedFilter; import org.openecard.sal.protocol.eac.gui.ErrorStep; import org.openecard.sal.protocol.eac.gui.PINStep; import org.openecard.sal.protocol.eac.gui.ProcessingStep; import org.openecard.sal.protocol.eac.gui.ProcessingStepAction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Implements PACE protocol step according to BSI TR-03112-7. * * @see "BSI-TR-03112, version 1.1.2., part 7, section 4.6.5." * @author Tobias Wich * @author Moritz Horsch * @author Dirk Petrautzki */ public class PACEStep implements ProtocolStep<DIDAuthenticate, DIDAuthenticateResponse> { private static final Logger logger = LoggerFactory.getLogger(PACEStep.class.getName()); private static final I18n lang = I18n.getTranslation("eac"); private static final I18n langPace = I18n.getTranslation("pace"); // GUI translation constants private static final String TITLE = "eac_user_consent_title"; private final Dispatcher dispatcher; private final UserConsent gui; private final EventManager eventManager; /** * Creates a new PACE protocol step. * * @param dispatcher Dispatcher * @param gui GUI * @param eventManager */ public PACEStep(Dispatcher dispatcher, UserConsent gui, EventManager eventManager) { this.dispatcher = dispatcher; this.gui = gui; this.eventManager = eventManager; } @Override public FunctionType getFunctionType() { return FunctionType.DIDAuthenticate; } @Override public DIDAuthenticateResponse perform(DIDAuthenticate request, Map<String, Object> internalData) { // get context to save values in DynamicContext dynCtx = DynamicContext.getInstance(TR03112Keys.INSTANCE_KEY); DIDAuthenticate didAuthenticate = request; DIDAuthenticateResponse response = new DIDAuthenticateResponse(); ConnectionHandleType conHandle = (ConnectionHandleType) dynCtx.get(TR03112Keys.CONNECTION_HANDLE); try { JAXBSchemaValidator valid = (JAXBSchemaValidator) dynCtx.getPromise(EACProtocol.SCHEMA_VALIDATOR).deref(); boolean messageValid = valid.validateObject(request); if (! messageValid) { String msg = "Validation of the EAC1InputType message failed."; logger.error(msg); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); response.setResult(WSHelper.makeResultError(ECardConstants.Minor.App.INCORRECT_PARM, msg)); return response; } } catch (JAXBException ex) { String msg = "Validation of the EAC1InputType message failed due to invalid input data."; logger.error(msg, ex); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); response.setResult(WSHelper.makeResultError(ECardConstants.Minor.App.INT_ERROR, msg)); return response; } catch (InterruptedException ex) { String msg = "Thread interrupted while waiting for schema validator instance."; logger.error(msg, ex); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); response.setResult(WSHelper.makeResultError(ECardConstants.Minor.App.INT_ERROR, msg)); return response; } if (! ByteUtils.compare(conHandle.getSlotHandle(), didAuthenticate.getConnectionHandle().getSlotHandle())) { String msg = "Invalid connection handle given in DIDAuthenticate message."; Result r = WSHelper.makeResultError(ECardConstants.Minor.SAL.UNKNOWN_HANDLE, msg); response.setResult(r); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); return response; } byte[] slotHandle = conHandle.getSlotHandle(); dynCtx.put(EACProtocol.SLOT_HANDLE, slotHandle); dynCtx.put(EACProtocol.DISPATCHER, dispatcher); try { EAC1InputType eac1Input = new EAC1InputType(didAuthenticate.getAuthenticationProtocolData()); EAC1OutputType eac1Output = eac1Input.getOutputType(); AuthenticatedAuxiliaryData aad = new AuthenticatedAuxiliaryData(eac1Input.getAuthenticatedAuxiliaryData()); byte pinID = PasswordID.valueOf(didAuthenticate.getDIDName()).getByte(); final String passwordType = PasswordID.parse(pinID).getString(); // determine PACE capabilities of the terminal boolean nativePace = genericPACESupport(conHandle); dynCtx.put(EACProtocol.IS_NATIVE_PACE, nativePace); // Certificate chain CardVerifiableCertificateChain certChain = new CardVerifiableCertificateChain(eac1Input.getCertificates()); byte[] rawCertificateDescription = eac1Input.getCertificateDescription(); CertificateDescription certDescription = CertificateDescription.getInstance(rawCertificateDescription); // put CertificateDescription into DynamicContext which is needed for later checks dynCtx.put(TR03112Keys.ESERVICE_CERTIFICATE_DESC, certDescription); // according to BSI-INSTANCE_KEY-7 we MUST perform some checks immediately after receiving the eService cert Result activationChecksResult = performChecks(certDescription, dynCtx); if (! ECardConstants.Major.OK.equals(activationChecksResult.getResultMajor())) { response.setResult(activationChecksResult); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); return response; } CHAT requiredCHAT = new CHAT(eac1Input.getRequiredCHAT()); CHAT optionalCHAT = new CHAT(eac1Input.getOptionalCHAT()); // get the PACEMarker CardStateEntry cardState = (CardStateEntry) internalData.get(EACConstants.IDATA_CARD_STATE_ENTRY); PACEMarkerType paceMarker = getPaceMarker(cardState, passwordType); dynCtx.put(EACProtocol.PACE_MARKER, paceMarker); // Verify that the certificate description matches the terminal certificate CardVerifiableCertificate taCert = certChain.getTerminalCertificate(); CardVerifiableCertificateVerifier.verify(taCert, certDescription); // Verify that the required CHAT matches the terminal certificate's CHAT CHAT taCHAT = taCert.getCHAT(); // Check that we got an authentication terminal terminal certificate. We abort the process in case there is // an other role. if (taCHAT.getRole() != CHAT.Role.AUTHENTICATION_TERMINAL) { String msg = "Unsupported terminal type in Terminal Certificate referenced. Refernced terminal type is " + taCHAT.getRole().toString() + "."; response.setResult(WSHelper.makeResultError(ECardConstants.Minor.App.PARM_ERROR, msg)); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); return response; } CHATVerifier.verfiy(taCHAT, requiredCHAT); // remove overlapping values from optional chat optionalCHAT.restrictAccessRights(taCHAT); // Prepare data in DIDAuthenticate for GUI final EACData eacData = new EACData(); eacData.didRequest = didAuthenticate; eacData.certificate = certChain.getTerminalCertificate(); eacData.certificateDescription = certDescription; eacData.rawCertificateDescription = rawCertificateDescription; eacData.transactionInfo = eac1Input.getTransactionInfo(); eacData.requiredCHAT = requiredCHAT; eacData.optionalCHAT = optionalCHAT; eacData.selectedCHAT = requiredCHAT; eacData.aad = aad; eacData.pinID = pinID; eacData.passwordType = passwordType; dynCtx.put(EACProtocol.EAC_DATA, eacData); // get initial pin status InputAPDUInfoType input = new InputAPDUInfoType(); input.setInputAPDU(new byte[] {(byte) 0x00, (byte) 0x22, (byte) 0xC1, (byte) 0xA4, (byte) 0x0F, (byte) 0x80, (byte) 0x0A, (byte) 0x04, (byte) 0x00, (byte) 0x7F, (byte) 0x00, (byte) 0x07, (byte) 0x02, (byte) 0x02, (byte) 0x04, (byte) 0x02, (byte) 0x02, (byte) 0x83, (byte) 0x01, (byte) 0x03}); input.getAcceptableStatusCode().add(new byte[] {(byte) 0x90, (byte) 0x00}); // pin activated 3 tries left input.getAcceptableStatusCode().add(new byte[] {(byte) 0x63, (byte) 0xC2}); // pin activated 2 tries left input.getAcceptableStatusCode().add(new byte[] {(byte) 0x63, (byte) 0xC1}); // pin suspended 1 try left CAN // needs to be entered input.getAcceptableStatusCode().add(new byte[] {(byte) 0x63, (byte) 0xC0}); // pin blocked 0 tries left input.getAcceptableStatusCode().add(new byte[] {(byte) 0x62, (byte) 0x83}); // pin deaktivated Transmit transmit = new Transmit(); transmit.setSlotHandle(slotHandle); transmit.getInputAPDUInfo().add(input); TransmitResponse pinCheckResponse = (TransmitResponse) dispatcher.deliver(transmit); byte[] output = pinCheckResponse.getOutputAPDU().get(0); ResponseAPDU outputApdu = new ResponseAPDU(output); byte[] status = {(byte) outputApdu.getSW1(), (byte) outputApdu.getSW2()}; dynCtx.put(EACProtocol.PIN_STATUS_BYTES, status); boolean pinUsable = ! Arrays.equals(status, new byte[]{(byte) 0x63, (byte) 0xC0}); // define GUI depending on the PIN status final UserConsentDescription uc = new UserConsentDescription(lang.translationForKey(TITLE)); if (pinUsable) { // create GUI and init executor CardMonitor cardMon = new CardMonitor(); CardRemovedFilter filter = new CardRemovedFilter(conHandle.getIFDName(), conHandle.getSlotIndex()); eventManager.register(cardMon, filter); CVCStep cvcStep = new CVCStep(eacData); cvcStep.setBackgroundTask(cardMon); CVCStepAction cvcStepAction = new CVCStepAction(cvcStep); cvcStep.setAction(cvcStepAction); uc.getSteps().add(cvcStep); uc.getSteps().add(CHATStep.createDummy()); uc.getSteps().add(PINStep.createDummy(passwordType)); ProcessingStep procStep = new ProcessingStep(); ProcessingStepAction procStepAction = new ProcessingStepAction(procStep); procStep.setAction(procStepAction); uc.getSteps().add(procStep); } else { String pin = langPace.translationForKey("pin"); String puk = langPace.translationForKey("puk"); String title = langPace.translationForKey("step_error_title_blocked", pin); String errorMsg = langPace.translationForKey("step_error_pin_blocked", pin, pin, puk, pin); ErrorStep eStep = new ErrorStep(title, errorMsg); uc.getSteps().add(eStep); dynCtx.put(EACProtocol.PACE_SUCCESSFUL, false); } Thread guiThread = new Thread(new Runnable() { @Override public void run() { // get context here because it is thread local DynamicContext dynCtx = DynamicContext.getInstance(TR03112Keys.INSTANCE_KEY); UserConsentNavigator navigator = gui.obtainNavigator(uc); dynCtx.put(TR03112Keys.OPEN_USER_CONSENT_NAVIGATOR, navigator); ExecutionEngine exec = new ExecutionEngine(navigator); ResultStatus guiResult = exec.process(); dynCtx.put(EACProtocol.GUI_RESULT, guiResult); if (guiResult == ResultStatus.CANCEL) { Promise<Object> pPaceSuccessful = dynCtx.getPromise(EACProtocol.PACE_SUCCESSFUL); if (! pPaceSuccessful.isDelivered()) { pPaceSuccessful.deliver(false); } } } }, "EAC-GUI"); guiThread.start(); // wait for PACE to finish Promise<Object> pPaceSuccessful = dynCtx.getPromise(EACProtocol.PACE_SUCCESSFUL); boolean paceSuccessful = (boolean) pPaceSuccessful.deref(); if (! paceSuccessful) { // TODO: differentiate between cancel and pin error String msg = "Failure in PACE authentication."; Result r = WSHelper.makeResultError(ECardConstants.Minor.SAL.CANCELLATION_BY_USER, msg); response.setResult(r); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); return response; } // get challenge from card TerminalAuthentication ta = new TerminalAuthentication(dispatcher, slotHandle); byte[] challenge = ta.getChallenge(); // prepare DIDAuthenticationResponse DIDAuthenticationDataType data = eacData.paceResponse.getAuthenticationProtocolData(); AuthDataMap paceOutputMap = new AuthDataMap(data); //int retryCounter = Integer.valueOf(paceOutputMap.getContentAsString(PACEOutputType.RETRY_COUNTER)); byte[] efCardAccess = paceOutputMap.getContentAsBytes(PACEOutputType.EF_CARD_ACCESS); byte[] currentCAR = paceOutputMap.getContentAsBytes(PACEOutputType.CURRENT_CAR); byte[] previousCAR = paceOutputMap.getContentAsBytes(PACEOutputType.PREVIOUS_CAR); byte[] idpicc = paceOutputMap.getContentAsBytes(PACEOutputType.ID_PICC); // Store SecurityInfos SecurityInfos securityInfos = SecurityInfos.getInstance(efCardAccess); internalData.put(EACConstants.IDATA_SECURITY_INFOS, securityInfos); // Store additional data internalData.put(EACConstants.IDATA_AUTHENTICATED_AUXILIARY_DATA, aad); internalData.put(EACConstants.IDATA_CERTIFICATES, certChain); internalData.put(EACConstants.IDATA_CURRENT_CAR, currentCAR); internalData.put(EACConstants.IDATA_CHALLENGE, challenge); // Create response //eac1Output.setRetryCounter(retryCounter); eac1Output.setCHAT(eacData.selectedCHAT.toByteArray()); eac1Output.setCurrentCAR(currentCAR); eac1Output.setPreviousCAR(previousCAR); eac1Output.setEFCardAccess(efCardAccess); eac1Output.setIDPICC(idpicc); eac1Output.setChallenge(challenge); response.setResult(WSHelper.makeResultOK()); response.setAuthenticationProtocolData(eac1Output.getAuthDataType()); } catch (CertificateException ex) { logger.error(ex.getMessage(), ex); String msg = ex.getMessage(); response.setResult(WSHelper.makeResultError(ECardConstants.Minor.SAL.EAC.DOC_VALID_FAILED, msg)); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); } catch (WSHelper.WSException e) { logger.error(e.getMessage(), e); response.setResult(e.getResult()); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); } catch (Exception e) { logger.error(e.getMessage(), e); response.setResult(WSHelper.makeResultUnknownError(e.getMessage())); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); } return response; } private PACEMarkerType getPaceMarker(CardStateEntry cardState, String pinType) { // TODO: replace with DIDGet call byte[] applicationIdentifier = cardState.getCurrentCardApplication().getApplicationIdentifier(); DIDStructureType didStructure = cardState.getDIDStructure(pinType, applicationIdentifier); iso.std.iso_iec._24727.tech.schema.PACEMarkerType didMarker; didMarker = (iso.std.iso_iec._24727.tech.schema.PACEMarkerType) didStructure.getDIDMarker(); return new PACEMarkerType(didMarker); } private boolean convertToBoolean(Object o) { if (o instanceof Boolean) { return ((Boolean) o); } else { return false; } } /** * Perform all checks as described in BSI TR-03112-7 3.4.4. * * @param certDescription CertificateDescription of the eService Certificate * @param dynCtx Dynamic Context * @return a {@link Result} set according to the results of the checks */ private Result performChecks(CertificateDescription certDescription, DynamicContext dynCtx) { Object objectActivation = dynCtx.get(TR03112Keys.OBJECT_ACTIVATION); Object tokenChecks = dynCtx.get(TR03112Keys.TCTOKEN_CHECKS); boolean checkPassed; // omit these checks if explicitly disabled if (convertToBoolean(tokenChecks)) { checkPassed = checkEserviceCertificate(certDescription, dynCtx); if (! checkPassed) { String msg = "Hash of eService certificate is NOT contained in the CertificateDescription."; // TODO check for the correct minor type Result r = WSHelper.makeResultError(ECardConstants.Minor.App.UNKNOWN_ERROR, msg); return r; } // only perform the following checks if new activation is used if (! convertToBoolean(objectActivation)) { checkPassed = checkTCTokenServerCertificates(certDescription, dynCtx); if (! checkPassed) { String msg = "Hash of the TCToken server certificate is NOT contained in the CertificateDescription."; // TODO check for the correct minor type Result r = WSHelper.makeResultError(ECardConstants.Minor.App.UNKNOWN_ERROR, msg); return r; } checkPassed = checkTCTokenAndSubjectURL(certDescription, dynCtx); if (! checkPassed) { String msg = "TCToken does not come from the server to which the authorization certificate was issued."; // TODO check for the correct minor type Result r = WSHelper.makeResultError(ECardConstants.Minor.App.UNKNOWN_ERROR, msg); return r; } } else { logger.warn("Checks according to BSI TR03112 3.4.4 (TCToken specific) skipped."); } } else { logger.warn("Checks according to BSI TR03112 3.4.4 skipped."); } // all checks passed return WSHelper.makeResultOK(); } private boolean checkTCTokenAndSubjectURL(CertificateDescription certDescription, DynamicContext dynCtx) { Object o = dynCtx.get(TR03112Keys.TCTOKEN_URL); if (o instanceof URL) { URL tcTokenURL = (URL) o; try { URL subjectURL = new URL(certDescription.getSubjectURL()); return TR03112Utils.checkSameOriginPolicy(tcTokenURL, subjectURL); } catch (MalformedURLException e) { logger.error("SubjectURL in CertificateDescription is not a well formed URL."); return false; } } else { logger.error("No TC Token URL set in Dynamic Context."); return false; } } private boolean checkEserviceCertificate(CertificateDescription certDescription, DynamicContext dynCtx) { Object o = dynCtx.get(TR03112Keys.ESERVICE_CERTIFICATE); if (o instanceof Certificate) { Certificate certificate = (Certificate) o; return TR03112Utils.isInCommCertificates(certificate, certDescription.getCommCertificates()); } else { logger.error("No eService TLS Certificate set in Dynamic Context."); return false; } } private boolean checkTCTokenServerCertificates(CertificateDescription certDescription, DynamicContext dynCtx) { Object o = dynCtx.get(TR03112Keys.TCTOKEN_SERVER_CERTIFICATES); if (o instanceof List) { List<?> certificates = (List<?>) o; for (Object cert : certificates) { if (cert instanceof Pair) { Pair<?, ?> p = (Pair<?, ?>) cert; if (p.p2 instanceof Certificate) { Certificate bcCert = (Certificate) p.p2; if (! TR03112Utils.isInCommCertificates(bcCert, certDescription.getCommCertificates())) { return false; } } } } return true; } else { logger.error("No TC Token server certificates set in Dynamic Context."); return false; } } /** * Check if the selected card reader supports PACE. * In that case, the reader is a standard or comfort reader. * * @param connectionHandle Handle describing the IFD and reader. * @return true when card reader supports genericPACE, false otherwise. * @throws Exception */ private boolean genericPACESupport(ConnectionHandleType connectionHandle) throws Exception { // Request terminal capabilities GetIFDCapabilities capabilitiesRequest = new GetIFDCapabilities(); capabilitiesRequest.setContextHandle(connectionHandle.getContextHandle()); capabilitiesRequest.setIFDName(connectionHandle.getIFDName()); GetIFDCapabilitiesResponse capabilitiesResponse = (GetIFDCapabilitiesResponse) dispatcher.deliver(capabilitiesRequest); WSHelper.checkResult(capabilitiesResponse); if (capabilitiesResponse.getIFDCapabilities() != null) { List<SlotCapabilityType> capabilities = capabilitiesResponse.getIFDCapabilities().getSlotCapability(); // Check all capabilities for generic PACE final String genericPACE = PACECapabilities.PACECapability.GenericPACE.getProtocol(); for (SlotCapabilityType capability : capabilities) { if (capability.getIndex().equals(connectionHandle.getSlotIndex())) { for (String protocol : capability.getProtocol()) { if (protocol.equals(genericPACE)) { return true; } } } } } // No PACE capability found return false; } }
addons/tr03112/src/main/java/org/openecard/sal/protocol/eac/PACEStep.java
/**************************************************************************** * Copyright (C) 2012-2014 ecsec GmbH. * All rights reserved. * Contact: ecsec GmbH ([email protected]) * * This file is part of the Open eCard App. * * GNU General Public License Usage * This file may be used under the terms of the GNU General Public * License version 3.0 as published by the Free Software Foundation * and appearing in the file LICENSE.GPL included in the packaging of * this file. Please review the following information to ensure the * GNU General Public License version 3.0 requirements will be met: * http://www.gnu.org/copyleft/gpl.html. * * Other Usage * Alternatively, this file may be used in accordance with the terms * and conditions contained in a signed written agreement between * you and ecsec GmbH. * ***************************************************************************/ package org.openecard.sal.protocol.eac; import iso.std.iso_iec._24727.tech.schema.ConnectionHandleType; import iso.std.iso_iec._24727.tech.schema.DIDAuthenticate; import iso.std.iso_iec._24727.tech.schema.DIDAuthenticateResponse; import iso.std.iso_iec._24727.tech.schema.DIDAuthenticationDataType; import iso.std.iso_iec._24727.tech.schema.DIDStructureType; import iso.std.iso_iec._24727.tech.schema.GetIFDCapabilities; import iso.std.iso_iec._24727.tech.schema.GetIFDCapabilitiesResponse; import iso.std.iso_iec._24727.tech.schema.InputAPDUInfoType; import iso.std.iso_iec._24727.tech.schema.SlotCapabilityType; import iso.std.iso_iec._24727.tech.schema.Transmit; import iso.std.iso_iec._24727.tech.schema.TransmitResponse; import java.net.MalformedURLException; import java.net.URL; import java.security.cert.CertificateException; import java.util.Arrays; import java.util.List; import java.util.Map; import javax.smartcardio.ResponseAPDU; import javax.xml.bind.JAXBException; import oasis.names.tc.dss._1_0.core.schema.Result; import org.openecard.addon.sal.FunctionType; import org.openecard.addon.sal.ProtocolStep; import org.openecard.binding.tctoken.TR03112Keys; import org.openecard.bouncycastle.crypto.tls.Certificate; import org.openecard.common.DynamicContext; import org.openecard.common.ECardConstants; import org.openecard.common.I18n; import org.openecard.common.WSHelper; import org.openecard.common.anytype.AuthDataMap; import org.openecard.common.ifd.PACECapabilities; import org.openecard.common.interfaces.Dispatcher; import org.openecard.common.interfaces.EventManager; import org.openecard.common.sal.state.CardStateEntry; import org.openecard.common.util.ByteUtils; import org.openecard.common.util.JAXBSchemaValidator; import org.openecard.common.util.Pair; import org.openecard.common.util.Promise; import org.openecard.common.util.TR03112Utils; import org.openecard.crypto.common.asn1.cvc.CHAT; import org.openecard.crypto.common.asn1.cvc.CHATVerifier; import org.openecard.crypto.common.asn1.cvc.CardVerifiableCertificate; import org.openecard.crypto.common.asn1.cvc.CardVerifiableCertificateChain; import org.openecard.crypto.common.asn1.cvc.CardVerifiableCertificateVerifier; import org.openecard.crypto.common.asn1.cvc.CertificateDescription; import org.openecard.crypto.common.asn1.eac.AuthenticatedAuxiliaryData; import org.openecard.crypto.common.asn1.eac.SecurityInfos; import org.openecard.gui.ResultStatus; import org.openecard.gui.UserConsent; import org.openecard.gui.UserConsentNavigator; import org.openecard.gui.definition.UserConsentDescription; import org.openecard.gui.executor.ExecutionEngine; import org.openecard.sal.protocol.eac.anytype.EAC1InputType; import org.openecard.sal.protocol.eac.anytype.EAC1OutputType; import org.openecard.sal.protocol.eac.anytype.PACEMarkerType; import org.openecard.sal.protocol.eac.anytype.PACEOutputType; import org.openecard.sal.protocol.eac.anytype.PasswordID; import org.openecard.sal.protocol.eac.gui.CHATStep; import org.openecard.sal.protocol.eac.gui.CVCStep; import org.openecard.sal.protocol.eac.gui.CVCStepAction; import org.openecard.sal.protocol.eac.gui.CardMonitor; import org.openecard.sal.protocol.eac.gui.CardRemovedFilter; import org.openecard.sal.protocol.eac.gui.ErrorStep; import org.openecard.sal.protocol.eac.gui.PINStep; import org.openecard.sal.protocol.eac.gui.ProcessingStep; import org.openecard.sal.protocol.eac.gui.ProcessingStepAction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Implements PACE protocol step according to BSI TR-03112-7. * * @see "BSI-TR-03112, version 1.1.2., part 7, section 4.6.5." * @author Tobias Wich * @author Moritz Horsch * @author Dirk Petrautzki */ public class PACEStep implements ProtocolStep<DIDAuthenticate, DIDAuthenticateResponse> { private static final Logger logger = LoggerFactory.getLogger(PACEStep.class.getName()); private static final I18n lang = I18n.getTranslation("eac"); private static final I18n langPace = I18n.getTranslation("pace"); // GUI translation constants private static final String TITLE = "eac_user_consent_title"; private final Dispatcher dispatcher; private final UserConsent gui; private final EventManager eventManager; /** * Creates a new PACE protocol step. * * @param dispatcher Dispatcher * @param gui GUI * @param eventManager */ public PACEStep(Dispatcher dispatcher, UserConsent gui, EventManager eventManager) { this.dispatcher = dispatcher; this.gui = gui; this.eventManager = eventManager; } @Override public FunctionType getFunctionType() { return FunctionType.DIDAuthenticate; } @Override public DIDAuthenticateResponse perform(DIDAuthenticate request, Map<String, Object> internalData) { // get context to save values in DynamicContext dynCtx = DynamicContext.getInstance(TR03112Keys.INSTANCE_KEY); DIDAuthenticate didAuthenticate = request; DIDAuthenticateResponse response = new DIDAuthenticateResponse(); ConnectionHandleType conHandle = (ConnectionHandleType) dynCtx.get(TR03112Keys.CONNECTION_HANDLE); try { JAXBSchemaValidator valid = (JAXBSchemaValidator) dynCtx.getPromise(EACProtocol.SCHEMA_VALIDATOR).deref(); boolean messageValid = valid.validateObject(request); if (! messageValid) { String msg = "Validation of the EAC1InputType message failed."; logger.error(msg); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); response.setResult(WSHelper.makeResultError(ECardConstants.Minor.App.INCORRECT_PARM, msg)); return response; } } catch (JAXBException ex) { String msg = "Validation of the EAC1InputType message failed due to invalid input data."; logger.error(msg, ex); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); response.setResult(WSHelper.makeResultError(ECardConstants.Minor.App.INT_ERROR, msg)); return response; } catch (InterruptedException ex) { String msg = "Thread interrupted while waiting for schema validator instance."; logger.error(msg, ex); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); response.setResult(WSHelper.makeResultError(ECardConstants.Minor.App.INT_ERROR, msg)); return response; } if (! ByteUtils.compare(conHandle.getSlotHandle(), didAuthenticate.getConnectionHandle().getSlotHandle())) { String msg = "Invalid connection handle given in DIDAuthenticate message."; Result r = WSHelper.makeResultError(ECardConstants.Minor.SAL.UNKNOWN_HANDLE, msg); response.setResult(r); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); return response; } byte[] slotHandle = conHandle.getSlotHandle(); dynCtx.put(EACProtocol.SLOT_HANDLE, slotHandle); dynCtx.put(EACProtocol.DISPATCHER, dispatcher); try { EAC1InputType eac1Input = new EAC1InputType(didAuthenticate.getAuthenticationProtocolData()); EAC1OutputType eac1Output = eac1Input.getOutputType(); AuthenticatedAuxiliaryData aad = new AuthenticatedAuxiliaryData(eac1Input.getAuthenticatedAuxiliaryData()); byte pinID = PasswordID.valueOf(didAuthenticate.getDIDName()).getByte(); final String passwordType = PasswordID.parse(pinID).getString(); // determine PACE capabilities of the terminal boolean nativePace = genericPACESupport(conHandle); dynCtx.put(EACProtocol.IS_NATIVE_PACE, nativePace); // Certificate chain CardVerifiableCertificateChain certChain = new CardVerifiableCertificateChain(eac1Input.getCertificates()); byte[] rawCertificateDescription = eac1Input.getCertificateDescription(); CertificateDescription certDescription = CertificateDescription.getInstance(rawCertificateDescription); // put CertificateDescription into DynamicContext which is needed for later checks dynCtx.put(TR03112Keys.ESERVICE_CERTIFICATE_DESC, certDescription); // according to BSI-INSTANCE_KEY-7 we MUST perform some checks immediately after receiving the eService cert Result activationChecksResult = performChecks(certDescription, dynCtx); if (! ECardConstants.Major.OK.equals(activationChecksResult.getResultMajor())) { response.setResult(activationChecksResult); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); return response; } CHAT requiredCHAT = new CHAT(eac1Input.getRequiredCHAT()); CHAT optionalCHAT = new CHAT(eac1Input.getOptionalCHAT()); // get the PACEMarker CardStateEntry cardState = (CardStateEntry) internalData.get(EACConstants.IDATA_CARD_STATE_ENTRY); PACEMarkerType paceMarker = getPaceMarker(cardState, passwordType); dynCtx.put(EACProtocol.PACE_MARKER, paceMarker); // Verify that the certificate description matches the terminal certificate CardVerifiableCertificate taCert = certChain.getTerminalCertificate(); CardVerifiableCertificateVerifier.verify(taCert, certDescription); // Verify that the required CHAT matches the terminal certificate's CHAT CHAT taCHAT = taCert.getCHAT(); CHATVerifier.verfiy(taCHAT, requiredCHAT); // remove overlapping values from optional chat optionalCHAT.restrictAccessRights(taCHAT); // Prepare data in DIDAuthenticate for GUI final EACData eacData = new EACData(); eacData.didRequest = didAuthenticate; eacData.certificate = certChain.getTerminalCertificate(); eacData.certificateDescription = certDescription; eacData.rawCertificateDescription = rawCertificateDescription; eacData.transactionInfo = eac1Input.getTransactionInfo(); eacData.requiredCHAT = requiredCHAT; eacData.optionalCHAT = optionalCHAT; eacData.selectedCHAT = requiredCHAT; eacData.aad = aad; eacData.pinID = pinID; eacData.passwordType = passwordType; dynCtx.put(EACProtocol.EAC_DATA, eacData); // get initial pin status InputAPDUInfoType input = new InputAPDUInfoType(); input.setInputAPDU(new byte[] {(byte) 0x00, (byte) 0x22, (byte) 0xC1, (byte) 0xA4, (byte) 0x0F, (byte) 0x80, (byte) 0x0A, (byte) 0x04, (byte) 0x00, (byte) 0x7F, (byte) 0x00, (byte) 0x07, (byte) 0x02, (byte) 0x02, (byte) 0x04, (byte) 0x02, (byte) 0x02, (byte) 0x83, (byte) 0x01, (byte) 0x03}); input.getAcceptableStatusCode().add(new byte[] {(byte) 0x90, (byte) 0x00}); // pin activated 3 tries left input.getAcceptableStatusCode().add(new byte[] {(byte) 0x63, (byte) 0xC2}); // pin activated 2 tries left input.getAcceptableStatusCode().add(new byte[] {(byte) 0x63, (byte) 0xC1}); // pin suspended 1 try left CAN // needs to be entered input.getAcceptableStatusCode().add(new byte[] {(byte) 0x63, (byte) 0xC0}); // pin blocked 0 tries left input.getAcceptableStatusCode().add(new byte[] {(byte) 0x62, (byte) 0x83}); // pin deaktivated Transmit transmit = new Transmit(); transmit.setSlotHandle(slotHandle); transmit.getInputAPDUInfo().add(input); TransmitResponse pinCheckResponse = (TransmitResponse) dispatcher.deliver(transmit); byte[] output = pinCheckResponse.getOutputAPDU().get(0); ResponseAPDU outputApdu = new ResponseAPDU(output); byte[] status = {(byte) outputApdu.getSW1(), (byte) outputApdu.getSW2()}; dynCtx.put(EACProtocol.PIN_STATUS_BYTES, status); boolean pinUsable = ! Arrays.equals(status, new byte[]{(byte) 0x63, (byte) 0xC0}); // define GUI depending on the PIN status final UserConsentDescription uc = new UserConsentDescription(lang.translationForKey(TITLE)); if (pinUsable) { // create GUI and init executor CardMonitor cardMon = new CardMonitor(); CardRemovedFilter filter = new CardRemovedFilter(conHandle.getIFDName(), conHandle.getSlotIndex()); eventManager.register(cardMon, filter); CVCStep cvcStep = new CVCStep(eacData); cvcStep.setBackgroundTask(cardMon); CVCStepAction cvcStepAction = new CVCStepAction(cvcStep); cvcStep.setAction(cvcStepAction); uc.getSteps().add(cvcStep); uc.getSteps().add(CHATStep.createDummy()); uc.getSteps().add(PINStep.createDummy(passwordType)); ProcessingStep procStep = new ProcessingStep(); ProcessingStepAction procStepAction = new ProcessingStepAction(procStep); procStep.setAction(procStepAction); uc.getSteps().add(procStep); } else { String pin = langPace.translationForKey("pin"); String puk = langPace.translationForKey("puk"); String title = langPace.translationForKey("step_error_title_blocked", pin); String errorMsg = langPace.translationForKey("step_error_pin_blocked", pin, pin, puk, pin); ErrorStep eStep = new ErrorStep(title, errorMsg); uc.getSteps().add(eStep); dynCtx.put(EACProtocol.PACE_SUCCESSFUL, false); } Thread guiThread = new Thread(new Runnable() { @Override public void run() { // get context here because it is thread local DynamicContext dynCtx = DynamicContext.getInstance(TR03112Keys.INSTANCE_KEY); UserConsentNavigator navigator = gui.obtainNavigator(uc); dynCtx.put(TR03112Keys.OPEN_USER_CONSENT_NAVIGATOR, navigator); ExecutionEngine exec = new ExecutionEngine(navigator); ResultStatus guiResult = exec.process(); dynCtx.put(EACProtocol.GUI_RESULT, guiResult); if (guiResult == ResultStatus.CANCEL) { Promise<Object> pPaceSuccessful = dynCtx.getPromise(EACProtocol.PACE_SUCCESSFUL); if (! pPaceSuccessful.isDelivered()) { pPaceSuccessful.deliver(false); } } } }, "EAC-GUI"); guiThread.start(); // wait for PACE to finish Promise<Object> pPaceSuccessful = dynCtx.getPromise(EACProtocol.PACE_SUCCESSFUL); boolean paceSuccessful = (boolean) pPaceSuccessful.deref(); if (! paceSuccessful) { // TODO: differentiate between cancel and pin error String msg = "Failure in PACE authentication."; Result r = WSHelper.makeResultError(ECardConstants.Minor.SAL.CANCELLATION_BY_USER, msg); response.setResult(r); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); return response; } // get challenge from card TerminalAuthentication ta = new TerminalAuthentication(dispatcher, slotHandle); byte[] challenge = ta.getChallenge(); // prepare DIDAuthenticationResponse DIDAuthenticationDataType data = eacData.paceResponse.getAuthenticationProtocolData(); AuthDataMap paceOutputMap = new AuthDataMap(data); //int retryCounter = Integer.valueOf(paceOutputMap.getContentAsString(PACEOutputType.RETRY_COUNTER)); byte[] efCardAccess = paceOutputMap.getContentAsBytes(PACEOutputType.EF_CARD_ACCESS); byte[] currentCAR = paceOutputMap.getContentAsBytes(PACEOutputType.CURRENT_CAR); byte[] previousCAR = paceOutputMap.getContentAsBytes(PACEOutputType.PREVIOUS_CAR); byte[] idpicc = paceOutputMap.getContentAsBytes(PACEOutputType.ID_PICC); // Store SecurityInfos SecurityInfos securityInfos = SecurityInfos.getInstance(efCardAccess); internalData.put(EACConstants.IDATA_SECURITY_INFOS, securityInfos); // Store additional data internalData.put(EACConstants.IDATA_AUTHENTICATED_AUXILIARY_DATA, aad); internalData.put(EACConstants.IDATA_CERTIFICATES, certChain); internalData.put(EACConstants.IDATA_CURRENT_CAR, currentCAR); internalData.put(EACConstants.IDATA_CHALLENGE, challenge); // Create response //eac1Output.setRetryCounter(retryCounter); eac1Output.setCHAT(eacData.selectedCHAT.toByteArray()); eac1Output.setCurrentCAR(currentCAR); eac1Output.setPreviousCAR(previousCAR); eac1Output.setEFCardAccess(efCardAccess); eac1Output.setIDPICC(idpicc); eac1Output.setChallenge(challenge); response.setResult(WSHelper.makeResultOK()); response.setAuthenticationProtocolData(eac1Output.getAuthDataType()); } catch (CertificateException ex) { logger.error(ex.getMessage(), ex); String msg = ex.getMessage(); response.setResult(WSHelper.makeResultError(ECardConstants.Minor.SAL.EAC.DOC_VALID_FAILED, msg)); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); } catch (WSHelper.WSException e) { logger.error(e.getMessage(), e); response.setResult(e.getResult()); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); } catch (Exception e) { logger.error(e.getMessage(), e); response.setResult(WSHelper.makeResultUnknownError(e.getMessage())); dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); } return response; } private PACEMarkerType getPaceMarker(CardStateEntry cardState, String pinType) { // TODO: replace with DIDGet call byte[] applicationIdentifier = cardState.getCurrentCardApplication().getApplicationIdentifier(); DIDStructureType didStructure = cardState.getDIDStructure(pinType, applicationIdentifier); iso.std.iso_iec._24727.tech.schema.PACEMarkerType didMarker; didMarker = (iso.std.iso_iec._24727.tech.schema.PACEMarkerType) didStructure.getDIDMarker(); return new PACEMarkerType(didMarker); } private boolean convertToBoolean(Object o) { if (o instanceof Boolean) { return ((Boolean) o); } else { return false; } } /** * Perform all checks as described in BSI TR-03112-7 3.4.4. * * @param certDescription CertificateDescription of the eService Certificate * @param dynCtx Dynamic Context * @return a {@link Result} set according to the results of the checks */ private Result performChecks(CertificateDescription certDescription, DynamicContext dynCtx) { Object objectActivation = dynCtx.get(TR03112Keys.OBJECT_ACTIVATION); Object tokenChecks = dynCtx.get(TR03112Keys.TCTOKEN_CHECKS); boolean checkPassed; // omit these checks if explicitly disabled if (convertToBoolean(tokenChecks)) { checkPassed = checkEserviceCertificate(certDescription, dynCtx); if (! checkPassed) { String msg = "Hash of eService certificate is NOT contained in the CertificateDescription."; // TODO check for the correct minor type Result r = WSHelper.makeResultError(ECardConstants.Minor.App.UNKNOWN_ERROR, msg); return r; } // only perform the following checks if new activation is used if (! convertToBoolean(objectActivation)) { checkPassed = checkTCTokenServerCertificates(certDescription, dynCtx); if (! checkPassed) { String msg = "Hash of the TCToken server certificate is NOT contained in the CertificateDescription."; // TODO check for the correct minor type Result r = WSHelper.makeResultError(ECardConstants.Minor.App.UNKNOWN_ERROR, msg); return r; } checkPassed = checkTCTokenAndSubjectURL(certDescription, dynCtx); if (! checkPassed) { String msg = "TCToken does not come from the server to which the authorization certificate was issued."; // TODO check for the correct minor type Result r = WSHelper.makeResultError(ECardConstants.Minor.App.UNKNOWN_ERROR, msg); return r; } } else { logger.warn("Checks according to BSI TR03112 3.4.4 (TCToken specific) skipped."); } } else { logger.warn("Checks according to BSI TR03112 3.4.4 skipped."); } // all checks passed return WSHelper.makeResultOK(); } private boolean checkTCTokenAndSubjectURL(CertificateDescription certDescription, DynamicContext dynCtx) { Object o = dynCtx.get(TR03112Keys.TCTOKEN_URL); if (o instanceof URL) { URL tcTokenURL = (URL) o; try { URL subjectURL = new URL(certDescription.getSubjectURL()); return TR03112Utils.checkSameOriginPolicy(tcTokenURL, subjectURL); } catch (MalformedURLException e) { logger.error("SubjectURL in CertificateDescription is not a well formed URL."); return false; } } else { logger.error("No TC Token URL set in Dynamic Context."); return false; } } private boolean checkEserviceCertificate(CertificateDescription certDescription, DynamicContext dynCtx) { Object o = dynCtx.get(TR03112Keys.ESERVICE_CERTIFICATE); if (o instanceof Certificate) { Certificate certificate = (Certificate) o; return TR03112Utils.isInCommCertificates(certificate, certDescription.getCommCertificates()); } else { logger.error("No eService TLS Certificate set in Dynamic Context."); return false; } } private boolean checkTCTokenServerCertificates(CertificateDescription certDescription, DynamicContext dynCtx) { Object o = dynCtx.get(TR03112Keys.TCTOKEN_SERVER_CERTIFICATES); if (o instanceof List) { List<?> certificates = (List<?>) o; for (Object cert : certificates) { if (cert instanceof Pair) { Pair<?, ?> p = (Pair<?, ?>) cert; if (p.p2 instanceof Certificate) { Certificate bcCert = (Certificate) p.p2; if (! TR03112Utils.isInCommCertificates(bcCert, certDescription.getCommCertificates())) { return false; } } } } return true; } else { logger.error("No TC Token server certificates set in Dynamic Context."); return false; } } /** * Check if the selected card reader supports PACE. * In that case, the reader is a standard or comfort reader. * * @param connectionHandle Handle describing the IFD and reader. * @return true when card reader supports genericPACE, false otherwise. * @throws Exception */ private boolean genericPACESupport(ConnectionHandleType connectionHandle) throws Exception { // Request terminal capabilities GetIFDCapabilities capabilitiesRequest = new GetIFDCapabilities(); capabilitiesRequest.setContextHandle(connectionHandle.getContextHandle()); capabilitiesRequest.setIFDName(connectionHandle.getIFDName()); GetIFDCapabilitiesResponse capabilitiesResponse = (GetIFDCapabilitiesResponse) dispatcher.deliver(capabilitiesRequest); WSHelper.checkResult(capabilitiesResponse); if (capabilitiesResponse.getIFDCapabilities() != null) { List<SlotCapabilityType> capabilities = capabilitiesResponse.getIFDCapabilities().getSlotCapability(); // Check all capabilities for generic PACE final String genericPACE = PACECapabilities.PACECapability.GenericPACE.getProtocol(); for (SlotCapabilityType capability : capabilities) { if (capability.getIndex().equals(connectionHandle.getSlotIndex())) { for (String protocol : capability.getProtocol()) { if (protocol.equals(genericPACE)) { return true; } } } } } // No PACE capability found return false; } }
Don't allow other terminal types than AUTHENTICATION_TERMINAL.
addons/tr03112/src/main/java/org/openecard/sal/protocol/eac/PACEStep.java
Don't allow other terminal types than AUTHENTICATION_TERMINAL.
<ide><path>ddons/tr03112/src/main/java/org/openecard/sal/protocol/eac/PACEStep.java <ide> CardVerifiableCertificateVerifier.verify(taCert, certDescription); <ide> // Verify that the required CHAT matches the terminal certificate's CHAT <ide> CHAT taCHAT = taCert.getCHAT(); <add> <add> // Check that we got an authentication terminal terminal certificate. We abort the process in case there is <add> // an other role. <add> if (taCHAT.getRole() != CHAT.Role.AUTHENTICATION_TERMINAL) { <add> String msg = "Unsupported terminal type in Terminal Certificate referenced. Refernced terminal type is " + <add> taCHAT.getRole().toString() + "."; <add> response.setResult(WSHelper.makeResultError(ECardConstants.Minor.App.PARM_ERROR, msg)); <add> dynCtx.put(EACProtocol.AUTHENTICATION_FAILED, true); <add> return response; <add> } <add> <ide> CHATVerifier.verfiy(taCHAT, requiredCHAT); <ide> // remove overlapping values from optional chat <ide> optionalCHAT.restrictAccessRights(taCHAT);
Java
apache-2.0
bfead2ec979c95f906595ab9f82bafa2516740bd
0
chtyim/cdap,hsaputra/cdap,caskdata/cdap,chtyim/cdap,caskdata/cdap,caskdata/cdap,anthcp/cdap,anthcp/cdap,caskdata/cdap,anthcp/cdap,chtyim/cdap,mpouttuclarke/cdap,anthcp/cdap,hsaputra/cdap,hsaputra/cdap,anthcp/cdap,caskdata/cdap,hsaputra/cdap,chtyim/cdap,hsaputra/cdap,caskdata/cdap,mpouttuclarke/cdap,chtyim/cdap,mpouttuclarke/cdap,mpouttuclarke/cdap,chtyim/cdap,mpouttuclarke/cdap
/* * Copyright 2014 Cask, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package co.cask.cdap.internal.app.services; import co.cask.cdap.api.service.Service; import co.cask.cdap.api.service.ServiceConfigurer; import co.cask.cdap.api.service.ServiceWorker; import co.cask.cdap.api.service.http.HttpServiceHandler; import org.apache.twill.api.TwillApplication; import org.apache.twill.api.TwillSpecification; import java.security.InvalidParameterException; import java.util.List; /** * TwillApplication to run a {@link Service}. */ public class ServiceTwillApplication implements TwillApplication { private final Service service; /** * Create a TwillApplication from a {@link Service}. * @param service */ public ServiceTwillApplication(Service service) { this.service = service; } @Override public TwillSpecification configure() { ServiceConfigurer configurer = new DefaultServiceConfigurer(); service.configure(configurer); List<? extends HttpServiceHandler> serviceHandlers = configurer.getHandlers(); if (serviceHandlers.size() == 0) { throw new InvalidParameterException("No handlers provided. Add handlers using configurer."); } TwillSpecification.Builder.RunnableSetter runnableSetter = TwillSpecification.Builder.with() .setName(configurer.getName()) .withRunnable() .add(new HttpServiceTwillRunnable(configurer.getName(), serviceHandlers)) .noLocalFiles(); for (ServiceWorker worker : configurer.getWorkers()) { ServiceWorkerTwillRunnable runnable = new ServiceWorkerTwillRunnable(worker); runnableSetter = runnableSetter.add(runnable).noLocalFiles(); } return runnableSetter.anyOrder().build(); } }
app-fabric/src/main/java/co/cask/cdap/internal/app/services/ServiceTwillApplication.java
/* * Copyright 2014 Cask, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package co.cask.cdap.internal.app.services; import co.cask.cdap.api.service.Service; import co.cask.cdap.api.service.ServiceConfigurer; import co.cask.cdap.api.service.ServiceWorker; import co.cask.cdap.api.service.http.HttpServiceHandler; import org.apache.twill.api.TwillApplication; import org.apache.twill.api.TwillSpecification; import java.security.InvalidParameterException; import java.util.List; /** * TwillApplication to run a {@link Service}. */ public class ServiceTwillApplication implements TwillApplication { private final Service service; /** * Create a TwillApplication from a {@link Service}. * @param service */ public ServiceTwillApplication(Service service) { this.service = service; } @Override public TwillSpecification configure() { ServiceConfigurer configurer = new DefaultServiceConfigurer(); service.configure(new DefaultServiceConfigurer()); List<? extends HttpServiceHandler> serviceHandlers = configurer.getHandlers(); if (serviceHandlers.size() == 0) { throw new InvalidParameterException("No handlers provided. Add handlers using configurer."); } TwillSpecification.Builder.RunnableSetter runnableSetter = TwillSpecification.Builder.with() .setName(configurer.getName()) .withRunnable() .add(new HttpServiceTwillRunnable(configurer.getName(), serviceHandlers)) .noLocalFiles(); for (ServiceWorker worker : configurer.getWorkers()) { ServiceWorkerTwillRunnable runnable = new ServiceWorkerTwillRunnable(worker); runnableSetter = runnableSetter.add(runnable).noLocalFiles(); } return runnableSetter.anyOrder().build(); } }
Pass correct configurer
app-fabric/src/main/java/co/cask/cdap/internal/app/services/ServiceTwillApplication.java
Pass correct configurer
<ide><path>pp-fabric/src/main/java/co/cask/cdap/internal/app/services/ServiceTwillApplication.java <ide> @Override <ide> public TwillSpecification configure() { <ide> ServiceConfigurer configurer = new DefaultServiceConfigurer(); <del> service.configure(new DefaultServiceConfigurer()); <add> service.configure(configurer); <ide> List<? extends HttpServiceHandler> serviceHandlers = configurer.getHandlers(); <ide> if (serviceHandlers.size() == 0) { <ide> throw new InvalidParameterException("No handlers provided. Add handlers using configurer.");
Java
apache-2.0
0452dcd2e62dbbf09e5117ceb440f5ab6dc78a4c
0
gxa/atlas,gxa/atlas,gxa/atlas,gxa/atlas,gxa/atlas
/* * Copyright 2008-2013 Microarray Informatics Team, EMBL-European Bioinformatics Institute * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * For further details of the Gene Expression Atlas project, including source code, * downloads and documentation, please see: * * http://gxa.github.com/gxa */ package uk.ac.ebi.atlas.acceptance.selenium.pages; import com.google.common.collect.Lists; import org.apache.commons.lang.StringUtils; import org.openqa.selenium.By; import org.openqa.selenium.NoSuchElementException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.interactions.Action; import org.openqa.selenium.interactions.Actions; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.ui.ExpectedConditions; import org.openqa.selenium.support.ui.FluentWait; import org.openqa.selenium.support.ui.WebDriverWait; import uk.ac.ebi.atlas.model.ExperimentType; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; public class HeatmapTablePage extends TablePage { private static final String PAGE_LOCATION = "/gxa/experiments/"; @FindBy(id = "arrayDesignAccession") private WebElement arrayDesignAccession; @FindBy(id = "heatmap-table") private WebElement heatmapTable; @FindBy(id = "geneCount") private WebElement geneCount; @FindBy(id = "display-experiment") private WebElement displayExperimentLink; @FindBy(id = "display-experiment-design") private WebElement displayExperimentDesignLink; @FindBy(id = "display-analysis-methods") private WebElement displayAnalysisLink; @FindBy(id = "download-profiles-link") private WebElement downloadExpressionProfilesLink; @FindBy(id = "download-raw") private WebElement downloadRawCountsLink; @FindBy(id = "download-analytics") private WebElement downloadAnalyticsLink; @FindBy(id = "download-normalized") private WebElement downloadNormalizedLink; @FindBy(id = "download-logFold") private WebElement downloadLogFoldLink; @FindBy(className = "gradient-level-min") private WebElement gradientLevelsMin; @FindBy(className = "gradient-level-max") private WebElement gradientLevelsMax; @FindBy(className = "genename") private List<WebElement> geneNames; @FindBy(xpath = "//label[@for='queryFactorValues']") private WebElement queryFactorLabel; @FindBy(id = "anatomogram") private WebElement anatomogram; @FindBy(id = "diff-heatmap-legend") private WebElement diffHeatmapTableLegend; @FindBy(xpath = "//thead/tr/td[1]") private WebElement geneColumnHeader; @FindBy(xpath = "//thead/tr/td[2]") private WebElement designElementHeader; private String experimentAccession; public HeatmapTablePage(WebDriver driver, String experimentAccession) { this(driver, experimentAccession, null); } public HeatmapTablePage(WebDriver driver, String experimentAccession, String httpParameters) { super(driver, httpParameters); this.experimentAccession = experimentAccession; } protected String getExperimentAccession() { return experimentAccession; } protected int getGeneExpressionStartingRowIndex() { try { driver.findElement(By.id("arrayDesignAccession")).getAttribute("value"); return 2; //MicroarrayExperiment, we have two columns before expression level cells } catch (NoSuchElementException e) { return 1; //Other experiment types, we have one column before expression level cells } } protected WebElement getHeatmapTable() { return heatmapTable; } protected void setHeatmapTable(WebElement heatmapTable) { this.heatmapTable = heatmapTable; } public List<String> getFactorValueHeaders() { List<String> queryFactorValues = getTableHeaders(getHeatmapTable()); //and we need to remove the last header value, because is related to the organism part column return queryFactorValues.subList(getGeneExpressionStartingRowIndex(), queryFactorValues.size()); } public List<String> getSelectedProfiles() { return getFirstColumnValues(getHeatmapTable()); } public String getQueryFactorLabel() { return queryFactorLabel.getText(); } public List<String> getDiffGradientMinLabels() { List<String> result = new ArrayList<>(); for (WebElement element : diffHeatmapTableLegend.findElements(By.className("gradient-level-min"))) { result.add(element.getText()); } return result; } public List<String> getDiffGradientMaxLabels() { List<String> result = new ArrayList<>(); for (WebElement element : diffHeatmapTableLegend.findElements(By.className("gradient-level-max"))) { result.add(element.getText()); } return result; } public List<String> getDiffGradientColors() { WebElement element = diffHeatmapTableLegend.findElement(By.className("color-gradient")); String style = element.getCssValue("background-image"); return Lists.newArrayList(StringUtils.substringsBetween(style,"rgb(",")")); } public String getDisplayExperimentLink() { return displayExperimentLink.getAttribute("href"); } public String getDisplayExperimentDesignLink() { return displayExperimentDesignLink.getAttribute("href"); } public String getDisplayExperimentAnalysisLink() { return displayAnalysisLink.getAttribute("href"); } public String getDownloadExpressionProfilesLink() { return downloadExpressionProfilesLink.getAttribute("href"); } public String getDownloadRawCountsLink() { return downloadRawCountsLink.getAttribute("href"); } public String getDownloadAnalyticsLink() { return downloadAnalyticsLink.getAttribute("href"); } public String getDownloadNormalizedLink() { return downloadNormalizedLink.getAttribute("href"); } public String getDownloadLogFoldLink() { return downloadLogFoldLink.getAttribute("href"); } @Override protected String getPageURI() { return PAGE_LOCATION + experimentAccession; } public List<String> getFirstGeneProfile() { List<String> firstTableRow = getRowValues(getHeatmapTable(), 1); return firstTableRow.subList(getGeneExpressionStartingRowIndex(), firstTableRow.size()); } public List<String> getGeneProfile(int zeroBasedRowIndex) { List<String> rowValues = getRowValues(getHeatmapTable(), zeroBasedRowIndex); return rowValues.subList(getGeneExpressionStartingRowIndex(), rowValues.size()); } public List<String> getLastGeneProfile() { List<String> firstTableRow = getLastRowValues(getHeatmapTable()); return firstTableRow.subList(getGeneExpressionStartingRowIndex(), firstTableRow.size()); } public String getGeneCount() { return geneCount.getText(); } public WebElement getDisplayLevelsButton() { new FluentWait<>(driver) .withTimeout(200, TimeUnit.SECONDS) .pollingEvery(200, TimeUnit.MILLISECONDS) .until(ExpectedConditions.visibilityOfElementLocated(By.id("display-levels"))); return driver.findElement(By.id("display-levels")); } public void clickDisplayLevelsButton() { WebElement displayLevelsButton = getDisplayLevelsButton(); displayLevelsButton.click(); } public String getDisplayLevelsButtonValue() { return getDisplayLevelsButton().getText(); } public boolean areGradientLevelsHidden() { String style = gradientLevelsMin.getAttribute("style"); String style2 = gradientLevelsMax.getAttribute("style"); return style.contains("display") && style.contains("none") && style2.contains("display") && style2.contains("none"); } public Boolean areExpressionLevelsHidden() { //we get the cell at index 1 because at index 0 we have the gene name WebElement firstExpressionLevelCell = getNonEmptyCellsFromFirstTableRow(getHeatmapTable()).get(getGeneExpressionStartingRowIndex()); WebElement div = firstExpressionLevelCell.findElement(By.tagName("div")); return div.getAttribute("class").contains("hide_cell"); } public double getAverageFpkm(int rowIndex) { List<String> stringValues = getGeneProfile(rowIndex); double averageFpkm = 0D; for (String stringValue : stringValues) { if (StringUtils.isNotBlank(stringValue)) { averageFpkm += Double.parseDouble(stringValue); } } return averageFpkm / stringValues.size(); } public double getMaxExpressionLevel(int rowIndex) { List<String> stringValues = getGeneProfile(rowIndex); double maxFpkm = 0D; for (String stringValue : stringValues) { if (StringUtils.isNotBlank(stringValue)) { double fpkmValue = Double.parseDouble(stringValue); if (fpkmValue > maxFpkm) { maxFpkm = fpkmValue; } } } return maxFpkm; } public String getGeneThatRanksAt(int rowIndex) { return getSelectedProfiles().get(rowIndex - 1); } public WebElement getAnatomogram() { return anatomogram; } protected WebElement getGeneProfileCell(int profileIndex, int expressionIndex) { return getCell(getHeatmapTable(), profileIndex + 1, expressionIndex + getGeneExpressionStartingRowIndex() + 1); } protected WebElement getGeneAnchor(int profileIndex) { return getGeneAnchor(getHeatmapTable(), profileIndex + 1); } public String getDifferentialExperimentTooltipTableHeader(int zeroBasedProfileIndex, int zeroBasedExpressionLevelIndex, int zeroBasedTooltipTableHeaderIndex, ExperimentType experimentType) { WebElement firstGeneProfileCell = getGeneProfileCell(zeroBasedProfileIndex, zeroBasedExpressionLevelIndex); hoverOnElement(firstGeneProfileCell); By byTooltipClass = By.xpath("//div[@class='ui-tooltip-content']//th[" + (zeroBasedTooltipTableHeaderIndex + 1) + "]"); FluentWait wait = new WebDriverWait(driver, 4L).pollingEvery(1, TimeUnit.SECONDS); wait.until(ExpectedConditions.visibilityOfElementLocated(byTooltipClass)); return driver.findElement(byTooltipClass).getText(); } public String getDifferentialExperimentTooltipTableCell(int zeroBasedProfileIndex, int zeroBasedExpressionLevelIndex, int zeroBasedTooltipTableCellIndex, ExperimentType experimentType) { hoverOnElement(getGeneProfileCell(0, zeroBasedExpressionLevelIndex)); By byTooltipClass = By.xpath("//div[@class='ui-tooltip-content']//td[" + (zeroBasedTooltipTableCellIndex + 1) + "]"); FluentWait wait = new WebDriverWait(driver, 4L).pollingEvery(1, TimeUnit.SECONDS); wait.until(ExpectedConditions.visibilityOfElementLocated(byTooltipClass)); return driver.findElement(byTooltipClass).getText(); } public String getGenePropertyTooltipContent(int zeroBasedProfileIndex) { WebElement geneProfileHeaderCell = getGeneAnchor(zeroBasedProfileIndex); hoverOnElement(geneProfileHeaderCell); By byTooltipClass = By.className("genename-tooltip"); WebDriverWait wait = new WebDriverWait(driver, 2L); wait.until(ExpectedConditions.visibilityOfElementLocated(byTooltipClass)); return driver.findElement(byTooltipClass).getText(); } public List<String> getGenePropertyTooltipHighlightedTerms(int zeroBasedProfileIndex) { By byTooltipClass = By.xpath("//div[@class='ui-tooltip-content']//span[@class='highlight']"); WebDriverWait wait = new WebDriverWait(driver, 3L); wait.until(ExpectedConditions.visibilityOfElementLocated(byTooltipClass)); List<WebElement> highlightedTermElements = driver.findElements(byTooltipClass); List<String> highlightedTerms = Lists.newArrayList(); for (WebElement highlightedTermElement : highlightedTermElements) { highlightedTerms.add(highlightedTermElement.getText()); } return highlightedTerms; } protected void hoverOnElement(WebElement webElement) { Action builder; Actions hover = new Actions(driver); hover.moveToElement(webElement); builder = hover.build(); builder.perform(); } public String getGeneColumnHeader() { return geneColumnHeader.getText(); } public String getDesignElementHeader() { return designElementHeader.getText(); } public BioEntityPage clickGeneName(int zeroBasedGeneNameIndex) { geneNames.get(zeroBasedGeneNameIndex).click(); WebDriverWait wait = new WebDriverWait(driver, 10L); wait.until(ExpectedConditions.presenceOfAllElementsLocatedBy(By.id("accordion"))); return new BioEntityPage(driver); } }
web/src/test/java/uk/ac/ebi/atlas/acceptance/selenium/pages/HeatmapTablePage.java
/* * Copyright 2008-2013 Microarray Informatics Team, EMBL-European Bioinformatics Institute * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * For further details of the Gene Expression Atlas project, including source code, * downloads and documentation, please see: * * http://gxa.github.com/gxa */ package uk.ac.ebi.atlas.acceptance.selenium.pages; import com.google.common.collect.Lists; import org.apache.commons.lang.StringUtils; import org.openqa.selenium.By; import org.openqa.selenium.NoSuchElementException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.interactions.Action; import org.openqa.selenium.interactions.Actions; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.ui.ExpectedConditions; import org.openqa.selenium.support.ui.FluentWait; import org.openqa.selenium.support.ui.WebDriverWait; import uk.ac.ebi.atlas.model.ExperimentType; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; public class HeatmapTablePage extends TablePage { private static final String PAGE_LOCATION = "/gxa/experiments/"; @FindBy(id = "arrayDesignAccession") private WebElement arrayDesignAccession; @FindBy(id = "heatmap-table") private WebElement heatmapTable; @FindBy(id = "geneCount") private WebElement geneCount; @FindBy(id = "display-experiment") private WebElement displayExperimentLink; @FindBy(id = "display-experiment-design") private WebElement displayExperimentDesignLink; @FindBy(id = "display-analysis-methods") private WebElement displayAnalysisLink; @FindBy(id = "download-profiles-link") private WebElement downloadExpressionProfilesLink; @FindBy(id = "download-raw") private WebElement downloadRawCountsLink; @FindBy(id = "download-analytics") private WebElement downloadAnalyticsLink; @FindBy(id = "download-normalized") private WebElement downloadNormalizedLink; @FindBy(id = "download-logFold") private WebElement downloadLogFoldLink; @FindBy(className = "gradient-level-min") private WebElement gradientLevelsMin; @FindBy(className = "gradient-level-max") private WebElement gradientLevelsMax; @FindBy(className = "genename") private List<WebElement> geneNames; @FindBy(xpath = "//label[@for='queryFactorValues']") private WebElement queryFactorLabel; @FindBy(id = "anatomogram") private WebElement anatomogram; @FindBy(id = "diff-heatmap-legend") private WebElement diffHeatmapTableLegend; @FindBy(xpath = "//thead/tr/td[1]") private WebElement geneColumnHeader; @FindBy(xpath = "//thead/tr/td[2]") private WebElement designElementHeader; private String experimentAccession; public HeatmapTablePage(WebDriver driver, String experimentAccession) { this(driver, experimentAccession, null); } public HeatmapTablePage(WebDriver driver, String experimentAccession, String httpParameters) { super(driver, httpParameters); this.experimentAccession = experimentAccession; } protected String getExperimentAccession() { return experimentAccession; } protected int getGeneExpressionStartingRowIndex() { try { driver.findElement(By.id("arrayDesignAccession")).getAttribute("value"); return 2; //MicroarrayExperiment, we have two columns before expression level cells } catch (NoSuchElementException e) { return 1; //Other experiment types, we have one column before expression level cells } } protected WebElement getHeatmapTable() { return heatmapTable; } protected void setHeatmapTable(WebElement heatmapTable) { this.heatmapTable = heatmapTable; } public List<String> getFactorValueHeaders() { List<String> queryFactorValues = getTableHeaders(getHeatmapTable()); //and we need to remove the last header value, because is related to the organism part column return queryFactorValues.subList(getGeneExpressionStartingRowIndex(), queryFactorValues.size()); } public List<String> getSelectedProfiles() { return getFirstColumnValues(getHeatmapTable()); } public String getQueryFactorLabel() { return queryFactorLabel.getText(); } public List<String> getDiffGradientMinLabels() { List<String> result = new ArrayList<>(); for (WebElement element : diffHeatmapTableLegend.findElements(By.className("gradient-level-min"))) { result.add(element.getText()); } return result; } public List<String> getDiffGradientMaxLabels() { List<String> result = new ArrayList<>(); for (WebElement element : diffHeatmapTableLegend.findElements(By.className("gradient-level-max"))) { result.add(element.getText()); } return result; } public List<String> getDiffGradientColors() { WebElement element = diffHeatmapTableLegend.findElement(By.className("color-gradient")); String style = element.getCssValue("background-image"); return Lists.newArrayList(StringUtils.substringsBetween(style,"rgb(",")")); } public String getDisplayExperimentLink() { return displayExperimentLink.getAttribute("href"); } public String getDisplayExperimentDesignLink() { return displayExperimentDesignLink.getAttribute("href"); } public String getDisplayExperimentAnalysisLink() { return displayAnalysisLink.getAttribute("href"); } public String getDownloadExpressionProfilesLink() { return downloadExpressionProfilesLink.getAttribute("href"); } public String getDownloadRawCountsLink() { return downloadRawCountsLink.getAttribute("href"); } public String getDownloadAnalyticsLink() { return downloadAnalyticsLink.getAttribute("href"); } public String getDownloadNormalizedLink() { return downloadNormalizedLink.getAttribute("href"); } public String getDownloadLogFoldLink() { return downloadLogFoldLink.getAttribute("href"); } @Override protected String getPageURI() { return PAGE_LOCATION + experimentAccession; } public List<String> getFirstGeneProfile() { List<String> firstTableRow = getRowValues(getHeatmapTable(), 1); return firstTableRow.subList(getGeneExpressionStartingRowIndex(), firstTableRow.size()); } public List<String> getGeneProfile(int zeroBasedRowIndex) { List<String> rowValues = getRowValues(getHeatmapTable(), zeroBasedRowIndex); return rowValues.subList(getGeneExpressionStartingRowIndex(), rowValues.size()); } public List<String> getLastGeneProfile() { List<String> firstTableRow = getLastRowValues(getHeatmapTable()); return firstTableRow.subList(getGeneExpressionStartingRowIndex(), firstTableRow.size()); } public String getGeneCount() { return geneCount.getText(); } public WebElement getDisplayLevelsButton() { new FluentWait<>(driver) .withTimeout(80, TimeUnit.SECONDS) .pollingEvery(200, TimeUnit.MILLISECONDS) .until(ExpectedConditions.visibilityOfElementLocated(By.id("display-levels"))); return driver.findElement(By.id("display-levels")); } public void clickDisplayLevelsButton() { WebElement displayLevelsButton = getDisplayLevelsButton(); displayLevelsButton.click(); } public String getDisplayLevelsButtonValue() { return getDisplayLevelsButton().getText(); } public boolean areGradientLevelsHidden() { String style = gradientLevelsMin.getAttribute("style"); String style2 = gradientLevelsMax.getAttribute("style"); return style.contains("display") && style.contains("none") && style2.contains("display") && style2.contains("none"); } public Boolean areExpressionLevelsHidden() { //we get the cell at index 1 because at index 0 we have the gene name WebElement firstExpressionLevelCell = getNonEmptyCellsFromFirstTableRow(getHeatmapTable()).get(getGeneExpressionStartingRowIndex()); WebElement div = firstExpressionLevelCell.findElement(By.tagName("div")); return div.getAttribute("class").contains("hide_cell"); } public double getAverageFpkm(int rowIndex) { List<String> stringValues = getGeneProfile(rowIndex); double averageFpkm = 0D; for (String stringValue : stringValues) { if (StringUtils.isNotBlank(stringValue)) { averageFpkm += Double.parseDouble(stringValue); } } return averageFpkm / stringValues.size(); } public double getMaxExpressionLevel(int rowIndex) { List<String> stringValues = getGeneProfile(rowIndex); double maxFpkm = 0D; for (String stringValue : stringValues) { if (StringUtils.isNotBlank(stringValue)) { double fpkmValue = Double.parseDouble(stringValue); if (fpkmValue > maxFpkm) { maxFpkm = fpkmValue; } } } return maxFpkm; } public String getGeneThatRanksAt(int rowIndex) { return getSelectedProfiles().get(rowIndex - 1); } public WebElement getAnatomogram() { return anatomogram; } protected WebElement getGeneProfileCell(int profileIndex, int expressionIndex) { return getCell(getHeatmapTable(), profileIndex + 1, expressionIndex + getGeneExpressionStartingRowIndex() + 1); } protected WebElement getGeneAnchor(int profileIndex) { return getGeneAnchor(getHeatmapTable(), profileIndex + 1); } public String getDifferentialExperimentTooltipTableHeader(int zeroBasedProfileIndex, int zeroBasedExpressionLevelIndex, int zeroBasedTooltipTableHeaderIndex, ExperimentType experimentType) { WebElement firstGeneProfileCell = getGeneProfileCell(zeroBasedProfileIndex, zeroBasedExpressionLevelIndex); hoverOnElement(firstGeneProfileCell); By byTooltipClass = By.xpath("//div[@class='ui-tooltip-content']//th[" + (zeroBasedTooltipTableHeaderIndex + 1) + "]"); FluentWait wait = new WebDriverWait(driver, 4L).pollingEvery(1, TimeUnit.SECONDS); wait.until(ExpectedConditions.visibilityOfElementLocated(byTooltipClass)); return driver.findElement(byTooltipClass).getText(); } public String getDifferentialExperimentTooltipTableCell(int zeroBasedProfileIndex, int zeroBasedExpressionLevelIndex, int zeroBasedTooltipTableCellIndex, ExperimentType experimentType) { hoverOnElement(getGeneProfileCell(0, zeroBasedExpressionLevelIndex)); By byTooltipClass = By.xpath("//div[@class='ui-tooltip-content']//td[" + (zeroBasedTooltipTableCellIndex + 1) + "]"); FluentWait wait = new WebDriverWait(driver, 4L).pollingEvery(1, TimeUnit.SECONDS); wait.until(ExpectedConditions.visibilityOfElementLocated(byTooltipClass)); return driver.findElement(byTooltipClass).getText(); } public String getGenePropertyTooltipContent(int zeroBasedProfileIndex) { WebElement geneProfileHeaderCell = getGeneAnchor(zeroBasedProfileIndex); hoverOnElement(geneProfileHeaderCell); By byTooltipClass = By.className("genename-tooltip"); WebDriverWait wait = new WebDriverWait(driver, 2L); wait.until(ExpectedConditions.visibilityOfElementLocated(byTooltipClass)); return driver.findElement(byTooltipClass).getText(); } public List<String> getGenePropertyTooltipHighlightedTerms(int zeroBasedProfileIndex) { By byTooltipClass = By.xpath("//div[@class='ui-tooltip-content']//span[@class='highlight']"); WebDriverWait wait = new WebDriverWait(driver, 3L); wait.until(ExpectedConditions.visibilityOfElementLocated(byTooltipClass)); List<WebElement> highlightedTermElements = driver.findElements(byTooltipClass); List<String> highlightedTerms = Lists.newArrayList(); for (WebElement highlightedTermElement : highlightedTermElements) { highlightedTerms.add(highlightedTermElement.getText()); } return highlightedTerms; } protected void hoverOnElement(WebElement webElement) { Action builder; Actions hover = new Actions(driver); hover.moveToElement(webElement); builder = hover.build(); builder.perform(); } public String getGeneColumnHeader() { return geneColumnHeader.getText(); } public String getDesignElementHeader() { return designElementHeader.getText(); } public BioEntityPage clickGeneName(int zeroBasedGeneNameIndex) { geneNames.get(zeroBasedGeneNameIndex).click(); WebDriverWait wait = new WebDriverWait(driver, 10L); wait.until(ExpectedConditions.presenceOfAllElementsLocatedBy(By.id("accordion"))); return new BioEntityPage(driver); } }
increased waitTimeout in selenium test of ajax component
web/src/test/java/uk/ac/ebi/atlas/acceptance/selenium/pages/HeatmapTablePage.java
increased waitTimeout in selenium test of ajax component
<ide><path>eb/src/test/java/uk/ac/ebi/atlas/acceptance/selenium/pages/HeatmapTablePage.java <ide> <ide> public WebElement getDisplayLevelsButton() { <ide> new FluentWait<>(driver) <del> .withTimeout(80, TimeUnit.SECONDS) <add> .withTimeout(200, TimeUnit.SECONDS) <ide> .pollingEvery(200, TimeUnit.MILLISECONDS) <ide> .until(ExpectedConditions.visibilityOfElementLocated(By.id("display-levels"))); <ide>
Java
mit
4bb671cecc4b8ea599d748b839b69c119c997e87
0
dozedoff/commonj
package com.github.dozedoff.commonj.hash; /* * pHash-like image hash. * Author: Elliot Shepherd ([email protected]) * Based On: http://www.hackerfactor.com/blog/index.php?/archives/432-Looks-Like-It.html * * Original Source: http://pastebin.com/Pj9d8jt5# */ import java.awt.Graphics2D; import java.awt.color.ColorSpace; import java.awt.image.BufferedImage; import java.awt.image.ColorConvertOp; import java.io.IOException; import java.io.InputStream; import javax.imageio.ImageIO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ImagePHash { private static final int DEFAULT_RESIZED_IMAGE_SIZE = 32; private static final int DEFAULT_DCT_MATRIX_SIZE = 8; private int resizedImageSize = 0; private int dctMatrixSize = 0; private double[] dctCoefficients; private static int resizeType = BufferedImage.TYPE_INT_ARGB_PRE; private static final ColorConvertOp colorConverter = new ColorConvertOp(ColorSpace.getInstance(ColorSpace.CS_GRAY), null); private static final Logger logger = LoggerFactory.getLogger(ImagePHash.class); static { resizeType = getResizeImageType(); } public ImagePHash() { this(DEFAULT_RESIZED_IMAGE_SIZE, DEFAULT_DCT_MATRIX_SIZE); } public ImagePHash(int resizedImageSize, int dctMatrixSize) { this.resizedImageSize = resizedImageSize; this.dctMatrixSize = dctMatrixSize; // TODO validate parameters initCoefficients(); ImageIO.setUseCache(false); } public int distance(String s1, String s2) { int counter = 0; for (int k = 0; k < s1.length(); k++) { if (s1.charAt(k) != s2.charAt(k)) { counter++; } } return counter; } /** * * @param is * file to hash * @return hash in as long * @throws IOException */ public long getLongHash(InputStream is) throws Exception { return getLongHash(readImage(is)); } public long getLongHash(BufferedImage img) throws Exception { double[][] dct = calculateDctMap(img); double dctAvg = calcDctAverage(dct); long hash = convertToLong(dct, dctAvg); return hash; } /** * Use {@link ImagePHash#getLongHash(BufferedImage)} instead. */ @Deprecated public long getLongHashScaledImage(BufferedImage img) throws Exception { double[][] dct = calculateDctMapScaledDown(img); double dctAvg = calcDctAverage(dct); long hash = convertToLong(dct, dctAvg); return hash; } /** * * @param is * file to hash * @return a 'binary string' (like. 001010111011100010) which is easy to do a hamming distance on. * @throws Exception */ public String getStringHash(InputStream is) throws Exception { /* * 6. Further reduce the DCT. This is the magic step. Set the 64 hash bits to 0 or 1 depending on whether each of the 64 DCT values * is above or below the average value. The result doesn't tell us the actual low frequencies; it just tells us the very-rough * relative scale of the frequencies to the mean. The result will not vary as long as the overall structure of the image remains the * same; this can survive gamma and color histogram adjustments without a problem. */ long hash = getLongHash(is); hash = Long.rotateRight(hash, 1); return Long.toBinaryString(hash); } /** * Use {@link ImagePHash#getStringHash(InputStream)} instead. * * @param is * @return * @throws Exception */ @Deprecated public String getHash(InputStream is) throws Exception { return getStringHash(is); } private static BufferedImage readImage(InputStream is) throws IOException { return ImageIO.read(is); } private double[][] reduceColor(BufferedImage img) { double reducedValues[][] = new double[resizedImageSize][resizedImageSize]; for (int x = 0; x < img.getWidth(); x++) { for (int y = 0; y < img.getHeight(); y++) { reducedValues[x][y] = getBlue(img, x, y); } } return reducedValues; } public double[][] calculateDctMap(InputStream is) throws IOException { BufferedImage img = readImage(is); return calculateDctMap(img); } public double[][] calculateDctMap(BufferedImage img) throws IOException { /* * 1. Reduce size. Like Average Hash, pHash starts with a small image. However, the image is larger than 8x8; 32x32 is a good size. * This is really done to simplify the DCT computation and not because it is needed to reduce the high frequencies. */ img = resize(img, resizedImageSize, resizedImageSize); return calculateDctMapScaledDown(img); } public double[][] calculateDctMapScaledDown(BufferedImage img) throws IOException { /* * 2. Reduce color. The image is reduced to a grayscale just to further simplify the number of computations. */ BufferedImage grayscaleImage = grayscale(img); double[][] reducedColorValues = reduceColor(grayscaleImage); /* * 3. Compute the DCT. The DCT separates the image into a collection of frequencies and scalars. While JPEG uses an 8x8 DCT, this * algorithm uses a 32x32 DCT. */ double[][] dctMap = applyDCT(reducedColorValues); return dctMap; } private long convertToLong(double[][] dctVals, double avg) { if (dctMatrixSize > 9) { throw new IllegalArgumentException("The selected smallerSize value is to big for the long datatype"); } long hash = 0; for (int x = 0; x < dctMatrixSize; x++) { for (int y = 0; y < dctMatrixSize; y++) { hash += (dctVals[x][y] > avg ? 1 : 0); hash = Long.rotateLeft(hash, 1); } } return hash; } private double calcDctAverage(double[][] dctMap) { /* * 4. Reduce the DCT. This is the magic step. While the DCT is 32x32, just keep the top-left 8x8. Those represent the lowest * frequencies in the picture. */ /* * 5. Compute the average value. Like the Average Hash, compute the mean DCT value (using only the 8x8 DCT low-frequency values and * excluding the first term since the DC coefficient can be significantly different from the other values and will throw off the * average). */ double sum = 0; for (int x = 0; x < dctMatrixSize; x++) { for (int y = 0; y < dctMatrixSize; y++) { sum += dctMap[x][y]; } } sum -= dctMap[0][0]; double average = sum / (double) ((dctMatrixSize * dctMatrixSize) - 1); return average; } public static BufferedImage resize(BufferedImage image, int width, int height) { BufferedImage resizedImage = new BufferedImage(width, height, resizeType); Graphics2D g = resizedImage.createGraphics(); g.drawImage(image, 0, 0, width, height, null); g.dispose(); return resizedImage; } private static BufferedImage grayscale(BufferedImage img) { colorConverter.filter(img, img); return img; } private static int getBlue(BufferedImage img, int x, int y) { return (img.getRGB(x, y)) & 0xff; } // DCT function stolen from // http://stackoverflow.com/questions/4240490/problems-with-dct-and-idct-algorithm-in-java private void initCoefficients() { dctCoefficients = new double[resizedImageSize]; for (int i = 1; i < resizedImageSize; i++) { dctCoefficients[i] = 1; } dctCoefficients[0] = 1 / Math.sqrt(2.0); } private double[][] applyDCT(double[][] reducedColorValues) { int N = resizedImageSize; double[][] F = new double[N][N]; for (int u = 0; u < N; u++) { for (int v = 0; v < N; v++) { double sum = 0.0; for (int i = 0; i < N; i++) { for (int j = 0; j < N; j++) { sum += Math.cos(((2 * i + 1) / (2.0 * N)) * u * Math.PI) * Math.cos(((2 * j + 1) / (2.0 * N)) * v * Math.PI) * (reducedColorValues[i][j]); } } sum *= ((dctCoefficients[u] * dctCoefficients[v]) / 4.0); F[u][v] = sum; } } return F; } private static int getResizeImageType() { logger.debug("Java version: {}, {}, {}", System.getProperty("java.vendor"), System.getProperty("java.vm.name"), System.getProperty("java.version")); if ((!System.getProperty("java.vm.name").startsWith("OpenJDK")) && System.getProperty("java.version").startsWith("1.7")) { logger.debug("Selected TYPE_INT_ARGB, value: ({})", BufferedImage.TYPE_INT_ARGB); logger.debug("You should only see this if you are running Oracle JRE/JDK 7"); return BufferedImage.TYPE_INT_ARGB; } logger.debug("Selected TYPE_INT_ARGB_PRE, value: ({})", BufferedImage.TYPE_INT_ARGB_PRE); return BufferedImage.TYPE_INT_ARGB_PRE; } }
src/main/java/com/github/dozedoff/commonj/hash/ImagePHash.java
package com.github.dozedoff.commonj.hash; /* * pHash-like image hash. * Author: Elliot Shepherd ([email protected]) * Based On: http://www.hackerfactor.com/blog/index.php?/archives/432-Looks-Like-It.html * * Original Source: http://pastebin.com/Pj9d8jt5# */ import java.awt.Graphics2D; import java.awt.color.ColorSpace; import java.awt.image.BufferedImage; import java.awt.image.ColorConvertOp; import java.io.IOException; import java.io.InputStream; import javax.imageio.ImageIO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ImagePHash { private static final int DEFAULT_RESIZED_IMAGE_SIZE = 32; private static final int DEFAULT_DCT_MATRIX_SIZE = 8; private int resizedImageSize = 0; private int dctMatrixSize = 0; private double[] dctCoefficients; private static int resizeType = BufferedImage.TYPE_INT_ARGB_PRE; private static final ColorConvertOp colorConverter = new ColorConvertOp(ColorSpace.getInstance(ColorSpace.CS_GRAY), null); private static final Logger logger = LoggerFactory.getLogger(ImagePHash.class); static { resizeType = getResizeImageType(); } public ImagePHash() { this(DEFAULT_RESIZED_IMAGE_SIZE, DEFAULT_DCT_MATRIX_SIZE); } public ImagePHash(int resizedImageSize, int dctMatrixSize) { this.resizedImageSize = resizedImageSize; this.dctMatrixSize = dctMatrixSize; // TODO validate parameters initCoefficients(); ImageIO.setUseCache(false); } public int distance(String s1, String s2) { int counter = 0; for (int k = 0; k < s1.length(); k++) { if (s1.charAt(k) != s2.charAt(k)) { counter++; } } return counter; } /** * * @param is * file to hash * @return hash in as long * @throws IOException */ public long getLongHash(InputStream is) throws Exception { return getLongHash(readImage(is)); } public long getLongHash(BufferedImage img) throws Exception { double[][] dct = calculateDctMap(img); double dctAvg = calcDctAverage(dct); long hash = convertToLong(dct, dctAvg); return hash; } /** * Use {@link ImagePHash#getLongHash(BufferedImage)} instead. */ @Deprecated public long getLongHashScaledImage(BufferedImage img) throws Exception { double[][] dct = calculateDctMapScaledDown(img); double dctAvg = calcDctAverage(dct); long hash = convertToLong(dct, dctAvg); return hash; } /** * * @param is * file to hash * @return a 'binary string' (like. 001010111011100010) which is easy to do a hamming distance on. * @throws IOException */ public String getStringHash(InputStream is) throws IOException { String hash; double[][] dct = calculateDctMap(is); double dctAvg = calcDctAverage(dct); hash = convertToBitString(dct, dctAvg); return hash; } /** * Use {@link ImagePHash#getStringHash(InputStream)} instead. * * @param is * @return * @throws IOException */ @Deprecated public String getHash(InputStream is) throws IOException { return getStringHash(is); } private static BufferedImage readImage(InputStream is) throws IOException { return ImageIO.read(is); } private double[][] reduceColor(BufferedImage img) { double reducedValues[][] = new double[resizedImageSize][resizedImageSize]; for (int x = 0; x < img.getWidth(); x++) { for (int y = 0; y < img.getHeight(); y++) { reducedValues[x][y] = getBlue(img, x, y); } } return reducedValues; } public double[][] calculateDctMap(InputStream is) throws IOException { BufferedImage img = readImage(is); return calculateDctMap(img); } public double[][] calculateDctMap(BufferedImage img) throws IOException { /* * 1. Reduce size. Like Average Hash, pHash starts with a small image. However, the image is larger than 8x8; 32x32 is a good size. * This is really done to simplify the DCT computation and not because it is needed to reduce the high frequencies. */ img = resize(img, resizedImageSize, resizedImageSize); return calculateDctMapScaledDown(img); } public double[][] calculateDctMapScaledDown(BufferedImage img) throws IOException { /* * 2. Reduce color. The image is reduced to a grayscale just to further simplify the number of computations. */ BufferedImage grayscaleImage = grayscale(img); double[][] reducedColorValues = reduceColor(grayscaleImage); /* * 3. Compute the DCT. The DCT separates the image into a collection of frequencies and scalars. While JPEG uses an 8x8 DCT, this * algorithm uses a 32x32 DCT. */ double[][] dctMap = applyDCT(reducedColorValues); return dctMap; } private String convertToBitString(double[][] dctVals, double avg) { /* * 6. Further reduce the DCT. This is the magic step. Set the 64 hash bits to 0 or 1 depending on whether each of the 64 DCT values * is above or below the average value. The result doesn't tell us the actual low frequencies; it just tells us the very-rough * relative scale of the frequencies to the mean. The result will not vary as long as the overall structure of the image remains the * same; this can survive gamma and color histogram adjustments without a problem. */ StringBuilder hash = new StringBuilder(64); for (int x = 0; x < dctMatrixSize; x++) { for (int y = 0; y < dctMatrixSize; y++) { hash.append(dctVals[x][y] > avg ? "1" : "0"); } } return hash.toString(); } private long convertToLong(double[][] dctVals, double avg) { if (dctMatrixSize > 9) { throw new IllegalArgumentException("The selected smallerSize value is to big for the long datatype"); } long hash = 0; for (int x = 0; x < dctMatrixSize; x++) { for (int y = 0; y < dctMatrixSize; y++) { hash += (dctVals[x][y] > avg ? 1 : 0); hash = Long.rotateLeft(hash, 1); } } return hash; } private double calcDctAverage(double[][] dctMap) { /* * 4. Reduce the DCT. This is the magic step. While the DCT is 32x32, just keep the top-left 8x8. Those represent the lowest * frequencies in the picture. */ /* * 5. Compute the average value. Like the Average Hash, compute the mean DCT value (using only the 8x8 DCT low-frequency values and * excluding the first term since the DC coefficient can be significantly different from the other values and will throw off the * average). */ double sum = 0; for (int x = 0; x < dctMatrixSize; x++) { for (int y = 0; y < dctMatrixSize; y++) { sum += dctMap[x][y]; } } sum -= dctMap[0][0]; double average = sum / (double) ((dctMatrixSize * dctMatrixSize) - 1); return average; } public static BufferedImage resize(BufferedImage image, int width, int height) { BufferedImage resizedImage = new BufferedImage(width, height, resizeType); Graphics2D g = resizedImage.createGraphics(); g.drawImage(image, 0, 0, width, height, null); g.dispose(); return resizedImage; } private static BufferedImage grayscale(BufferedImage img) { colorConverter.filter(img, img); return img; } private static int getBlue(BufferedImage img, int x, int y) { return (img.getRGB(x, y)) & 0xff; } // DCT function stolen from // http://stackoverflow.com/questions/4240490/problems-with-dct-and-idct-algorithm-in-java private void initCoefficients() { dctCoefficients = new double[resizedImageSize]; for (int i = 1; i < resizedImageSize; i++) { dctCoefficients[i] = 1; } dctCoefficients[0] = 1 / Math.sqrt(2.0); } private double[][] applyDCT(double[][] reducedColorValues) { int N = resizedImageSize; double[][] F = new double[N][N]; for (int u = 0; u < N; u++) { for (int v = 0; v < N; v++) { double sum = 0.0; for (int i = 0; i < N; i++) { for (int j = 0; j < N; j++) { sum += Math.cos(((2 * i + 1) / (2.0 * N)) * u * Math.PI) * Math.cos(((2 * j + 1) / (2.0 * N)) * v * Math.PI) * (reducedColorValues[i][j]); } } sum *= ((dctCoefficients[u] * dctCoefficients[v]) / 4.0); F[u][v] = sum; } } return F; } private static int getResizeImageType() { logger.debug("Java version: {}, {}, {}", System.getProperty("java.vendor"), System.getProperty("java.vm.name"), System.getProperty("java.version")); if ((!System.getProperty("java.vm.name").startsWith("OpenJDK")) && System.getProperty("java.version").startsWith("1.7")) { logger.debug("Selected TYPE_INT_ARGB, value: ({})", BufferedImage.TYPE_INT_ARGB); logger.debug("You should only see this if you are running Oracle JRE/JDK 7"); return BufferedImage.TYPE_INT_ARGB; } logger.debug("Selected TYPE_INT_ARGB_PRE, value: ({})", BufferedImage.TYPE_INT_ARGB_PRE); return BufferedImage.TYPE_INT_ARGB_PRE; } }
REFACTOR simplified getStringHash
src/main/java/com/github/dozedoff/commonj/hash/ImagePHash.java
REFACTOR simplified getStringHash
<ide><path>rc/main/java/com/github/dozedoff/commonj/hash/ImagePHash.java <ide> * @param is <ide> * file to hash <ide> * @return a 'binary string' (like. 001010111011100010) which is easy to do a hamming distance on. <del> * @throws IOException <del> */ <del> public String getStringHash(InputStream is) throws IOException { <del> String hash; <del> double[][] dct = calculateDctMap(is); <del> double dctAvg = calcDctAverage(dct); <del> hash = convertToBitString(dct, dctAvg); <del> <del> return hash; <add> * @throws Exception <add> */ <add> public String getStringHash(InputStream is) throws Exception { <add> /* <add> * 6. Further reduce the DCT. This is the magic step. Set the 64 hash bits to 0 or 1 depending on whether each of the 64 DCT values <add> * is above or below the average value. The result doesn't tell us the actual low frequencies; it just tells us the very-rough <add> * relative scale of the frequencies to the mean. The result will not vary as long as the overall structure of the image remains the <add> * same; this can survive gamma and color histogram adjustments without a problem. <add> */ <add> <add> long hash = getLongHash(is); <add> hash = Long.rotateRight(hash, 1); <add> return Long.toBinaryString(hash); <ide> } <ide> <ide> /** <ide> * <ide> * @param is <ide> * @return <del> * @throws IOException <add> * @throws Exception <ide> */ <ide> @Deprecated <del> public String getHash(InputStream is) throws IOException { <add> public String getHash(InputStream is) throws Exception { <ide> return getStringHash(is); <ide> } <ide> <ide> double[][] dctMap = applyDCT(reducedColorValues); <ide> <ide> return dctMap; <del> } <del> <del> private String convertToBitString(double[][] dctVals, double avg) { <del> /* <del> * 6. Further reduce the DCT. This is the magic step. Set the 64 hash bits to 0 or 1 depending on whether each of the 64 DCT values <del> * is above or below the average value. The result doesn't tell us the actual low frequencies; it just tells us the very-rough <del> * relative scale of the frequencies to the mean. The result will not vary as long as the overall structure of the image remains the <del> * same; this can survive gamma and color histogram adjustments without a problem. <del> */ <del> <del> StringBuilder hash = new StringBuilder(64); <del> <del> for (int x = 0; x < dctMatrixSize; x++) { <del> for (int y = 0; y < dctMatrixSize; y++) { <del> hash.append(dctVals[x][y] > avg ? "1" : "0"); <del> } <del> } <del> <del> return hash.toString(); <ide> } <ide> <ide> private long convertToLong(double[][] dctVals, double avg) {
Java
apache-2.0
9fd97198fd3e69e0e884e226f8d612652585fd02
0
bkirschn/sakai,noondaysun/sakai,willkara/sakai,introp-software/sakai,liubo404/sakai,hackbuteer59/sakai,colczr/sakai,ktakacs/sakai,puramshetty/sakai,whumph/sakai,kwedoff1/sakai,zqian/sakai,colczr/sakai,colczr/sakai,conder/sakai,joserabal/sakai,joserabal/sakai,willkara/sakai,bzhouduke123/sakai,lorenamgUMU/sakai,kingmook/sakai,lorenamgUMU/sakai,conder/sakai,udayg/sakai,whumph/sakai,kwedoff1/sakai,kingmook/sakai,frasese/sakai,conder/sakai,zqian/sakai,Fudan-University/sakai,liubo404/sakai,tl-its-umich-edu/sakai,frasese/sakai,clhedrick/sakai,frasese/sakai,frasese/sakai,Fudan-University/sakai,puramshetty/sakai,tl-its-umich-edu/sakai,surya-janani/sakai,rodriguezdevera/sakai,noondaysun/sakai,surya-janani/sakai,duke-compsci290-spring2016/sakai,rodriguezdevera/sakai,frasese/sakai,introp-software/sakai,bkirschn/sakai,zqian/sakai,pushyamig/sakai,duke-compsci290-spring2016/sakai,hackbuteer59/sakai,frasese/sakai,puramshetty/sakai,lorenamgUMU/sakai,willkara/sakai,ktakacs/sakai,bkirschn/sakai,pushyamig/sakai,willkara/sakai,willkara/sakai,wfuedu/sakai,colczr/sakai,kwedoff1/sakai,bkirschn/sakai,joserabal/sakai,ouit0408/sakai,bzhouduke123/sakai,buckett/sakai-gitflow,surya-janani/sakai,joserabal/sakai,rodriguezdevera/sakai,zqian/sakai,tl-its-umich-edu/sakai,colczr/sakai,clhedrick/sakai,wfuedu/sakai,lorenamgUMU/sakai,udayg/sakai,duke-compsci290-spring2016/sakai,buckett/sakai-gitflow,pushyamig/sakai,ouit0408/sakai,lorenamgUMU/sakai,tl-its-umich-edu/sakai,rodriguezdevera/sakai,OpenCollabZA/sakai,pushyamig/sakai,puramshetty/sakai,zqian/sakai,lorenamgUMU/sakai,udayg/sakai,udayg/sakai,hackbuteer59/sakai,introp-software/sakai,introp-software/sakai,zqian/sakai,bzhouduke123/sakai,udayg/sakai,duke-compsci290-spring2016/sakai,rodriguezdevera/sakai,surya-janani/sakai,buckett/sakai-gitflow,bzhouduke123/sakai,kwedoff1/sakai,joserabal/sakai,ktakacs/sakai,zqian/sakai,kingmook/sakai,tl-its-umich-edu/sakai,willkara/sakai,liubo404/sakai,pushyamig/sakai,buckett/sakai-gitflow,pushyamig/sakai,ouit0408/sakai,lorenamgUMU/sakai,zqian/sakai,bzhouduke123/sakai,puramshetty/sakai,noondaysun/sakai,frasese/sakai,conder/sakai,lorenamgUMU/sakai,wfuedu/sakai,bzhouduke123/sakai,introp-software/sakai,kwedoff1/sakai,clhedrick/sakai,whumph/sakai,buckett/sakai-gitflow,Fudan-University/sakai,Fudan-University/sakai,clhedrick/sakai,bzhouduke123/sakai,kingmook/sakai,liubo404/sakai,udayg/sakai,colczr/sakai,bkirschn/sakai,clhedrick/sakai,surya-janani/sakai,colczr/sakai,bzhouduke123/sakai,ktakacs/sakai,buckett/sakai-gitflow,duke-compsci290-spring2016/sakai,ktakacs/sakai,liubo404/sakai,puramshetty/sakai,noondaysun/sakai,conder/sakai,clhedrick/sakai,udayg/sakai,clhedrick/sakai,surya-janani/sakai,noondaysun/sakai,bkirschn/sakai,Fudan-University/sakai,buckett/sakai-gitflow,kingmook/sakai,wfuedu/sakai,colczr/sakai,OpenCollabZA/sakai,conder/sakai,introp-software/sakai,ktakacs/sakai,clhedrick/sakai,hackbuteer59/sakai,conder/sakai,ouit0408/sakai,wfuedu/sakai,surya-janani/sakai,willkara/sakai,ouit0408/sakai,kwedoff1/sakai,noondaysun/sakai,liubo404/sakai,kingmook/sakai,ktakacs/sakai,joserabal/sakai,rodriguezdevera/sakai,wfuedu/sakai,tl-its-umich-edu/sakai,whumph/sakai,wfuedu/sakai,noondaysun/sakai,kwedoff1/sakai,hackbuteer59/sakai,pushyamig/sakai,introp-software/sakai,introp-software/sakai,hackbuteer59/sakai,hackbuteer59/sakai,buckett/sakai-gitflow,OpenCollabZA/sakai,duke-compsci290-spring2016/sakai,pushyamig/sakai,tl-its-umich-edu/sakai,willkara/sakai,puramshetty/sakai,Fudan-University/sakai,whumph/sakai,ouit0408/sakai,OpenCollabZA/sakai,Fudan-University/sakai,hackbuteer59/sakai,surya-janani/sakai,OpenCollabZA/sakai,Fudan-University/sakai,udayg/sakai,bkirschn/sakai,duke-compsci290-spring2016/sakai,joserabal/sakai,duke-compsci290-spring2016/sakai,OpenCollabZA/sakai,liubo404/sakai,whumph/sakai,conder/sakai,puramshetty/sakai,joserabal/sakai,ouit0408/sakai,tl-its-umich-edu/sakai,wfuedu/sakai,rodriguezdevera/sakai,noondaysun/sakai,frasese/sakai,bkirschn/sakai,kingmook/sakai,OpenCollabZA/sakai,ouit0408/sakai,kingmook/sakai,rodriguezdevera/sakai,whumph/sakai,whumph/sakai,ktakacs/sakai,OpenCollabZA/sakai,kwedoff1/sakai,liubo404/sakai
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2004, 2005, 2006, 2007, 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.osedu.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.tool.assessment.ui.bean.author; import java.io.Serializable; import org.sakaiproject.util.ResourceLoader; import org.sakaiproject.tool.assessment.ui.listener.util.ContextUtil; import org.sakaiproject.util.ResourceLoader; import javax.faces.application.FacesMessage; import javax.faces.context.FacesContext; import javax.faces.component.UIComponent; import javax.faces.component.UIInput; import javax.print.attribute.standard.Severity; public class AnswerBean implements Serializable{ private static final long serialVersionUID = 7526471155622776147L; private String text; private Long sequence; private String label; private String feedback; private Boolean isCorrect; private String partialCredit = "0"; //to incorporate partial credit private static ResourceLoader rb = new ResourceLoader("org.sakaiproject.tool.assessment.bundle.AuthorMessages"); public static final String choiceLabels = rb.getString("choice_labels"); public AnswerBean() {} public AnswerBean(String ptext, Long pseq, String plabel, String pfdbk, Boolean pcorr, String pgrade , Float pscore) { this.text = ptext; this.sequence = pseq; this.label = plabel; this.feedback= pfdbk; this.isCorrect = pcorr; } public String getText() { if (text == null) { return text; } String status; if (text.equalsIgnoreCase("Agree")) status = "st_agree"; else if (text.equalsIgnoreCase("Disagree")) status = "st_disagree"; else if (text.equalsIgnoreCase("Undecided")) status = "st_undecided"; else if (text.equalsIgnoreCase("Below Average")) status = "st_below_avg"; else if (text.equalsIgnoreCase("Average")) status = "st_avg"; else if (text.equalsIgnoreCase("Above Average")) status = "st_above_avg"; else if (text.equalsIgnoreCase("Strongly Disagree")) status = "st_strong_disagree"; else if (text.equalsIgnoreCase("Strongly agree")) status = "st_strong_agree"; else if (text.equalsIgnoreCase("Unacceptable")) status = "st_unacceptable"; else if (text.equalsIgnoreCase("Excellent")) status = "st_excellent"; else status = text; String str = rb.getString(status); if (str.indexOf("missing key")!=-1) return text; else return str; } public void setText(String text) { this.text = text; } public Long getSequence() { return sequence; } public void setSequence(Long sequence) { this.sequence = sequence; } public String getLabel() { return label; } public void setLabel(String label) { this.label = label; } public String getFeedback() { return feedback; } public void setFeedback(String feedback) { this.feedback= feedback; } public Boolean getIsCorrect() { return isCorrect; } public void setIsCorrect(Boolean isCorrect) { this.isCorrect = isCorrect; } public static String[] getChoiceLabels() { String[] lables = choiceLabels.split(":"); return lables; } // additional constroctor for partial credit public AnswerBean(String ptext, Long pseq, String plabel, String pfdbk, Boolean pcorr, String pgrade, Float pscore, String pCredit) { this.text = ptext; this.sequence = pseq; this.label = plabel; this.feedback = pfdbk; this.isCorrect = pcorr; this.partialCredit = pCredit; } // --mustansar for partial credit public String getPartialCredit() { return partialCredit; } public void setPartialCredit(String pCredit) { this.partialCredit = pCredit; } public void validatePartialCredit(FacesContext context, UIComponent toValidate,Object value){ Integer pCredit = null; boolean isValid = true; if ("0.0".equals(value.toString())) { pCredit = 0; } else { try { pCredit = Integer.parseInt(value.toString()); } catch (NumberFormatException e) { isValid = false; } } if(isValid && (pCredit==null || pCredit<0 || pCredit>99 )){ isValid = false; } if (!isValid) { ((UIInput)toValidate).setValid(false); FacesMessage message=new FacesMessage(); message.setSeverity(FacesMessage.SEVERITY_ERROR); String summary=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AuthorMessages","partial_credit_limit_summary"); String detail =ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AuthorMessages","partial_credit_limit_detail"); message.setSummary(summary) ; message.setDetail(detail); context.addMessage(toValidate.getClientId(context), message); } } }
samigo/samigo-app/src/java/org/sakaiproject/tool/assessment/ui/bean/author/AnswerBean.java
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2004, 2005, 2006, 2007, 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.osedu.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.tool.assessment.ui.bean.author; import java.io.Serializable; import org.sakaiproject.util.ResourceLoader; import org.sakaiproject.tool.assessment.ui.listener.util.ContextUtil; import org.sakaiproject.util.ResourceLoader; import javax.faces.application.FacesMessage; import javax.faces.context.FacesContext; import javax.faces.component.UIComponent; import javax.faces.component.UIInput; import javax.print.attribute.standard.Severity; public class AnswerBean implements Serializable{ private static final long serialVersionUID = 7526471155622776147L; private String text; private Long sequence; private String label; private String feedback; private Boolean isCorrect; private String partialCredit = "0"; //to incorporate partial credit private static ResourceLoader rb = new ResourceLoader("org.sakaiproject.tool.assessment.bundle.AuthorMessages"); public static final String choiceLabels = rb.getString("choice_labels"); public AnswerBean() {} public AnswerBean(String ptext, Long pseq, String plabel, String pfdbk, Boolean pcorr, String pgrade , Float pscore) { this.text = ptext; this.sequence = pseq; this.label = plabel; this.feedback= pfdbk; this.isCorrect = pcorr; } public String getText() { String status; if (text.equalsIgnoreCase("Agree")) status = "st_agree"; else if (text.equalsIgnoreCase("Disagree")) status = "st_disagree"; else if (text.equalsIgnoreCase("Undecided")) status = "st_undecided"; else if (text.equalsIgnoreCase("Below Average")) status = "st_below_avg"; else if (text.equalsIgnoreCase("Average")) status = "st_avg"; else if (text.equalsIgnoreCase("Above Average")) status = "st_above_avg"; else if (text.equalsIgnoreCase("Strongly Disagree")) status = "st_strong_disagree"; else if (text.equalsIgnoreCase("Strongly agree")) status = "st_strong_agree"; else if (text.equalsIgnoreCase("Unacceptable")) status = "st_unacceptable"; else if (text.equalsIgnoreCase("Excellent")) status = "st_excellent"; else status = text; String str = rb.getString(status); if (str.indexOf("missing key")!=-1) return text; else return str; } public void setText(String text) { this.text = text; } public Long getSequence() { return sequence; } public void setSequence(Long sequence) { this.sequence = sequence; } public String getLabel() { return label; } public void setLabel(String label) { this.label = label; } public String getFeedback() { return feedback; } public void setFeedback(String feedback) { this.feedback= feedback; } public Boolean getIsCorrect() { return isCorrect; } public void setIsCorrect(Boolean isCorrect) { this.isCorrect = isCorrect; } public static String[] getChoiceLabels() { String[] lables = choiceLabels.split(":"); return lables; } // additional constroctor for partial credit public AnswerBean(String ptext, Long pseq, String plabel, String pfdbk, Boolean pcorr, String pgrade, Float pscore, String pCredit) { this.text = ptext; this.sequence = pseq; this.label = plabel; this.feedback = pfdbk; this.isCorrect = pcorr; this.partialCredit = pCredit; } // --mustansar for partial credit public String getPartialCredit() { return partialCredit; } public void setPartialCredit(String pCredit) { this.partialCredit = pCredit; } public void validatePartialCredit(FacesContext context, UIComponent toValidate,Object value){ Integer pCredit = null; boolean isValid = true; if ("0.0".equals(value.toString())) { pCredit = 0; } else { try { pCredit = Integer.parseInt(value.toString()); } catch (NumberFormatException e) { isValid = false; } } if(isValid && (pCredit==null || pCredit<0 || pCredit>99 )){ isValid = false; } if (!isValid) { ((UIInput)toValidate).setValid(false); FacesMessage message=new FacesMessage(); message.setSeverity(FacesMessage.SEVERITY_ERROR); String summary=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AuthorMessages","partial_credit_limit_summary"); String detail =ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AuthorMessages","partial_credit_limit_detail"); message.setSummary(summary) ; message.setDetail(detail); context.addMessage(toValidate.getClientId(context), message); } } }
SAM-787 git-svn-id: 574bb14f304dbe16c01253ed6697ea749724087f@92377 66ffb92e-73f9-0310-93c1-f5514f145a0a
samigo/samigo-app/src/java/org/sakaiproject/tool/assessment/ui/bean/author/AnswerBean.java
SAM-787
<ide><path>amigo/samigo-app/src/java/org/sakaiproject/tool/assessment/ui/bean/author/AnswerBean.java <ide> } <ide> <ide> public String getText() { <add> if (text == null) { <add> return text; <add> } <ide> String status; <ide> if (text.equalsIgnoreCase("Agree")) <ide> status = "st_agree";
JavaScript
mit
eb1cf4a2a6fdbfa9fb45c1d8a25862333b7902a2
0
ecomfe/veui,ecomfe/veui,ecomfe/veui,ecomfe/veui
import { isObject, startsWith, forEach } from 'lodash' import Vue from 'vue' import i18n from './i18n' export class ConfigManager { store = new Vue({ data: { store: {}, watchers: {} }, methods: { setConfig (obj, key, val, ns, override) { if (isObject(key)) { ns = val val = key Object.keys(val).forEach(k => { this.setConfig(obj, k, val[k], ns, override) }) return } if (typeof key !== 'string') { throw new Error('`Config key must be a string value.') } let k = ns ? `${ns}.${key}` : key if (!(k in obj) || override) { this.setConfigItem(obj, k, val) } }, setConfigItem (obj, key, val) { this.$set(obj, key, val) let relatedWatcherKeys = Object.keys(this.watchers).filter(k => startsWith(k, key) ) relatedWatcherKeys.forEach(watcherKey => this.unwatch(watcherKey)) this.transformValue(obj, key, null) }, transformValue (context, key, path) { if (context && (context._isVue || context._Ctor)) { return } let watcherKey = path ? `${path}.${key}` : key let val = context[key] let watcher = this.watchers[watcherKey] if (typeof val === 'string') { if (startsWith(val, '@@')) { let i18nKey = val.substring(2) if (watcher && watcher.key !== i18nKey) { // already watched another i18n key before, unwatch it watcher.unwatch() } this.watchers[watcherKey] = { key: i18nKey, unwatch: i18n.watch(i18nKey, val => { context[key] = val }) } context[key] = i18n.get(i18nKey) } } else if (isObject(val) || Array.isArray(val)) { // recursively replace pointers forEach(val, (_, k) => { this.transformValue(val, k, watcherKey) }) } }, unwatch (key) { let watcher = this.watchers[key] if (watcher) { watcher.unwatch() delete this.watchers[key] } }, set (key, val, ns) { this.setConfig(this.store, key, val, ns, true) }, defaults (key, val, ns) { this.setConfig(this.store, key, val, ns, false) }, get (key) { return this.store[key] } } }) set (key, val, ns) { this.store.set(key, val, ns) } defaults (key, val, ns) { this.store.defaults(key, val, ns) } get (path) { return this.store.get(path) } } export default new ConfigManager()
packages/veui/src/managers/config.js
import { isObject, startsWith, forEach } from 'lodash' import Vue from 'vue' import i18n from './i18n' export class ConfigManager { store = new Vue({ data: { store: {}, watchers: {} }, methods: { setConfig (obj, key, val, ns, override) { if (isObject(key)) { ns = val val = key Object.keys(val).forEach(k => { this.setConfig(obj, k, val[k], ns, override) }) return } if (typeof key !== 'string') { throw new Error('`Config key must be a string value.') } let k = ns ? `${ns}.${key}` : key if (!(k in obj) || override) { this.setConfigItem(obj, k, val) } }, setConfigItem (obj, key, val) { this.$set(obj, key, val) let relatedWatcherKeys = Object.keys(this.watchers).filter(k => startsWith(k, key) ) relatedWatcherKeys.forEach(watcherKey => this.unwatch(watcherKey)) this.transformValue(obj, key, null) }, transformValue (context, key, path) { let watcherKey = path ? `${path}.${key}` : key let val = context[key] let watcher = this.watchers[watcherKey] if (typeof val === 'string') { if (startsWith(val, '@@')) { let i18nKey = val.substring(2) if (watcher && watcher.key !== i18nKey) { // already watched another i18n key before, unwatch it watcher.unwatch() } this.watchers[watcherKey] = { key: i18nKey, unwatch: i18n.watch(i18nKey, val => { context[key] = val }) } context[key] = i18n.get(i18nKey) } } else if (isObject(val) || Array.isArray(val)) { // recursively replace pointers forEach(val, (_, k) => { this.transformValue(val, k, watcherKey) }) } }, unwatch (key) { let watcher = this.watchers[key] if (watcher) { watcher.unwatch() delete this.watchers[key] } }, set (key, val, ns) { this.setConfig(this.store, key, val, ns, true) }, defaults (key, val, ns) { this.setConfig(this.store, key, val, ns, false) }, get (key) { return this.store[key] } } }) set (key, val, ns) { this.store.set(key, val, ns) } defaults (key, val, ns) { this.store.defaults(key, val, ns) } get (path) { return this.store.get(path) } } export default new ConfigManager()
fix: quick fix for potential infinite recursion (when config data contains components)
packages/veui/src/managers/config.js
fix: quick fix for potential infinite recursion (when config data contains components)
<ide><path>ackages/veui/src/managers/config.js <ide> this.transformValue(obj, key, null) <ide> }, <ide> transformValue (context, key, path) { <add> if (context && (context._isVue || context._Ctor)) { <add> return <add> } <add> <ide> let watcherKey = path ? `${path}.${key}` : key <ide> let val = context[key] <ide>
Java
apache-2.0
e978837c23b96e2fdca9ae118b96f0853d44c50e
0
joukojo/cache-gcfilter
package net.yogocodes.gclistener.ehcache; import java.lang.management.ManagementFactory; import java.util.List; import javax.management.InstanceNotFoundException; import javax.management.MBeanServer; import javax.management.MalformedObjectNameException; import javax.management.Notification; import javax.management.NotificationListener; import javax.management.ObjectName; import javax.management.openmbean.CompositeData; import com.sun.management.GarbageCollectionNotificationInfo; import net.sf.ehcache.Cache; import net.sf.ehcache.CacheManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; // net.yogocodes.gclistener.ehcache.EhCacheGCListener public class EhCacheGCListener implements NotificationListener { private final Logger logger = LoggerFactory .getLogger(EhCacheGCListener.class); private boolean analyzeMemoryConsumption = false; public void handleNotification(Notification notification, Object handback) { String notificationType = notification.getType(); logger.trace("notificationType: {}", notificationType); if ("com.sun.management.gc.notification".equals(notificationType)) { // retrieve the garbage collection notification information final CompositeData cd = (CompositeData) notification.getUserData(); final GarbageCollectionNotificationInfo info = GarbageCollectionNotificationInfo .from(cd); logger.trace("{} : {} : {}", info.getGcName(), info.getGcAction(), info.getGcCause()); if ("end of major GC".equals(info.getGcAction())) { List<CacheManager> cacheManagers = CacheManager.ALL_CACHE_MANAGERS; for (final CacheManager cacheManager : cacheManagers) { final String[] cacheNames = cacheManager.getCacheNames(); for (final String cacheName : cacheNames) { final Cache cache = cacheManager.getCache(cacheName); if (isAnalyzeMemoryConsumption()) { long calculateInMemorySize = cache .calculateInMemorySize(); logger.info("{} : {} bytes [{}Mb]", cacheName, calculateInMemorySize, (calculateInMemorySize/1024/1024)); } else { logger.info(cache.toString()); } } } } } } public static void register() throws MalformedObjectNameException, InstanceNotFoundException { boolean isListeningDisabled = "true".equals(System.getProperty("gc.listener.disable")); if( isListeningDisabled ) { return; } EhCacheGCListener listener = new EhCacheGCListener(); boolean analyzeMemoryConsumption = System.getProperty("gc.listener.analyze.memory") != null; listener.setAnalyzeMemoryConsumption(analyzeMemoryConsumption); ObjectName gcName = new ObjectName( ManagementFactory.GARBAGE_COLLECTOR_MXBEAN_DOMAIN_TYPE + ",*"); MBeanServer server = ManagementFactory.getPlatformMBeanServer(); for (ObjectName name : server.queryNames(gcName, null)){ server.addNotificationListener(name, listener, null, null); } } public boolean isAnalyzeMemoryConsumption() { return analyzeMemoryConsumption; } public void setAnalyzeMemoryConsumption(boolean analyzeMemoryConsumption) { this.analyzeMemoryConsumption = analyzeMemoryConsumption; } }
ehcache-gc-listener/src/main/java/net/yogocodes/gclistener/ehcache/EhCacheGCListener.java
package net.yogocodes.gclistener.ehcache; import java.lang.management.ManagementFactory; import java.util.List; import javax.management.InstanceNotFoundException; import javax.management.MBeanServer; import javax.management.MalformedObjectNameException; import javax.management.Notification; import javax.management.NotificationListener; import javax.management.ObjectName; import javax.management.openmbean.CompositeData; import com.sun.management.GarbageCollectionNotificationInfo; import net.sf.ehcache.Cache; import net.sf.ehcache.CacheManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; // net.yogocodes.gclistener.ehcache.EhCacheGCListener public class EhCacheGCListener implements NotificationListener { private final Logger logger = LoggerFactory .getLogger(EhCacheGCListener.class); private boolean analyzeMemoryConsumption = false; public void handleNotification(Notification notification, Object handback) { String notificationType = notification.getType(); logger.trace("notificationType: {}", notificationType); if ("com.sun.management.gc.notification".equals(notificationType)) { // retrieve the garbage collection notification information final CompositeData cd = (CompositeData) notification.getUserData(); final GarbageCollectionNotificationInfo info = GarbageCollectionNotificationInfo .from(cd); logger.trace("{} : {} : {}", info.getGcName(), info.getGcAction(), info.getGcCause()); if ("end of major GC".equals(info.getGcAction())) { List<CacheManager> cacheManagers = CacheManager.ALL_CACHE_MANAGERS; for (final CacheManager cacheManager : cacheManagers) { final String[] cacheNames = cacheManager.getCacheNames(); for (final String cacheName : cacheNames) { final Cache cache = cacheManager.getCache(cacheName); if (isAnalyzeMemoryConsumption()) { long calculateInMemorySize = cache .calculateInMemorySize(); logger.info("{} : {} bytes [{}Mb]", cacheName, calculateInMemorySize, (calculateInMemorySize/1024/1024)); } else { logger.info(cache.toString()); } } } } } } public static void register() throws MalformedObjectNameException, InstanceNotFoundException { EhCacheGCListener listener = new EhCacheGCListener(); boolean analyzeMemoryConsumption = System.getProperty("gc.listener.analyze.memory") != null; listener.setAnalyzeMemoryConsumption(analyzeMemoryConsumption); ObjectName gcName = new ObjectName( ManagementFactory.GARBAGE_COLLECTOR_MXBEAN_DOMAIN_TYPE + ",*"); MBeanServer server = ManagementFactory.getPlatformMBeanServer(); for (ObjectName name : server.queryNames(gcName, null)){ server.addNotificationListener(name, listener, null, null); } } public boolean isAnalyzeMemoryConsumption() { return analyzeMemoryConsumption; } public void setAnalyzeMemoryConsumption(boolean analyzeMemoryConsumption) { this.analyzeMemoryConsumption = analyzeMemoryConsumption; } }
added jvm option to disable the listening
ehcache-gc-listener/src/main/java/net/yogocodes/gclistener/ehcache/EhCacheGCListener.java
added jvm option to disable the listening
<ide><path>hcache-gc-listener/src/main/java/net/yogocodes/gclistener/ehcache/EhCacheGCListener.java <ide> <ide> public static void register() throws MalformedObjectNameException, <ide> InstanceNotFoundException { <add> <add> boolean isListeningDisabled = "true".equals(System.getProperty("gc.listener.disable")); <add> if( isListeningDisabled ) { <add> return; <add> } <add> <ide> EhCacheGCListener listener = new EhCacheGCListener(); <del> <ide> boolean analyzeMemoryConsumption = System.getProperty("gc.listener.analyze.memory") != null; <ide> listener.setAnalyzeMemoryConsumption(analyzeMemoryConsumption); <ide>
Java
apache-2.0
1b310f58d683c809a942c827b2cd8e8eb4005bdd
0
macula-projects/macula-framework,macula-projects/macula-framework
/* * Copyright 2004-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.maculaframework.boot.web.config.security; import org.maculaframework.boot.web.config.WebConfigProperties; import org.maculaframework.boot.web.filter.KaptchaAuthenticationFilter; import org.maculaframework.boot.web.security.access.vote.MaculaRoleVoter; import org.maculaframework.boot.web.security.web.access.interceptor.ActionFilterInvocationSecurityMetadataSource; import org.maculaframework.boot.web.security.web.access.interceptor.DelegatingFilterInvocationSecurityMetadataSource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.security.access.vote.AbstractAccessDecisionManager; import org.springframework.security.config.annotation.ObjectPostProcessor; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.builders.WebSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.core.session.SessionRegistry; import org.springframework.security.web.access.intercept.FilterInvocationSecurityMetadataSource; import org.springframework.security.web.access.intercept.FilterSecurityInterceptor; import org.springframework.security.web.authentication.AuthenticationFailureHandler; import org.springframework.security.web.authentication.SimpleUrlAuthenticationFailureHandler; import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter; import org.springframework.session.data.redis.RedisIndexedSessionRepository; import org.springframework.session.security.SpringSessionBackedSessionRegistry; /** * <p> * <b>WebSecurityConfig</b> Web Security配置文件 * </p> * * @author Rain * @since 2019-06-20 */ @Configuration @EnableWebSecurity public class WebSecurityConfig extends WebSecurityConfigurerAdapter { @Autowired private WebConfigProperties webConfigProperties; @Override public void configure(WebSecurity web) { web.ignoring() .regexMatchers(webConfigProperties.getIgnoringRegexPattern()); } @Override protected void configure(HttpSecurity http) throws Exception { http. authorizeRequests(authorizeRequests -> authorizeRequests // TODO 测试代码,未来需要删除 .mvcMatchers("/admin/index").hasRole("ADMIN") .withObjectPostProcessor(new ObjectPostProcessor<FilterSecurityInterceptor>() { @Override public <O extends FilterSecurityInterceptor> O postProcess(O o) { // 保留原来基于表达式的MetadataSource FilterInvocationSecurityMetadataSource expressionUrlMetadataSource = o.getSecurityMetadataSource(); o.setSecurityMetadataSource(new DelegatingFilterInvocationSecurityMetadataSource( actionFilterInvocationSecurityMetadataSource(), expressionUrlMetadataSource)); // 添加动态URL Voter AbstractAccessDecisionManager accessDecisionManager = (AbstractAccessDecisionManager) o.getAccessDecisionManager(); accessDecisionManager.getDecisionVoters().add(0, new MaculaRoleVoter()); return o; } }) ) .csrf(csrf -> csrf.ignoringRequestMatchers(request -> "XMLHttpRequest".equals(request.getHeader("X-Requested-With"))) ) .sessionManagement(sessionManagement -> sessionManagement .sessionAuthenticationFailureHandler(authenticationFailureHandler()) .maximumSessions(webConfigProperties.getMaximumSessions()) .sessionRegistry(getApplicationContext().getBean(SessionRegistry.class)) .expiredUrl(webConfigProperties.getExpiredUrl()) ) .logout(logout -> logout.permitAll() ) .formLogin(formLogin -> formLogin.permitAll().failureHandler(authenticationFailureHandler()) ) .oauth2ResourceServer(oauth2Rs -> oauth2Rs.opaqueToken() ) .addFilterBefore( new KaptchaAuthenticationFilter("/login", authenticationFailureHandler()), UsernamePasswordAuthenticationFilter.class ); } @Bean public FilterInvocationSecurityMetadataSource actionFilterInvocationSecurityMetadataSource() { return new ActionFilterInvocationSecurityMetadataSource(); } @Bean public SessionRegistry springSessionBackedSessionRegistry(RedisIndexedSessionRepository sessionRepository) { return new SpringSessionBackedSessionRegistry(sessionRepository); } @Bean public AuthenticationFailureHandler authenticationFailureHandler() { return new SimpleUrlAuthenticationFailureHandler(webConfigProperties.getFailureUrl()); } }
macula-boot/macula-boot-web/src/main/java/org/maculaframework/boot/web/config/security/WebSecurityConfig.java
/* * Copyright 2004-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.maculaframework.boot.web.config.security; import org.maculaframework.boot.web.config.WebConfigProperties; import org.maculaframework.boot.web.filter.KaptchaAuthenticationFilter; import org.maculaframework.boot.web.security.access.vote.MaculaRoleVoter; import org.maculaframework.boot.web.security.web.access.interceptor.ActionFilterInvocationSecurityMetadataSource; import org.maculaframework.boot.web.security.web.access.interceptor.DelegatingFilterInvocationSecurityMetadataSource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.security.access.vote.AbstractAccessDecisionManager; import org.springframework.security.config.annotation.ObjectPostProcessor; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.builders.WebSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.core.session.SessionRegistry; import org.springframework.security.web.access.intercept.FilterInvocationSecurityMetadataSource; import org.springframework.security.web.access.intercept.FilterSecurityInterceptor; import org.springframework.security.web.authentication.AuthenticationFailureHandler; import org.springframework.security.web.authentication.SimpleUrlAuthenticationFailureHandler; import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter; import org.springframework.session.data.redis.RedisIndexedSessionRepository; import org.springframework.session.security.SpringSessionBackedSessionRegistry; /** * <p> * <b>WebSecurityConfig</b> Web Security配置文件 * </p> * * @author Rain * @since 2019-06-20 */ @Configuration @EnableWebSecurity public class WebSecurityConfig extends WebSecurityConfigurerAdapter { @Autowired private WebConfigProperties webConfigProperties; @Override public void configure(WebSecurity web) { web.ignoring() .regexMatchers(webConfigProperties.getIgnoringRegexPattern()); } @Override protected void configure(HttpSecurity http) throws Exception { http.authorizeRequests() // TODO 测试代码,未来需要删除 .mvcMatchers("/admin/index").hasRole("ADMIN") .withObjectPostProcessor(new ObjectPostProcessor<FilterSecurityInterceptor>() { @Override public <O extends FilterSecurityInterceptor> O postProcess(O o) { // 保留原来基于表达式的MetadataSource FilterInvocationSecurityMetadataSource expressionUrlMetadataSource = o.getSecurityMetadataSource(); o.setSecurityMetadataSource(new DelegatingFilterInvocationSecurityMetadataSource( actionFilterInvocationSecurityMetadataSource(), expressionUrlMetadataSource)); // 添加动态URL Voter AbstractAccessDecisionManager accessDecisionManager = (AbstractAccessDecisionManager)o.getAccessDecisionManager(); accessDecisionManager.getDecisionVoters().add(0, new MaculaRoleVoter()); return o; } }) .and() .csrf() .ignoringRequestMatchers(request -> "XMLHttpRequest".equals(request.getHeader("X-Requested-With"))) .and() .sessionManagement() .sessionAuthenticationFailureHandler(authenticationFailureHandler()) .maximumSessions(webConfigProperties.getMaximumSessions()) .sessionRegistry(getApplicationContext().getBean(SessionRegistry.class)) .expiredUrl(webConfigProperties.getExpiredUrl()).and() .and() .logout().permitAll() .and() .formLogin().permitAll() .failureHandler(authenticationFailureHandler()) .and() .addFilterBefore(new KaptchaAuthenticationFilter("/login", authenticationFailureHandler()), UsernamePasswordAuthenticationFilter.class); } @Bean public FilterInvocationSecurityMetadataSource actionFilterInvocationSecurityMetadataSource() { return new ActionFilterInvocationSecurityMetadataSource(); } @Bean public SessionRegistry springSessionBackedSessionRegistry(RedisIndexedSessionRepository sessionRepository) { return new SpringSessionBackedSessionRegistry(sessionRepository); } @Bean public AuthenticationFailureHandler authenticationFailureHandler() { return new SimpleUrlAuthenticationFailureHandler(webConfigProperties.getFailureUrl()); } }
升级到spring boot 2.2.0
macula-boot/macula-boot-web/src/main/java/org/maculaframework/boot/web/config/security/WebSecurityConfig.java
升级到spring boot 2.2.0
<ide><path>acula-boot/macula-boot-web/src/main/java/org/maculaframework/boot/web/config/security/WebSecurityConfig.java <ide> @Override <ide> public void configure(WebSecurity web) { <ide> web.ignoring() <del> .regexMatchers(webConfigProperties.getIgnoringRegexPattern()); <add> .regexMatchers(webConfigProperties.getIgnoringRegexPattern()); <ide> } <ide> <ide> @Override <ide> protected void configure(HttpSecurity http) throws Exception { <del> http.authorizeRequests() <del> // TODO 测试代码,未来需要删除 <del> .mvcMatchers("/admin/index").hasRole("ADMIN") <del> .withObjectPostProcessor(new ObjectPostProcessor<FilterSecurityInterceptor>() { <del> @Override <del> public <O extends FilterSecurityInterceptor> O postProcess(O o) { <add> http. <add> authorizeRequests(authorizeRequests -> <add> authorizeRequests <add> // TODO 测试代码,未来需要删除 <add> .mvcMatchers("/admin/index").hasRole("ADMIN") <add> .withObjectPostProcessor(new ObjectPostProcessor<FilterSecurityInterceptor>() { <add> @Override <add> public <O extends FilterSecurityInterceptor> O postProcess(O o) { <ide> <del> // 保留原来基于表达式的MetadataSource <del> FilterInvocationSecurityMetadataSource expressionUrlMetadataSource = o.getSecurityMetadataSource(); <add> // 保留原来基于表达式的MetadataSource <add> FilterInvocationSecurityMetadataSource expressionUrlMetadataSource = o.getSecurityMetadataSource(); <ide> <del> o.setSecurityMetadataSource(new DelegatingFilterInvocationSecurityMetadataSource( <del> actionFilterInvocationSecurityMetadataSource(), expressionUrlMetadataSource)); <add> o.setSecurityMetadataSource(new DelegatingFilterInvocationSecurityMetadataSource( <add> actionFilterInvocationSecurityMetadataSource(), expressionUrlMetadataSource)); <ide> <del> // 添加动态URL Voter <del> AbstractAccessDecisionManager accessDecisionManager = (AbstractAccessDecisionManager)o.getAccessDecisionManager(); <del> accessDecisionManager.getDecisionVoters().add(0, new MaculaRoleVoter()); <add> // 添加动态URL Voter <add> AbstractAccessDecisionManager accessDecisionManager = (AbstractAccessDecisionManager) o.getAccessDecisionManager(); <add> accessDecisionManager.getDecisionVoters().add(0, new MaculaRoleVoter()); <ide> <del> return o; <del> } <del> }) <del> .and() <del> .csrf() <del> .ignoringRequestMatchers(request -> "XMLHttpRequest".equals(request.getHeader("X-Requested-With"))) <del> .and() <del> .sessionManagement() <del> .sessionAuthenticationFailureHandler(authenticationFailureHandler()) <del> .maximumSessions(webConfigProperties.getMaximumSessions()) <del> .sessionRegistry(getApplicationContext().getBean(SessionRegistry.class)) <del> .expiredUrl(webConfigProperties.getExpiredUrl()).and() <del> .and() <del> .logout().permitAll() <del> .and() <del> .formLogin().permitAll() <del> .failureHandler(authenticationFailureHandler()) <del> .and() <del> .addFilterBefore(new KaptchaAuthenticationFilter("/login", authenticationFailureHandler()), <del> UsernamePasswordAuthenticationFilter.class); <add> return o; <add> } <add> }) <add> ) <add> .csrf(csrf -> <add> csrf.ignoringRequestMatchers(request -> "XMLHttpRequest".equals(request.getHeader("X-Requested-With"))) <add> ) <add> .sessionManagement(sessionManagement -> <add> sessionManagement <add> .sessionAuthenticationFailureHandler(authenticationFailureHandler()) <add> .maximumSessions(webConfigProperties.getMaximumSessions()) <add> .sessionRegistry(getApplicationContext().getBean(SessionRegistry.class)) <add> .expiredUrl(webConfigProperties.getExpiredUrl()) <add> ) <add> .logout(logout -> <add> logout.permitAll() <add> ) <add> .formLogin(formLogin -> <add> formLogin.permitAll().failureHandler(authenticationFailureHandler()) <add> ) <add> .oauth2ResourceServer(oauth2Rs -> <add> oauth2Rs.opaqueToken() <add> ) <add> .addFilterBefore( <add> new KaptchaAuthenticationFilter("/login", authenticationFailureHandler()), UsernamePasswordAuthenticationFilter.class <add> ); <ide> <ide> <ide> }
Java
bsd-3-clause
54e95e0fe986ed78dd643031a757e0b2eb8b9029
0
reines/oversim-manager,reines/oversim-manager
package com.jamierf.oversim.manager; import com.jamierf.oversim.manager.runnable.SimulationData; import com.jamierf.oversim.manager.runnable.SimulationRun; import com.jamierf.oversim.manager.util.DirectoryArchiver; import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConversionException; import org.apache.commons.lang.StringUtils; import java.io.*; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; public class Manager { protected final File overSim; protected final File workingDir; protected final File resultRootDir; protected final String configFile; protected final List<SimulationConfig> configs; protected final Map<String, String> globalParameters; protected final List<SimulationThread> threads; protected final List<Runnable> queue; protected final String[] wantedScalars; protected final boolean deleteData; protected final StringBuilder buffer; protected final boolean shuffle; protected boolean paused; protected DirectoryArchiver archiver; protected long startTime; protected boolean finished; protected int pendingRuns; public Manager(Configuration config) throws IOException, ConfigurationException { // Count how many available cores we should use (max) int maxThreads = Runtime.getRuntime().availableProcessors(); if (config.containsKey("simulation.max-threads")) { try { int overrideCoreCount = config.getInt("simulation.max-threads"); if (overrideCoreCount > maxThreads) throw new RuntimeException("max threads (" + overrideCoreCount + ") in config is higher than phyiscal core count (" + maxThreads + "). This is a bad idea!"); maxThreads = overrideCoreCount; } catch (ConversionException e) { throw new ConfigurationException("Malformed configuration, simulation.max-threads must be an integer!"); } } // Should we shuffle runs after adding them? shuffle = config.getBoolean("simulation.shuffle-runs", true); buffer = new StringBuilder(); configs = new LinkedList<SimulationConfig>(); // Fetch a list of scalars that we care about, then quote them so they are usable inside a regex wantedScalars = config.getStringArray("data.scalar"); for (int i = 0;i < wantedScalars.length;i++) wantedScalars[i] = Pattern.quote(wantedScalars[i]); // TODO: Load in any override parameters globalParameters = new HashMap<String, String>(); // Set the working directory and config file workingDir = new File(config.getString("simulation.working-dir", ".")); configFile = config.getString("simulation.config-file", "omnetpp.ini"); archiver = null; if (config.getBoolean("data.compress", false)) { try { DirectoryArchiver.ArchiveType type = DirectoryArchiver.ArchiveType.valueOf(config.getString("data.compression-type", "TAR_GZIP")); archiver = new DirectoryArchiver(type); } catch (IllegalArgumentException e) { throw new ConfigurationException("Malformed configuration, data.compression-type must be one of: " + StringUtils.join(DirectoryArchiver.ArchiveType.values(), ", ") + "."); } } deleteData = config.getBoolean("data.delete", false); finished = false; resultRootDir = new File(workingDir, globalParameters.containsKey("result-dir") ? globalParameters.get("result-dir") : "results"); if (!resultRootDir.isDirectory()) throw new ConfigurationException("Invalid result directory: " + resultRootDir.getCanonicalPath()); final String executableName = config.getString("simulation.executable-name", "OverSim"); // Find OverSim - attempt to use the RELEASE version by default final File release = new File(workingDir, "../out/gcc-release/src/" + executableName); final File debug = new File(workingDir, "../out/gcc-debug/src/" + executableName); final File link = new File(workingDir, "../src/" + executableName); if (release.exists()) { this.println("Using OverSim in RELEASE mode."); overSim = release; } else if (debug.exists()) { this.println("Using OverSim in DEBUG mode."); overSim = debug; } else if (link.exists()) overSim = link; else throw new FileNotFoundException("Unable to locate OverSim executable."); threads = new ArrayList<SimulationThread>(maxThreads); queue = new ArrayList<Runnable>(); pendingRuns = 0; paused = false; // Create threads for (int i = 0;i < maxThreads;i++) { final SimulationThread thread = new SimulationThread(this); threads.add(thread); } this.println("Initialized " + threads.size() + " threads."); } public synchronized void setPaused(boolean paused) { this.paused = paused; } public boolean isPaused() { return paused; } public synchronized void addRunConfig(String configName, String id) throws IOException { final int totalRunCount = this.countRuns(configName); // Fetch the total run count if (totalRunCount == 0) throw new RuntimeException("Invalid config name, 0 runs found."); final SimulationConfig config = new SimulationConfig(configFile, configName, resultRootDir, id, globalParameters, totalRunCount); pendingRuns += totalRunCount; // Create the queue of simulation runs for (int i = 0;i < totalRunCount;i++) { final SimulationRun run = new SimulationRun(i, workingDir, overSim, config); if (run.resultsExist()) { this.completed(run); } // Only queue the run if the results don't already exist else { queue.add(run); } } // Shuffle the queue to help prevent bunching of memory intensive configurations if (shuffle) { Collections.shuffle(queue); } this.println("Added configuration: " + config + " with " + totalRunCount + " runs"); this.println("Result dir: " + config.getResultDir().getCanonicalPath()); this.println("Pending: " + pendingRuns + " runs"); this.println("Queue size: " + queue.size()); configs.add(config); this.notifyAll(); } public synchronized void addDataConfig(String configName, String id) throws IOException { final SimulationConfig config = new SimulationConfig(configFile, configName, resultRootDir, id); final FilenameFilter filter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.endsWith(".sca"); } }; final String[] files = config.getResultDir().list(filter); final Pattern pattern = Pattern.compile("^" + Pattern.quote(configName) + "-(\\d+)\\.sca$"); // Create the queue of simulation data for (String file : files) { final Matcher m = pattern.matcher(file); if (!m.matches()) { continue; } final int i = Integer.parseInt(m.group(1)); final SimulationData data = new SimulationData(i, wantedScalars, config); queue.add(data); config.pendingRuns++; } pendingRuns += config.pendingRuns; // Shuffle the queue to help prevent bunching of memory intensive configurations if (shuffle) { Collections.shuffle(queue); } this.println("Added result: " + config + " with " + config.completedRuns + " results."); this.println("Result dir: " + config.getResultDir().getCanonicalPath()); this.println("Queue size now: " + queue.size()); configs.add(config); this.notifyAll(); } protected int countRuns(String configName) throws IOException { final List<String> command = new LinkedList<String>(); command.add(overSim.getCanonicalPath()); command.add("-f" + configFile); command.add("-x" + configName); final Process process = new ProcessBuilder(command).directory(workingDir).start(); BufferedReader in = null; int runs = 0; try { in = new BufferedReader(new InputStreamReader(process.getInputStream())); final Pattern p = Pattern.compile("Number of runs: (\\d+)"); for (String line;(line = in.readLine()) != null;) { final Matcher m = p.matcher(line); if (m.find()) { runs = Integer.parseInt(m.group(1)); break; } } } finally { if (in != null) in.close(); } return runs; } public synchronized void start() throws IOException, InterruptedException { if (startTime > 0) throw new RuntimeException("This manager has already been started."); if (queue.isEmpty()) throw new RuntimeException("Queue is empty, nothing to do."); startTime = System.currentTimeMillis(); // Start all our threads for (SimulationThread thread : threads) { thread.start(); } while (!finished) { this.wait(); } this.println("All runs completed, terminating."); this.shutdown(); } public synchronized Runnable poll() throws InterruptedException { while (queue.isEmpty()) this.wait(); return queue.remove(0); } public synchronized void started(SimulationThread thread, Runnable runnable) { this.println(thread + " starting " + runnable + ". (" + queue.size() + " left in queue)"); } public synchronized void completed(Runnable runnable) { SimulationConfig config = null; if (runnable instanceof SimulationRun) { final SimulationRun run = (SimulationRun) runnable; config = run.getConfig(); config.pendingRuns--; pendingRuns--; config.completedRuns++; // Queue a data processing instance for this run queue.add(new SimulationData(run.getRunId(), wantedScalars, run.getConfig())); this.notifyAll(); config.pendingRuns++; pendingRuns++; } else if (runnable instanceof SimulationData) { config = ((SimulationData) runnable).getConfig(); config.pendingRuns--; pendingRuns--; } if (config != null) { checkForCompletion(config); } } public synchronized void failed(SimulationThread thread, Runnable runnable) { this.println(thread + " failed " + runnable); SimulationConfig config = null; if (runnable instanceof SimulationRun) { final SimulationRun run = (SimulationRun) runnable; config = run.getConfig(); config.pendingRuns--; pendingRuns--; config.failedRuns++; } else if (runnable instanceof SimulationData) { config = ((SimulationData) runnable).getConfig(); config.pendingRuns--; pendingRuns--; } if (config != null) checkForCompletion(config); } protected synchronized void checkForCompletion(SimulationConfig config) { if (config.pendingRuns == 0) { try { config.processData(this, resultRootDir, archiver, deleteData); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } if (pendingRuns == 0) { finished = true; this.notifyAll(); } } public String getBuffer() { return buffer.toString().trim(); } public synchronized final void println(Object o) { final String line = o.toString(); System.out.println(line); buffer.append(line); buffer.append('\n'); } public synchronized void shutdown() { this.println("Shutdown requested."); System.exit(0); } }
src/main/java/com/jamierf/oversim/manager/Manager.java
package com.jamierf.oversim.manager; import com.jamierf.oversim.manager.runnable.SimulationData; import com.jamierf.oversim.manager.runnable.SimulationRun; import com.jamierf.oversim.manager.util.DirectoryArchiver; import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConversionException; import org.apache.commons.lang.StringUtils; import java.io.*; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; public class Manager { protected final File overSim; protected final File workingDir; protected final File resultRootDir; protected final String configFile; protected final List<SimulationConfig> configs; protected final Map<String, String> globalParameters; protected final List<SimulationThread> threads; protected final List<Runnable> queue; protected final String[] wantedScalars; protected final boolean deleteData; protected final StringBuilder buffer; protected final boolean shuffle; protected boolean paused; protected DirectoryArchiver archiver; protected long startTime; protected boolean finished; protected int pendingRuns; public Manager(Configuration config) throws IOException, ConfigurationException { // Count how many available cores we should use (max) int maxThreads = Runtime.getRuntime().availableProcessors(); if (config.containsKey("simulation.max-threads")) { try { int overrideCoreCount = config.getInt("simulation.max-threads"); if (overrideCoreCount > maxThreads) throw new RuntimeException("max threads (" + overrideCoreCount + ") in config is higher than phyiscal core count (" + maxThreads + "). This is a bad idea!"); maxThreads = overrideCoreCount; } catch (ConversionException e) { throw new ConfigurationException("Malformed configuration, simulation.max-threads must be an integer!"); } } // Should we shuffle runs after adding them? shuffle = config.getBoolean("simulation.shuffle-runs", true); buffer = new StringBuilder(); configs = new LinkedList<SimulationConfig>(); // Fetch a list of scalars that we care about, then quote them so they are usable inside a regex wantedScalars = config.getStringArray("data.scalar"); for (int i = 0;i < wantedScalars.length;i++) wantedScalars[i] = Pattern.quote(wantedScalars[i]); // TODO: Load in any override parameters globalParameters = new HashMap<String, String>(); // Set the working directory and config file workingDir = new File(config.getString("simulation.working-dir", ".")); configFile = config.getString("simulation.config-file", "omnetpp.ini"); archiver = null; if (config.getBoolean("data.compress", false)) { try { DirectoryArchiver.ArchiveType type = DirectoryArchiver.ArchiveType.valueOf(config.getString("data.compression-type", "TAR_GZIP")); archiver = new DirectoryArchiver(type); } catch (IllegalArgumentException e) { throw new ConfigurationException("Malformed configuration, data.compression-type must be one of: " + StringUtils.join(DirectoryArchiver.ArchiveType.values(), ", ") + "."); } } deleteData = config.getBoolean("data.delete", false); finished = false; resultRootDir = new File(workingDir, globalParameters.containsKey("result-dir") ? globalParameters.get("result-dir") : "results"); if (!resultRootDir.isDirectory()) throw new ConfigurationException("Invalid result directory: " + resultRootDir.getCanonicalPath()); final String executableName = config.getString("simulation.executable-name", "OverSim"); // Find OverSim - attempt to use the RELEASE version by default final File release = new File(workingDir, "../out/gcc-release/src/" + executableName); final File debug = new File(workingDir, "../out/gcc-debug/src/" + executableName); final File link = new File(workingDir, "../src/" + executableName); if (release.exists()) { this.println("Using OverSim in RELEASE mode."); overSim = release; } else if (debug.exists()) { this.println("Using OverSim in DEBUG mode."); overSim = debug; } else if (link.exists()) overSim = link; else throw new FileNotFoundException("Unable to locate OverSim executable."); threads = new ArrayList<SimulationThread>(maxThreads); queue = new ArrayList<Runnable>(); pendingRuns = 0; paused = false; // Create threads for (int i = 0;i < maxThreads;i++) { final SimulationThread thread = new SimulationThread(this); threads.add(thread); } this.println("Initialized " + threads.size() + " threads."); } public synchronized void setPaused(boolean paused) { this.paused = paused; } public boolean isPaused() { return paused; } public synchronized void addRunConfig(String configName, String id) throws IOException { final int totalRunCount = this.countRuns(configName); // Fetch the total run count if (totalRunCount == 0) throw new RuntimeException("Invalid config name, 0 runs found."); final SimulationConfig config = new SimulationConfig(configFile, configName, resultRootDir, id, globalParameters, totalRunCount); pendingRuns += totalRunCount; // Create the queue of simulation runs for (int i = 0;i < totalRunCount;i++) { final SimulationRun run = new SimulationRun(i, workingDir, overSim, config); if (run.resultsExist()) { this.completed(run); } // Only queue the run if the results don't already exist else { queue.add(run); } } // Shuffle the queue to help prevent bunching of memory intensive configurations if (shuffle) { Collections.shuffle(queue); } this.println("Added configuration: " + config + " with " + totalRunCount + " runs"); this.println("Result dir: " + config.getResultDir().getCanonicalPath()); this.println("Pending: " + pendingRuns + " runs"); this.println("Queue size: " + queue.size()); configs.add(config); this.notifyAll(); } public synchronized void addDataConfig(String configName, String id) throws IOException { final SimulationConfig config = new SimulationConfig(configFile, configName, resultRootDir, id); final FilenameFilter filter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.endsWith(".sca"); } }; final String[] files = config.getResultDir().list(filter); final Pattern pattern = Pattern.compile("^" + Pattern.quote(configName) + "-(\\d+)\\.sca$"); // Create the queue of simulation data for (String file : files) { final Matcher m = pattern.matcher(file); if (!m.matches()) { continue; } final int i = Integer.parseInt(m.group(1)); final SimulationData data = new SimulationData(i, wantedScalars, config); queue.add(data); config.pendingRuns++; } pendingRuns += config.pendingRuns; // Shuffle the queue to help prevent bunching of memory intensive configurations if (shuffle) { Collections.shuffle(queue); } this.println("Added result: " + config + " with " + config.completedRuns + " results."); this.println("Result dir: " + config.getResultDir().getCanonicalPath()); this.println("Queue size now: " + queue.size()); configs.add(config); this.notifyAll(); } protected int countRuns(String configName) throws IOException { final List<String> command = new LinkedList<String>(); command.add(overSim.getCanonicalPath()); command.add("-f" + configFile); command.add("-x" + configName); final Process process = new ProcessBuilder(command).directory(workingDir).start(); BufferedReader in = null; int runs = 0; try { in = new BufferedReader(new InputStreamReader(process.getInputStream())); final Pattern p = Pattern.compile("Number of runs: (\\d+)"); for (String line;(line = in.readLine()) != null;) { final Matcher m = p.matcher(line); if (m.find()) { runs = Integer.parseInt(m.group(1)); break; } } } finally { if (in != null) in.close(); } return runs; } public synchronized void start() throws IOException, InterruptedException { if (startTime > 0) throw new RuntimeException("This manager has already been started."); if (queue.isEmpty()) throw new RuntimeException("Queue is empty, nothing to do."); startTime = System.currentTimeMillis(); // Start all our threads for (SimulationThread thread : threads) { thread.start(); } while (!finished) { this.wait(); } this.println("All runs completed, terminating."); this.shutdown(); } public synchronized Runnable poll() throws InterruptedException { while (queue.isEmpty()) this.wait(); return queue.remove(0); } public synchronized void started(SimulationThread thread, Runnable runnable) { this.println(thread + " starting " + runnable + "."); } public synchronized void completed(Runnable runnable) { SimulationConfig config = null; if (runnable instanceof SimulationRun) { final SimulationRun run = (SimulationRun) runnable; config = run.getConfig(); config.pendingRuns--; pendingRuns--; config.completedRuns++; // Queue a data processing instance for this run queue.add(new SimulationData(run.getRunId(), wantedScalars, run.getConfig())); this.notifyAll(); config.pendingRuns++; pendingRuns++; } else if (runnable instanceof SimulationData) { config = ((SimulationData) runnable).getConfig(); config.pendingRuns--; pendingRuns--; } if (config != null) { checkForCompletion(config); } } public synchronized void failed(SimulationThread thread, Runnable runnable) { this.println(thread + " failed " + runnable); SimulationConfig config = null; if (runnable instanceof SimulationRun) { final SimulationRun run = (SimulationRun) runnable; config = run.getConfig(); config.pendingRuns--; pendingRuns--; config.failedRuns++; } else if (runnable instanceof SimulationData) { config = ((SimulationData) runnable).getConfig(); config.pendingRuns--; pendingRuns--; } if (config != null) checkForCompletion(config); } protected synchronized void checkForCompletion(SimulationConfig config) { if (config.pendingRuns == 0) { try { config.processData(this, resultRootDir, archiver, deleteData); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } if (pendingRuns == 0) { finished = true; this.notifyAll(); } } public String getBuffer() { return buffer.toString().trim(); } public synchronized final void println(Object o) { final String line = o.toString(); System.out.println(line); buffer.append(line); buffer.append('\n'); } public synchronized void shutdown() { this.println("Shutdown requested."); System.exit(0); } }
Adding queue size debugging
src/main/java/com/jamierf/oversim/manager/Manager.java
Adding queue size debugging
<ide><path>rc/main/java/com/jamierf/oversim/manager/Manager.java <ide> } <ide> <ide> public synchronized void started(SimulationThread thread, Runnable runnable) { <del> this.println(thread + " starting " + runnable + "."); <add> this.println(thread + " starting " + runnable + ". (" + queue.size() + " left in queue)"); <ide> } <ide> <ide> public synchronized void completed(Runnable runnable) {
JavaScript
bsd-3-clause
af5c7a9c3b44da1a64517bd8d5e4a07df32d20ba
0
leanprover/ace,JaroslavMoravec/ace,AaronZhangL/ace,vgrish/ace,ggalancs/ace,patope/ace,multiarc/ace,mrlong/ace,skylarkcob/ace,Eynaliyev/ace,brandonb927/ace,erezarnon/ace,JanvanCasteren/ace,jjatria/ace,mrlong/ace,multiarc/ace,skynetbot/ace,ylian/ace,shidao-fm/ace,mrlong/ace,AaronZhangL/ace,patope/ace,STRd6/ace,bcuff/ace,paladox/ace,sekcheong/ace,ThiagoGarciaAlves/ace,mlajtos/ace,ektx/ace,patope/ace,brandonb927/ace,EthanK28/ace,luvegt/ace,Amrit01/ace,likitha/ace,cintiamh/ace,thsunmy/ace,Amrit01/ace,makelivedotnet/ace,MarkBandilla/ace,itsff/ace,animack/ace,397239396/ace,brandonb927/ace,cintiamh/ace,thsunmy/ace,tbutter/ace,derekja/ace,tobyreynold/ace,JanvanCasteren/ace,XCage15/ace,Stackato-Apps/ace,makelivedotnet/ace,tbutter/ace,ggalancs/ace,tobyreynold/ace,eranwitkon/ace,Amrit01/ace,skylarkcob/ace,ggalancs/ace,PatrickTo/ace,leanprover/ace,2947721120/ACE,ggalancs/ace,ylian/ace,thdoan/ace,luvegt/ace,dudb/ace,thsunmy/ace,multiarc/ace,derekja/ace,vgrish/ace,AaronZhangL/ace,makelivedotnet/ace,kevinkmp/ace,Stackato-Apps/ace,shamim8888/ace,kevinkmp/ace,leanprover/ace,WigWagCo/ace,JanvanCasteren/ace,PatrickTo/ace,fjakobs/ace,patope/ace,tobyreynold/ace,dudb/ace,ThiagoGarciaAlves/ace,bcuff/ace,STRd6/ace,cintiamh/ace,qweasd1/ace,tbutter/ace,mrlong/ace,cintiamh/ace,jabgibson/ace,mlajtos/ace,kevinkmp/ace,Eynaliyev/ace,patope/ace,paladox/ace,animack/ace,brandonb927/ace,durai145/ace,WigWagCo/ace,luvegt/ace,patope/ace,zpzgone/ace,STRd6/ace,Mabbu9/ace,skylarkcob/ace,jabgibson/ace,Amrit01/ace,tbutter/ace,ThiagoGarciaAlves/ace,qweasd1/ace,erezarnon/ace,JaroslavMoravec/ace,shamim8888/ace,kevinkmp/ace,paladox/ace,thdoan/ace,CAAL/ace,paladox/ace,sbusso/ace,tranch-xiao/ace,brandonb927/ace,mkosieradzki/ace,thdoan/ace,IChocolateKapa/ace,Nicholas-Westley/ace,thdoan/ace,qweasd1/ace,eranwitkon/ace,sekcheong/ace,mlajtos/ace,thdoan/ace,STRd6/ace,thdoan/ace,WigWagCo/ace,dudb/ace,bradparks/ace,skynetbot/ace,thdoan/ace,likitha/ace,Mabbu9/ace,ylian/ace,enricoberti/ace,EthanK28/ace,shamim8888/ace,tobyreynold/ace,mlajtos/ace,skynetbot/ace,brandonb927/ace,qweasd1/ace,tranch-xiao/ace,sbusso/ace,derekja/ace,397239396/ace,sbusso/ace,taylorleh/ace,fjakobs/ace,itsff/ace,ggalancs/ace,vgrish/ace,thdoan/ace,AaronZhangL/ace,Eynaliyev/ace,jabgibson/ace,kevinkmp/ace,thdoan/ace,ThiagoGarciaAlves/ace,Gottox/ace,ThiagoGarciaAlves/ace,enricoberti/ace,erezarnon/ace,dudb/ace,likitha/ace,shidao-fm/ace,bradparks/ace,XCage15/ace,paladox/ace,vgrish/ace,zpzgone/ace,fjakobs/ace,likitha/ace,397239396/ace,sbusso/ace,itsff/ace,jjatria/ace,Stackato-Apps/ace,likitha/ace,Mabbu9/ace,durai145/ace,thsunmy/ace,tobyreynold/ace,bradparks/ace,bradparks/ace,thdoan/ace,ThiagoGarciaAlves/ace,2947721120/ACE,AaronZhangL/ace,likitha/ace,leanprover/ace,2947721120/ACE,itsff/ace,JaroslavMoravec/ace,397239396/ace,soonhokong/lean-ace,397239396/ace,itsff/ace,sekcheong/ace,2947721120/ACE,STRd6/ace,vgrish/ace,shidao-fm/ace,mdinger/ace,jjatria/ace,CAAL/ace,Nicholas-Westley/ace,erezarnon/ace,likitha/ace,jabgibson/ace,mdinger/ace,itsff/ace,zpzgone/ace,durai145/ace,durai145/ace,ThiagoGarciaAlves/ace,bcuff/ace,Mabbu9/ace,EthanK28/ace,Eynaliyev/ace,kevinkmp/ace,multiarc/ace,WigWagCo/ace,thsunmy/ace,IChocolateKapa/ace,sekcheong/ace,sekcheong/ace,patope/ace,2947721120/ACE,ylian/ace,alexbarnsley/ace,patope/ace,IChocolateKapa/ace,Amrit01/ace,tranch-xiao/ace,erezarnon/ace,mkosieradzki/ace,2947721120/ACE,eranwitkon/ace,brandonb927/ace,mkosieradzki/ace,bcuff/ace,brandonb927/ace,Stackato-Apps/ace,XCage15/ace,dudb/ace,JanvanCasteren/ace,XCage15/ace,EthanK28/ace,itsff/ace,JanvanCasteren/ace,kevinkmp/ace,luvegt/ace,JaroslavMoravec/ace,STRd6/ace,vgrish/ace,brandonb927/ace,Amrit01/ace,dudb/ace,ylian/ace,skylarkcob/ace,thsunmy/ace,itsff/ace,makelivedotnet/ace,Mabbu9/ace,2947721120/ACE,mlajtos/ace,eranwitkon/ace,bcuff/ace,qweasd1/ace,leanprover/ace,thsunmy/ace,soonhokong/lean-ace,sbusso/ace,paladox/ace,enricoberti/ace,XCage15/ace,eranwitkon/ace,skynetbot/ace,Eynaliyev/ace,taylorleh/ace,eranwitkon/ace,qweasd1/ace,sekcheong/ace,thdoan/ace,kevinkmp/ace,EthanK28/ace,fjakobs/ace,STRd6/ace,acanakoglu/ace,itsff/ace,taylorleh/ace,taylorleh/ace,shamim8888/ace,zpzgone/ace,leanprover/ace,shidao-fm/ace,leanprover/ace,erezarnon/ace,derekja/ace,vgrish/ace,Eynaliyev/ace,skynetbot/ace,JanvanCasteren/ace,alexbarnsley/ace,Mabbu9/ace,leanprover/ace,sekcheong/ace,cintiamh/ace,skynetbot/ace,mdinger/ace,qweasd1/ace,skylarkcob/ace,alexbarnsley/ace,MarkBandilla/ace,jabgibson/ace,alexbarnsley/ace,soonhokong/lean-ace,durai145/ace,shamim8888/ace,XCage15/ace,wcandillon/ace,STRd6/ace,Mabbu9/ace,qweasd1/ace,cintiamh/ace,zpzgone/ace,paladox/ace,animack/ace,Gottox/ace,2947721120/ACE,soonhokong/lean-ace,JanvanCasteren/ace,thdoan/ace,fjakobs/ace,luvegt/ace,enricoberti/ace,ggalancs/ace,taylorleh/ace,Gottox/ace,derekja/ace,Stackato-Apps/ace,kevinkmp/ace,blake-regalia/ace-webapp.js,Mabbu9/ace,fjakobs/ace,skynetbot/ace,itsff/ace,Mabbu9/ace,eranwitkon/ace,Mabbu9/ace,bradparks/ace,skynetbot/ace,mkosieradzki/ace,fjakobs/ace,AaronZhangL/ace,Stackato-Apps/ace,durai145/ace,makelivedotnet/ace,taylorleh/ace,soonhokong/lean-ace,erezarnon/ace,Eynaliyev/ace,skynetbot/ace,mdinger/ace,mrlong/ace,ggalancs/ace,kevinkmp/ace,jjatria/ace,jjatria/ace,WigWagCo/ace,zpzgone/ace,mlajtos/ace,animack/ace,wcandillon/ace,bcuff/ace,zpzgone/ace,tranch-xiao/ace,Amrit01/ace,Nicholas-Westley/ace,Stackato-Apps/ace,CAAL/ace,Stackato-Apps/ace,397239396/ace,sbusso/ace,thsunmy/ace,Eynaliyev/ace,soonhokong/lean-ace,zpzgone/ace,itsff/ace,eranwitkon/ace,ggalancs/ace,soonhokong/lean-ace,leanprover/ace,erezarnon/ace,WigWagCo/ace,tranch-xiao/ace,enricoberti/ace,ylian/ace,animack/ace,sbusso/ace,Nicholas-Westley/ace,Nicholas-Westley/ace,WigWagCo/ace,shamim8888/ace,alexbarnsley/ace,luvegt/ace,mlajtos/ace,WigWagCo/ace,enricoberti/ace,derekja/ace,PatrickTo/ace,multiarc/ace,derekja/ace,STRd6/ace,ggalancs/ace,MarkBandilla/ace,leanprover/ace,mrlong/ace,wcandillon/ace,Mabbu9/ace,2947721120/ACE,mkosieradzki/ace,mkosieradzki/ace,JaroslavMoravec/ace,vgrish/ace,Amrit01/ace,patope/ace,JanvanCasteren/ace,erezarnon/ace,IChocolateKapa/ace,sbusso/ace,durai145/ace,multiarc/ace,tbutter/ace,tranch-xiao/ace,mrlong/ace,shidao-fm/ace,skynetbot/ace,XCage15/ace,durai145/ace,soonhokong/lean-ace,mdinger/ace,PatrickTo/ace,AaronZhangL/ace,makelivedotnet/ace,PatrickTo/ace,alexbarnsley/ace,fjakobs/ace,jabgibson/ace,mlajtos/ace,mdinger/ace,sbusso/ace,erezarnon/ace,Mabbu9/ace,mrlong/ace,mkosieradzki/ace,ylian/ace,CAAL/ace,bcuff/ace,XCage15/ace,soonhokong/lean-ace,EthanK28/ace,ylian/ace,tranch-xiao/ace,makelivedotnet/ace,animack/ace,wcandillon/ace,animack/ace,JaroslavMoravec/ace,skynetbot/ace,2947721120/ACE,MarkBandilla/ace,JaroslavMoravec/ace,tobyreynold/ace,skylarkcob/ace,STRd6/ace,luvegt/ace,tobyreynold/ace,vgrish/ace,bcuff/ace,CAAL/ace,jabgibson/ace,MarkBandilla/ace,sbusso/ace,shamim8888/ace,Stackato-Apps/ace,cintiamh/ace,zpzgone/ace,sbusso/ace,enricoberti/ace,itsff/ace,zpzgone/ace,PatrickTo/ace,ggalancs/ace,derekja/ace,tobyreynold/ace,shamim8888/ace,animack/ace,eranwitkon/ace,brandonb927/ace,taylorleh/ace,patope/ace,XCage15/ace,JaroslavMoravec/ace,mkosieradzki/ace,397239396/ace,jjatria/ace,luvegt/ace,makelivedotnet/ace,Amrit01/ace,itsff/ace,soonhokong/lean-ace,tobyreynold/ace,qweasd1/ace,mdinger/ace,AaronZhangL/ace,2947721120/ACE,wcandillon/ace,likitha/ace,tbutter/ace,alexbarnsley/ace,MarkBandilla/ace,enricoberti/ace,jjatria/ace,AaronZhangL/ace,shamim8888/ace,durai145/ace,eranwitkon/ace,erezarnon/ace,IChocolateKapa/ace,brandonb927/ace,ThiagoGarciaAlves/ace,tranch-xiao/ace,ThiagoGarciaAlves/ace,MarkBandilla/ace,PatrickTo/ace,alexbarnsley/ace,AaronZhangL/ace,wcandillon/ace,EthanK28/ace,tranch-xiao/ace,shidao-fm/ace,dudb/ace,fjakobs/ace,ThiagoGarciaAlves/ace,leanprover/ace,erezarnon/ace,multiarc/ace,brandonb927/ace,tobyreynold/ace,WigWagCo/ace,skynetbot/ace,kevinkmp/ace,ylian/ace,jabgibson/ace,Amrit01/ace,ektx/ace,XCage15/ace,itsff/ace,brandonb927/ace,cintiamh/ace,IChocolateKapa/ace,Gottox/ace,luvegt/ace,ggalancs/ace,tbutter/ace,JanvanCasteren/ace,MarkBandilla/ace,shidao-fm/ace,thsunmy/ace,397239396/ace,jabgibson/ace,JaroslavMoravec/ace,ThiagoGarciaAlves/ace,qweasd1/ace,mrlong/ace,paladox/ace,MarkBandilla/ace,sbusso/ace,enricoberti/ace,multiarc/ace,skylarkcob/ace,luvegt/ace,sbusso/ace,paladox/ace,soonhokong/lean-ace,tranch-xiao/ace,sbusso/ace,animack/ace,kevinkmp/ace,likitha/ace,paladox/ace,likitha/ace,taylorleh/ace,sekcheong/ace,MarkBandilla/ace,durai145/ace,leanprover/ace,XCage15/ace,taylorleh/ace,eranwitkon/ace,mkosieradzki/ace,qweasd1/ace,2947721120/ACE,Eynaliyev/ace,paladox/ace,mrlong/ace,patope/ace,PatrickTo/ace,derekja/ace,Gottox/ace,zpzgone/ace,patope/ace,Amrit01/ace,Gottox/ace,WigWagCo/ace,brandonb927/ace,Gottox/ace,paladox/ace,Eynaliyev/ace,shamim8888/ace,Nicholas-Westley/ace,CAAL/ace,jabgibson/ace,Nicholas-Westley/ace,shidao-fm/ace,leanprover/ace,ylian/ace,skylarkcob/ace,fjakobs/ace,robottomw/ace,PatrickTo/ace,Mabbu9/ace,Mabbu9/ace,durai145/ace,Gottox/ace,tbutter/ace,bcuff/ace,Gottox/ace,qweasd1/ace,eranwitkon/ace,alexbarnsley/ace,thsunmy/ace,ylian/ace,AaronZhangL/ace,soonhokong/lean-ace,likitha/ace,fjakobs/ace,enricoberti/ace,2947721120/ACE,Gottox/ace,CAAL/ace,tbutter/ace,MarkBandilla/ace,fjakobs/ace,erezarnon/ace,jabgibson/ace,cintiamh/ace,JanvanCasteren/ace,MarkBandilla/ace,ThiagoGarciaAlves/ace,dudb/ace,shamim8888/ace,makelivedotnet/ace,Gottox/ace,wcandillon/ace,soonhokong/lean-ace,397239396/ace,JanvanCasteren/ace,luvegt/ace,ThiagoGarciaAlves/ace,CAAL/ace,jjatria/ace,2947721120/ACE,vgrish/ace,bcuff/ace,derekja/ace,mlajtos/ace,taylorleh/ace,cintiamh/ace,skylarkcob/ace,thdoan/ace,shidao-fm/ace,PatrickTo/ace,EthanK28/ace,erezarnon/ace,tranch-xiao/ace,397239396/ace,PatrickTo/ace,IChocolateKapa/ace,fjakobs/ace,mrlong/ace,taylorleh/ace,ggalancs/ace,animack/ace,eranwitkon/ace,makelivedotnet/ace,alexbarnsley/ace,alexbarnsley/ace,luvegt/ace,Nicholas-Westley/ace,STRd6/ace,Eynaliyev/ace,paladox/ace,sbusso/ace,jjatria/ace,luvegt/ace,STRd6/ace,vgrish/ace,jabgibson/ace,Eynaliyev/ace,fjakobs/ace,jjatria/ace,patope/ace,eranwitkon/ace,multiarc/ace,CAAL/ace,IChocolateKapa/ace,enricoberti/ace,wcandillon/ace,likitha/ace,multiarc/ace,mdinger/ace,mdinger/ace,EthanK28/ace,XCage15/ace,Stackato-Apps/ace,EthanK28/ace,jjatria/ace,wcandillon/ace,JaroslavMoravec/ace,patope/ace,JaroslavMoravec/ace,Amrit01/ace,brandonb927/ace,kevinkmp/ace,WigWagCo/ace,mrlong/ace,taylorleh/ace,qweasd1/ace,tranch-xiao/ace,shidao-fm/ace,multiarc/ace,animack/ace,WigWagCo/ace,mlajtos/ace,jabgibson/ace,enricoberti/ace,IChocolateKapa/ace,thdoan/ace,WigWagCo/ace,kevinkmp/ace,bradparks/ace,brandonb927/ace,MarkBandilla/ace,EthanK28/ace,wcandillon/ace,thsunmy/ace,animack/ace,EthanK28/ace,397239396/ace,JaroslavMoravec/ace,bradparks/ace,WigWagCo/ace,luvegt/ace,tobyreynold/ace,jjatria/ace,Gottox/ace,qweasd1/ace,makelivedotnet/ace,sbusso/ace,WigWagCo/ace,sekcheong/ace,ggalancs/ace,STRd6/ace,skylarkcob/ace,dudb/ace,eranwitkon/ace,taylorleh/ace,vgrish/ace,alexbarnsley/ace,shamim8888/ace,makelivedotnet/ace,wcandillon/ace,Amrit01/ace,ggalancs/ace,erezarnon/ace,soonhokong/lean-ace,patope/ace,jabgibson/ace,thsunmy/ace,sekcheong/ace,zpzgone/ace,shamim8888/ace,cintiamh/ace,Nicholas-Westley/ace,zpzgone/ace,zpzgone/ace,bcuff/ace,taylorleh/ace,Nicholas-Westley/ace,jjatria/ace,397239396/ace,sekcheong/ace,derekja/ace,animack/ace,JaroslavMoravec/ace,bcuff/ace,Gottox/ace,jjatria/ace,dudb/ace,bradparks/ace,robottomw/ace,luvegt/ace,CAAL/ace,ylian/ace,tranch-xiao/ace,fjakobs/ace,skynetbot/ace,Gottox/ace,Mabbu9/ace,PatrickTo/ace,shidao-fm/ace,jabgibson/ace,leanprover/ace,mkosieradzki/ace,ThiagoGarciaAlves/ace,mdinger/ace,leanprover/ace,wcandillon/ace,IChocolateKapa/ace,cintiamh/ace,XCage15/ace,wcandillon/ace,derekja/ace,mdinger/ace,fjakobs/ace,durai145/ace,mlajtos/ace,CAAL/ace,animack/ace,EthanK28/ace,wcandillon/ace,bradparks/ace,cintiamh/ace,Amrit01/ace,likitha/ace,multiarc/ace,dudb/ace,zpzgone/ace,brandonb927/ace,zpzgone/ace,Amrit01/ace,mdinger/ace,kevinkmp/ace,itsff/ace,MarkBandilla/ace,2947721120/ACE,tbutter/ace,makelivedotnet/ace,itsff/ace,likitha/ace,animack/ace,tbutter/ace,STRd6/ace,IChocolateKapa/ace,durai145/ace,JanvanCasteren/ace,qweasd1/ace,animack/ace,derekja/ace,EthanK28/ace,makelivedotnet/ace,makelivedotnet/ace,alexbarnsley/ace,WigWagCo/ace,likitha/ace,tobyreynold/ace,mkosieradzki/ace,skylarkcob/ace,derekja/ace,thdoan/ace,animack/ace,shamim8888/ace,AaronZhangL/ace,PatrickTo/ace,skylarkcob/ace,XCage15/ace,JanvanCasteren/ace,eranwitkon/ace,paladox/ace,mdinger/ace,skynetbot/ace,JaroslavMoravec/ace,IChocolateKapa/ace,jjatria/ace,tranch-xiao/ace,Nicholas-Westley/ace,Nicholas-Westley/ace,ggalancs/ace,2947721120/ACE,cintiamh/ace,WigWagCo/ace,ylian/ace,CAAL/ace,thsunmy/ace,CAAL/ace,CAAL/ace,thdoan/ace,bradparks/ace,mrlong/ace,enricoberti/ace,eranwitkon/ace,AaronZhangL/ace,dudb/ace,jabgibson/ace,ylian/ace,taylorleh/ace,wcandillon/ace,shamim8888/ace,mkosieradzki/ace,CAAL/ace,JanvanCasteren/ace,397239396/ace,Nicholas-Westley/ace,bradparks/ace,vgrish/ace,mlajtos/ace,tranch-xiao/ace,PatrickTo/ace,multiarc/ace,tranch-xiao/ace,Eynaliyev/ace,PatrickTo/ace,ggalancs/ace,mkosieradzki/ace,patope/ace,animack/ace,likitha/ace,XCage15/ace,cintiamh/ace,durai145/ace,brandonb927/ace,mlajtos/ace,shidao-fm/ace,soonhokong/lean-ace,IChocolateKapa/ace,PatrickTo/ace,XCage15/ace,alexbarnsley/ace,bradparks/ace,shidao-fm/ace,Amrit01/ace,JanvanCasteren/ace,itsff/ace,jjatria/ace,taylorleh/ace,durai145/ace,Eynaliyev/ace,paladox/ace,2947721120/ACE,sekcheong/ace,AaronZhangL/ace,Nicholas-Westley/ace,kevinkmp/ace,derekja/ace,JanvanCasteren/ace,397239396/ace,taylorleh/ace,tobyreynold/ace,enricoberti/ace,luvegt/ace,tobyreynold/ace,MarkBandilla/ace,IChocolateKapa/ace,XCage15/ace,kevinkmp/ace,fjakobs/ace,thsunmy/ace,EthanK28/ace,vgrish/ace,alexbarnsley/ace,Stackato-Apps/ace,JanvanCasteren/ace,vgrish/ace,PatrickTo/ace,derekja/ace,jjatria/ace,tbutter/ace,tbutter/ace,IChocolateKapa/ace,bcuff/ace,multiarc/ace,Gottox/ace,zpzgone/ace,erezarnon/ace,shidao-fm/ace,JaroslavMoravec/ace,mdinger/ace,STRd6/ace,bcuff/ace,mkosieradzki/ace,Stackato-Apps/ace,alexbarnsley/ace,enricoberti/ace,soonhokong/lean-ace,itsff/ace,ggalancs/ace,acanakoglu/ace,vgrish/ace,fjakobs/ace,kevinkmp/ace,IChocolateKapa/ace,vgrish/ace,thsunmy/ace,multiarc/ace,2947721120/ACE,bradparks/ace,mlajtos/ace,Nicholas-Westley/ace,thsunmy/ace,Stackato-Apps/ace,skynetbot/ace,cintiamh/ace,derekja/ace,thsunmy/ace,makelivedotnet/ace,skylarkcob/ace,cintiamh/ace,cintiamh/ace,tobyreynold/ace,Mabbu9/ace,Eynaliyev/ace,mlajtos/ace,397239396/ace,397239396/ace,sekcheong/ace,bradparks/ace,tbutter/ace,leanprover/ace,mlajtos/ace,wcandillon/ace,bcuff/ace,zpzgone/ace,ylian/ace,mkosieradzki/ace,Mabbu9/ace,tbutter/ace,makelivedotnet/ace,wcandillon/ace,XCage15/ace,multiarc/ace,thdoan/ace,tbutter/ace,AaronZhangL/ace,shidao-fm/ace,EthanK28/ace,CAAL/ace,bcuff/ace,mdinger/ace,mkosieradzki/ace,skylarkcob/ace,shamim8888/ace,mrlong/ace,alexbarnsley/ace,JanvanCasteren/ace,durai145/ace,enricoberti/ace,tbutter/ace,tranch-xiao/ace,skylarkcob/ace,bradparks/ace,mkosieradzki/ace,leanprover/ace,sbusso/ace,Eynaliyev/ace,tranch-xiao/ace,shidao-fm/ace,IChocolateKapa/ace,erezarnon/ace,shamim8888/ace,ylian/ace,AaronZhangL/ace,paladox/ace,skynetbot/ace,Amrit01/ace,multiarc/ace,enricoberti/ace,patope/ace,CAAL/ace,MarkBandilla/ace,skylarkcob/ace,dudb/ace,shamim8888/ace,derekja/ace,jabgibson/ace,jjatria/ace,JaroslavMoravec/ace,qweasd1/ace,mrlong/ace,sekcheong/ace,Gottox/ace,sekcheong/ace,mrlong/ace,acanakoglu/ace,ggalancs/ace,ThiagoGarciaAlves/ace,tobyreynold/ace,WigWagCo/ace,MarkBandilla/ace,ThiagoGarciaAlves/ace,PatrickTo/ace,ThiagoGarciaAlves/ace,Stackato-Apps/ace,taylorleh/ace,ylian/ace,jabgibson/ace,mrlong/ace,likitha/ace,Nicholas-Westley/ace,sekcheong/ace,multiarc/ace,thdoan/ace,bradparks/ace,eranwitkon/ace,mlajtos/ace,skynetbot/ace,skylarkcob/ace,dudb/ace,sekcheong/ace,Stackato-Apps/ace,STRd6/ace,qweasd1/ace,makelivedotnet/ace,CAAL/ace,tobyreynold/ace,erezarnon/ace,bradparks/ace,STRd6/ace,bradparks/ace,tobyreynold/ace,luvegt/ace,EthanK28/ace,wcandillon/ace,Eynaliyev/ace,soonhokong/lean-ace,Gottox/ace,robottomw/ace,qweasd1/ace,JaroslavMoravec/ace,IChocolateKapa/ace,Mabbu9/ace,shidao-fm/ace,MarkBandilla/ace,tbutter/ace,soonhokong/lean-ace,sbusso/ace,durai145/ace,mlajtos/ace,bcuff/ace,STRd6/ace,bcuff/ace,paladox/ace,enricoberti/ace,AaronZhangL/ace,shidao-fm/ace,sekcheong/ace,ylian/ace,alexbarnsley/ace,Nicholas-Westley/ace,dudb/ace,durai145/ace,mdinger/ace,EthanK28/ace,leanprover/ace,paladox/ace,397239396/ace,skylarkcob/ace,dudb/ace,animack/ace,vgrish/ace,mdinger/ace,Stackato-Apps/ace,mkosieradzki/ace,JaroslavMoravec/ace,patope/ace,ektx/ace,Stackato-Apps/ace,mrlong/ace,Eynaliyev/ace,Stackato-Apps/ace,likitha/ace,Nicholas-Westley/ace,dudb/ace,skynetbot/ace,thsunmy/ace,fjakobs/ace,397239396/ace
"no use strict"; ;(function(window) { if (typeof window.window != "undefined" && window.document) return; if (window.require && window.define) return; window.console = function() { var msgs = Array.prototype.slice.call(arguments, 0); postMessage({type: "log", data: msgs}); }; window.console.error = window.console.warn = window.console.log = window.console.trace = window.console; window.window = window; window.ace = window; window.onerror = function(message, file, line, col, err) { postMessage({type: "error", data: { message: message, data: err.data, file: file, line: line, col: col, stack: err.stack }}); }; window.normalizeModule = function(parentId, moduleName) { // normalize plugin requires if (moduleName.indexOf("!") !== -1) { var chunks = moduleName.split("!"); return window.normalizeModule(parentId, chunks[0]) + "!" + window.normalizeModule(parentId, chunks[1]); } // normalize relative requires if (moduleName.charAt(0) == ".") { var base = parentId.split("/").slice(0, -1).join("/"); moduleName = (base ? base + "/" : "") + moduleName; while (moduleName.indexOf(".") !== -1 && previous != moduleName) { var previous = moduleName; moduleName = moduleName.replace(/^\.\//, "").replace(/\/\.\//, "/").replace(/[^\/]+\/\.\.\//, ""); } } return moduleName; }; window.require = function require(parentId, id) { if (!id) { id = parentId; parentId = null; } if (!id.charAt) throw new Error("worker.js require() accepts only (parentId, id) as arguments"); id = window.normalizeModule(parentId, id); var module = window.require.modules[id]; if (module) { if (!module.initialized) { module.initialized = true; module.exports = module.factory().exports; } return module.exports; } if (!window.require.tlns) return console.log("unable to load " + id); var path = resolveModuleId(id, window.require.tlns); if (path.slice(-3) != ".js") path += ".js"; window.require.id = id; window.require.modules[id] = {}; // prevent infinite loop on broken modules importScripts(path); return window.require(parentId, id); }; function resolveModuleId(id, paths) { var testPath = id, tail = ""; while (testPath) { var alias = paths[testPath]; if (typeof alias == "string") { return alias + tail; } else if (alias) { return alias.location.replace(/\/*$/, "/") + (tail || alias.main || alias.name); } else if (alias === false) { return ""; } var i = testPath.lastIndexOf("/"); if (i === -1) break; tail = testPath.substr(i) + tail; testPath = testPath.slice(0, i); } return id; } window.require.modules = {}; window.require.tlns = {}; window.define = function(id, deps, factory) { if (arguments.length == 2) { factory = deps; if (typeof id != "string") { deps = id; id = window.require.id; } } else if (arguments.length == 1) { factory = id; deps = []; id = window.require.id; } if (typeof factory != "function") { window.require.modules[id] = { exports: factory, initialized: true }; return; } if (!deps.length) // If there is no dependencies, we inject "require", "exports" and // "module" as dependencies, to provide CommonJS compatibility. deps = ["require", "exports", "module"]; var req = function(childId) { return window.require(id, childId); }; window.require.modules[id] = { exports: {}, factory: function() { var module = this; var returnExports = factory.apply(this, deps.map(function(dep) { switch (dep) { // Because "require", "exports" and "module" aren't actual // dependencies, we must handle them seperately. case "require": return req; case "exports": return module.exports; case "module": return module; // But for all other dependencies, we can just go ahead and // require them. default: return req(dep); } })); if (returnExports) module.exports = returnExports; return module; } }; }; window.define.amd = {}; require.tlns = {}; window.initBaseUrls = function initBaseUrls(topLevelNamespaces) { for (var i in topLevelNamespaces) require.tlns[i] = topLevelNamespaces[i]; }; window.initSender = function initSender() { var EventEmitter = window.require("ace/lib/event_emitter").EventEmitter; var oop = window.require("ace/lib/oop"); var Sender = function() {}; (function() { oop.implement(this, EventEmitter); this.callback = function(data, callbackId) { postMessage({ type: "call", id: callbackId, data: data }); }; this.emit = function(name, data) { postMessage({ type: "event", name: name, data: data }); }; }).call(Sender.prototype); return new Sender(); }; var main = window.main = null; var sender = window.sender = null; window.onmessage = function(e) { var msg = e.data; if (msg.event && sender) { sender._signal(msg.event, msg.data); } else if (msg.command) { if (main[msg.command]) main[msg.command].apply(main, msg.args); else if (window[msg.command]) window[msg.command].apply(window, msg.args); else throw new Error("Unknown command:" + msg.command); } else if (msg.init) { window.initBaseUrls(msg.tlns); require("ace/lib/es5-shim"); sender = window.sender = window.initSender(); var clazz = require(msg.module)[msg.classname]; main = window.main = new clazz(sender); } }; })(this);
lib/ace/worker/worker.js
"no use strict"; ;(function(window) { if (typeof window.window != "undefined" && window.document) { return; } window.console = function() { var msgs = Array.prototype.slice.call(arguments, 0); postMessage({type: "log", data: msgs}); }; window.console.error = window.console.warn = window.console.log = window.console.trace = window.console; window.window = window; window.ace = window; window.onerror = function(message, file, line, col, err) { postMessage({type: "error", data: { message: message, file: file, line: line, col: col, stack: err.stack }}); }; window.normalizeModule = function(parentId, moduleName) { // normalize plugin requires if (moduleName.indexOf("!") !== -1) { var chunks = moduleName.split("!"); return window.normalizeModule(parentId, chunks[0]) + "!" + window.normalizeModule(parentId, chunks[1]); } // normalize relative requires if (moduleName.charAt(0) == ".") { var base = parentId.split("/").slice(0, -1).join("/"); moduleName = (base ? base + "/" : "") + moduleName; while(moduleName.indexOf(".") !== -1 && previous != moduleName) { var previous = moduleName; moduleName = moduleName.replace(/^\.\//, "").replace(/\/\.\//, "/").replace(/[^\/]+\/\.\.\//, ""); } } return moduleName; }; window.require = function(parentId, id) { if (!id) { id = parentId; parentId = null; } if (!id.charAt) throw new Error("worker.js require() accepts only (parentId, id) as arguments"); id = window.normalizeModule(parentId, id); var module = window.require.modules[id]; if (module) { if (!module.initialized) { module.initialized = true; module.exports = module.factory().exports; } return module.exports; } var chunks = id.split("/"); if (!window.require.tlns) return console.log("unable to load " + id); chunks[0] = window.require.tlns[chunks[0]] || chunks[0]; var path = chunks.join("/"); if (path.slice(-3) != ".js") path += ".js"; window.require.id = id; importScripts(path); return window.require(parentId, id); }; window.require.modules = {}; window.require.tlns = {}; window.define = function(id, deps, factory) { if (arguments.length == 2) { factory = deps; if (typeof id != "string") { deps = id; id = window.require.id; } } else if (arguments.length == 1) { factory = id; deps = []; id = window.require.id; } if (typeof factory != "function") { window.require.modules[id] = { exports: factory, initialized: true }; return; } if (!deps.length) // If there is no dependencies, we inject 'require', 'exports' and // 'module' as dependencies, to provide CommonJS compatibility. deps = ['require', 'exports', 'module']; var req = function(childId) { return window.require(id, childId); }; window.require.modules[id] = { exports: {}, factory: function() { var module = this; var returnExports = factory.apply(this, deps.map(function(dep) { switch(dep) { // Because 'require', 'exports' and 'module' aren't actual // dependencies, we must handle them seperately. case 'require': return req; case 'exports': return module.exports; case 'module': return module; // But for all other dependencies, we can just go ahead and // require them. default: return req(dep); } })); if (returnExports) module.exports = returnExports; return module; } }; }; window.define.amd = {}; window.initBaseUrls = function initBaseUrls(topLevelNamespaces) { require.tlns = topLevelNamespaces; }; window.initSender = function initSender() { var EventEmitter = window.require("ace/lib/event_emitter").EventEmitter; var oop = window.require("ace/lib/oop"); var Sender = function() {}; (function() { oop.implement(this, EventEmitter); this.callback = function(data, callbackId) { postMessage({ type: "call", id: callbackId, data: data }); }; this.emit = function(name, data) { postMessage({ type: "event", name: name, data: data }); }; }).call(Sender.prototype); return new Sender(); }; var main = window.main = null; var sender = window.sender = null; window.onmessage = function(e) { var msg = e.data; if (msg.command) { if (main[msg.command]) main[msg.command].apply(main, msg.args); else throw new Error("Unknown command:" + msg.command); } else if (msg.init) { initBaseUrls(msg.tlns); require("ace/lib/es5-shim"); sender = window.sender = initSender(); var clazz = require(msg.module)[msg.classname]; main = window.main = new clazz(sender); } else if (msg.event && sender) { sender._signal(msg.event, msg.data); } }; })(this);
improve require support in worker
lib/ace/worker/worker.js
improve require support in worker
<ide><path>ib/ace/worker/worker.js <ide> "no use strict"; <ide> ;(function(window) { <del>if (typeof window.window != "undefined" && window.document) { <add>if (typeof window.window != "undefined" && window.document) <ide> return; <del>} <add>if (window.require && window.define) <add> return; <ide> <ide> window.console = function() { <ide> var msgs = Array.prototype.slice.call(arguments, 0); <ide> window.onerror = function(message, file, line, col, err) { <ide> postMessage({type: "error", data: { <ide> message: message, <add> data: err.data, <ide> file: file, <ide> line: line, <ide> col: col, <ide> var base = parentId.split("/").slice(0, -1).join("/"); <ide> moduleName = (base ? base + "/" : "") + moduleName; <ide> <del> while(moduleName.indexOf(".") !== -1 && previous != moduleName) { <add> while (moduleName.indexOf(".") !== -1 && previous != moduleName) { <ide> var previous = moduleName; <ide> moduleName = moduleName.replace(/^\.\//, "").replace(/\/\.\//, "/").replace(/[^\/]+\/\.\.\//, ""); <ide> } <ide> return moduleName; <ide> }; <ide> <del>window.require = function(parentId, id) { <add>window.require = function require(parentId, id) { <ide> if (!id) { <ide> id = parentId; <ide> parentId = null; <ide> } <ide> return module.exports; <ide> } <del> <del> var chunks = id.split("/"); <add> <ide> if (!window.require.tlns) <ide> return console.log("unable to load " + id); <del> chunks[0] = window.require.tlns[chunks[0]] || chunks[0]; <del> var path = chunks.join("/"); <add> <add> var path = resolveModuleId(id, window.require.tlns); <ide> if (path.slice(-3) != ".js") path += ".js"; <ide> <ide> window.require.id = id; <add> window.require.modules[id] = {}; // prevent infinite loop on broken modules <ide> importScripts(path); <ide> return window.require(parentId, id); <ide> }; <add>function resolveModuleId(id, paths) { <add> var testPath = id, tail = ""; <add> while (testPath) { <add> var alias = paths[testPath]; <add> if (typeof alias == "string") { <add> return alias + tail; <add> } else if (alias) { <add> return alias.location.replace(/\/*$/, "/") + (tail || alias.main || alias.name); <add> } else if (alias === false) { <add> return ""; <add> } <add> var i = testPath.lastIndexOf("/"); <add> if (i === -1) break; <add> tail = testPath.substr(i) + tail; <add> testPath = testPath.slice(0, i); <add> } <add> return id; <add>} <ide> window.require.modules = {}; <ide> window.require.tlns = {}; <ide> <ide> } <ide> <ide> if (!deps.length) <del> // If there is no dependencies, we inject 'require', 'exports' and <del> // 'module' as dependencies, to provide CommonJS compatibility. <del> deps = ['require', 'exports', 'module']; <add> // If there is no dependencies, we inject "require", "exports" and <add> // "module" as dependencies, to provide CommonJS compatibility. <add> deps = ["require", "exports", "module"]; <ide> <ide> var req = function(childId) { <ide> return window.require(id, childId); <ide> factory: function() { <ide> var module = this; <ide> var returnExports = factory.apply(this, deps.map(function(dep) { <del> switch(dep) { <del> // Because 'require', 'exports' and 'module' aren't actual <del> // dependencies, we must handle them seperately. <del> case 'require': return req; <del> case 'exports': return module.exports; <del> case 'module': return module; <del> // But for all other dependencies, we can just go ahead and <del> // require them. <del> default: return req(dep); <del> } <add> switch (dep) { <add> // Because "require", "exports" and "module" aren't actual <add> // dependencies, we must handle them seperately. <add> case "require": return req; <add> case "exports": return module.exports; <add> case "module": return module; <add> // But for all other dependencies, we can just go ahead and <add> // require them. <add> default: return req(dep); <add> } <ide> })); <ide> if (returnExports) <ide> module.exports = returnExports; <ide> }; <ide> }; <ide> window.define.amd = {}; <del> <add>require.tlns = {}; <ide> window.initBaseUrls = function initBaseUrls(topLevelNamespaces) { <del> require.tlns = topLevelNamespaces; <add> for (var i in topLevelNamespaces) <add> require.tlns[i] = topLevelNamespaces[i]; <ide> }; <ide> <ide> window.initSender = function initSender() { <ide> <ide> window.onmessage = function(e) { <ide> var msg = e.data; <del> if (msg.command) { <add> if (msg.event && sender) { <add> sender._signal(msg.event, msg.data); <add> } <add> else if (msg.command) { <ide> if (main[msg.command]) <ide> main[msg.command].apply(main, msg.args); <add> else if (window[msg.command]) <add> window[msg.command].apply(window, msg.args); <ide> else <ide> throw new Error("Unknown command:" + msg.command); <ide> } <del> else if (msg.init) { <del> initBaseUrls(msg.tlns); <add> else if (msg.init) { <add> window.initBaseUrls(msg.tlns); <ide> require("ace/lib/es5-shim"); <del> sender = window.sender = initSender(); <add> sender = window.sender = window.initSender(); <ide> var clazz = require(msg.module)[msg.classname]; <ide> main = window.main = new clazz(sender); <del> } <del> else if (msg.event && sender) { <del> sender._signal(msg.event, msg.data); <ide> } <ide> }; <ide> })(this);
Java
apache-2.0
a68be8199f023dd19210448e192d301e46efca60
0
chenxiuyang/onos,oeeagle/onos,opennetworkinglab/onos,LorenzReinhart/ONOSnew,jinlongliu/onos,VinodKumarS-Huawei/ietf96yang,gkatsikas/onos,gkatsikas/onos,y-higuchi/onos,VinodKumarS-Huawei/ietf96yang,maheshraju-Huawei/actn,osinstom/onos,kuujo/onos,Shashikanth-Huawei/bmp,gkatsikas/onos,jinlongliu/onos,LorenzReinhart/ONOSnew,sdnwiselab/onos,LorenzReinhart/ONOSnew,y-higuchi/onos,gkatsikas/onos,Shashikanth-Huawei/bmp,donNewtonAlpha/onos,mengmoya/onos,mengmoya/onos,opennetworkinglab/onos,sdnwiselab/onos,LorenzReinhart/ONOSnew,chinghanyu/onos,osinstom/onos,rvhub/onos,kkkane/ONOS,packet-tracker/onos,kkkane/ONOS,maheshraju-Huawei/actn,maheshraju-Huawei/actn,LorenzReinhart/ONOSnew,sdnwiselab/onos,opennetworkinglab/onos,sonu283304/onos,planoAccess/clonedONOS,rvhub/onos,rvhub/onos,zsh2938/onos,Shashikanth-Huawei/bmp,opennetworkinglab/onos,sonu283304/onos,osinstom/onos,Shashikanth-Huawei/bmp,y-higuchi/onos,kuujo/onos,chinghanyu/onos,SmartInfrastructures/dreamer,y-higuchi/onos,oplinkoms/onos,opennetworkinglab/onos,castroflavio/onos,oeeagle/onos,mengmoya/onos,jmiserez/onos,sdnwiselab/onos,chinghanyu/onos,mengmoya/onos,jinlongliu/onos,CNlukai/onos-gerrit-test,packet-tracker/onos,y-higuchi/onos,CNlukai/onos-gerrit-test,zsh2938/onos,lsinfo3/onos,donNewtonAlpha/onos,oeeagle/onos,planoAccess/clonedONOS,sonu283304/onos,ravikumaran2015/ravikumaran201504,jinlongliu/onos,osinstom/onos,donNewtonAlpha/onos,kkkane/ONOS,kuujo/onos,gkatsikas/onos,kuangrewawa/onos,sonu283304/onos,oplinkoms/onos,chinghanyu/onos,castroflavio/onos,CNlukai/onos-gerrit-test,chenxiuyang/onos,maheshraju-Huawei/actn,mengmoya/onos,zsh2938/onos,maheshraju-Huawei/actn,oplinkoms/onos,packet-tracker/onos,CNlukai/onos-gerrit-test,Shashikanth-Huawei/bmp,oplinkoms/onos,oplinkoms/onos,packet-tracker/onos,planoAccess/clonedONOS,chenxiuyang/onos,sdnwiselab/onos,donNewtonAlpha/onos,jmiserez/onos,ravikumaran2015/ravikumaran201504,castroflavio/onos,osinstom/onos,jmiserez/onos,oplinkoms/onos,gkatsikas/onos,lsinfo3/onos,kkkane/ONOS,oplinkoms/onos,kuangrewawa/onos,ravikumaran2015/ravikumaran201504,chenxiuyang/onos,kuujo/onos,kuujo/onos,kuujo/onos,SmartInfrastructures/dreamer,opennetworkinglab/onos,jmiserez/onos,kuujo/onos,planoAccess/clonedONOS,lsinfo3/onos,lsinfo3/onos,VinodKumarS-Huawei/ietf96yang,sdnwiselab/onos,kuangrewawa/onos,VinodKumarS-Huawei/ietf96yang,VinodKumarS-Huawei/ietf96yang,SmartInfrastructures/dreamer,donNewtonAlpha/onos,zsh2938/onos,rvhub/onos,oeeagle/onos,castroflavio/onos
/* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onlab.onos.store.trivial.impl; import static org.slf4j.LoggerFactory.getLogger; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.onlab.onos.cluster.ClusterEventListener; import org.onlab.onos.cluster.ClusterService; import org.onlab.onos.cluster.ControllerNode; import org.onlab.onos.cluster.ControllerNode.State; import org.onlab.onos.cluster.DefaultControllerNode; import org.onlab.onos.cluster.NodeId; import org.onlab.onos.cluster.RoleInfo; import org.onlab.onos.mastership.MastershipEvent; import org.onlab.onos.mastership.MastershipStore; import org.onlab.onos.mastership.MastershipStoreDelegate; import org.onlab.onos.mastership.MastershipTerm; import org.onlab.onos.net.DeviceId; import org.onlab.onos.net.MastershipRole; import org.onlab.onos.store.AbstractStore; import org.onlab.packet.IpAddress; import org.slf4j.Logger; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import static org.onlab.onos.mastership.MastershipEvent.Type.*; /** * Manages inventory of controller mastership over devices using * trivial, non-distributed in-memory structures implementation. */ @Component(immediate = true) @Service public class SimpleMastershipStore extends AbstractStore<MastershipEvent, MastershipStoreDelegate> implements MastershipStore { private final Logger log = getLogger(getClass()); private static final int NOTHING = 0; private static final int INIT = 1; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ClusterService clusterService; //devices mapped to their masters, to emulate multiple nodes protected final Map<DeviceId, NodeId> masterMap = new HashMap<>(); //emulate backups with pile of nodes protected final Map<DeviceId, List<NodeId>> backups = new HashMap<>(); //terms protected final Map<DeviceId, AtomicInteger> termMap = new HashMap<>(); @Activate public void activate() { if (clusterService == null) { // just for ease of unit test final ControllerNode instance = new DefaultControllerNode(new NodeId("local"), IpAddress.valueOf("127.0.0.1")); clusterService = new ClusterService() { @Override public ControllerNode getLocalNode() { return instance; } @Override public Set<ControllerNode> getNodes() { return ImmutableSet.of(instance); } @Override public ControllerNode getNode(NodeId nodeId) { if (instance.id().equals(nodeId)) { return instance; } return null; } @Override public State getState(NodeId nodeId) { if (instance.id().equals(nodeId)) { return State.ACTIVE; } else { return State.INACTIVE; } } @Override public void addListener(ClusterEventListener listener) { } @Override public void removeListener(ClusterEventListener listener) { } }; } log.info("Started"); } @Deactivate public void deactivate() { log.info("Stopped"); } @Override public synchronized MastershipEvent setMaster(NodeId nodeId, DeviceId deviceId) { MastershipRole role = getRole(nodeId, deviceId); switch (role) { case MASTER: // no-op return null; case STANDBY: case NONE: NodeId prevMaster = masterMap.put(deviceId, nodeId); incrementTerm(deviceId); removeFromBackups(deviceId, nodeId); addToBackup(deviceId, prevMaster); break; default: log.warn("unknown Mastership Role {}", role); return null; } return new MastershipEvent(MASTER_CHANGED, deviceId, getNodes(deviceId)); } @Override public NodeId getMaster(DeviceId deviceId) { return masterMap.get(deviceId); } // synchronized for atomic read @Override public synchronized RoleInfo getNodes(DeviceId deviceId) { return new RoleInfo(masterMap.get(deviceId), backups.getOrDefault(deviceId, ImmutableList.of())); } @Override public Set<DeviceId> getDevices(NodeId nodeId) { Set<DeviceId> ids = new HashSet<>(); for (Map.Entry<DeviceId, NodeId> d : masterMap.entrySet()) { if (Objects.equals(d.getValue(), nodeId)) { ids.add(d.getKey()); } } return ids; } @Override public synchronized MastershipRole requestRole(DeviceId deviceId) { //query+possible reelection NodeId node = clusterService.getLocalNode().id(); MastershipRole role = getRole(node, deviceId); switch (role) { case MASTER: return MastershipRole.MASTER; case STANDBY: if (getMaster(deviceId) == null) { // no master => become master masterMap.put(deviceId, node); incrementTerm(deviceId); // remove from backup list removeFromBackups(deviceId, node); notifyDelegate(new MastershipEvent(MASTER_CHANGED, deviceId, getNodes(deviceId))); return MastershipRole.MASTER; } return MastershipRole.STANDBY; case NONE: if (getMaster(deviceId) == null) { // no master => become master masterMap.put(deviceId, node); incrementTerm(deviceId); notifyDelegate(new MastershipEvent(MASTER_CHANGED, deviceId, getNodes(deviceId))); return MastershipRole.MASTER; } // add to backup list if (addToBackup(deviceId, node)) { notifyDelegate(new MastershipEvent(BACKUPS_CHANGED, deviceId, getNodes(deviceId))); } return MastershipRole.STANDBY; default: log.warn("unknown Mastership Role {}", role); } return role; } // add to backup if not there already, silently ignores null node private synchronized boolean addToBackup(DeviceId deviceId, NodeId nodeId) { boolean modified = false; List<NodeId> stbys = backups.getOrDefault(deviceId, new ArrayList<>()); if (nodeId != null && !stbys.contains(nodeId)) { stbys.add(nodeId); modified = true; } backups.put(deviceId, stbys); return modified; } private synchronized boolean removeFromBackups(DeviceId deviceId, NodeId node) { List<NodeId> stbys = backups.getOrDefault(deviceId, new ArrayList<>()); boolean modified = stbys.remove(node); backups.put(deviceId, stbys); return modified; } private synchronized void incrementTerm(DeviceId deviceId) { AtomicInteger term = termMap.getOrDefault(deviceId, new AtomicInteger(NOTHING)); term.incrementAndGet(); termMap.put(deviceId, term); } @Override public MastershipRole getRole(NodeId nodeId, DeviceId deviceId) { //just query NodeId current = masterMap.get(deviceId); MastershipRole role; if (current != null && current.equals(nodeId)) { return MastershipRole.MASTER; } if (backups.getOrDefault(deviceId, Collections.emptyList()).contains(nodeId)) { role = MastershipRole.STANDBY; } else { role = MastershipRole.NONE; } return role; } // synchronized for atomic read @Override public synchronized MastershipTerm getTermFor(DeviceId deviceId) { if ((termMap.get(deviceId) == null)) { return MastershipTerm.of(masterMap.get(deviceId), NOTHING); } return MastershipTerm.of( masterMap.get(deviceId), termMap.get(deviceId).get()); } @Override public synchronized MastershipEvent setStandby(NodeId nodeId, DeviceId deviceId) { MastershipRole role = getRole(nodeId, deviceId); switch (role) { case MASTER: NodeId backup = reelect(deviceId, nodeId); if (backup == null) { // no master alternative masterMap.remove(deviceId); // TODO: Should there be new event type for no MASTER? return new MastershipEvent(MASTER_CHANGED, deviceId, getNodes(deviceId)); } else { NodeId prevMaster = masterMap.put(deviceId, backup); incrementTerm(deviceId); addToBackup(deviceId, prevMaster); return new MastershipEvent(MASTER_CHANGED, deviceId, getNodes(deviceId)); } case STANDBY: case NONE: boolean modified = addToBackup(deviceId, nodeId); if (modified) { return new MastershipEvent(BACKUPS_CHANGED, deviceId, getNodes(deviceId)); } break; default: log.warn("unknown Mastership Role {}", role); } return null; } //dumbly selects next-available node that's not the current one //emulate leader election private synchronized NodeId reelect(DeviceId did, NodeId nodeId) { List<NodeId> stbys = backups.getOrDefault(did, Collections.emptyList()); NodeId backup = null; for (NodeId n : stbys) { if (!n.equals(nodeId)) { backup = n; break; } } stbys.remove(backup); return backup; } @Override public synchronized MastershipEvent relinquishRole(NodeId nodeId, DeviceId deviceId) { MastershipRole role = getRole(nodeId, deviceId); switch (role) { case MASTER: NodeId backup = reelect(deviceId, nodeId); masterMap.put(deviceId, backup); incrementTerm(deviceId); return new MastershipEvent(MASTER_CHANGED, deviceId, getNodes(deviceId)); case STANDBY: if (removeFromBackups(deviceId, nodeId)) { return new MastershipEvent(BACKUPS_CHANGED, deviceId, getNodes(deviceId)); } break; case NONE: break; default: log.warn("unknown Mastership Role {}", role); } return null; } }
core/store/trivial/src/main/java/org/onlab/onos/store/trivial/impl/SimpleMastershipStore.java
/* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onlab.onos.store.trivial.impl; import static org.slf4j.LoggerFactory.getLogger; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.onlab.onos.cluster.ClusterEventListener; import org.onlab.onos.cluster.ClusterService; import org.onlab.onos.cluster.ControllerNode; import org.onlab.onos.cluster.ControllerNode.State; import org.onlab.onos.cluster.DefaultControllerNode; import org.onlab.onos.cluster.NodeId; import org.onlab.onos.cluster.RoleInfo; import org.onlab.onos.mastership.MastershipEvent; import org.onlab.onos.mastership.MastershipStore; import org.onlab.onos.mastership.MastershipStoreDelegate; import org.onlab.onos.mastership.MastershipTerm; import org.onlab.onos.net.DeviceId; import org.onlab.onos.net.MastershipRole; import org.onlab.onos.store.AbstractStore; import org.onlab.packet.IpAddress; import org.slf4j.Logger; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import static org.onlab.onos.mastership.MastershipEvent.Type.*; /** * Manages inventory of controller mastership over devices using * trivial, non-distributed in-memory structures implementation. */ @Component(immediate = true) @Service public class SimpleMastershipStore extends AbstractStore<MastershipEvent, MastershipStoreDelegate> implements MastershipStore { private final Logger log = getLogger(getClass()); private static final int NOTHING = 0; private static final int INIT = 1; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ClusterService clusterService; //devices mapped to their masters, to emulate multiple nodes protected final Map<DeviceId, NodeId> masterMap = new HashMap<>(); //emulate backups with pile of nodes protected final Map<DeviceId, List<NodeId>> backups = new HashMap<>(); //terms protected final Map<DeviceId, AtomicInteger> termMap = new HashMap<>(); @Activate public void activate() { if (clusterService == null) { // just for ease of unit test final ControllerNode instance = new DefaultControllerNode(new NodeId("local"), IpAddress.valueOf("127.0.0.1")); clusterService = new ClusterService() { @Override public ControllerNode getLocalNode() { return instance; } @Override public Set<ControllerNode> getNodes() { return ImmutableSet.of(instance); } @Override public ControllerNode getNode(NodeId nodeId) { if (instance.id().equals(nodeId)) { return instance; } return null; } @Override public State getState(NodeId nodeId) { if (instance.id().equals(nodeId)) { return State.ACTIVE; } else { return State.INACTIVE; } } @Override public void addListener(ClusterEventListener listener) { } @Override public void removeListener(ClusterEventListener listener) { } }; } log.info("Started"); } @Deactivate public void deactivate() { log.info("Stopped"); } @Override public synchronized MastershipEvent setMaster(NodeId nodeId, DeviceId deviceId) { MastershipRole role = getRole(nodeId, deviceId); switch (role) { case MASTER: // no-op return null; case STANDBY: case NONE: NodeId prevMaster = masterMap.put(deviceId, nodeId); incrementTerm(deviceId); removeFromBackups(deviceId, nodeId); addToBackup(deviceId, prevMaster); break; default: log.warn("unknown Mastership Role {}", role); return null; } return new MastershipEvent(MASTER_CHANGED, deviceId, getNodes(deviceId)); } @Override public NodeId getMaster(DeviceId deviceId) { return masterMap.get(deviceId); } // synchronized for atomic read @Override public synchronized RoleInfo getNodes(DeviceId deviceId) { return new RoleInfo(masterMap.get(deviceId), backups.getOrDefault(deviceId, ImmutableList.of())); } @Override public Set<DeviceId> getDevices(NodeId nodeId) { Set<DeviceId> ids = new HashSet<>(); for (Map.Entry<DeviceId, NodeId> d : masterMap.entrySet()) { if (d.getValue().equals(nodeId)) { ids.add(d.getKey()); } } return ids; } @Override public synchronized MastershipRole requestRole(DeviceId deviceId) { //query+possible reelection NodeId node = clusterService.getLocalNode().id(); MastershipRole role = getRole(node, deviceId); switch (role) { case MASTER: return MastershipRole.MASTER; case STANDBY: if (getMaster(deviceId) == null) { // no master => become master masterMap.put(deviceId, node); incrementTerm(deviceId); // remove from backup list removeFromBackups(deviceId, node); notifyDelegate(new MastershipEvent(MASTER_CHANGED, deviceId, getNodes(deviceId))); return MastershipRole.MASTER; } return MastershipRole.STANDBY; case NONE: if (getMaster(deviceId) == null) { // no master => become master masterMap.put(deviceId, node); incrementTerm(deviceId); notifyDelegate(new MastershipEvent(MASTER_CHANGED, deviceId, getNodes(deviceId))); return MastershipRole.MASTER; } // add to backup list if (addToBackup(deviceId, node)) { notifyDelegate(new MastershipEvent(BACKUPS_CHANGED, deviceId, getNodes(deviceId))); } return MastershipRole.STANDBY; default: log.warn("unknown Mastership Role {}", role); } return role; } // add to backup if not there already, silently ignores null node private synchronized boolean addToBackup(DeviceId deviceId, NodeId nodeId) { boolean modified = false; List<NodeId> stbys = backups.getOrDefault(deviceId, new ArrayList<>()); if (nodeId != null && !stbys.contains(nodeId)) { stbys.add(nodeId); modified = true; } backups.put(deviceId, stbys); return modified; } private synchronized boolean removeFromBackups(DeviceId deviceId, NodeId node) { List<NodeId> stbys = backups.getOrDefault(deviceId, new ArrayList<>()); boolean modified = stbys.remove(node); backups.put(deviceId, stbys); return modified; } private synchronized void incrementTerm(DeviceId deviceId) { AtomicInteger term = termMap.getOrDefault(deviceId, new AtomicInteger(NOTHING)); term.incrementAndGet(); termMap.put(deviceId, term); } @Override public MastershipRole getRole(NodeId nodeId, DeviceId deviceId) { //just query NodeId current = masterMap.get(deviceId); MastershipRole role; if (current != null && current.equals(nodeId)) { return MastershipRole.MASTER; } if (backups.getOrDefault(deviceId, Collections.emptyList()).contains(nodeId)) { role = MastershipRole.STANDBY; } else { role = MastershipRole.NONE; } return role; } // synchronized for atomic read @Override public synchronized MastershipTerm getTermFor(DeviceId deviceId) { if ((termMap.get(deviceId) == null)) { return MastershipTerm.of(masterMap.get(deviceId), NOTHING); } return MastershipTerm.of( masterMap.get(deviceId), termMap.get(deviceId).get()); } @Override public synchronized MastershipEvent setStandby(NodeId nodeId, DeviceId deviceId) { MastershipRole role = getRole(nodeId, deviceId); switch (role) { case MASTER: NodeId backup = reelect(deviceId, nodeId); if (backup == null) { // no master alternative masterMap.remove(deviceId); // TODO: Should there be new event type for no MASTER? return new MastershipEvent(MASTER_CHANGED, deviceId, getNodes(deviceId)); } else { NodeId prevMaster = masterMap.put(deviceId, backup); incrementTerm(deviceId); addToBackup(deviceId, prevMaster); return new MastershipEvent(MASTER_CHANGED, deviceId, getNodes(deviceId)); } case STANDBY: case NONE: boolean modified = addToBackup(deviceId, nodeId); if (modified) { return new MastershipEvent(BACKUPS_CHANGED, deviceId, getNodes(deviceId)); } break; default: log.warn("unknown Mastership Role {}", role); } return null; } //dumbly selects next-available node that's not the current one //emulate leader election private synchronized NodeId reelect(DeviceId did, NodeId nodeId) { List<NodeId> stbys = backups.getOrDefault(did, Collections.emptyList()); NodeId backup = null; for (NodeId n : stbys) { if (!n.equals(nodeId)) { backup = n; break; } } stbys.remove(backup); return backup; } @Override public synchronized MastershipEvent relinquishRole(NodeId nodeId, DeviceId deviceId) { MastershipRole role = getRole(nodeId, deviceId); switch (role) { case MASTER: NodeId backup = reelect(deviceId, nodeId); masterMap.put(deviceId, backup); incrementTerm(deviceId); return new MastershipEvent(MASTER_CHANGED, deviceId, getNodes(deviceId)); case STANDBY: if (removeFromBackups(deviceId, nodeId)) { return new MastershipEvent(BACKUPS_CHANGED, deviceId, getNodes(deviceId)); } break; case NONE: break; default: log.warn("unknown Mastership Role {}", role); } return null; } }
Fixed a null pointer exception in SimpleMastershipStore; need to verify whether same exists in distributed one. Change-Id: I89ad124114586eb69bb9de3702085e8587cb315c
core/store/trivial/src/main/java/org/onlab/onos/store/trivial/impl/SimpleMastershipStore.java
Fixed a null pointer exception in SimpleMastershipStore; need to verify whether same exists in distributed one.
<ide><path>ore/store/trivial/src/main/java/org/onlab/onos/store/trivial/impl/SimpleMastershipStore.java <ide> import java.util.HashSet; <ide> import java.util.List; <ide> import java.util.Map; <add>import java.util.Objects; <ide> import java.util.Set; <ide> import java.util.concurrent.atomic.AtomicInteger; <ide> <ide> public Set<DeviceId> getDevices(NodeId nodeId) { <ide> Set<DeviceId> ids = new HashSet<>(); <ide> for (Map.Entry<DeviceId, NodeId> d : masterMap.entrySet()) { <del> if (d.getValue().equals(nodeId)) { <add> if (Objects.equals(d.getValue(), nodeId)) { <ide> ids.add(d.getKey()); <ide> } <ide> }
Java
apache-2.0
286eb7755edd241b8f334553bbb584caa4950a07
0
asciidoctor/asciidoctor-maven-plugin,asciidoctor/asciidoctor-maven-plugin,NicolasGeraud/asciidoctor-maven-plugin,abelsromero/asciidoctor-maven-plugin,abelsromero/asciidoctor-maven-plugin,abelsromero/asciidoctor-maven-plugin,mojavelinux/asciidoctor-maven-plugin,asciidoctor/asciidoctor-maven-plugin
/* * Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an &quot;AS IS&quot; BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.asciidoc.maven; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.NameFileFilter; import org.apache.commons.io.filefilter.RegexFileFilter; import org.apache.commons.io.filefilter.TrueFileFilter; import org.apache.commons.io.monitor.FileAlterationListener; import org.apache.commons.io.monitor.FileAlterationListenerAdaptor; import org.apache.commons.io.monitor.FileAlterationMonitor; import org.apache.commons.io.monitor.FileAlterationObserver; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.asciidoctor.Asciidoctor; import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.Scanner; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @Mojo(name = "auto-refresh") public class AsciidoctorRefreshMojo extends AsciidoctorMojo { @Parameter(property = "port", required = false) protected int port = 2000; @Parameter(property = "interval", required = false) protected int interval = 2000; // 2s private Future<Asciidoctor> asciidoctor = null; private Collection<FileAlterationMonitor> monitors = null; private final AtomicBoolean needsUpdate = new AtomicBoolean(false); private ScheduledExecutorService updaterScheduler = null; @Override public void execute() throws MojoExecutionException, MojoFailureException { // this is long because of JRuby startup createAsciidoctor(); startPolling(); startUpdater(); doWork(); stopUpdater(); stopMonitor(); } private void stopUpdater() { if (updaterScheduler != null) { updaterScheduler.shutdown(); } } private void startUpdater() { updaterScheduler = Executors.newScheduledThreadPool(1); updaterScheduler.scheduleAtFixedRate(new Updater(needsUpdate, this), interval, interval, TimeUnit.MILLISECONDS); } protected void doWork() throws MojoFailureException, MojoExecutionException { getLog().info("Rendered doc in " + executeAndReturnDuration() + "ms"); doWait(); } protected void doWait() { getLog().info("Type [Enter] to exit"); new Scanner(System.in).nextLine(); } private void stopMonitor() throws MojoExecutionException { if (monitors != null) { for (final FileAlterationMonitor monitor : monitors) { try { monitor.stop(); } catch (Exception e) { throw new MojoExecutionException(e.getMessage(), e); } } } } protected synchronized void doExecute() { // delete only content files, resources are synchronized so normally up to date for (final File f : FileUtils.listFiles(outputDirectory, new RegexFileFilter(ASCIIDOC_REG_EXP_EXTENSION), TrueFileFilter.INSTANCE)) { FileUtils.deleteQuietly(f); } try { getLog().info("Re-rendered doc in " + executeAndReturnDuration() + "ms"); } catch (final MojoExecutionException e) { getLog().error(e); } catch (final MojoFailureException e) { getLog().error(e); } } protected long executeAndReturnDuration() throws MojoExecutionException, MojoFailureException { final long start = System.nanoTime(); super.execute(); final long end = System.nanoTime(); return TimeUnit.NANOSECONDS.toMillis(end - start); } private void startPolling() throws MojoExecutionException { monitors = new ArrayList<FileAlterationMonitor>(); { // content monitor final FileAlterationObserver observer; if (sourceDirectory != null) { observer = new FileAlterationObserver(sourceDirectory, new RegexFileFilter(ASCIIDOC_REG_EXP_EXTENSION)); } else if (sourceDocumentName != null) { observer = new FileAlterationObserver(sourceDocumentName.getParentFile(), new NameFileFilter(sourceDocumentName.getName())); } else { monitors = null; // no need to start anything because there is no content return; } final FileAlterationMonitor monitor = new FileAlterationMonitor(interval); final FileAlterationListener listener = new FileAlterationListenerAdaptor() { @Override public void onFileCreate(final File file) { getLog().info("File " + file.getAbsolutePath() + " created."); needsUpdate.set(true); } @Override public void onFileChange(final File file) { getLog().info("File " + file.getAbsolutePath() + " updated."); needsUpdate.set(true); } @Override public void onFileDelete(final File file) { getLog().info("File " + file.getAbsolutePath() + " deleted."); needsUpdate.set(true); } }; observer.addListener(listener); monitor.addObserver(observer); monitors.add(monitor); } { // resources monitors if (synchronizations != null) { for (final Synchronization s : synchronizations) { final FileAlterationMonitor monitor = new FileAlterationMonitor(interval); final FileAlterationListener listener = new FileAlterationListenerAdaptor() { @Override public void onFileCreate(final File file) { getLog().info("File " + file.getAbsolutePath() + " created."); synchronize(s); needsUpdate.set(true); } @Override public void onFileChange(final File file) { getLog().info("File " + file.getAbsolutePath() + " updated."); synchronize(s); needsUpdate.set(true); } @Override public void onFileDelete(final File file) { getLog().info("File " + file.getAbsolutePath() + " deleted."); FileUtils.deleteQuietly(file); needsUpdate.set(true); } }; final File source = s.getSource(); final FileAlterationObserver observer; if (source.isDirectory()) { observer = new FileAlterationObserver(source); } else { observer = new FileAlterationObserver(source.getParentFile(), new NameFileFilter(source.getName())); } observer.addListener(listener); monitor.addObserver(observer); monitors.add(monitor); } } } for (final FileAlterationMonitor monitor : monitors) { try { monitor.start(); } catch (final Exception e) { throw new MojoExecutionException(e.getMessage(), e); } } } private void createAsciidoctor() { final ExecutorService es = Executors.newSingleThreadExecutor(); asciidoctor = es.submit(new Callable<Asciidoctor>() { @Override public Asciidoctor call() throws Exception { return Asciidoctor.Factory.create(); } }); es.shutdown(); } @Override protected Asciidoctor getAsciidoctorInstance() throws MojoExecutionException { try { return asciidoctor.get(); } catch (final Exception e) { throw new MojoExecutionException(e.getMessage(), e); } } private static class Updater implements Runnable { private final AtomicBoolean run; private final AsciidoctorRefreshMojo mojo; private Updater(final AtomicBoolean run, final AsciidoctorRefreshMojo mojo) { this.run = run; this.mojo = mojo; } @Override public void run() { if (run.get()) { run.set(false); mojo.doExecute(); } } } }
src/main/java/org/asciidoc/maven/AsciidoctorRefreshMojo.java
/* * Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an &quot;AS IS&quot; BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.asciidoc.maven; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.NameFileFilter; import org.apache.commons.io.filefilter.RegexFileFilter; import org.apache.commons.io.filefilter.TrueFileFilter; import org.apache.commons.io.monitor.FileAlterationListener; import org.apache.commons.io.monitor.FileAlterationListenerAdaptor; import org.apache.commons.io.monitor.FileAlterationMonitor; import org.apache.commons.io.monitor.FileAlterationObserver; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.asciidoctor.Asciidoctor; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Scanner; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @Mojo(name = "auto-refresh") public class AsciidoctorRefreshMojo extends AsciidoctorMojo { @Parameter(property = "port", required = false) protected int port = 2000; @Parameter(property = "interval", required = false) protected int interval = 2000; // 2s private Future<Asciidoctor> asciidoctor = null; private Collection<FileAlterationMonitor> monitors = null; private final AtomicBoolean needsUpdate = new AtomicBoolean(false); private ScheduledExecutorService updaterScheduler = null; @Override public void execute() throws MojoExecutionException, MojoFailureException { // this is long because of JRuby startup createAsciidoctor(); startPolling(); startUpdater(); doWork(); stopUpdater(); stopMonitor(); } private void stopUpdater() { if (updaterScheduler != null) { updaterScheduler.shutdown(); } } private void startUpdater() { updaterScheduler = Executors.newScheduledThreadPool(1); updaterScheduler.scheduleAtFixedRate(new Updater(needsUpdate, this), interval, interval, TimeUnit.MILLISECONDS); } protected void doWork() throws MojoFailureException, MojoExecutionException { super.execute(); doWait(); } protected void doWait() { getLog().info("Type [Enter] to exit"); new Scanner(System.in).nextLine(); } private void stopMonitor() throws MojoExecutionException { if (monitors != null) { for (final FileAlterationMonitor monitor : monitors) { try { monitor.stop(); } catch (Exception e) { throw new MojoExecutionException(e.getMessage(), e); } } } } protected synchronized void doExecute() { // delete only content files, resources are synchronized so normally up to date for (final File f : FileUtils.listFiles(outputDirectory, new RegexFileFilter(ASCIIDOC_REG_EXP_EXTENSION), TrueFileFilter.INSTANCE)) { FileUtils.deleteQuietly(f); } try { final long start = System.nanoTime(); super.execute(); final long end = System.nanoTime(); getLog().info("Re-rendered doc in " + TimeUnit.NANOSECONDS.toMillis(end - start) + "ms"); } catch (final MojoExecutionException e) { getLog().error(e); } catch (final MojoFailureException e) { getLog().error(e); } } private void startPolling() throws MojoExecutionException { monitors = new ArrayList<FileAlterationMonitor>(); { // content monitor final FileAlterationObserver observer; if (sourceDirectory != null) { observer = new FileAlterationObserver(sourceDirectory, new RegexFileFilter(ASCIIDOC_REG_EXP_EXTENSION)); } else if (sourceDocumentName != null) { observer = new FileAlterationObserver(sourceDocumentName.getParentFile(), new NameFileFilter(sourceDocumentName.getName())); } else { monitors = null; // no need to start anything because there is no content return; } final FileAlterationMonitor monitor = new FileAlterationMonitor(interval); final FileAlterationListener listener = new FileAlterationListenerAdaptor() { @Override public void onFileCreate(final File file) { getLog().info("File " + file.getAbsolutePath() + " created."); needsUpdate.set(true); } @Override public void onFileChange(final File file) { getLog().info("File " + file.getAbsolutePath() + " updated."); needsUpdate.set(true); } @Override public void onFileDelete(final File file) { getLog().info("File " + file.getAbsolutePath() + " deleted."); needsUpdate.set(true); } }; observer.addListener(listener); monitor.addObserver(observer); monitors.add(monitor); } { // resources monitors if (synchronizations != null) { for (final Synchronization s : synchronizations) { final FileAlterationMonitor monitor = new FileAlterationMonitor(interval); final FileAlterationListener listener = new FileAlterationListenerAdaptor() { @Override public void onFileCreate(final File file) { getLog().info("File " + file.getAbsolutePath() + " created."); synchronize(s); needsUpdate.set(true); } @Override public void onFileChange(final File file) { getLog().info("File " + file.getAbsolutePath() + " updated."); synchronize(s); needsUpdate.set(true); } @Override public void onFileDelete(final File file) { getLog().info("File " + file.getAbsolutePath() + " deleted."); FileUtils.deleteQuietly(file); needsUpdate.set(true); } }; final File source = s.getSource(); final FileAlterationObserver observer; if (source.isDirectory()) { observer = new FileAlterationObserver(source); } else { observer = new FileAlterationObserver(source.getParentFile(), new NameFileFilter(source.getName())); } observer.addListener(listener); monitor.addObserver(observer); monitors.add(monitor); } } } for (final FileAlterationMonitor monitor : monitors) { try { monitor.start(); } catch (final Exception e) { throw new MojoExecutionException(e.getMessage(), e); } } } private void createAsciidoctor() { final ExecutorService es = Executors.newSingleThreadExecutor(); asciidoctor = es.submit(new Callable<Asciidoctor>() { @Override public Asciidoctor call() throws Exception { return Asciidoctor.Factory.create(); } }); es.shutdown(); } @Override protected Asciidoctor getAsciidoctorInstance() throws MojoExecutionException { try { return asciidoctor.get(); } catch (final Exception e) { throw new MojoExecutionException(e.getMessage(), e); } } private static class Updater implements Runnable { private final AtomicBoolean run; private final AsciidoctorRefreshMojo mojo; private Updater(final AtomicBoolean run, final AsciidoctorRefreshMojo mojo) { this.run = run; this.mojo = mojo; } @Override public void run() { if (run.get()) { run.set(false); mojo.doExecute(); } } } }
better logging for the first rendering
src/main/java/org/asciidoc/maven/AsciidoctorRefreshMojo.java
better logging for the first rendering
<ide><path>rc/main/java/org/asciidoc/maven/AsciidoctorRefreshMojo.java <ide> import org.asciidoctor.Asciidoctor; <ide> <ide> import java.io.File; <del>import java.io.IOException; <ide> import java.util.ArrayList; <ide> import java.util.Collection; <ide> import java.util.Scanner; <ide> } <ide> <ide> protected void doWork() throws MojoFailureException, MojoExecutionException { <del> super.execute(); <add> getLog().info("Rendered doc in " + executeAndReturnDuration() + "ms"); <ide> doWait(); <ide> } <ide> <ide> } <ide> <ide> try { <del> final long start = System.nanoTime(); <del> super.execute(); <del> final long end = System.nanoTime(); <del> getLog().info("Re-rendered doc in " + TimeUnit.NANOSECONDS.toMillis(end - start) + "ms"); <add> getLog().info("Re-rendered doc in " + executeAndReturnDuration() + "ms"); <ide> } catch (final MojoExecutionException e) { <ide> getLog().error(e); <ide> } catch (final MojoFailureException e) { <ide> getLog().error(e); <ide> } <add> } <add> <add> protected long executeAndReturnDuration() throws MojoExecutionException, MojoFailureException { <add> final long start = System.nanoTime(); <add> super.execute(); <add> final long end = System.nanoTime(); <add> return TimeUnit.NANOSECONDS.toMillis(end - start); <ide> } <ide> <ide> private void startPolling() throws MojoExecutionException {
JavaScript
mit
0ddf2d0303d0bc67bf8075c1090e49b4c8ea67b9
0
filipedeschamps/rss-feed-emitter
'use strict'; const FeedParser = require('feedparser'); const request = require('request'); const FeedError = require('./FeedError'); const RESPONSE_CODES = { OK: 200, NOT_FOUND: 404, ISE: 500, }; /** * This module manages automatically how many feed items * it will keep in memory, and basically it will have a * maximum history which is how many items the feed has * multiplied by this number below. So, if the feed have * 10 items, we will keep 30 items max in the history. * @type {Number} */ const historyLengthMultiplier = 3; /** * Default UserAgent string * Since static stuff doesn't work in older versions, keep using global const * @type {String} */ const DEFAULT_UA = 'Node/RssFeedEmitter (https://github.com/filipedeschamps/rss-feed-emitter)'; class Feed { constructor(data) { ({ items: this.items, url: this.url, refresh: this.refresh, userAgent: this.userAgent, eventName: this.eventName, } = data); if (!this.items) { this.items = []; } if (!this.url) { throw new TypeError('missing required field `url`'); } if (!this.refresh) { this.refresh = 60000; } /** * If the user has specified a User Agent * we will use that as the 'user-agent' header when * making requests, otherwise we use the default option. * @type {string} */ if (!this.userAgent) { this.userAgent = DEFAULT_UA; } if (!this.eventName) { this.eventName = 'new-item'; } } /** * Given a feed and item, try to find * it inside the feed item list. We will use * this to see if there's already an item inside * the feed item list. If there is, we know it's * not a new item. * @param {Object} item item specitics * @returns {Object} the matched element */ findItem(item) { return this.items.find((entry) => { // if feed is RSS 2.x, check existence of 'guid' if (item.guid) return entry.guid === item.guid; // if feed is Atom 1.x, check existence of 'id' if (item.id) return entry.id === item.id; // default object with 'link' and 'title' return entry.link === item.link && entry.title === item.title; }); } updateHxLength(newItems) { this.maxHistoryLength = newItems.length * historyLengthMultiplier; } addItem(item) { this.items.push(item); this.items = this.items.slice(this.items.length - this.maxHistoryLength, this.items.length); } fetchData() { // eslint-disable-next-line no-async-promise-executor return new Promise(async (resolve) => { const items = []; const feedparser = new FeedParser(); feedparser.on('readable', () => { const item = feedparser.read(); item.meta.link = this.url; items.push(item); }); feedparser.on('error', () => { this.handleError(new FeedError(`Cannot parse ${this.url} XML`, 'invalid_feed', this.url)); }); feedparser.on('end', () => { resolve(items); }); this.get(feedparser); }); } get(feedparser) { request .get({ url: this.url, headers: { 'user-agent': this.userAgent, accept: 'text/html,application/xhtml+xml,application/xml,text/xml', }, }) .on('response', (res) => { if (res.statusCode !== RESPONSE_CODES.OK) { this.handleError(new FeedError(`${this.url} returned a ${res.statusCode} status code`, 'fetch_url_error', this.url)); } }) .on('error', () => { this.handleError(new FeedError(`Cannot connect to ${this.url}`, 'fetch_url_error', this.url)); }) .pipe(feedparser) .on('end', () => {}); } handleError(error) { if (this.handler) { this.handler.handle(error); } else { throw error; } } destroy() { clearInterval(this.interval); delete this.interval; } } module.exports = Feed;
src/Feed.js
'use strict'; const FeedParser = require('feedparser'); const request = require('request'); const FeedError = require('./FeedError'); const RESPONSE_CODES = { OK: 200, NOT_FOUND: 404, ISE: 500, }; /** * This module manages automatically how many feed items * it will keep in memory, and basically it will have a * maximum history which is how many items the feed has * multiplied by this number below. So, if the feed have * 10 items, we will keep 30 items max in the history. * @type {Number} */ const historyLengthMultiplier = 3; /** * Default UserAgent string * Since static stuff doesn't work in older versions, keep using global const * @type {String} */ const DEFAULT_UA = 'Node/RssFeedEmitter (https://github.com/filipedeschamps/rss-feed-emitter)'; class Feed { constructor(data) { ({ items: this.items, url: this.url, refresh: this.refresh, userAgent: this.userAgent, eventName: this.eventName, } = data); if (!this.items) { this.items = []; } if (!this.url) { throw new TypeError('missing required field `url`'); } if (!this.refresh) { this.refresh = 60000; } /** * If the user has specified a User Agent * we will use that as the 'user-agent' header when * making requests, otherwise we use the default option. * @type {string} */ if (!this.userAgent) { this.userAgent = DEFAULT_UA; } if (!this.eventName) { this.eventName = 'new-item'; } } /** * Given a feed and item, try to find * it inside the feed item list. We will use * this to see if there's already an item inside * the feed item list. If there is, we know it's * not a new item. * @param {Object} item item specitics * @returns {Object} the matched element */ findItem(item) { return this.items.find((entry) => { // if feed is RSS 2.x, check existence of 'guid' if (item.guid) return entry.guid === item.guid; // if feed is Atom 1.x, check existence of 'id' if (item.id) return entry.id === item.id; // default object with 'link' and 'title' return entry.link === item.link && entry.title === item.title; }); } updateHxLength(newItems) { this.maxHistoryLength = newItems.length * historyLengthMultiplier; } addItem(item) { this.items.push(item); this.items = this.items.slice(this.items.length - this.maxHistoryLength, this.items.length); } fetchData() { // eslint-disable-next-line no-async-promise-executor return new Promise(async (resolve) => { const items = []; const feedparser = new FeedParser(); feedparser.on('readable', () => { const item = feedparser.read(); item.meta.link = this.url; items.push(item); }); feedparser.on('error', () => { this.handleError(new FeedError(`Cannot parse ${this.url} XML`, 'invalid_feed', this.url)); }); feedparser.on('end', () => { resolve(items); }); this.get(feedparser); }); } get(feedparser) { request .get({ url: this.url, headers: { 'user-agent': this.userAgent, accept: 'text/html,application/xhtml+xml,application/xml,text/xml', }, }) .on('response', (res) => { if (res.statusCode !== RESPONSE_CODES.OK) { this.handleError(new FeedError(`This URL returned a ${res.statusCode} status code`, 'fetch_url_error', this.url)); } }) .on('error', () => { this.handleError(new FeedError(`Cannot connect to ${this.url}`, 'fetch_url_error', this.url)); }) .pipe(feedparser) .on('end', () => {}); } handleError(error) { if (this.handler) { this.handler.handle(error); } else { throw error; } } destroy() { clearInterval(this.interval); delete this.interval; } } module.exports = Feed;
improve FeedError message
src/Feed.js
improve FeedError message
<ide><path>rc/Feed.js <ide> }) <ide> .on('response', (res) => { <ide> if (res.statusCode !== RESPONSE_CODES.OK) { <del> this.handleError(new FeedError(`This URL returned a ${res.statusCode} status code`, 'fetch_url_error', this.url)); <add> this.handleError(new FeedError(`${this.url} returned a ${res.statusCode} status code`, 'fetch_url_error', this.url)); <ide> } <ide> }) <ide> .on('error', () => {
Java
lgpl-2.1
error: pathspec 'src/test/java/com/celements/xwikiPatches/ConcurrentCacheTest.java' did not match any file(s) known to git
387877245fb7e63a5d641bdda594c0572868b63d
1
celements/celements-core,celements/celements-core
package com.celements.xwikiPatches; import static com.celements.common.test.CelementsTestUtils.*; import static org.easymock.EasyMock.*; import static org.junit.Assert.*; import java.util.ArrayList; import java.util.concurrent.Callable; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import org.junit.Before; import org.junit.Test; import org.xwiki.cache.CacheFactory; import org.xwiki.context.Execution; import org.xwiki.context.ExecutionContext; import org.xwiki.model.reference.DocumentReference; import com.celements.common.test.AbstractComponentTest; import com.xpn.xwiki.XWikiConfig; import com.xpn.xwiki.XWikiContext; import com.xpn.xwiki.doc.XWikiDocument; import com.xpn.xwiki.store.XWikiCacheStore; import com.xpn.xwiki.store.XWikiStoreInterface; import com.xpn.xwiki.web.Utils; public class ConcurrentCacheTest extends AbstractComponentTest { private XWikiCacheStore theCacheStore; private String wikiName = "testWiki"; public String testFullName = "TestSpace.TestDoc"; private XWikiConfig configMock; public DocumentReference testDocRef; @Before public void setUp_ConcurrentCatchTest() throws Exception { getContext().setDatabase(wikiName); testDocRef = new DocumentReference(wikiName, "TestSpace", "TestDoc"); configMock = createMockAndAddToDefault(XWikiConfig.class); expect(configMock.getProperty(eq("xwiki.store.hibernate.path"), eq( "/WEB-INF/hibernate.cfg.xml"))).andReturn("testhibernate.cfg.xml"); expect(getWikiMock().Param(eq("xwiki.store.cache.capacity"))).andReturn(null).anyTimes(); expect(getWikiMock().Param(eq("xwiki.store.cache.pageexistcapacity"))).andReturn( null).anyTimes(); CacheFactory cacheFactory = Utils.getComponent(CacheFactory.class, "jbosscache"); expect(getWikiMock().getCacheFactory()).andReturn(cacheFactory).anyTimes(); } @Test public void test() throws Exception { expect(getWikiMock().getConfig()).andReturn(configMock); replayDefault(); XWikiStoreInterface store = Utils.getComponent(XWikiStoreInterface.class); theCacheStore = new XWikiCacheStore(store, getContext()); int cores = Runtime.getRuntime().availableProcessors(); assertTrue("This tests needs real multi core processors, but found " + cores, cores > 1); ScheduledExecutorService theExecutor = Executors.newScheduledThreadPool(cores); ArrayList<ScheduledFuture<Boolean>> futureList = new ArrayList<>(100); for (int i = 1; i < 100; i++) { ScheduledFuture<Boolean> testFuture = theExecutor.schedule(new LoadXWikiDocCommand(), 90, TimeUnit.MILLISECONDS); futureList.add(testFuture); } theExecutor.scheduleAtFixedRate(new ResetCacheEntryCommand(), 100, 100, TimeUnit.MILLISECONDS); try { theExecutor.awaitTermination(10, TimeUnit.SECONDS); } catch (InterruptedException exp) { exp.printStackTrace(); } for (ScheduledFuture<Boolean> testFuture : futureList) { assertTrue(testFuture.isDone()); assertTrue(testFuture.get()); } theExecutor.shutdown(); verifyDefault(); } public class ResetCacheEntryCommand implements Runnable { @Override public void run() { String key = theCacheStore.getKey(wikiName, testFullName, ""); if (theCacheStore.getCache() != null) { theCacheStore.getCache().remove(key); } } } public class LoadXWikiDocCommand implements Callable<Boolean> { private XWikiContext getContext() { Execution execution = Utils.getComponent(Execution.class); ExecutionContext execContext = execution.getContext(); // TODO create ExecutionContext if not exists return (XWikiContext) execContext.getProperty("xwikicontext"); } @Override public Boolean call() throws Exception { XWikiDocument myDoc = new XWikiDocument(testDocRef); XWikiDocument loadedXWikiDoc = theCacheStore.loadXWikiDoc(myDoc, getContext()); assertNotNull(loadedXWikiDoc); // TODO check objects return false; } } }
src/test/java/com/celements/xwikiPatches/ConcurrentCacheTest.java
start implementing concurrent document cache tests
src/test/java/com/celements/xwikiPatches/ConcurrentCacheTest.java
start implementing concurrent document cache tests
<ide><path>rc/test/java/com/celements/xwikiPatches/ConcurrentCacheTest.java <add>package com.celements.xwikiPatches; <add> <add>import static com.celements.common.test.CelementsTestUtils.*; <add>import static org.easymock.EasyMock.*; <add>import static org.junit.Assert.*; <add> <add>import java.util.ArrayList; <add>import java.util.concurrent.Callable; <add>import java.util.concurrent.Executors; <add>import java.util.concurrent.ScheduledExecutorService; <add>import java.util.concurrent.ScheduledFuture; <add>import java.util.concurrent.TimeUnit; <add> <add>import org.junit.Before; <add>import org.junit.Test; <add>import org.xwiki.cache.CacheFactory; <add>import org.xwiki.context.Execution; <add>import org.xwiki.context.ExecutionContext; <add>import org.xwiki.model.reference.DocumentReference; <add> <add>import com.celements.common.test.AbstractComponentTest; <add>import com.xpn.xwiki.XWikiConfig; <add>import com.xpn.xwiki.XWikiContext; <add>import com.xpn.xwiki.doc.XWikiDocument; <add>import com.xpn.xwiki.store.XWikiCacheStore; <add>import com.xpn.xwiki.store.XWikiStoreInterface; <add>import com.xpn.xwiki.web.Utils; <add> <add>public class ConcurrentCacheTest extends AbstractComponentTest { <add> <add> private XWikiCacheStore theCacheStore; <add> private String wikiName = "testWiki"; <add> public String testFullName = "TestSpace.TestDoc"; <add> private XWikiConfig configMock; <add> public DocumentReference testDocRef; <add> <add> @Before <add> public void setUp_ConcurrentCatchTest() throws Exception { <add> getContext().setDatabase(wikiName); <add> testDocRef = new DocumentReference(wikiName, "TestSpace", "TestDoc"); <add> configMock = createMockAndAddToDefault(XWikiConfig.class); <add> expect(configMock.getProperty(eq("xwiki.store.hibernate.path"), eq( <add> "/WEB-INF/hibernate.cfg.xml"))).andReturn("testhibernate.cfg.xml"); <add> expect(getWikiMock().Param(eq("xwiki.store.cache.capacity"))).andReturn(null).anyTimes(); <add> expect(getWikiMock().Param(eq("xwiki.store.cache.pageexistcapacity"))).andReturn( <add> null).anyTimes(); <add> CacheFactory cacheFactory = Utils.getComponent(CacheFactory.class, "jbosscache"); <add> expect(getWikiMock().getCacheFactory()).andReturn(cacheFactory).anyTimes(); <add> } <add> <add> @Test <add> public void test() throws Exception { <add> expect(getWikiMock().getConfig()).andReturn(configMock); <add> replayDefault(); <add> XWikiStoreInterface store = Utils.getComponent(XWikiStoreInterface.class); <add> theCacheStore = new XWikiCacheStore(store, getContext()); <add> int cores = Runtime.getRuntime().availableProcessors(); <add> assertTrue("This tests needs real multi core processors, but found " + cores, cores > 1); <add> ScheduledExecutorService theExecutor = Executors.newScheduledThreadPool(cores); <add> ArrayList<ScheduledFuture<Boolean>> futureList = new ArrayList<>(100); <add> for (int i = 1; i < 100; i++) { <add> ScheduledFuture<Boolean> testFuture = theExecutor.schedule(new LoadXWikiDocCommand(), 90, <add> TimeUnit.MILLISECONDS); <add> futureList.add(testFuture); <add> } <add> theExecutor.scheduleAtFixedRate(new ResetCacheEntryCommand(), 100, 100, TimeUnit.MILLISECONDS); <add> try { <add> theExecutor.awaitTermination(10, TimeUnit.SECONDS); <add> } catch (InterruptedException exp) { <add> exp.printStackTrace(); <add> } <add> for (ScheduledFuture<Boolean> testFuture : futureList) { <add> assertTrue(testFuture.isDone()); <add> assertTrue(testFuture.get()); <add> } <add> theExecutor.shutdown(); <add> verifyDefault(); <add> } <add> <add> public class ResetCacheEntryCommand implements Runnable { <add> <add> @Override <add> public void run() { <add> String key = theCacheStore.getKey(wikiName, testFullName, ""); <add> if (theCacheStore.getCache() != null) { <add> theCacheStore.getCache().remove(key); <add> } <add> } <add> <add> } <add> <add> public class LoadXWikiDocCommand implements Callable<Boolean> { <add> <add> private XWikiContext getContext() { <add> Execution execution = Utils.getComponent(Execution.class); <add> ExecutionContext execContext = execution.getContext(); <add> // TODO create ExecutionContext if not exists <add> return (XWikiContext) execContext.getProperty("xwikicontext"); <add> } <add> <add> @Override <add> public Boolean call() throws Exception { <add> XWikiDocument myDoc = new XWikiDocument(testDocRef); <add> XWikiDocument loadedXWikiDoc = theCacheStore.loadXWikiDoc(myDoc, getContext()); <add> assertNotNull(loadedXWikiDoc); <add> // TODO check objects <add> return false; <add> } <add> <add> } <add> <add>}
Java
apache-2.0
9b3234a742db9491c8dc965288d9728c828d7432
0
ops4j/org.ops4j.pax.logging,ops4j/org.ops4j.pax.logging
/* * Copyright (c) 2004-2007 QOS.ch * All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.slf4j.impl; import org.slf4j.IMarkerFactory; import org.slf4j.MarkerFactory; import org.slf4j.helpers.BasicMarkerFactory; import org.slf4j.spi.MarkerFactoryBinder; /** * * The binding of {@link MarkerFactory} class with an actual instance of * {@link IMarkerFactory} is performed using information returned by this class. * * This class is meant to provide a *dummy* StaticMarkerBinder to the slf4j-api module. * Real implementations are found in each SLF4J binding project, e.g. slf4j-nop, * slf4j-log4j12 etc. * * @author Ceki G&uuml;lc&uuml; */ public class StaticMarkerBinder implements MarkerFactoryBinder { /** * The unique instance of this class. */ public static final StaticMarkerBinder SINGLETON = new StaticMarkerBinder(); private StaticMarkerBinder() { } /** * Currently this method always returns an instance of * {@link BasicMarkerFactory}. */ public IMarkerFactory getMarkerFactory() { return new BasicMarkerFactory(); } /** * Currently, this method returns the class name of * {@link BasicMarkerFactory}. */ public String getMarkerFactoryClassStr() { return BasicMarkerFactory.class.getName(); } }
api/src/main/java/org/slf4j/impl/StaticMarkerBinder.java
/* * Copyright (c) 2004-2007 QOS.ch * All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.slf4j.impl; import org.slf4j.IMarkerFactory; import org.slf4j.MarkerFactory; import org.slf4j.helpers.BasicMarkerFactory; import org.slf4j.spi.MarkerFactoryBinder; /** * * The binding of {@link MarkerFactory} class with an actual instance of * {@link IMarkerFactory} is performed using information returned by this class. * * This class is meant to provide a *dummy* StaticMarkerBinder to the slf4j-api module. * Real implementations are found in each SLF4J binding project, e.g. slf4j-nop, * slf4j-log4j12 etc. * * @author Ceki G&uuml;lc&uuml; */ public class StaticMarkerBinder implements MarkerFactoryBinder { /** * The unique instance of this class. */ public static final StaticMarkerBinder SINGLETON = new StaticMarkerBinder(); private StaticMarkerBinder() { } /** * Currently this method always returns an instance of * {@link BasicMarkerFactory}. */ public IMarkerFactory getMarkerFactory() { return null; } /** * Currently, this method returns the class name of * {@link BasicMarkerFactory}. */ public String getMarkerFactoryClassStr() { return BasicMarkerFactory.class.getName(); } }
PAXLOGGING-43: Incomplete SLF4J Marker support
api/src/main/java/org/slf4j/impl/StaticMarkerBinder.java
PAXLOGGING-43: Incomplete SLF4J Marker support
<ide><path>pi/src/main/java/org/slf4j/impl/StaticMarkerBinder.java <ide> * {@link BasicMarkerFactory}. <ide> */ <ide> public IMarkerFactory getMarkerFactory() { <del> return null; <add> return new BasicMarkerFactory(); <ide> } <ide> <ide> /**
Java
apache-2.0
bb2c1395af3cc99be07cee7476a23c81b81a820a
0
mtransitapps/commons-android,mtransitapps/commons-android,mtransitapps/commons-android
package org.mtransit.android.commons.provider; import org.junit.Before; import org.junit.Test; import org.mtransit.android.commons.TimeUtils; import org.mtransit.android.commons.data.POI; import org.mtransit.android.commons.data.POIStatus; import org.mtransit.android.commons.data.Route; import org.mtransit.android.commons.data.RouteTripStop; import org.mtransit.android.commons.data.Schedule; import org.mtransit.android.commons.data.Stop; import org.mtransit.android.commons.data.Trip; import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes; import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes.JResult; import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes.JResult.JRealTimeResult; import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes.JResult.JStopTimeResult; import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes.JResult.JStopTimeResult.JLine; import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes.JResult.JStopTimeResult.JStopTime; import org.mtransit.commons.CommonsApp; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class CaLTCOnlineProviderTest { private static final String AUTHORITY = "authority.test"; private static final Route DEFAULT_ROUTE = new Route(1, "1", "route 1", "color"); private static final Trip DEFAULT_TRIP = new Trip(1, Trip.HEADSIGN_TYPE_STRING, "trip 1", 1); private static final Stop DEFAULT_STOP = new Stop(1, "1", "stop 1", 0, 0); private final CaLTCOnlineProvider provider = new CaLTCOnlineProvider(); private RouteTripStop rts; @Before public void setUp() { CommonsApp.setup(false); rts = new RouteTripStop( AUTHORITY, POI.ITEM_VIEW_TYPE_ROUTE_TRIP_STOP, DEFAULT_ROUTE, DEFAULT_TRIP, DEFAULT_STOP, false); } @Test public void testParseAgencyJSON() { // Arrange JBusTimes jBusTimes = new JBusTimes(Collections.singletonList( new JResult( Arrays.asList( new JRealTimeResult(52500, 37760, 52725, 32, 999013, true), new JRealTimeResult(52500, 37880, 52770, 32, 999554, true), new JRealTimeResult(52920, 37711, 52770, 32, 998671, true), new JRealTimeResult(53220, 37881, 53490, 32, 999592, true), new JRealTimeResult(54300, 37760, 54225, 32, 999012, true), new JRealTimeResult(54300, 37880, 54495, 32, 999555, true), new JRealTimeResult(54480, 37711, 54975, 32, 998670, true), new JRealTimeResult(54720, 37711, 55215, 32, 998670, true), new JRealTimeResult(55020, 37881, 55215, 32, 999593, true), new JRealTimeResult(55980, 37711, 55980, 32, 998669, true) ), Collections.singletonList( new JStopTimeResult( Arrays.asList( new JLine("EASTBOUND ", "07", 37760, 32), new JLine("WESTBOUND", "02", 37711, 32), new JLine("EASTBOUND ", "17", 37880, 32), new JLine("WESTBOUND", "17", 37881, 32) ), Arrays.asList( new JStopTime("Argyle Mall via York", 52500, 37760, "32", 999013), new JStopTime("Argyle Mall via Oxford", 52500, 37880, "32", 999554), new JStopTime("Natural Science via Dundas", 52920, 37711, "32", 998671), new JStopTime("Byron via Oxford", 53220, 37881, "32", 999592), new JStopTime("Argyle Mall via York", 54300, 37760, "32", 999012), new JStopTime("Argyle Mall via Oxford", 54300, 37880, "32", 999555), new JStopTime("Natural Science via Dundas", 54480, 37711, "32", 998670), new JStopTime("Natural Science via Dundas", 54720, 37711, "32", 998670), new JStopTime("Byron via Oxford", 55020, 37881, "32", 999593), new JStopTime("Natural Science via Dundas", 55980, 37711, "32", 998669) ) ) ) ) )); String _7_E = CaLTCOnlineProvider.getAgencyRouteStopTargetUUID(AUTHORITY, "7", Trip.HEADING_EAST, "32"); String _2_W = CaLTCOnlineProvider.getAgencyRouteStopTargetUUID(AUTHORITY, "2", Trip.HEADING_WEST, "32"); String _17_E = CaLTCOnlineProvider.getAgencyRouteStopTargetUUID(AUTHORITY, "17", Trip.HEADING_EAST, "32"); String _17_W = CaLTCOnlineProvider.getAgencyRouteStopTargetUUID(AUTHORITY, "17", Trip.HEADING_WEST, "32"); List<String> expectedTargetUUIDs = Arrays.asList(_7_E, _2_W, _17_E, _17_W); long newLastUpdateInMs = 1544384312000L; // Sun, 09 Dec 2018 14:38:32 GMT-05:00 long beginningOfTodayInMs = 1544331600000L; // Sun, 09 Dec 2018 00:00:00 GMT-05:00 // Act List<POIStatus> result = provider.parseAgencyJSON(jBusTimes, rts, newLastUpdateInMs, beginningOfTodayInMs); // Assert assertEquals(4, result.size()); for (POIStatus poiStatus : result) { assertTrue(poiStatus instanceof Schedule); Schedule schedule = (Schedule) poiStatus; String targetUUID = schedule.getTargetUUID(); assertNotNull(targetUUID); assertTrue(expectedTargetUUIDs.contains(targetUUID)); if (_2_W.equalsIgnoreCase(targetUUID)) { assertEquals(4, schedule.getTimestampsCount()); assertEquals( "Natural Science Via Dundas", schedule.getTimestamps().get(0).getHeading()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(52770)), schedule.getTimestamps().get(0).getT()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(54975)), schedule.getTimestamps().get(1).getT()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(55215)), schedule.getTimestamps().get(2).getT()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(55980)), schedule.getTimestamps().get(3).getT()); } else if (_7_E.equalsIgnoreCase(targetUUID)) { assertEquals( "Argyle Mall Via York", schedule.getTimestamps().get(0).getHeading()); assertEquals(2, schedule.getTimestampsCount()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(52725)), schedule.getTimestamps().get(0).getT()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(54225)), schedule.getTimestamps().get(1).getT()); } else if (_17_E.equalsIgnoreCase(targetUUID)) { assertEquals( "Argyle Mall Via Oxford", schedule.getTimestamps().get(0).getHeading()); assertEquals(2, schedule.getTimestampsCount()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(52770)), schedule.getTimestamps().get(0).getT()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(54495)), schedule.getTimestamps().get(1).getT()); } else if (_17_W.equalsIgnoreCase(targetUUID)) { assertEquals( "Byron Via Oxford", schedule.getTimestamps().get(0).getHeading()); assertEquals(2, schedule.getTimestampsCount()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(53490)), schedule.getTimestamps().get(0).getT()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(55215)), schedule.getTimestamps().get(1).getT()); } else { fail("Unexpected target UUID'" + targetUUID + "'!"); } } } }
src/test/java/org/mtransit/android/commons/provider/CaLTCOnlineProviderTest.java
package org.mtransit.android.commons.provider; import org.junit.Before; import org.junit.Test; import org.mtransit.android.commons.TimeUtils; import org.mtransit.android.commons.data.POI; import org.mtransit.android.commons.data.POIStatus; import org.mtransit.android.commons.data.Route; import org.mtransit.android.commons.data.RouteTripStop; import org.mtransit.android.commons.data.Schedule; import org.mtransit.android.commons.data.Stop; import org.mtransit.android.commons.data.Trip; import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes; import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes.JResult; import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes.JResult.JRealTimeResult; import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes.JResult.JStopTimeResult; import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes.JResult.JStopTimeResult.JLine; import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes.JResult.JStopTimeResult.JStopTime; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class CaLTCOnlineProviderTest { private static final String AUTHORITY = "authority.test"; private static final Route DEFAULT_ROUTE = new Route(1, "1", "route 1", "color"); private static final Trip DEFAULT_TRIP = new Trip(1, Trip.HEADSIGN_TYPE_STRING, "trip 1", 1); private static final Stop DEFAULT_STOP = new Stop(1, "1", "stop 1", 0, 0); private final CaLTCOnlineProvider provider = new CaLTCOnlineProvider(); private RouteTripStop rts; @Before public void setUp() { rts = new RouteTripStop( AUTHORITY, POI.ITEM_VIEW_TYPE_ROUTE_TRIP_STOP, DEFAULT_ROUTE, DEFAULT_TRIP, DEFAULT_STOP, false); } @Test public void testParseAgencyJSON() { // Arrange JBusTimes jBusTimes = new JBusTimes(Collections.singletonList( new JResult( Arrays.asList( new JRealTimeResult(52500, 37760, 52725, 32, 999013, true), new JRealTimeResult(52500, 37880, 52770, 32, 999554, true), new JRealTimeResult(52920, 37711, 52770, 32, 998671, true), new JRealTimeResult(53220, 37881, 53490, 32, 999592, true), new JRealTimeResult(54300, 37760, 54225, 32, 999012, true), new JRealTimeResult(54300, 37880, 54495, 32, 999555, true), new JRealTimeResult(54480, 37711, 54975, 32, 998670, true), new JRealTimeResult(54720, 37711, 55215, 32, 998670, true), new JRealTimeResult(55020, 37881, 55215, 32, 999593, true), new JRealTimeResult(55980, 37711, 55980, 32, 998669, true) ), Collections.singletonList( new JStopTimeResult( Arrays.asList( new JLine("EASTBOUND ", "07", 37760, 32), new JLine("WESTBOUND", "02", 37711, 32), new JLine("EASTBOUND ", "17", 37880, 32), new JLine("WESTBOUND", "17", 37881, 32) ), Arrays.asList( new JStopTime("Argyle Mall via York", 52500, 37760, "32", 999013), new JStopTime("Argyle Mall via Oxford", 52500, 37880, "32", 999554), new JStopTime("Natural Science via Dundas", 52920, 37711, "32", 998671), new JStopTime("Byron via Oxford", 53220, 37881, "32", 999592), new JStopTime("Argyle Mall via York", 54300, 37760, "32", 999012), new JStopTime("Argyle Mall via Oxford", 54300, 37880, "32", 999555), new JStopTime("Natural Science via Dundas", 54480, 37711, "32", 998670), new JStopTime("Natural Science via Dundas", 54720, 37711, "32", 998670), new JStopTime("Byron via Oxford", 55020, 37881, "32", 999593), new JStopTime("Natural Science via Dundas", 55980, 37711, "32", 998669) ) ) ) ) )); String _7_E = CaLTCOnlineProvider.getAgencyRouteStopTargetUUID(AUTHORITY, "7", Trip.HEADING_EAST, "32"); String _2_W = CaLTCOnlineProvider.getAgencyRouteStopTargetUUID(AUTHORITY, "2", Trip.HEADING_WEST, "32"); String _17_E = CaLTCOnlineProvider.getAgencyRouteStopTargetUUID(AUTHORITY, "17", Trip.HEADING_EAST, "32"); String _17_W = CaLTCOnlineProvider.getAgencyRouteStopTargetUUID(AUTHORITY, "17", Trip.HEADING_WEST, "32"); List<String> expectedTargetUUIDs = Arrays.asList(_7_E, _2_W, _17_E, _17_W); long newLastUpdateInMs = 1544384312000L; // Sun, 09 Dec 2018 14:38:32 GMT-05:00 long beginningOfTodayInMs = 1544331600000L; // Sun, 09 Dec 2018 00:00:00 GMT-05:00 // Act List<POIStatus> result = provider.parseAgencyJSON(jBusTimes, rts, newLastUpdateInMs, beginningOfTodayInMs); // Assert assertEquals(4, result.size()); for (POIStatus poiStatus : result) { assertTrue(poiStatus instanceof Schedule); Schedule schedule = (Schedule) poiStatus; String targetUUID = schedule.getTargetUUID(); assertNotNull(targetUUID); assertTrue(expectedTargetUUIDs.contains(targetUUID)); if (_2_W.equalsIgnoreCase(targetUUID)) { assertEquals(4, schedule.getTimestampsCount()); assertEquals( "Natural Science Via Dundas", schedule.getTimestamps().get(0).getHeading()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(52770)), schedule.getTimestamps().get(0).getT()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(54975)), schedule.getTimestamps().get(1).getT()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(55215)), schedule.getTimestamps().get(2).getT()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(55980)), schedule.getTimestamps().get(3).getT()); } else if (_7_E.equalsIgnoreCase(targetUUID)) { assertEquals( "Argyle Mall Via York", schedule.getTimestamps().get(0).getHeading()); assertEquals(2, schedule.getTimestampsCount()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(52725)), schedule.getTimestamps().get(0).getT()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(54225)), schedule.getTimestamps().get(1).getT()); } else if (_17_E.equalsIgnoreCase(targetUUID)) { assertEquals( "Argyle Mall Via Oxford", schedule.getTimestamps().get(0).getHeading()); assertEquals(2, schedule.getTimestampsCount()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(52770)), schedule.getTimestamps().get(0).getT()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(54495)), schedule.getTimestamps().get(1).getT()); } else if (_17_W.equalsIgnoreCase(targetUUID)) { assertEquals( "Byron Via Oxford", schedule.getTimestamps().get(0).getHeading()); assertEquals(2, schedule.getTimestampsCount()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(53490)), schedule.getTimestamps().get(0).getT()); assertEquals( TimeUtils.timeToTheTensSecondsMillis(beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(55215)), schedule.getTimestamps().get(1).getT()); } else { fail("Unexpected target UUID'" + targetUUID + "'!"); } } } }
Fix tests > commons-java: setup()
src/test/java/org/mtransit/android/commons/provider/CaLTCOnlineProviderTest.java
Fix tests > commons-java: setup()
<ide><path>rc/test/java/org/mtransit/android/commons/provider/CaLTCOnlineProviderTest.java <ide> import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes.JResult.JStopTimeResult; <ide> import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes.JResult.JStopTimeResult.JLine; <ide> import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes.JResult.JStopTimeResult.JStopTime; <add>import org.mtransit.commons.CommonsApp; <ide> <ide> import java.util.Arrays; <ide> import java.util.Collections; <ide> <ide> @Before <ide> public void setUp() { <add> CommonsApp.setup(false); <ide> rts = new RouteTripStop( <ide> AUTHORITY, <ide> POI.ITEM_VIEW_TYPE_ROUTE_TRIP_STOP,
Java
apache-2.0
e7b16a5e3fba959cd76c70af002b5935bbcd9e3f
0
hs-jenkins-bot/Singularity,HubSpot/Singularity,HubSpot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity,hs-jenkins-bot/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity
package com.hubspot.singularity.executor.cleanup; import java.io.File; import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Objects; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Optional; import com.google.common.base.Strings; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Sets; import com.google.inject.Inject; import com.google.inject.name.Named; import com.hubspot.mesos.JavaUtils; import com.hubspot.mesos.SingularityContainerType; import com.hubspot.mesos.client.MesosClient; import com.hubspot.singularity.MachineState; import com.hubspot.singularity.SingularityClientCredentials; import com.hubspot.singularity.SingularitySlave; import com.hubspot.singularity.SingularityTask; import com.hubspot.singularity.SingularityTaskExecutorData; import com.hubspot.singularity.SingularityTaskHistory; import com.hubspot.singularity.SingularityTaskHistoryUpdate; import com.hubspot.singularity.client.SingularityClient; import com.hubspot.singularity.client.SingularityClientException; import com.hubspot.singularity.client.SingularityClientProvider; import com.hubspot.singularity.executor.SingularityExecutorCleanupStatistics; import com.hubspot.singularity.executor.SingularityExecutorCleanupStatistics.SingularityExecutorCleanupStatisticsBuilder; import com.hubspot.singularity.executor.TemplateManager; import com.hubspot.singularity.executor.cleanup.config.SingularityExecutorCleanupConfiguration; import com.hubspot.singularity.executor.config.SingularityExecutorConfiguration; import com.hubspot.singularity.executor.config.SingularityExecutorLogrotateAdditionalFile; import com.hubspot.singularity.executor.task.SingularityExecutorTaskCleanup; import com.hubspot.singularity.executor.task.SingularityExecutorTaskDefinition; import com.hubspot.singularity.executor.task.SingularityExecutorTaskLogManager; import com.hubspot.singularity.executor.task.TaskCleanupResult; import com.hubspot.singularity.executor.utils.DockerUtils; import com.hubspot.singularity.runner.base.config.SingularityRunnerBaseModule; import com.hubspot.singularity.runner.base.configuration.SingularityRunnerBaseConfiguration; import com.hubspot.singularity.runner.base.sentry.SingularityRunnerExceptionNotifier; import com.hubspot.singularity.runner.base.shared.CompressionType; import com.hubspot.singularity.runner.base.shared.JsonObjectFileHelper; import com.hubspot.singularity.runner.base.shared.ProcessFailedException; import com.hubspot.singularity.runner.base.shared.ProcessUtils; import com.hubspot.singularity.runner.base.shared.SimpleProcessManager; import com.spotify.docker.client.messages.Container; import com.spotify.docker.client.messages.ContainerInfo; public class SingularityExecutorCleanup { private static final Logger LOG = LoggerFactory.getLogger(SingularityExecutorCleanup.class); private final JsonObjectFileHelper jsonObjectFileHelper; private final SingularityRunnerBaseConfiguration baseConfiguration; private final SingularityExecutorConfiguration executorConfiguration; private final SingularityClient singularityClient; private final TemplateManager templateManager; private final SingularityExecutorCleanupConfiguration cleanupConfiguration; private final MesosClient mesosClient; private final ProcessUtils processUtils; private final DockerUtils dockerUtils; private final String hostname; private final SingularityRunnerExceptionNotifier exceptionNotifier; @Inject public SingularityExecutorCleanup(SingularityClientProvider singularityClientProvider, JsonObjectFileHelper jsonObjectFileHelper, SingularityRunnerBaseConfiguration baseConfiguration, SingularityExecutorConfiguration executorConfiguration, SingularityExecutorCleanupConfiguration cleanupConfiguration, TemplateManager templateManager, MesosClient mesosClient, DockerUtils dockerUtils, @Named(SingularityRunnerBaseModule.HOST_NAME_PROPERTY) String hostname, SingularityRunnerExceptionNotifier exceptionNotifier) { this.jsonObjectFileHelper = jsonObjectFileHelper; this.baseConfiguration = baseConfiguration; this.executorConfiguration = executorConfiguration; this.cleanupConfiguration = cleanupConfiguration; this.templateManager = templateManager; this.mesosClient = mesosClient; this.processUtils = new ProcessUtils(LOG); this.dockerUtils = dockerUtils; this.hostname = hostname; this.exceptionNotifier = exceptionNotifier; Optional<SingularityClientCredentials> maybeCredentials = getClientCredentials(cleanupConfiguration, jsonObjectFileHelper); if (maybeCredentials.isPresent()) { singularityClientProvider.setCredentials(maybeCredentials.get()); } this.singularityClient = singularityClientProvider.setSsl(cleanupConfiguration.isSingularityUseSsl()).get(); } private static Optional<SingularityClientCredentials> getClientCredentials(SingularityExecutorCleanupConfiguration cleanupConfiguration, JsonObjectFileHelper jsonObjectFileHelper) { try { if (cleanupConfiguration.getSingularityClientCredentialsPath().isPresent()) { Optional<SingularityClientCredentials> maybeCredentials = jsonObjectFileHelper.read(new File(cleanupConfiguration.getSingularityClientCredentialsPath().get()).toPath(), LOG, SingularityClientCredentials.class); if (maybeCredentials.isPresent()) { return maybeCredentials; } } if (cleanupConfiguration.getSingularityClientCredentials().isPresent()) { return cleanupConfiguration.getSingularityClientCredentials(); } return Optional.absent(); } catch (Throwable t) { throw new RuntimeException(t); } } private Set<String> getRunningTaskIdsSafe(SingularityExecutorCleanupStatisticsBuilder statisticsBldr) { try { return getRunningTaskIds(); } catch (Throwable t) { LOG.error("While fetching running tasks from singularity", t); exceptionNotifier.notify(String.format("Error fetching running tasks (%s)", t.getMessage()), t, Collections.<String, String>emptyMap()); statisticsBldr.setErrorMessage(t.getMessage()); return null; } } public SingularityExecutorCleanupStatistics clean() { final SingularityExecutorCleanupStatisticsBuilder statisticsBldr = new SingularityExecutorCleanupStatisticsBuilder(); final Path directory = Paths.get(executorConfiguration.getGlobalTaskDefinitionDirectory()); Set<String> runningTaskIds = getRunningTaskIdsSafe(statisticsBldr); if (runningTaskIds == null) { return statisticsBldr.build(); } LOG.info("Found {} running tasks from Mesos", runningTaskIds); statisticsBldr.setMesosRunningTasks(runningTaskIds.size()); if (runningTaskIds.isEmpty()) { if (!isDecommissioned()) { if (cleanupConfiguration.isSafeModeWontRunWithNoTasks()) { final String errorMessage = "Running in safe mode and found 0 running tasks - aborting cleanup"; LOG.error(errorMessage); statisticsBldr.setErrorMessage(errorMessage); return statisticsBldr.build(); } else { LOG.warn("Found 0 running tasks - proceeding with cleanup as we are not in safe mode"); } } else { if (!cleanupConfiguration.isCleanTasksWhenDecommissioned()) { return statisticsBldr.build(); } } } if (cleanupConfiguration.isRunDockerCleanup()) { cleanDocker(runningTaskIds); } try { try (Stream<Path> paths = Files.walk(directory, 1)) { paths.forEach((file) -> { if (!Objects.toString(file.getFileName()).endsWith(executorConfiguration.getGlobalTaskDefinitionSuffix())) { LOG.debug("Ignoring file {} that doesn't have suffix {}", file, executorConfiguration.getGlobalTaskDefinitionSuffix()); statisticsBldr.incrInvalidTasks(); return; } statisticsBldr.incrTotalTaskFiles(); try { Optional<SingularityExecutorTaskDefinition> maybeTaskDefinition = jsonObjectFileHelper.read(file, LOG, SingularityExecutorTaskDefinition.class); if (!maybeTaskDefinition.isPresent()) { statisticsBldr.incrInvalidTasks(); return; } SingularityExecutorTaskDefinition taskDefinition = withDefaults(maybeTaskDefinition.get()); final String taskId = taskDefinition.getTaskId(); LOG.info("{} - Starting possible cleanup", taskId); if (runningTaskIds.contains(taskId) || executorStillRunning(taskDefinition)) { statisticsBldr.incrRunningTasksIgnored(); return; } Optional<SingularityTaskHistory> taskHistory = null; try { taskHistory = singularityClient.getHistoryForTask(taskId); } catch (SingularityClientException sce) { LOG.error("{} - Failed fetching history", taskId, sce); exceptionNotifier.notify(String.format("Error fetching history (%s)", sce.getMessage()), sce, ImmutableMap.<String, String>of("taskId", taskId)); statisticsBldr.incrErrorTasks(); return; } TaskCleanupResult result = cleanTask(taskDefinition, taskHistory); LOG.info("{} - {}", taskId, result); switch (result) { case ERROR: statisticsBldr.incrErrorTasks(); break; case SUCCESS: statisticsBldr.incrSuccessfullyCleanedTasks(); break; case WAITING: statisticsBldr.incrWaitingTasks(); break; default: break; } } catch (IOException ioe) { LOG.error("Couldn't read file {}", file, ioe); exceptionNotifier.notify(String.format("Error reading file (%s)", ioe.getMessage()), ioe, ImmutableMap.of("file", file.toString())); statisticsBldr.incrIoErrorTasks(); } }); } } catch (IOException ioe) { LOG.error("Couldn't read files", ioe); exceptionNotifier.notify(String.format("Error reading files (%s)", ioe.getMessage()), ioe, Collections.emptyMap()); statisticsBldr.incrIoErrorTasks(); } return statisticsBldr.build(); } private SingularityExecutorTaskDefinition withDefaults(SingularityExecutorTaskDefinition oldDefinition) { return new SingularityExecutorTaskDefinition( oldDefinition.getTaskId(), new SingularityTaskExecutorData( oldDefinition.getExecutorData(), oldDefinition.getExecutorData().getS3UploaderAdditionalFiles() == null ? cleanupConfiguration.getS3UploaderAdditionalFiles() : oldDefinition.getExecutorData().getS3UploaderAdditionalFiles(), Strings.isNullOrEmpty(oldDefinition.getExecutorData().getDefaultS3Bucket()) ? cleanupConfiguration.getDefaultS3Bucket() : oldDefinition.getExecutorData().getDefaultS3Bucket(), Strings.isNullOrEmpty(oldDefinition.getExecutorData().getS3UploaderKeyPattern()) ? cleanupConfiguration.getS3KeyFormat(): oldDefinition.getExecutorData().getS3UploaderKeyPattern(), Strings.isNullOrEmpty(oldDefinition.getExecutorData().getServiceLog()) ? cleanupConfiguration.getDefaultServiceLog() : oldDefinition.getExecutorData().getServiceLog(), Strings.isNullOrEmpty(oldDefinition.getExecutorData().getServiceFinishedTailLog()) ? cleanupConfiguration.getDefaultServiceFinishedTailLog() : oldDefinition.getExecutorData().getServiceFinishedTailLog(), oldDefinition.getExecutorData().getRequestGroup(), oldDefinition.getExecutorData().getS3StorageClass(), oldDefinition.getExecutorData().getApplyS3StorageClassAfterBytes(), oldDefinition.getExecutorData().getCpuHardLimit(), Optional.absent() ), oldDefinition.getTaskDirectory(), oldDefinition.getExecutorPid(), Strings.isNullOrEmpty(oldDefinition.getServiceLogFileName()) ? cleanupConfiguration.getDefaultServiceLog() :oldDefinition.getServiceLogFileName(), oldDefinition.getServiceLogOutExtension(), Strings.isNullOrEmpty(oldDefinition.getServiceFinishedTailLogFileName()) ? cleanupConfiguration.getDefaultServiceFinishedTailLog() : oldDefinition.getServiceFinishedTailLogFileName(), oldDefinition.getTaskAppDirectory(), oldDefinition.getExecutorBashOut(), oldDefinition.getLogrotateStateFile(), oldDefinition.getSignatureVerifyOut() ); } private boolean isDecommissioned() { Collection<SingularitySlave> slaves = singularityClient.getSlaves(Optional.of(MachineState.DECOMMISSIONED)); boolean decommissioned = false; for (SingularitySlave slave : slaves) { if (slave.getHost().equals(hostname)) { decommissioned = true; } } return decommissioned; } private Set<String> getRunningTaskIds() { final String slaveId = mesosClient.getSlaveState(mesosClient.getSlaveUri(hostname)).getId(); final Collection<SingularityTask> activeTasks = singularityClient.getActiveTasksOnSlave(slaveId); final Set<String> runningTaskIds = Sets.newHashSet(); for (SingularityTask task : activeTasks) { runningTaskIds.add(task.getTaskId().getId()); } return runningTaskIds; } private boolean executorStillRunning(SingularityExecutorTaskDefinition taskDefinition) { Optional<Integer> executorPidSafe = taskDefinition.getExecutorPidSafe(); if (!executorPidSafe.isPresent()) { return false; } return processUtils.doesProcessExist(executorPidSafe.get()); } private TaskCleanupResult cleanTask(SingularityExecutorTaskDefinition taskDefinition, Optional<SingularityTaskHistory> taskHistory) { SingularityExecutorTaskLogManager logManager = new SingularityExecutorTaskLogManager(taskDefinition, templateManager, baseConfiguration, executorConfiguration, LOG, jsonObjectFileHelper, false); SingularityExecutorTaskCleanup taskCleanup = new SingularityExecutorTaskCleanup(logManager, executorConfiguration, taskDefinition, LOG, dockerUtils); boolean cleanupTaskAppDirectory = !taskDefinition.getExecutorData().getPreserveTaskSandboxAfterFinish().or(Boolean.FALSE); if (taskDefinition.shouldLogrotateLogFile()) { checkForUncompressedLogrotatedFile(taskDefinition); } if (taskHistory.isPresent()) { final Optional<SingularityTaskHistoryUpdate> lastUpdate = JavaUtils.getLast(taskHistory.get().getTaskUpdates()); if (lastUpdate.isPresent()) { if (lastUpdate.get().getTaskState().isDone() && System.currentTimeMillis() - lastUpdate.get().getTimestamp() > TimeUnit.MINUTES.toMillis(15)) { LOG.info("Task {} is done for > 15 minutes, removing logrotate files"); taskCleanup.cleanUpLogs(); checkForLogrotateAdditionalFilesToDelete(taskDefinition); } if (lastUpdate.get().getTaskState().isFailed()) { final long delta = System.currentTimeMillis() - lastUpdate.get().getTimestamp(); if (delta < cleanupConfiguration.getCleanupAppDirectoryOfFailedTasksAfterMillis()) { LOG.info("Not cleaning up task app directory of {} because only {} has elapsed since it failed (will cleanup after {})", taskDefinition.getTaskId(), JavaUtils.durationFromMillis(delta), JavaUtils.durationFromMillis(cleanupConfiguration.getCleanupAppDirectoryOfFailedTasksAfterMillis())); cleanupTaskAppDirectory = false; } } } } boolean isDocker = (taskHistory.isPresent() && taskHistory.get().getTask().getTaskRequest().getDeploy().getContainerInfo().isPresent() && taskHistory.get().getTask().getTaskRequest().getDeploy().getContainerInfo().get().getType() == SingularityContainerType.DOCKER); return taskCleanup.cleanup(cleanupTaskAppDirectory, isDocker); } private void checkForLogrotateAdditionalFilesToDelete(SingularityExecutorTaskDefinition taskDefinition) { executorConfiguration.getLogrotateAdditionalFiles() .stream() .filter(SingularityExecutorLogrotateAdditionalFile::isDeleteInExecutorCleanup) .forEach(toDelete -> { try (DirectoryStream<Path> stream = Files.newDirectoryStream(taskDefinition.getTaskDirectoryPath(), String.format("glob:%s", toDelete.getFilename()))) { stream.iterator().forEachRemaining(path -> { try { Files.delete(path); } catch (IOException e) { LOG.error("Unable to delete logrotateAdditionalFile {}", path.toString(), e); } }); } catch (IOException e) { LOG.error("Unable to list files while trying to files to delete for {}", toDelete); } }); } private Iterator<Path> getUncompressedLogrotatedFileIterator(SingularityExecutorTaskDefinition taskDefinition) { final Path serviceLogOutPath = taskDefinition.getServiceLogOutPath(); final Path parent = serviceLogOutPath.getParent(); if (parent == null) { throw new IllegalStateException("Service log path " + serviceLogOutPath + " has no parent"); } final Path logrotateToPath = parent.resolve(executorConfiguration.getLogrotateToDirectory()); if (!logrotateToPath.toFile().exists() || !logrotateToPath.toFile().isDirectory()) { LOG.warn("Skipping uncompressed logrotated file cleanup for {} -- {} does not exist or is not a directory (task sandbox was probably garbage collected by Mesos)", taskDefinition.getTaskId(), logrotateToPath); return Collections.emptyIterator(); } try { DirectoryStream<Path> dirStream = Files.newDirectoryStream(logrotateToPath, String.format("%s-*", serviceLogOutPath.getFileName())); return dirStream.iterator(); } catch (IOException e) { throw Throwables.propagate(e); } } private void checkForUncompressedLogrotatedFile(SingularityExecutorTaskDefinition taskDefinition) { final Iterator<Path> iterator = getUncompressedLogrotatedFileIterator(taskDefinition); final Set<String> emptyPaths = new HashSet<>(); final List<Path> uncompressedFiles = new ArrayList<>(); // check for matched 0 byte compressed files.. and delete/compress them while (iterator.hasNext()) { Path path = iterator.next(); final String fileName = Objects.toString(path.getFileName()); Optional<CompressionType> maybeCompressionType = getFileCompressionType(fileName); if (maybeCompressionType.isPresent()) { try { if (Files.size(path) == 0) { Files.deleteIfExists(path); emptyPaths.add(fileName.substring(0, fileName.length() - maybeCompressionType.get().getExtention().length())); } } catch (IOException ioe) { LOG.error("Failed to handle empty {} file {}", maybeCompressionType.get(), path, ioe); exceptionNotifier.notify(String.format("Error handling empty file (%s)", ioe.getMessage()), ioe, ImmutableMap.of("file", path.toString())); } } else { uncompressedFiles.add(path); } } for (Path path : uncompressedFiles) { if (emptyPaths.contains(Objects.toString(path.getFileName()))) { LOG.info("Compressing abandoned file {}", path); try { new SimpleProcessManager(LOG).runCommand(ImmutableList.<String> of(cleanupConfiguration.getCompressionType().getCommand(), path.toString())); } catch (InterruptedException | ProcessFailedException e) { LOG.error("Failed to {} {}", cleanupConfiguration.getCompressionType(), path, e); exceptionNotifier.notify(String.format("Failed to gzip (%s)", e.getMessage()), e, ImmutableMap.of("file", path.toString())); } } else { LOG.debug("Didn't find matched empty {} file for {}", cleanupConfiguration.getCompressionType(), path); } } } private Optional<CompressionType> getFileCompressionType(String fileName) { if (fileName.endsWith(CompressionType.GZIP.getExtention())) { return Optional.of(CompressionType.GZIP); } else if (fileName.endsWith(CompressionType.BZIP2.getExtention())) { return Optional.of(CompressionType.BZIP2); } else { return Optional.absent(); } } private void cleanDocker(Set<String> runningTaskIds) { try { for (Container container : dockerUtils.listContainers()) { for (String name : container.names()) { if (name.startsWith(executorConfiguration.getDockerPrefix())) { if (!runningTaskIds.contains(name.substring(executorConfiguration.getDockerPrefix().length()))) { stopContainer(container); } } } } } catch (Exception e) { LOG.error("Could not get list of Docker containers", e); exceptionNotifier.notify(String.format("Error listing docker containers (%s)", e.getMessage()), e, Collections.<String, String>emptyMap()); } } private void stopContainer(Container container) { try { ContainerInfo containerInfo = dockerUtils.inspectContainer(container.id()); if (containerInfo.state().running()) { dockerUtils.stopContainer(container.id(), executorConfiguration.getDockerStopTimeout()); LOG.debug("Forcefully stopped container {}", container.names()); } dockerUtils.removeContainer(container.id(), true); LOG.debug("Removed container {}", container.names()); } catch (Exception e) { LOG.error("Failed to stop or remove container {}", container.names(), e); exceptionNotifier.notify(String.format("Failed stopping container (%s)", e.getMessage()), e, Collections.<String, String>emptyMap()); } } }
SingularityExecutorCleanup/src/main/java/com/hubspot/singularity/executor/cleanup/SingularityExecutorCleanup.java
package com.hubspot.singularity.executor.cleanup; import java.io.File; import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Objects; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Optional; import com.google.common.base.Strings; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Sets; import com.google.inject.Inject; import com.google.inject.name.Named; import com.hubspot.mesos.JavaUtils; import com.hubspot.mesos.SingularityContainerType; import com.hubspot.mesos.client.MesosClient; import com.hubspot.singularity.MachineState; import com.hubspot.singularity.SingularityClientCredentials; import com.hubspot.singularity.SingularitySlave; import com.hubspot.singularity.SingularityTask; import com.hubspot.singularity.SingularityTaskExecutorData; import com.hubspot.singularity.SingularityTaskHistory; import com.hubspot.singularity.SingularityTaskHistoryUpdate; import com.hubspot.singularity.client.SingularityClient; import com.hubspot.singularity.client.SingularityClientException; import com.hubspot.singularity.client.SingularityClientProvider; import com.hubspot.singularity.executor.SingularityExecutorCleanupStatistics; import com.hubspot.singularity.executor.SingularityExecutorCleanupStatistics.SingularityExecutorCleanupStatisticsBuilder; import com.hubspot.singularity.executor.TemplateManager; import com.hubspot.singularity.executor.cleanup.config.SingularityExecutorCleanupConfiguration; import com.hubspot.singularity.executor.config.SingularityExecutorConfiguration; import com.hubspot.singularity.executor.task.SingularityExecutorTaskCleanup; import com.hubspot.singularity.executor.task.SingularityExecutorTaskDefinition; import com.hubspot.singularity.executor.task.SingularityExecutorTaskLogManager; import com.hubspot.singularity.executor.task.TaskCleanupResult; import com.hubspot.singularity.executor.utils.DockerUtils; import com.hubspot.singularity.runner.base.config.SingularityRunnerBaseModule; import com.hubspot.singularity.runner.base.configuration.SingularityRunnerBaseConfiguration; import com.hubspot.singularity.runner.base.sentry.SingularityRunnerExceptionNotifier; import com.hubspot.singularity.runner.base.shared.CompressionType; import com.hubspot.singularity.runner.base.shared.JsonObjectFileHelper; import com.hubspot.singularity.runner.base.shared.ProcessFailedException; import com.hubspot.singularity.runner.base.shared.ProcessUtils; import com.hubspot.singularity.runner.base.shared.SimpleProcessManager; import com.spotify.docker.client.messages.Container; import com.spotify.docker.client.messages.ContainerInfo; public class SingularityExecutorCleanup { private static final Logger LOG = LoggerFactory.getLogger(SingularityExecutorCleanup.class); private final JsonObjectFileHelper jsonObjectFileHelper; private final SingularityRunnerBaseConfiguration baseConfiguration; private final SingularityExecutorConfiguration executorConfiguration; private final SingularityClient singularityClient; private final TemplateManager templateManager; private final SingularityExecutorCleanupConfiguration cleanupConfiguration; private final MesosClient mesosClient; private final ProcessUtils processUtils; private final DockerUtils dockerUtils; private final String hostname; private final SingularityRunnerExceptionNotifier exceptionNotifier; @Inject public SingularityExecutorCleanup(SingularityClientProvider singularityClientProvider, JsonObjectFileHelper jsonObjectFileHelper, SingularityRunnerBaseConfiguration baseConfiguration, SingularityExecutorConfiguration executorConfiguration, SingularityExecutorCleanupConfiguration cleanupConfiguration, TemplateManager templateManager, MesosClient mesosClient, DockerUtils dockerUtils, @Named(SingularityRunnerBaseModule.HOST_NAME_PROPERTY) String hostname, SingularityRunnerExceptionNotifier exceptionNotifier) { this.jsonObjectFileHelper = jsonObjectFileHelper; this.baseConfiguration = baseConfiguration; this.executorConfiguration = executorConfiguration; this.cleanupConfiguration = cleanupConfiguration; this.templateManager = templateManager; this.mesosClient = mesosClient; this.processUtils = new ProcessUtils(LOG); this.dockerUtils = dockerUtils; this.hostname = hostname; this.exceptionNotifier = exceptionNotifier; Optional<SingularityClientCredentials> maybeCredentials = getClientCredentials(cleanupConfiguration, jsonObjectFileHelper); if (maybeCredentials.isPresent()) { singularityClientProvider.setCredentials(maybeCredentials.get()); } this.singularityClient = singularityClientProvider.setSsl(cleanupConfiguration.isSingularityUseSsl()).get(); } private static Optional<SingularityClientCredentials> getClientCredentials(SingularityExecutorCleanupConfiguration cleanupConfiguration, JsonObjectFileHelper jsonObjectFileHelper) { try { if (cleanupConfiguration.getSingularityClientCredentialsPath().isPresent()) { Optional<SingularityClientCredentials> maybeCredentials = jsonObjectFileHelper.read(new File(cleanupConfiguration.getSingularityClientCredentialsPath().get()).toPath(), LOG, SingularityClientCredentials.class); if (maybeCredentials.isPresent()) { return maybeCredentials; } } if (cleanupConfiguration.getSingularityClientCredentials().isPresent()) { return cleanupConfiguration.getSingularityClientCredentials(); } return Optional.absent(); } catch (Throwable t) { throw new RuntimeException(t); } } private Set<String> getRunningTaskIdsSafe(SingularityExecutorCleanupStatisticsBuilder statisticsBldr) { try { return getRunningTaskIds(); } catch (Throwable t) { LOG.error("While fetching running tasks from singularity", t); exceptionNotifier.notify(String.format("Error fetching running tasks (%s)", t.getMessage()), t, Collections.<String, String>emptyMap()); statisticsBldr.setErrorMessage(t.getMessage()); return null; } } public SingularityExecutorCleanupStatistics clean() { final SingularityExecutorCleanupStatisticsBuilder statisticsBldr = new SingularityExecutorCleanupStatisticsBuilder(); final Path directory = Paths.get(executorConfiguration.getGlobalTaskDefinitionDirectory()); Set<String> runningTaskIds = getRunningTaskIdsSafe(statisticsBldr); if (runningTaskIds == null) { return statisticsBldr.build(); } LOG.info("Found {} running tasks from Mesos", runningTaskIds); statisticsBldr.setMesosRunningTasks(runningTaskIds.size()); if (runningTaskIds.isEmpty()) { if (!isDecommissioned()) { if (cleanupConfiguration.isSafeModeWontRunWithNoTasks()) { final String errorMessage = "Running in safe mode and found 0 running tasks - aborting cleanup"; LOG.error(errorMessage); statisticsBldr.setErrorMessage(errorMessage); return statisticsBldr.build(); } else { LOG.warn("Found 0 running tasks - proceeding with cleanup as we are not in safe mode"); } } else { if (!cleanupConfiguration.isCleanTasksWhenDecommissioned()) { return statisticsBldr.build(); } } } if (cleanupConfiguration.isRunDockerCleanup()) { cleanDocker(runningTaskIds); } try { try (Stream<Path> paths = Files.walk(directory, 1)) { paths.forEach((file) -> { if (!Objects.toString(file.getFileName()).endsWith(executorConfiguration.getGlobalTaskDefinitionSuffix())) { LOG.debug("Ignoring file {} that doesn't have suffix {}", file, executorConfiguration.getGlobalTaskDefinitionSuffix()); statisticsBldr.incrInvalidTasks(); return; } statisticsBldr.incrTotalTaskFiles(); try { Optional<SingularityExecutorTaskDefinition> maybeTaskDefinition = jsonObjectFileHelper.read(file, LOG, SingularityExecutorTaskDefinition.class); if (!maybeTaskDefinition.isPresent()) { statisticsBldr.incrInvalidTasks(); return; } SingularityExecutorTaskDefinition taskDefinition = withDefaults(maybeTaskDefinition.get()); final String taskId = taskDefinition.getTaskId(); LOG.info("{} - Starting possible cleanup", taskId); if (runningTaskIds.contains(taskId) || executorStillRunning(taskDefinition)) { statisticsBldr.incrRunningTasksIgnored(); return; } Optional<SingularityTaskHistory> taskHistory = null; try { taskHistory = singularityClient.getHistoryForTask(taskId); } catch (SingularityClientException sce) { LOG.error("{} - Failed fetching history", taskId, sce); exceptionNotifier.notify(String.format("Error fetching history (%s)", sce.getMessage()), sce, ImmutableMap.<String, String>of("taskId", taskId)); statisticsBldr.incrErrorTasks(); return; } TaskCleanupResult result = cleanTask(taskDefinition, taskHistory); LOG.info("{} - {}", taskId, result); switch (result) { case ERROR: statisticsBldr.incrErrorTasks(); break; case SUCCESS: statisticsBldr.incrSuccessfullyCleanedTasks(); break; case WAITING: statisticsBldr.incrWaitingTasks(); break; default: break; } } catch (IOException ioe) { LOG.error("Couldn't read file {}", file, ioe); exceptionNotifier.notify(String.format("Error reading file (%s)", ioe.getMessage()), ioe, ImmutableMap.of("file", file.toString())); statisticsBldr.incrIoErrorTasks(); } }); } } catch (IOException ioe) { LOG.error("Couldn't read files", ioe); exceptionNotifier.notify(String.format("Error reading files (%s)", ioe.getMessage()), ioe, Collections.emptyMap()); statisticsBldr.incrIoErrorTasks(); } return statisticsBldr.build(); } private SingularityExecutorTaskDefinition withDefaults(SingularityExecutorTaskDefinition oldDefinition) { return new SingularityExecutorTaskDefinition( oldDefinition.getTaskId(), new SingularityTaskExecutorData( oldDefinition.getExecutorData(), oldDefinition.getExecutorData().getS3UploaderAdditionalFiles() == null ? cleanupConfiguration.getS3UploaderAdditionalFiles() : oldDefinition.getExecutorData().getS3UploaderAdditionalFiles(), Strings.isNullOrEmpty(oldDefinition.getExecutorData().getDefaultS3Bucket()) ? cleanupConfiguration.getDefaultS3Bucket() : oldDefinition.getExecutorData().getDefaultS3Bucket(), Strings.isNullOrEmpty(oldDefinition.getExecutorData().getS3UploaderKeyPattern()) ? cleanupConfiguration.getS3KeyFormat(): oldDefinition.getExecutorData().getS3UploaderKeyPattern(), Strings.isNullOrEmpty(oldDefinition.getExecutorData().getServiceLog()) ? cleanupConfiguration.getDefaultServiceLog() : oldDefinition.getExecutorData().getServiceLog(), Strings.isNullOrEmpty(oldDefinition.getExecutorData().getServiceFinishedTailLog()) ? cleanupConfiguration.getDefaultServiceFinishedTailLog() : oldDefinition.getExecutorData().getServiceFinishedTailLog(), oldDefinition.getExecutorData().getRequestGroup(), oldDefinition.getExecutorData().getS3StorageClass(), oldDefinition.getExecutorData().getApplyS3StorageClassAfterBytes(), oldDefinition.getExecutorData().getCpuHardLimit(), Optional.absent() ), oldDefinition.getTaskDirectory(), oldDefinition.getExecutorPid(), Strings.isNullOrEmpty(oldDefinition.getServiceLogFileName()) ? cleanupConfiguration.getDefaultServiceLog() :oldDefinition.getServiceLogFileName(), oldDefinition.getServiceLogOutExtension(), Strings.isNullOrEmpty(oldDefinition.getServiceFinishedTailLogFileName()) ? cleanupConfiguration.getDefaultServiceFinishedTailLog() : oldDefinition.getServiceFinishedTailLogFileName(), oldDefinition.getTaskAppDirectory(), oldDefinition.getExecutorBashOut(), oldDefinition.getLogrotateStateFile(), oldDefinition.getSignatureVerifyOut() ); } private boolean isDecommissioned() { Collection<SingularitySlave> slaves = singularityClient.getSlaves(Optional.of(MachineState.DECOMMISSIONED)); boolean decommissioned = false; for (SingularitySlave slave : slaves) { if (slave.getHost().equals(hostname)) { decommissioned = true; } } return decommissioned; } private Set<String> getRunningTaskIds() { final String slaveId = mesosClient.getSlaveState(mesosClient.getSlaveUri(hostname)).getId(); final Collection<SingularityTask> activeTasks = singularityClient.getActiveTasksOnSlave(slaveId); final Set<String> runningTaskIds = Sets.newHashSet(); for (SingularityTask task : activeTasks) { runningTaskIds.add(task.getTaskId().getId()); } return runningTaskIds; } private boolean executorStillRunning(SingularityExecutorTaskDefinition taskDefinition) { Optional<Integer> executorPidSafe = taskDefinition.getExecutorPidSafe(); if (!executorPidSafe.isPresent()) { return false; } return processUtils.doesProcessExist(executorPidSafe.get()); } private TaskCleanupResult cleanTask(SingularityExecutorTaskDefinition taskDefinition, Optional<SingularityTaskHistory> taskHistory) { SingularityExecutorTaskLogManager logManager = new SingularityExecutorTaskLogManager(taskDefinition, templateManager, baseConfiguration, executorConfiguration, LOG, jsonObjectFileHelper, false); SingularityExecutorTaskCleanup taskCleanup = new SingularityExecutorTaskCleanup(logManager, executorConfiguration, taskDefinition, LOG, dockerUtils); boolean cleanupTaskAppDirectory = !taskDefinition.getExecutorData().getPreserveTaskSandboxAfterFinish().or(Boolean.FALSE); if (taskDefinition.shouldLogrotateLogFile()) { checkForUncompressedLogrotatedFile(taskDefinition); } if (taskHistory.isPresent()) { final Optional<SingularityTaskHistoryUpdate> lastUpdate = JavaUtils.getLast(taskHistory.get().getTaskUpdates()); if (lastUpdate.isPresent()) { if (lastUpdate.get().getTaskState().isDone() && System.currentTimeMillis() - lastUpdate.get().getTimestamp() > TimeUnit.MINUTES.toMillis(15)) { LOG.info("Task {} is done for > 15 minutes, removing logrotate files"); taskCleanup.cleanUpLogs(); } if (lastUpdate.get().getTaskState().isFailed()) { final long delta = System.currentTimeMillis() - lastUpdate.get().getTimestamp(); if (delta < cleanupConfiguration.getCleanupAppDirectoryOfFailedTasksAfterMillis()) { LOG.info("Not cleaning up task app directory of {} because only {} has elapsed since it failed (will cleanup after {})", taskDefinition.getTaskId(), JavaUtils.durationFromMillis(delta), JavaUtils.durationFromMillis(cleanupConfiguration.getCleanupAppDirectoryOfFailedTasksAfterMillis())); cleanupTaskAppDirectory = false; } } } } boolean isDocker = (taskHistory.isPresent() && taskHistory.get().getTask().getTaskRequest().getDeploy().getContainerInfo().isPresent() && taskHistory.get().getTask().getTaskRequest().getDeploy().getContainerInfo().get().getType() == SingularityContainerType.DOCKER); return taskCleanup.cleanup(cleanupTaskAppDirectory, isDocker); } private Iterator<Path> getUncompressedLogrotatedFileIterator(SingularityExecutorTaskDefinition taskDefinition) { final Path serviceLogOutPath = taskDefinition.getServiceLogOutPath(); final Path parent = serviceLogOutPath.getParent(); if (parent == null) { throw new IllegalStateException("Service log path " + serviceLogOutPath + " has no parent"); } final Path logrotateToPath = parent.resolve(executorConfiguration.getLogrotateToDirectory()); if (!logrotateToPath.toFile().exists() || !logrotateToPath.toFile().isDirectory()) { LOG.warn("Skipping uncompressed logrotated file cleanup for {} -- {} does not exist or is not a directory (task sandbox was probably garbage collected by Mesos)", taskDefinition.getTaskId(), logrotateToPath); return Collections.emptyIterator(); } try { DirectoryStream<Path> dirStream = Files.newDirectoryStream(logrotateToPath, String.format("%s-*", serviceLogOutPath.getFileName())); return dirStream.iterator(); } catch (IOException e) { throw Throwables.propagate(e); } } private void checkForUncompressedLogrotatedFile(SingularityExecutorTaskDefinition taskDefinition) { final Iterator<Path> iterator = getUncompressedLogrotatedFileIterator(taskDefinition); final Set<String> emptyPaths = new HashSet<>(); final List<Path> uncompressedFiles = new ArrayList<>(); // check for matched 0 byte compressed files.. and delete/compress them while (iterator.hasNext()) { Path path = iterator.next(); final String fileName = Objects.toString(path.getFileName()); Optional<CompressionType> maybeCompressionType = getFileCompressionType(fileName); if (maybeCompressionType.isPresent()) { try { if (Files.size(path) == 0) { Files.deleteIfExists(path); emptyPaths.add(fileName.substring(0, fileName.length() - maybeCompressionType.get().getExtention().length())); } } catch (IOException ioe) { LOG.error("Failed to handle empty {} file {}", maybeCompressionType.get(), path, ioe); exceptionNotifier.notify(String.format("Error handling empty file (%s)", ioe.getMessage()), ioe, ImmutableMap.of("file", path.toString())); } } else { uncompressedFiles.add(path); } } for (Path path : uncompressedFiles) { if (emptyPaths.contains(Objects.toString(path.getFileName()))) { LOG.info("Compressing abandoned file {}", path); try { new SimpleProcessManager(LOG).runCommand(ImmutableList.<String> of(cleanupConfiguration.getCompressionType().getCommand(), path.toString())); } catch (InterruptedException | ProcessFailedException e) { LOG.error("Failed to {} {}", cleanupConfiguration.getCompressionType(), path, e); exceptionNotifier.notify(String.format("Failed to gzip (%s)", e.getMessage()), e, ImmutableMap.of("file", path.toString())); } } else { LOG.debug("Didn't find matched empty {} file for {}", cleanupConfiguration.getCompressionType(), path); } } } private Optional<CompressionType> getFileCompressionType(String fileName) { if (fileName.endsWith(CompressionType.GZIP.getExtention())) { return Optional.of(CompressionType.GZIP); } else if (fileName.endsWith(CompressionType.BZIP2.getExtention())) { return Optional.of(CompressionType.BZIP2); } else { return Optional.absent(); } } private void cleanDocker(Set<String> runningTaskIds) { try { for (Container container : dockerUtils.listContainers()) { for (String name : container.names()) { if (name.startsWith(executorConfiguration.getDockerPrefix())) { if (!runningTaskIds.contains(name.substring(executorConfiguration.getDockerPrefix().length()))) { stopContainer(container); } } } } } catch (Exception e) { LOG.error("Could not get list of Docker containers", e); exceptionNotifier.notify(String.format("Error listing docker containers (%s)", e.getMessage()), e, Collections.<String, String>emptyMap()); } } private void stopContainer(Container container) { try { ContainerInfo containerInfo = dockerUtils.inspectContainer(container.id()); if (containerInfo.state().running()) { dockerUtils.stopContainer(container.id(), executorConfiguration.getDockerStopTimeout()); LOG.debug("Forcefully stopped container {}", container.names()); } dockerUtils.removeContainer(container.id(), true); LOG.debug("Removed container {}", container.names()); } catch (Exception e) { LOG.error("Failed to stop or remove container {}", container.names(), e); exceptionNotifier.notify(String.format("Failed stopping container (%s)", e.getMessage()), e, Collections.<String, String>emptyMap()); } } }
When configured, delete logrotateAdditionalFiles 15 minutes after the task terminates.
SingularityExecutorCleanup/src/main/java/com/hubspot/singularity/executor/cleanup/SingularityExecutorCleanup.java
When configured, delete logrotateAdditionalFiles 15 minutes after the task terminates.
<ide><path>ingularityExecutorCleanup/src/main/java/com/hubspot/singularity/executor/cleanup/SingularityExecutorCleanup.java <ide> import com.hubspot.singularity.executor.TemplateManager; <ide> import com.hubspot.singularity.executor.cleanup.config.SingularityExecutorCleanupConfiguration; <ide> import com.hubspot.singularity.executor.config.SingularityExecutorConfiguration; <add>import com.hubspot.singularity.executor.config.SingularityExecutorLogrotateAdditionalFile; <ide> import com.hubspot.singularity.executor.task.SingularityExecutorTaskCleanup; <ide> import com.hubspot.singularity.executor.task.SingularityExecutorTaskDefinition; <ide> import com.hubspot.singularity.executor.task.SingularityExecutorTaskLogManager; <ide> if (lastUpdate.get().getTaskState().isDone() && System.currentTimeMillis() - lastUpdate.get().getTimestamp() > TimeUnit.MINUTES.toMillis(15)) { <ide> LOG.info("Task {} is done for > 15 minutes, removing logrotate files"); <ide> taskCleanup.cleanUpLogs(); <add> checkForLogrotateAdditionalFilesToDelete(taskDefinition); <ide> } <ide> if (lastUpdate.get().getTaskState().isFailed()) { <ide> final long delta = System.currentTimeMillis() - lastUpdate.get().getTimestamp(); <ide> && taskHistory.get().getTask().getTaskRequest().getDeploy().getContainerInfo().get().getType() == SingularityContainerType.DOCKER); <ide> <ide> return taskCleanup.cleanup(cleanupTaskAppDirectory, isDocker); <add> } <add> <add> private void checkForLogrotateAdditionalFilesToDelete(SingularityExecutorTaskDefinition taskDefinition) { <add> executorConfiguration.getLogrotateAdditionalFiles() <add> .stream() <add> .filter(SingularityExecutorLogrotateAdditionalFile::isDeleteInExecutorCleanup) <add> .forEach(toDelete -> { <add> try (DirectoryStream<Path> stream = Files.newDirectoryStream(taskDefinition.getTaskDirectoryPath(), String.format("glob:%s", toDelete.getFilename()))) { <add> stream.iterator().forEachRemaining(path -> { <add> try { <add> Files.delete(path); <add> } catch (IOException e) { <add> LOG.error("Unable to delete logrotateAdditionalFile {}", path.toString(), e); <add> } <add> }); <add> } catch (IOException e) { <add> LOG.error("Unable to list files while trying to files to delete for {}", toDelete); <add> } <add> }); <ide> } <ide> <ide> private Iterator<Path> getUncompressedLogrotatedFileIterator(SingularityExecutorTaskDefinition taskDefinition) {
JavaScript
mit
22c9f6b1bf4954c852a9ce04bc9b702051d15089
0
Indigo744/jQuery-Mapael,neveldo/jQuery-Mapael,Indigo744/jQuery-Mapael,neveldo/jQuery-Mapael
/*! * * Jquery Mapael - Dynamic maps jQuery plugin (based on raphael.js) * Requires jQuery, raphael.js and jquery.mousewheel * * Version: 2.0.0-dev * * Copyright (c) 2015 Vincent Brouté (http://www.vincentbroute.fr/mapael) * Licensed under the MIT license (http://www.opensource.org/licenses/mit-license.php). * * Thanks to Indigo744 * */ (function (factory) { if (typeof exports === 'object') { // CommonJS module.exports = factory(require('jquery'), require('raphael'), require('mousewheel')); } else if (typeof define === 'function' && define.amd) { // AMD. Register as an anonymous module. define(['jquery', 'raphael', 'mousewheel'], factory); } else { // Browser globals factory(jQuery, Raphael, jQuery.fn.mousewheel); } }(function ($, Raphael, mousewheel, undefined) { "use strict"; // The plugin name (used on several places) var pluginName = "mapael"; // Version number of jQuery Mapael. See http://semver.org/ for more information. var version = "2.0.0-dev"; /* * Mapael constructor * Init instance vars and call init() * @param container the DOM element on which to apply the plugin * @param options the complete options to use */ var Mapael = function (container, options) { var self = this; // the global container (DOM element object) self.container = container; // the global container (jQuery object) self.$container = $(container); // the global options self.options = self.extendDefaultOptions(options); // Save initial HTML content (used by destroy method) self.initialHTMLContent = self.$container.html(); // zoom TimeOut handler (used to set and clear) self.zoomTO = 0; // zoom center coordinate (set at touchstart) self.zoomCenterX = 0; self.zoomCenterY = 0; // Zoom pinch (set at touchstart and touchmove) self.previousPinchDist = 0; // Zoom data self.zoomData = { zoomLevel: 0, zoomX: 0, zoomY: 0, panX: 0, panY: 0 }; // resize TimeOut handler (used to set and clear) self.resizeTO = 0; // Panning: tell if panning action is in progress self.panning = false; // Panning TimeOut handler (used to set and clear) self.panningTO = 0; // Animate view box Interval handler (used to set and clear) self.animationIntervalID = null; // Map subcontainer jQuery object self.$map = {}; // The tooltip jQuery object self.$tooltip = {}; // The paper Raphael object self.paper = {}; // The areas object list self.areas = {}; // The plots object list self.plots = {}; // The links object list self.links = {}; // The map configuration object (taken from map file) self.mapConf = {}; // Let's start the initialization self.init(); }; /* * Mapael Prototype * Defines all methods and properties needed by Mapael * Each mapael object inherits their properties and methods from this prototype */ Mapael.prototype = { /* * Version number */ version: version, /* * Initialize the plugin * Called by the constructor */ init: function () { var self = this; // Init check for class existence if (self.options.map.cssClass === "" || $("." + self.options.map.cssClass, self.container).length === 0) { throw new Error("The map class `" + self.options.map.cssClass + "` doesn't exists"); } // Create the tooltip container self.$tooltip = $("<div>").addClass(self.options.map.tooltip.cssClass).css("display", "none"); // Get the map container, empty it then append tooltip self.$map = $("." + self.options.map.cssClass, self.container).empty().append(self.$tooltip); // Get the map from $.mapael or $.fn.mapael (backward compatibility) if ($[pluginName] && $[pluginName].maps && $[pluginName].maps[self.options.map.name]) { // Mapael version >= 2.x self.mapConf = $[pluginName].maps[self.options.map.name]; } else if ($.fn[pluginName] && $.fn[pluginName].maps && $.fn[pluginName].maps[self.options.map.name]) { // Mapael version <= 1.x - DEPRECATED self.mapConf = $.fn[pluginName].maps[self.options.map.name]; if (window.console && window.console.warn) { window.console.warn("Extending $.fn.mapael is deprecated (map '" + self.options.map.name + "')"); } } else { throw new Error("Unknown map '" + self.options.map.name + "'"); } // Create Raphael paper self.paper = new Raphael(self.$map[0], self.mapConf.width, self.mapConf.height); // add plugin class name on element self.$container.addClass(pluginName); if (self.options.map.tooltip.css) self.$tooltip.css(self.options.map.tooltip.css); self.paper.setViewBox(0, 0, self.mapConf.width, self.mapConf.height, false); // Draw map areas $.each(self.mapConf.elems, function (id) { var elemOptions = self.getElemOptions( self.options.map.defaultArea, (self.options.areas[id] ? self.options.areas[id] : {}), self.options.legend.area ); self.areas[id] = {"mapElem": self.paper.path(self.mapConf.elems[id]).attr(elemOptions.attrs)}; }); // Hook that allows to add custom processing on the map if (self.options.map.beforeInit) self.options.map.beforeInit(self.$container, self.paper, self.options); // Init map areas in a second loop (prevent texts to be hidden by map elements) $.each(self.mapConf.elems, function (id) { var elemOptions = self.getElemOptions( self.options.map.defaultArea, (self.options.areas[id] ? self.options.areas[id] : {}), self.options.legend.area ); self.initElem(self.areas[id], elemOptions, id); }); // Draw links self.links = self.drawLinksCollection(self.options.links); // Draw plots $.each(self.options.plots, function (id) { self.plots[id] = self.drawPlot(id); }); // Attach zoom event self.$container.on("zoom." + pluginName, function (e, zoomOptions) { self.onZoomEvent(e, zoomOptions); }); if (self.options.map.zoom.enabled) { // Enable zoom self.initZoom(self.mapConf.width, self.mapConf.height, self.options.map.zoom); } // Set initial zoom if (self.options.map.zoom.init !== undefined) { if (self.options.map.zoom.init.animDuration === undefined) { self.options.map.zoom.init.animDuration = 0; } self.$container.trigger("zoom." + pluginName, self.options.map.zoom.init); } // Create the legends for areas self.createLegends("area", self.areas, 1); // Attach update event self.$container.on("update." + pluginName, function (e, opt) { self.onUpdateEvent(e, opt); }); // Attach showElementsInRange event self.$container.on("showElementsInRange." + pluginName, function (e, opt) { self.onShowElementsInRange(e, opt); }); // Handle map size if (self.options.map.width) { // NOT responsive: map has a fixed width self.paper.setSize(self.options.map.width, self.mapConf.height * (self.options.map.width / self.mapConf.width)); // Create the legends for plots taking into account the scale of the map self.createLegends("plot", self.plots, (self.options.map.width / self.mapConf.width)); } else { // Responsive: handle resizing of the map self.handleMapResizing(); } // Hook that allows to add custom processing on the map if (self.options.map.afterInit) self.options.map.afterInit(self.$container, self.paper, self.areas, self.plots, self.options); $(self.paper.desc).append(" and Mapael " + self.version + " (http://www.vincentbroute.fr/mapael/)"); }, /* * Destroy mapael * This function effectively detach mapael from the container * - Set the container back to the way it was before mapael instanciation * - Remove all data associated to it (memory can then be free'ed by browser) * * This method can be call directly by user: * $(".mapcontainer").data("mapael").destroy(); * * This method is also automatically called if the user try to call mapael * on a container already containing a mapael instance */ destroy: function () { var self = this; // Empty the container (this will also detach all event listeners) self.$container.empty(); // Detach the global resize event handler if (self.onResizeEvent) $(window).off("resize." + pluginName, self.onResizeEvent); // Replace initial HTML content self.$container.html(self.initialHTMLContent); // Remove mapael class self.$container.removeClass(pluginName); // Remove the data self.$container.removeData(pluginName); // Remove all internal reference self.container = undefined; self.$container = undefined; self.options = undefined; self.paper = undefined; self.$map = undefined; self.$tooltip = undefined; self.mapConf = undefined; self.areas = undefined; self.plots = undefined; self.links = undefined; }, handleMapResizing: function () { var self = this; // Create the legends for plots taking into account the scale of the map var createPlotLegend = function () { self.createLegends("plot", self.plots, (self.$map.width() / self.mapConf.width)); self.$map.off("resizeEnd." + pluginName, createPlotLegend); }; // onResizeEvent: call when the window element trigger the resize event // We create it inside this function (and not in the prototype) in order to have a closure // Otherwise, in the prototype, 'this' when triggered is *not* the mapael object but the global window self.onResizeEvent = function () { // Clear any previous setTimeout (avoid too much triggering) clearTimeout(self.resizeTO); // setTimeout to wait for the user to finish its resizing self.resizeTO = setTimeout(function () { self.$map.trigger("resizeEnd." + pluginName); }, 150); }; // Attach resize handler $(window).on("resize." + pluginName, self.onResizeEvent); self.$map.on("resizeEnd." + pluginName, function () { var containerWidth = self.$map.width(); if (self.paper.width != containerWidth) { self.paper.setSize(containerWidth, self.mapConf.height * (containerWidth / self.mapConf.width)); } }).on("resizeEnd." + pluginName, createPlotLegend).trigger("resizeEnd." + pluginName); }, /* * Extend the user option with the default one * @param options the user options * @return new options object */ extendDefaultOptions: function (options) { // Extend default options with user options options = $.extend(true, {}, Mapael.prototype.defaultOptions, options); // Extend legend default options $.each(options.legend, function (type) { if ($.isArray(options.legend[type])) { for (var i = 0; i < options.legend[type].length; ++i) options.legend[type][i] = $.extend(true, {}, Mapael.prototype.legendDefaultOptions[type], options.legend[type][i]); } else { options.legend[type] = $.extend(true, {}, Mapael.prototype.legendDefaultOptions[type], options.legend[type]); } }); return options; }, /* * Init the element "elem" on the map (drawing, setting attributes, events, tooltip, ...) */ initElem: function (elem, elemOptions, id) { var self = this; var bbox = {}; var textPosition = {}; if (elemOptions.value !== undefined) elem.value = elemOptions.value; // Init attrsHover self.setHoverOptions(elem.mapElem, elemOptions.attrs, elemOptions.attrsHover); // Init the label related to the element if (elemOptions.text && elemOptions.text.content !== undefined) { // Set a text label in the area bbox = elem.mapElem.getBBox(); textPosition = self.getTextPosition(bbox, elemOptions.text.position, elemOptions.text.margin); elemOptions.text.attrs["text-anchor"] = textPosition.textAnchor; elem.textElem = self.paper.text(textPosition.x, textPosition.y, elemOptions.text.content).attr(elemOptions.text.attrs); self.setHoverOptions(elem.textElem, elemOptions.text.attrs, elemOptions.text.attrsHover); if (elemOptions.eventHandlers) self.setEventHandlers(id, elemOptions, elem.mapElem, elem.textElem); self.setHover(elem.mapElem, elem.textElem); $(elem.textElem.node).attr("data-id", id); } else { if (elemOptions.eventHandlers) self.setEventHandlers(id, elemOptions, elem.mapElem); self.setHover(elem.mapElem); } // Init the tooltip if (elemOptions.tooltip) { elem.mapElem.tooltip = elemOptions.tooltip; self.setTooltip(elem.mapElem); if (elemOptions.text && elemOptions.text.content !== undefined) { elem.textElem.tooltip = elemOptions.tooltip; self.setTooltip(elem.textElem); } } // Init the link if (elemOptions.href) { elem.mapElem.href = elemOptions.href; elem.mapElem.target = elemOptions.target; self.setHref(elem.mapElem); if (elemOptions.text && elemOptions.text.content !== undefined) { elem.textElem.href = elemOptions.href; elem.textElem.target = elemOptions.target; self.setHref(elem.textElem); } } $(elem.mapElem.node).attr("data-id", id); }, /* * Init zoom and panning for the map * @param mapWidth * @param mapHeight * @param zoomOptions */ initZoom: function (mapWidth, mapHeight, zoomOptions) { var self = this; var $zoomIn; var $zoomOut; var mousedown = false; var previousX = 0; var previousY = 0; // init Zoom data $.extend(self.zoomData, { zoomLevel: 0, panX: 0, panY: 0 }); // init zoom button $zoomIn = $("<div>").addClass(zoomOptions.zoomInCssClass).html("+"); $zoomOut = $("<div>").addClass(zoomOptions.zoomOutCssClass).html("&#x2212;"); self.$map.append($zoomIn).append($zoomOut); $zoomIn.on("click." + pluginName, function () { self.$container.trigger("zoom." + pluginName, {"level": self.zoomData.zoomLevel + 1}); }); $zoomOut.on("click." + pluginName, function () { self.$container.trigger("zoom." + pluginName, {"level": self.zoomData.zoomLevel - 1}); }); // Update the zoom level of the map on mousewheel if (self.options.map.zoom.mousewheel) { self.$map.on("mousewheel." + pluginName, function (e) { var offset = self.$map.offset(); var initFactor = (self.options.map.width) ? (self.mapConf.width / self.options.map.width) : (self.mapConf.width / self.$map.width()); var zoomLevel = (e.deltaY > 0) ? 1 : -1; var zoomFactor = 1 / (1 + (self.zoomData.zoomLevel) * self.options.map.zoom.step); var x = zoomFactor * initFactor * (e.clientX + $(window).scrollLeft() - offset.left) + self.zoomData.panX; var y = zoomFactor * initFactor * (e.clientY + $(window).scrollTop() - offset.top) + self.zoomData.panY; self.$container.trigger("zoom." + pluginName, { "fixedCenter": true, "level": self.zoomData.zoomLevel + zoomLevel, "x": x, "y": y }); return false; }); } // Update the zoom level of the map on touch pinch if (self.options.map.zoom.touch) { self.$map.on("touchstart." + pluginName, function (e) { if (e.originalEvent.touches.length === 2) { self.zoomCenterX = (e.originalEvent.touches[0].clientX + e.originalEvent.touches[1].clientX) / 2; self.zoomCenterY = (e.originalEvent.touches[0].clientY + e.originalEvent.touches[1].clientY) / 2; self.previousPinchDist = Math.sqrt(Math.pow((e.originalEvent.touches[1].clientX - e.originalEvent.touches[0].clientX), 2) + Math.pow((e.originalEvent.touches[1].clientY - e.originalEvent.touches[0].clientY), 2)); } }); self.$map.on("touchmove." + pluginName, function (e) { var offset = 0; var initFactor = 0; var zoomFactor = 0; var x = 0; var y = 0; var pinchDist = 0; var zoomLevel = 0; if (e.originalEvent.touches.length === 2) { pinchDist = Math.sqrt(Math.pow((e.originalEvent.touches[1].clientX - e.originalEvent.touches[0].clientX), 2) + Math.pow((e.originalEvent.touches[1].clientY - e.originalEvent.touches[0].clientY), 2)); if (Math.abs(pinchDist - self.previousPinchDist) > 15) { offset = self.$map.offset(); initFactor = (self.options.map.width) ? (self.mapConf.width / self.options.map.width) : (self.mapConf.width / self.$map.width()); zoomFactor = 1 / (1 + (self.zoomData.zoomLevel) * self.options.map.zoom.step); x = zoomFactor * initFactor * (self.zoomCenterX + $(window).scrollLeft() - offset.left) + self.zoomData.panX; y = zoomFactor * initFactor * (self.zoomCenterY + $(window).scrollTop() - offset.top) + self.zoomData.panY; zoomLevel = (pinchDist - self.previousPinchDist) / Math.abs(pinchDist - self.previousPinchDist); self.$container.trigger("zoom." + pluginName, { "fixedCenter": true, "level": self.zoomData.zoomLevel + zoomLevel, "x": x, "y": y }); self.previousPinchDist = pinchDist; } return false; } }); } // Panning $("body").on("mouseup." + pluginName + (zoomOptions.touch ? " touchend" : ""), function () { mousedown = false; setTimeout(function () { self.panning = false; }, 50); }); self.$map.on("mousedown." + pluginName + (zoomOptions.touch ? " touchstart" : ""), function (e) { if (e.pageX !== undefined) { mousedown = true; previousX = e.pageX; previousY = e.pageY; } else { if (e.originalEvent.touches.length === 1) { mousedown = true; previousX = e.originalEvent.touches[0].pageX; previousY = e.originalEvent.touches[0].pageY; } } }).on("mousemove." + pluginName + (zoomOptions.touch ? " touchmove" : ""), function (e) { var currentLevel = self.zoomData.zoomLevel; var pageX = 0; var pageY = 0; if (e.pageX !== undefined) { pageX = e.pageX; pageY = e.pageY; } else { if (e.originalEvent.touches.length === 1) { pageX = e.originalEvent.touches[0].pageX; pageY = e.originalEvent.touches[0].pageY; } else { mousedown = false; } } if (mousedown && currentLevel !== 0) { var offsetX = (previousX - pageX) / (1 + (currentLevel * zoomOptions.step)) * (mapWidth / self.paper.width); var offsetY = (previousY - pageY) / (1 + (currentLevel * zoomOptions.step)) * (mapHeight / self.paper.height); var panX = Math.min(Math.max(0, self.paper._viewBox[0] + offsetX), (mapWidth - self.paper._viewBox[2])); var panY = Math.min(Math.max(0, self.paper._viewBox[1] + offsetY), (mapHeight - self.paper._viewBox[3])); if (Math.abs(offsetX) > 5 || Math.abs(offsetY) > 5) { $.extend(self.zoomData, { panX: panX, panY: panY, zoomX: panX + self.paper._viewBox[2] / 2, zoomY: panY + self.paper._viewBox[3] / 2 }); self.paper.setViewBox(panX, panY, self.paper._viewBox[2], self.paper._viewBox[3]); clearTimeout(self.panningTO); self.panningTO = setTimeout(function () { self.$map.trigger("afterPanning", { x1: panX, y1: panY, x2: (panX + self.paper._viewBox[2]), y2: (panY + self.paper._viewBox[3]) }); }, 150); previousX = pageX; previousY = pageY; self.panning = true; } return false; } }); }, /* * Zoom on the map at a specific level focused on specific coordinates * If no coordinates are specified, the zoom will be focused on the center of the map * options : * "level" : level of the zoom between 0 and maxLevel * "x" or "latitude" : x coordinate or latitude of the point to focus on * "y" or "longitude" : y coordinate or longitude of the point to focus on * "fixedCenter" : set to true in order to preserve the position of x,y in the canvas when zoomed * "animDuration" : zoom duration */ onZoomEvent: function (e, zoomOptions) { var self = this; var newLevel = Math.min(Math.max(zoomOptions.level, 0), self.options.map.zoom.maxLevel); var panX = 0; var panY = 0; var previousZoomLevel = (1 + self.zoomData.zoomLevel * self.options.map.zoom.step); var zoomLevel = (1 + newLevel * self.options.map.zoom.step); var animDuration = (zoomOptions.animDuration !== undefined) ? zoomOptions.animDuration : self.options.map.zoom.animDuration; var offsetX = 0; var offsetY = 0; var coords = {}; if (zoomOptions.latitude !== undefined && zoomOptions.longitude !== undefined) { coords = self.mapConf.getCoords(zoomOptions.latitude, zoomOptions.longitude); zoomOptions.x = coords.x; zoomOptions.y = coords.y; } if (zoomOptions.x === undefined) zoomOptions.x = self.paper._viewBox[0] + self.paper._viewBox[2] / 2; if (zoomOptions.y === undefined) zoomOptions.y = (self.paper._viewBox[1] + self.paper._viewBox[3] / 2); if (newLevel === 0) { panX = 0; panY = 0; } else if (zoomOptions.fixedCenter !== undefined && zoomOptions.fixedCenter === true) { offsetX = self.zoomData.panX + ((zoomOptions.x - self.zoomData.panX) * (zoomLevel - previousZoomLevel)) / zoomLevel; offsetY = self.zoomData.panY + ((zoomOptions.y - self.zoomData.panY) * (zoomLevel - previousZoomLevel)) / zoomLevel; panX = Math.min(Math.max(0, offsetX), (self.mapConf.width - (self.mapConf.width / zoomLevel))); panY = Math.min(Math.max(0, offsetY), (self.mapConf.height - (self.mapConf.height / zoomLevel))); } else { panX = Math.min(Math.max(0, zoomOptions.x - (self.mapConf.width / zoomLevel) / 2), (self.mapConf.width - (self.mapConf.width / zoomLevel))); panY = Math.min(Math.max(0, zoomOptions.y - (self.mapConf.height / zoomLevel) / 2), (self.mapConf.height - (self.mapConf.height / zoomLevel))); } // Update zoom level of the map if (zoomLevel == previousZoomLevel && panX == self.zoomData.panX && panY == self.zoomData.panY) return; if (animDuration > 0) { self.animateViewBox(panX, panY, self.mapConf.width / zoomLevel, self.mapConf.height / zoomLevel, animDuration, self.options.map.zoom.animEasing); } else { self.paper.setViewBox(panX, panY, self.mapConf.width / zoomLevel, self.mapConf.height / zoomLevel); clearTimeout(self.zoomTO); self.zoomTO = setTimeout(function () { self.$map.trigger("afterZoom", { x1: panX, y1: panY, x2: (panX + (self.mapConf.width / zoomLevel)), y2: (panY + (self.mapConf.height / zoomLevel)) }); }, 150); } $.extend(self.zoomData, { zoomLevel: newLevel, panX: panX, panY: panY, zoomX: panX + self.paper._viewBox[2] / 2, zoomY: panY + self.paper._viewBox[3] / 2 }); }, /* * Show some element in range defined by user * Triggered by user $(".mapcontainer").trigger("showElementsInRange", [opt]); * * @param opt the options * opt.hiddenOpacity opacity for hidden element (default = 0.3) * opt.animDuration animation duration in ms (default = 0) * opt.afterShowRange callback * opt.ranges the range to show: * Example: * opt.ranges = { * 'plot' : { * 0 : { // valueIndex * 'min': 1000, * 'max': 1200 * }, * 1 : { // valueIndex * 'min': 10, * 'max': 12 * } * }, * 'area' : { * {'min': 10, 'max': 20} // No valueIndex, only an object, use 0 as valueIndex (easy case) * } * } */ onShowElementsInRange: function(e, opt) { var self = this; // set animDuration to default if not defined if (opt.animDuration === undefined) { opt.animDuration = 0; } // set hiddenOpacity to default if not defined if (opt.hiddenOpacity === undefined) { opt.hiddenOpacity = 0.3; } // handle area if (opt.ranges && opt.ranges.area) { self.showElemByRange(opt.ranges.area, self.areas, opt.hiddenOpacity, opt.animDuration); } // handle plot if (opt.ranges && opt.ranges.plot) { self.showElemByRange(opt.ranges.plot, self.plots, opt.hiddenOpacity, opt.animDuration); } // handle link if (opt.ranges && opt.ranges.link) { self.showElemByRange(opt.ranges.link, self.links, opt.hiddenOpacity, opt.animDuration); } // Call user callback if (opt.afterShowRange) opt.afterShowRange(); }, /* * Show some element in range * @param ranges: the ranges * @param elems: list of element on which to check against previous range * @hiddenOpacity: the opacity when hidden * @animDuration: the animation duration */ showElemByRange: function(ranges, elems, hiddenOpacity, animDuration) { var self = this; // Hold the final opacity value for all elements consolidated after applying each ranges // This allow to set the opacity only once for each elements var elemsFinalOpacity = {}; // set object with one valueIndex to 0 if we have directly the min/max if (ranges.min !== undefined || ranges.max !== undefined) { ranges = {0: ranges}; } // Loop through each valueIndex $.each(ranges, function (valueIndex) { var range = ranges[valueIndex]; // Check if user defined at least a min or max value if (range.min === undefined && range.max === undefined) { return true; // skip this iteration (each loop), goto next range } // Loop through each elements $.each(elems, function (id) { var elemValue = elems[id].value; // set value with one valueIndex to 0 if not object if (typeof elemValue !== "object") { elemValue = [elemValue]; } // Check existence of this value index if (elemValue[valueIndex] === undefined) { return true; // skip this iteration (each loop), goto next element } // Check if in range if ((range.min !== undefined && elemValue[valueIndex] < range.min) || (range.max !== undefined && elemValue[valueIndex] > range.max)) { // Element not in range elemsFinalOpacity[id] = hiddenOpacity; } else { // Element in range elemsFinalOpacity[id] = 1; } }); }); // Now that we looped through all ranges, we can really assign the final opacity $.each(elemsFinalOpacity, function (id) { self.setElementOpacity(elems[id], elemsFinalOpacity[id], animDuration); }); }, /* * Set element opacity * Handle elem.mapElem and elem.textElem * @param elem the element * @param opacity the opacity to apply * @param animDuration the animation duration to use */ setElementOpacity: function(elem, opacity, animDuration) { // Ensure no animation is running elem.mapElem.stop(); if (elem.textElem) elem.textElem.stop(); // If final opacity is not null, ensure element is shown before proceeding if (opacity > 0) { elem.mapElem.show(); if (elem.textElem) elem.textElem.show(); } if (animDuration > 0) { // Animate attribute elem.mapElem.animate({"opacity": opacity}, animDuration, "linear", function () { // If final attribute is 0, hide if (opacity === 0) elem.mapElem.hide(); }); // Handle text element if (elem.textElem) { // Animate attribute elem.textElem.animate({"opacity": opacity}, animDuration, "linear", function () { // If final attribute is 0, hide if (opacity === 0) elem.textElem.hide(); }); } } else { // Set attribute elem.mapElem.attr({"opacity": opacity}); // For null opacity, hide it if (opacity === 0) elem.mapElem.hide(); // Handle text elemen if (elem.textElem) { // Set attribute elem.textElem.attr({"opacity": opacity}); // For null opacity, hide it if (opacity === 0) elem.textElem.hide(); } } }, /* * * Update the current map * Refresh attributes and tooltips for areas and plots * @param opt option for the refresh : * opt.mapOptions: options to update for plots and areas * opt.replaceOptions: whether mapsOptions should entirely replace current map options, or just extend it * opt.opt.newPlots new plots to add to the map * opt.newLinks new links to add to the map * opt.deletePlotKeys plots to delete from the map (array, or "all" to remove all plots) * opt.deleteLinkKeys links to remove from the map (array, or "all" to remove all links) * opt.setLegendElemsState the state of legend elements to be set : show (default) or hide * opt.animDuration animation duration in ms (default = 0) * opt.afterUpdate Hook that allows to add custom processing on the map */ onUpdateEvent: function (e, opt) { var self = this; // Abort if opt is undefined if (typeof opt !== "object") return; var i = 0; var animDuration = (opt.animDuration) ? opt.animDuration : 0; // This function remove an element using animation (or not, depending on animDuration) // Used for deletePlotKeys and deleteLinkKeys var fnRemoveElement = function (elem) { // Unset all event handlers self.unsetHover(elem.mapElem, elem.textElem); if (animDuration > 0) { elem.mapElem.animate({"opacity": 0}, animDuration, "linear", function () { elem.mapElem.remove(); }); if (elem.textElem) { elem.textElem.animate({"opacity": 0}, animDuration, "linear", function () { elem.textElem.remove(); }); } } else { elem.mapElem.remove(); if (elem.textElem) { elem.textElem.remove(); } } }; // This function show an element using animation // Used for newPlots and newLinks var fnShowElement = function (elem) { // Starts with hidden elements elem.mapElem.attr({opacity: 0}); if (elem.textElem) elem.textElem.attr({opacity: 0}); // Set final element opacity self.setElementOpacity( elem, (elem.mapElem.originalAttrs.opacity !== undefined) ? elem.mapElem.originalAttrs.opacity : 1, animDuration ); }; if (typeof opt.mapOptions === "object") { if (opt.replaceOptions === true) self.options = self.extendDefaultOptions(opt.mapOptions); else $.extend(true, self.options, opt.mapOptions); // IF we update areas, plots or legend, then reset all legend state to "show" if (opt.mapOptions.areas !== undefined || opt.mapOptions.plots !== undefined || opt.mapOptions.legend !== undefined) { $("[data-type='elem']", self.$container).each(function (id, elem) { if ($(elem).attr('data-hidden') === "1") { // Toggle state of element by clicking $(elem).trigger("click." + pluginName, [false, animDuration]); } }); } } // Delete plots by name if deletePlotKeys is array if (typeof opt.deletePlotKeys === "object") { for (; i < opt.deletePlotKeys.length; i++) { if (self.plots[opt.deletePlotKeys[i]] !== undefined) { fnRemoveElement(self.plots[opt.deletePlotKeys[i]]); delete self.plots[opt.deletePlotKeys[i]]; } } // Delete ALL plots if deletePlotKeys is set to "all" } else if (opt.deletePlotKeys === "all") { $.each(self.plots, function (id, elem) { fnRemoveElement(elem); }); // Empty plots object self.plots = {}; } // Delete links by name if deleteLinkKeys is array if (typeof opt.deleteLinkKeys === "object") { for (i = 0; i < opt.deleteLinkKeys.length; i++) { if (self.links[opt.deleteLinkKeys[i]] !== undefined) { fnRemoveElement(self.links[opt.deleteLinkKeys[i]]); delete self.links[opt.deleteLinkKeys[i]]; } } // Delete ALL links if deleteLinkKeys is set to "all" } else if (opt.deleteLinkKeys === "all") { $.each(self.links, function (id, elem) { fnRemoveElement(elem); }); // Empty links object self.links = {}; } // New plots if (typeof opt.newPlots === "object") { $.each(opt.newPlots, function (id) { if (self.plots[id] === undefined) { self.options.plots[id] = opt.newPlots[id]; self.plots[id] = self.drawPlot(id); if (animDuration > 0) { fnShowElement(self.plots[id]); } } }); } // New links if (typeof opt.newLinks === "object") { var newLinks = self.drawLinksCollection(opt.newLinks); $.extend(self.links, newLinks); $.extend(self.options.links, opt.newLinks); if (animDuration > 0) { $.each(newLinks, function (id) { fnShowElement(newLinks[id]); }); } } // Update areas attributes and tooltips $.each(self.areas, function (id) { var elemOptions = self.getElemOptions( self.options.map.defaultArea, (self.options.areas[id] ? self.options.areas[id] : {}), self.options.legend.area ); self.updateElem(elemOptions, self.areas[id], animDuration); }); // Update plots attributes and tooltips $.each(self.plots, function (id) { var elemOptions = self.getElemOptions( self.options.map.defaultPlot, (self.options.plots[id] ? self.options.plots[id] : {}), self.options.legend.plot ); if (elemOptions.type == "square") { elemOptions.attrs.width = elemOptions.size; elemOptions.attrs.height = elemOptions.size; elemOptions.attrs.x = self.plots[id].mapElem.attrs.x - (elemOptions.size - self.plots[id].mapElem.attrs.width) / 2; elemOptions.attrs.y = self.plots[id].mapElem.attrs.y - (elemOptions.size - self.plots[id].mapElem.attrs.height) / 2; } else if (elemOptions.type == "image") { elemOptions.attrs.width = elemOptions.width; elemOptions.attrs.height = elemOptions.height; elemOptions.attrs.x = self.plots[id].mapElem.attrs.x - (elemOptions.width - self.plots[id].mapElem.attrs.width) / 2; elemOptions.attrs.y = self.plots[id].mapElem.attrs.y - (elemOptions.height - self.plots[id].mapElem.attrs.height) / 2; } else { // Default : circle elemOptions.attrs.r = elemOptions.size / 2; } self.updateElem(elemOptions, self.plots[id], animDuration); }); // Update links attributes and tooltips $.each(self.links, function (id) { var elemOptions = self.getElemOptions( self.options.map.defaultLink, (self.options.links[id] ? self.options.links[id] : {}), {} ); self.updateElem(elemOptions, self.links[id], animDuration); }); // Update legends if (opt.mapOptions && typeof opt.mapOptions.legend === "object") { self.createLegends("area", self.areas, 1); if (self.options.map.width) { self.createLegends("plot", self.plots, (self.options.map.width / self.mapConf.width)); } else { self.createLegends("plot", self.plots, (self.$map.width() / self.mapConf.width)); } } // Hide/Show all elements based on showlegendElems // Toggle (i.e. click) only if: // - slice legend is shown AND we want to hide // - slice legend is hidden AND we want to show if (typeof opt.setLegendElemsState === "object") { // setLegendElemsState is an object listing the legend we want to hide/show $.each(opt.setLegendElemsState, function (legendCSSClass, action) { // Search for the legend var $legend = self.$container.find("." + legendCSSClass)[0]; if ($legend !== undefined) { // Select all elem inside this legend $("[data-type='elem']", $legend).each(function (id, elem) { if (($(elem).attr('data-hidden') === "0" && action === "hide") || ($(elem).attr('data-hidden') === "1" && action === "show")) { // Toggle state of element by clicking $(elem).trigger("click." + pluginName, [false, animDuration]); } }); } }); } else { // setLegendElemsState is a string, or is undefined // Default : "show" var action = (opt.setLegendElemsState === "hide") ? "hide" : "show"; $("[data-type='elem']", self.$container).each(function (id, elem) { if (($(elem).attr('data-hidden') === "0" && action === "hide") || ($(elem).attr('data-hidden') === "1" && action === "show")) { // Toggle state of element by clicking $(elem).trigger("click." + pluginName, [false, animDuration]); } }); } if (opt.afterUpdate) opt.afterUpdate(self.$container, self.paper, self.areas, plots, self.options); }, /* * Draw all links between plots on the paper */ drawLinksCollection: function (linksCollection) { var self = this; var p1 = {}; var p2 = {}; var coordsP1 = {}; var coordsP2 = {}; var links = {}; $.each(linksCollection, function (id) { var elemOptions = self.getElemOptions(self.options.map.defaultLink, linksCollection[id], {}); if (typeof linksCollection[id].between[0] == 'string') { p1 = self.options.plots[linksCollection[id].between[0]]; } else { p1 = linksCollection[id].between[0]; } if (typeof linksCollection[id].between[1] == 'string') { p2 = self.options.plots[linksCollection[id].between[1]]; } else { p2 = linksCollection[id].between[1]; } if (p1.latitude !== undefined && p1.longitude !== undefined) { coordsP1 = self.mapConf.getCoords(p1.latitude, p1.longitude); } else { coordsP1.x = p1.x; coordsP1.y = p1.y; } if (p2.latitude !== undefined && p2.longitude !== undefined) { coordsP2 = self.mapConf.getCoords(p2.latitude, p2.longitude); } else { coordsP2.x = p2.x; coordsP2.y = p2.y; } links[id] = self.drawLink(id, coordsP1.x, coordsP1.y, coordsP2.x, coordsP2.y, elemOptions); }); return links; }, /* * Draw a curved link between two couples of coordinates a(xa,ya) and b(xb, yb) on the paper */ drawLink: function (id, xa, ya, xb, yb, elemOptions) { var self = this; var elem = {}; // Compute the "curveto" SVG point, d(x,y) // c(xc, yc) is the center of (xa,ya) and (xb, yb) var xc = (xa + xb) / 2; var yc = (ya + yb) / 2; // Equation for (cd) : y = acd * x + bcd (d is the cure point) var acd = -1 / ((yb - ya) / (xb - xa)); var bcd = yc - acd * xc; // dist(c,d) = dist(a,b) (=abDist) var abDist = Math.sqrt((xb - xa) * (xb - xa) + (yb - ya) * (yb - ya)); // Solution for equation dist(cd) = sqrt((xd - xc)² + (yd - yc)²) // dist(c,d)² = (xd - xc)² + (yd - yc)² // We assume that dist(c,d) = dist(a,b) // so : (xd - xc)² + (yd - yc)² - dist(a,b)² = 0 // With the factor : (xd - xc)² + (yd - yc)² - (factor*dist(a,b))² = 0 // (xd - xc)² + (acd*xd + bcd - yc)² - (factor*dist(a,b))² = 0 var a = 1 + acd * acd; var b = -2 * xc + 2 * acd * bcd - 2 * acd * yc; var c = xc * xc + bcd * bcd - bcd * yc - yc * bcd + yc * yc - ((elemOptions.factor * abDist) * (elemOptions.factor * abDist)); var delta = b * b - 4 * a * c; var x = 0; var y = 0; // There are two solutions, we choose one or the other depending on the sign of the factor if (elemOptions.factor > 0) { x = (-b + Math.sqrt(delta)) / (2 * a); y = acd * x + bcd; } else { x = (-b - Math.sqrt(delta)) / (2 * a); y = acd * x + bcd; } elem.mapElem = self.paper.path("m " + xa + "," + ya + " C " + x + "," + y + " " + xb + "," + yb + " " + xb + "," + yb + "").attr(elemOptions.attrs); self.initElem(elem, elemOptions, id); return elem; }, /* * Update the element "elem" on the map with the new elemOptions options */ updateElem: function (elemOptions, elem, animDuration) { var self = this; var bbox; var textPosition; var plotOffsetX; var plotOffsetY; if (elemOptions.value !== undefined) elem.value = elemOptions.value; // Update the label if (elem.textElem) { if (elemOptions.text !== undefined && elemOptions.text.content !== undefined && elemOptions.text.content != elem.textElem.attrs.text) elem.textElem.attr({text: elemOptions.text.content}); bbox = elem.mapElem.getBBox(); if (elemOptions.size || (elemOptions.width && elemOptions.height)) { if (elemOptions.type == "image" || elemOptions.type == "svg") { plotOffsetX = (elemOptions.width - bbox.width) / 2; plotOffsetY = (elemOptions.height - bbox.height) / 2; } else { plotOffsetX = (elemOptions.size - bbox.width) / 2; plotOffsetY = (elemOptions.size - bbox.height) / 2; } bbox.x -= plotOffsetX; bbox.x2 += plotOffsetX; bbox.y -= plotOffsetY; bbox.y2 += plotOffsetY; } textPosition = self.getTextPosition(bbox, elemOptions.text.position, elemOptions.text.margin); if (textPosition.x != elem.textElem.attrs.x || textPosition.y != elem.textElem.attrs.y) { if (animDuration > 0) { elem.textElem.attr({"text-anchor": textPosition.textAnchor}); elem.textElem.animate({x: textPosition.x, y: textPosition.y}, animDuration); } else elem.textElem.attr({ x: textPosition.x, y: textPosition.y, "text-anchor": textPosition.textAnchor }); } self.setHoverOptions(elem.textElem, elemOptions.text.attrs, elemOptions.text.attrsHover); if (animDuration > 0) elem.textElem.animate(elemOptions.text.attrs, animDuration); else elem.textElem.attr(elemOptions.text.attrs); } // Update elements attrs and attrsHover self.setHoverOptions(elem.mapElem, elemOptions.attrs, elemOptions.attrsHover); if (animDuration > 0) elem.mapElem.animate(elemOptions.attrs, animDuration); else elem.mapElem.attr(elemOptions.attrs); // Update dimensions of SVG plots if (elemOptions.type == "svg") { elem.mapElem.transform("m" + (elemOptions.width / elem.mapElem.originalWidth) + ",0,0," + (elemOptions.height / elem.mapElem.originalHeight) + "," + bbox.x + "," + bbox.y); } // Update the tooltip if (elemOptions.tooltip) { if (elem.mapElem.tooltip === undefined) { self.setTooltip(elem.mapElem); if (elem.textElem) self.setTooltip(elem.textElem); } elem.mapElem.tooltip = elemOptions.tooltip; if (elem.textElem) elem.textElem.tooltip = elemOptions.tooltip; } // Update the link if (elemOptions.href !== undefined) { if (elem.mapElem.href === undefined) { self.setHref(elem.mapElem); if (elem.textElem) self.setHref(elem.textElem); } elem.mapElem.href = elemOptions.href; elem.mapElem.target = elemOptions.target; if (elem.textElem) { elem.textElem.href = elemOptions.href; elem.textElem.target = elemOptions.target; } } }, /* * Draw the plot */ drawPlot: function (id) { var self = this; var plot = {}; var coords = {}; var elemOptions = self.getElemOptions( self.options.map.defaultPlot, (self.options.plots[id] ? self.options.plots[id] : {}), self.options.legend.plot ); if (elemOptions.x !== undefined && elemOptions.y !== undefined) coords = {x: elemOptions.x, y: elemOptions.y}; else coords = self.mapConf.getCoords(elemOptions.latitude, elemOptions.longitude); if (elemOptions.type == "square") { plot = { "mapElem": self.paper.rect( coords.x - (elemOptions.size / 2), coords.y - (elemOptions.size / 2), elemOptions.size, elemOptions.size ).attr(elemOptions.attrs) }; } else if (elemOptions.type == "image") { plot = { "mapElem": self.paper.image( elemOptions.url, coords.x - elemOptions.width / 2, coords.y - elemOptions.height / 2, elemOptions.width, elemOptions.height ).attr(elemOptions.attrs) }; } else if (elemOptions.type == "svg") { plot = {"mapElem": self.paper.path(elemOptions.path).attr(elemOptions.attrs)}; plot.mapElem.originalWidth = plot.mapElem.getBBox().width; plot.mapElem.originalHeight = plot.mapElem.getBBox().height; plot.mapElem.transform("m" + (elemOptions.width / plot.mapElem.originalWidth) + ",0,0," + (elemOptions.height / plot.mapElem.originalHeight) + "," + (coords.x - elemOptions.width / 2) + "," + (coords.y - elemOptions.height / 2)); } else { // Default = circle plot = {"mapElem": self.paper.circle(coords.x, coords.y, elemOptions.size / 2).attr(elemOptions.attrs)}; } self.initElem(plot, elemOptions, id); return plot; }, /* * Set target link on elem */ setHref: function (elem) { var self = this; elem.attr({cursor: "pointer"}); $(elem.node).on("click." + pluginName, function () { if (!self.panning && elem.href) window.open(elem.href, elem.target); }); }, /* * Set a tooltip for the areas and plots * @param elem area or plot element * @param content the content to set in the tooltip */ setTooltip: function (elem) { var self = this; var tooltipTO = 0; var cssClass = self.$tooltip.attr('class'); var updateTooltipPosition = function (x, y) { var tooltipPosition = { "left": Math.min(self.$map.width() - self.$tooltip.outerWidth() - 5, x - self.$map.offset().left + 10), "top": Math.min(self.$map.height() - self.$tooltip.outerHeight() - 5, y - self.$map.offset().top + 20) }; if (elem.tooltip.overflow !== undefined) { if (elem.tooltip.overflow.right !== undefined && elem.tooltip.overflow.right === true) { tooltipPosition.left = x - self.$map.offset().left + 10; } if (elem.tooltip.overflow.bottom !== undefined && elem.tooltip.overflow.bottom === true) { tooltipPosition.top = y - self.$map.offset().top + 20; } } self.$tooltip.css(tooltipPosition); }; $(elem.node).on("mouseover." + pluginName, function (e) { tooltipTO = setTimeout( function () { self.$tooltip.attr("class", cssClass); if (elem.tooltip !== undefined) { if (elem.tooltip.content !== undefined) { // if tooltip.content is function, call it. Otherwise, assign it directly. var content = (typeof elem.tooltip.content === "function") ? elem.tooltip.content(elem) : elem.tooltip.content; self.$tooltip.html(content).css("display", "block"); } if (elem.tooltip.cssClass !== undefined) { self.$tooltip.addClass(elem.tooltip.cssClass); } } updateTooltipPosition(e.pageX, e.pageY); }, 120 ); }).on("mouseout." + pluginName, function () { clearTimeout(tooltipTO); self.$tooltip.css("display", "none"); }).on("mousemove." + pluginName, function (e) { updateTooltipPosition(e.pageX, e.pageY); }); }, /* * Set user defined handlers for events on areas and plots * @param id the id of the element * @param elemOptions the element parameters * @param mapElem the map element to set callback on * @param textElem the optional text within the map element */ setEventHandlers: function (id, elemOptions, mapElem, textElem) { var self = this; $.each(elemOptions.eventHandlers, function (event) { (function (event) { $(mapElem.node).on(event, function (e) { if (!self.panning) elemOptions.eventHandlers[event](e, id, mapElem, textElem, elemOptions); }); if (textElem) { $(textElem.node).on(event, function (e) { if (!self.panning) elemOptions.eventHandlers[event](e, id, mapElem, textElem, elemOptions); }); } })(event); }); }, /* * Draw a legend for areas and / or plots * @param legendOptions options for the legend to draw * @param legendType the type of the legend : "area" or "plot" * @param elems collection of plots or areas on the maps * @param legendIndex index of the legend in the conf array */ drawLegend: function (legendOptions, legendType, elems, scale, legendIndex) { var self = this; var $legend = {}; var legendPaper = {}; var width = 0; var height = 0; var title = null; var elem = {}; var elemBBox = {}; var label = {}; var i = 0; var x = 0; var y = 0; var yCenter = 0; var sliceAttrs = []; var length = 0; $legend = $("." + legendOptions.cssClass, self.$container).empty(); legendPaper = new Raphael($legend.get(0)); height = width = 0; // Set the title of the legend if (legendOptions.title && legendOptions.title !== "") { title = legendPaper.text(legendOptions.marginLeftTitle, 0, legendOptions.title).attr(legendOptions.titleAttrs); title.attr({y: 0.5 * title.getBBox().height}); width = legendOptions.marginLeftTitle + title.getBBox().width; height += legendOptions.marginBottomTitle + title.getBBox().height; } // Calculate attrs (and width, height and r (radius)) for legend elements, and yCenter for horizontal legends for (i = 0, length = legendOptions.slices.length; i < length; ++i) { var yCenterCurrent = 0; // Check if size is defined. If not, take defaultPlot size if (legendOptions.slices[i].size === undefined) legendOptions.slices[i].size = self.options.map.defaultPlot.size; if (legendOptions.slices[i].legendSpecificAttrs === undefined) legendOptions.slices[i].legendSpecificAttrs = {}; sliceAttrs[i] = $.extend( {}, (legendType == "plot") ? self.options.map.defaultPlot.attrs : self.options.map.defaultArea.attrs, legendOptions.slices[i].attrs, legendOptions.slices[i].legendSpecificAttrs ); if (legendType == "area") { if (sliceAttrs[i].width === undefined) sliceAttrs[i].width = 30; if (sliceAttrs[i].height === undefined) sliceAttrs[i].height = 20; } else if (legendOptions.slices[i].type == "square") { if (sliceAttrs[i].width === undefined) sliceAttrs[i].width = legendOptions.slices[i].size; if (sliceAttrs[i].height === undefined) sliceAttrs[i].height = legendOptions.slices[i].size; } else if (legendOptions.slices[i].type == "image" || legendOptions.slices[i].type == "svg") { if (sliceAttrs[i].width === undefined) sliceAttrs[i].width = legendOptions.slices[i].width; if (sliceAttrs[i].height === undefined) sliceAttrs[i].height = legendOptions.slices[i].height; } else { if (sliceAttrs[i].r === undefined) sliceAttrs[i].r = legendOptions.slices[i].size / 2; } // Compute yCenter for this legend slice yCenterCurrent = legendOptions.marginBottomTitle; // Add title height if it exists if (title) { yCenterCurrent += title.getBBox().height; } if (legendType == "plot" && (legendOptions.slices[i].type === undefined || legendOptions.slices[i].type == "circle")) { yCenterCurrent += scale * sliceAttrs[i].r; } else { yCenterCurrent += scale * sliceAttrs[i].height / 2; } // Update yCenter if current larger yCenter = Math.max(yCenter, yCenterCurrent); } if (legendOptions.mode == "horizontal") { width = legendOptions.marginLeft; } // Draw legend elements (circle, square or image in vertical or horizontal mode) for (i = 0, length = legendOptions.slices.length; i < length; ++i) { if (legendOptions.slices[i].display === undefined || legendOptions.slices[i].display === true) { if (legendType == "area") { if (legendOptions.mode == "horizontal") { x = width + legendOptions.marginLeft; y = yCenter - (0.5 * scale * sliceAttrs[i].height); } else { x = legendOptions.marginLeft; y = height; } elem = legendPaper.rect(x, y, scale * (sliceAttrs[i].width), scale * (sliceAttrs[i].height)); } else if (legendOptions.slices[i].type == "square") { if (legendOptions.mode == "horizontal") { x = width + legendOptions.marginLeft; y = yCenter - (0.5 * scale * sliceAttrs[i].height); } else { x = legendOptions.marginLeft; y = height; } elem = legendPaper.rect(x, y, scale * (sliceAttrs[i].width), scale * (sliceAttrs[i].height)); } else if (legendOptions.slices[i].type == "image" || legendOptions.slices[i].type == "svg") { if (legendOptions.mode == "horizontal") { x = width + legendOptions.marginLeft; y = yCenter - (0.5 * scale * sliceAttrs[i].height); } else { x = legendOptions.marginLeft; y = height; } if (legendOptions.slices[i].type == "image") { elem = legendPaper.image( legendOptions.slices[i].url, x, y, scale * sliceAttrs[i].width, scale * sliceAttrs[i].height); } else { elem = legendPaper.path(legendOptions.slices[i].path); elem.transform("m" + ((scale * legendOptions.slices[i].width) / elem.getBBox().width) + ",0,0," + ((scale * legendOptions.slices[i].height) / elem.getBBox().height) + "," + x + "," + y); } } else { if (legendOptions.mode == "horizontal") { x = width + legendOptions.marginLeft + scale * (sliceAttrs[i].r); y = yCenter; } else { x = legendOptions.marginLeft + scale * (sliceAttrs[i].r); y = height + scale * (sliceAttrs[i].r); } elem = legendPaper.circle(x, y, scale * (sliceAttrs[i].r)); } // Set attrs to the element drawn above delete sliceAttrs[i].width; delete sliceAttrs[i].height; delete sliceAttrs[i].r; elem.attr(sliceAttrs[i]); elemBBox = elem.getBBox(); // Draw the label associated with the element if (legendOptions.mode == "horizontal") { x = width + legendOptions.marginLeft + elemBBox.width + legendOptions.marginLeftLabel; y = yCenter; } else { x = legendOptions.marginLeft + elemBBox.width + legendOptions.marginLeftLabel; y = height + (elemBBox.height / 2); } label = legendPaper.text(x, y, legendOptions.slices[i].label).attr(legendOptions.labelAttrs); // Update the width and height for the paper if (legendOptions.mode == "horizontal") { var currentHeight = legendOptions.marginBottom + elemBBox.height; width += legendOptions.marginLeft + elemBBox.width + legendOptions.marginLeftLabel + label.getBBox().width; if (legendOptions.slices[i].type != "image" && legendType != "area") { currentHeight += legendOptions.marginBottomTitle; } // Add title height if it exists if (title) { currentHeight += title.getBBox().height; } height = Math.max(height, currentHeight); } else { width = Math.max(width, legendOptions.marginLeft + elemBBox.width + legendOptions.marginLeftLabel + label.getBBox().width); height += legendOptions.marginBottom + elemBBox.height; } $(elem.node).attr({"data-type": "elem", "data-index": i, "data-hidden": 0}); $(label.node).attr({"data-type": "label", "data-index": i, "data-hidden": 0}); // Hide map elements when the user clicks on a legend item if (legendOptions.hideElemsOnClick.enabled) { // Hide/show elements when user clicks on a legend element label.attr({cursor: "pointer"}); elem.attr({cursor: "pointer"}); self.setHoverOptions(elem, sliceAttrs[i], sliceAttrs[i]); self.setHoverOptions(label, legendOptions.labelAttrs, legendOptions.labelAttrsHover); self.setHover(elem, label); self.handleClickOnLegendElem(legendOptions, legendOptions.slices[i], label, elem, elems, legendIndex); } } } // VMLWidth option allows you to set static width for the legend // only for VML render because text.getBBox() returns wrong values on IE6/7 if (Raphael.type != "SVG" && legendOptions.VMLWidth) width = legendOptions.VMLWidth; legendPaper.setSize(width, height); return legendPaper; }, /* * Allow to hide elements of the map when the user clicks on a related legend item * @param legendOptions options for the legend to draw * @param sliceOptions options of the slice * @param label label of the legend item * @param elem element of the legend item * @param elems collection of plots or areas displayed on the map * @param legendIndex index of the legend in the conf array */ handleClickOnLegendElem: function (legendOptions, sliceOptions, label, elem, elems, legendIndex) { var self = this; var hideMapElems = function (e, hideOtherElems, animDuration) { var elemValue = 0; var hidden = $(label.node).attr('data-hidden'); var hiddenNewAttr = (hidden === '0') ? {"data-hidden": '1'} : {"data-hidden": '0'}; // Check animDuration: if not set, this is a regular click, use the value specified in options if (animDuration === undefined) animDuration = legendOptions.hideElemsOnClick.animDuration; if (hidden === '0') { if (animDuration > 0) label.animate({"opacity": 0.5}, animDuration); else label.attr({"opacity": 0.5}); } else { if (animDuration > 0) label.animate({"opacity": 1}, animDuration); else label.attr({"opacity": 1}); } $.each(elems, function (id) { // Retreive stored data of element // 'hidden-by' contains the list of legendIndex that is hiding this element var hiddenBy = elems[id].mapElem.data('hidden-by'); // Set to empty object if undefined if (hiddenBy === undefined) hiddenBy = {}; if ($.isArray(elems[id].value)) { elemValue = elems[id].value[legendIndex]; } else { elemValue = elems[id].value; } if ((sliceOptions.sliceValue !== undefined && elemValue == sliceOptions.sliceValue) || ((sliceOptions.sliceValue === undefined) && (sliceOptions.min === undefined || elemValue >= sliceOptions.min) && (sliceOptions.max === undefined || elemValue <= sliceOptions.max)) ) { (function (id) { if (hidden === '0') { // we want to hide this element hiddenBy[legendIndex] = true; // add legendIndex to the data object for later use self.setElementOpacity(elems[id], legendOptions.hideElemsOnClick.opacity, animDuration); } else { // We want to show this element delete hiddenBy[legendIndex]; // Remove this legendIndex from object // Check if another legendIndex is defined // We will show this element only if no legend is no longer hiding it if ($.isEmptyObject(hiddenBy)) { self.setElementOpacity( elems[id], elems[id].mapElem.originalAttrs.opacity !== undefined ? elems[id].mapElem.originalAttrs.opacity : 1, animDuration ); } } // Update elem data with new values elems[id].mapElem.data('hidden-by', hiddenBy); })(id); } }); $(elem.node).attr(hiddenNewAttr); $(label.node).attr(hiddenNewAttr); if ((hideOtherElems === undefined || hideOtherElems === true) && legendOptions.exclusive !== undefined && legendOptions.exclusive === true ) { $("[data-type='elem'][data-hidden=0]", self.$container).each(function () { if ($(this).attr('data-index') !== $(elem.node).attr('data-index')) { $(this).trigger("click." + pluginName, false); } }); } }; $(label.node).on("click." + pluginName, hideMapElems); $(elem.node).on("click." + pluginName, hideMapElems); if (sliceOptions.clicked !== undefined && sliceOptions.clicked === true) { $(elem.node).trigger("click." + pluginName, false); } }, /* * Create all legends for a specified type (area or plot) * @param legendType the type of the legend : "area" or "plot" * @param elems collection of plots or areas displayed on the map * @param scale scale ratio of the map */ createLegends: function (legendType, elems, scale) { var self = this; var legendsOptions = self.options.legend[legendType]; var legends = []; if (!$.isArray(self.options.legend[legendType])) { legendsOptions = [self.options.legend[legendType]]; } for (var j = 0; j < legendsOptions.length; ++j) { // Check for class existence if (legendsOptions[j].cssClass === "" || $("." + legendsOptions[j].cssClass, self.$container).length === 0) { throw new Error("The legend class `" + legendsOptions[j].cssClass + "` doesn't exists."); } if (legendsOptions[j].display === true && $.isArray(legendsOptions[j].slices) && legendsOptions[j].slices.length > 0) { legends.push(self.drawLegend(legendsOptions[j], legendType, elems, scale, j)); } } return legends; }, /* * Set the attributes on hover and the attributes to restore for a map element * @param elem the map element * @param originalAttrs the original attributes to restore on mouseout event * @param attrsHover the attributes to set on mouseover event */ setHoverOptions: function (elem, originalAttrs, attrsHover) { // Disable transform option on hover for VML (IE<9) because of several bugs if (Raphael.type != "SVG") delete attrsHover.transform; elem.attrsHover = attrsHover; if (elem.attrsHover.transform) elem.originalAttrs = $.extend({transform: "s1"}, originalAttrs); else elem.originalAttrs = originalAttrs; }, /* * Set the hover behavior (mouseover & mouseout) for plots and areas * @param mapElem the map element * @param textElem the optional text element (within the map element) */ setHover: function (mapElem, textElem) { var self = this; var $mapElem = {}; var $textElem = {}; var hoverTO = 0; var overBehaviour = function () { hoverTO = setTimeout(function () { self.elemHover(mapElem, textElem); }, 120); }; var outBehaviour = function () { clearTimeout(hoverTO); self.elemOut(mapElem, textElem); }; $mapElem = $(mapElem.node); $mapElem.on("mouseover." + pluginName, overBehaviour); $mapElem.on("mouseout." + pluginName, outBehaviour); if (textElem) { $textElem = $(textElem.node); $textElem.on("mouseover." + pluginName, overBehaviour); $(textElem.node).on("mouseout." + pluginName, outBehaviour); } }, /* * Remove the hover behavior for plots and areas * @param mapElem the map element * @param textElem the optional text element (within the map element) */ unsetHover: function (mapElem, textElem) { $(mapElem.node).off("." + pluginName); if (textElem) $(textElem.node).off("." + pluginName); }, /* * Set he behaviour for "mouseover" event * @param mapElem mapElem the map element * @param textElem the optional text element (within the map element) */ elemHover: function (mapElem, textElem) { var self = this; // Set mapElem if (mapElem.attrsHover.animDuration > 0) mapElem.animate(mapElem.attrsHover, mapElem.attrsHover.animDuration); else mapElem.attr(mapElem.attrsHover); // Set textElem if (textElem) { if (textElem.attrsHover.animDuration > 0) textElem.animate(textElem.attrsHover, textElem.attrsHover.animDuration); else textElem.attr(textElem.attrsHover); } // workaround for older version of Raphael if (self.paper.safari) self.paper.safari(); }, /* * Set he behaviour for "mouseout" event * @param mapElem the map element * @param textElem the optional text element (within the map element) */ elemOut: function (mapElem, textElem) { var self = this; // Set mapElem if (mapElem.attrsHover.animDuration > 0) mapElem.animate(mapElem.originalAttrs, mapElem.attrsHover.animDuration); else mapElem.attr(mapElem.originalAttrs); // Set textElem if (textElem) { if (textElem.attrsHover.animDuration > 0) textElem.animate(textElem.originalAttrs, textElem.attrsHover.animDuration); else textElem.attr(textElem.originalAttrs); } // workaround for older version of Raphael if (self.paper.safari) self.paper.safari(); }, /* * Get element options by merging default options, element options and legend options * @param defaultOptions * @param elemOptions * @param legendOptions */ getElemOptions: function (defaultOptions, elemOptions, legendOptions) { var self = this; var options = $.extend(true, {}, defaultOptions, elemOptions); if (options.value !== undefined) { if ($.isArray(legendOptions)) { for (var i = 0, length = legendOptions.length; i < length; ++i) { options = $.extend(true, {}, options, self.getLegendSlice(options.value[i], legendOptions[i])); } } else { options = $.extend(true, {}, options, self.getLegendSlice(options.value, legendOptions)); } } return options; }, /* * Get the coordinates of the text relative to a bbox and a position * @param bbox the boundary box of the element * @param textPosition the wanted text position (inner, right, left, top or bottom) */ getTextPosition: function (bbox, textPosition, margin) { var textX = 0; var textY = 0; var textAnchor = ""; switch (textPosition) { case "bottom" : textX = (bbox.x + bbox.x2) / 2; textY = bbox.y2 + margin; textAnchor = "middle"; break; case "top" : textX = (bbox.x + bbox.x2) / 2; textY = bbox.y - margin; textAnchor = "middle"; break; case "left" : textX = bbox.x - margin; textY = (bbox.y + bbox.y2) / 2; textAnchor = "end"; break; case "right" : textX = bbox.x2 + margin; textY = (bbox.y + bbox.y2) / 2; textAnchor = "start"; break; default : // "inner" position textX = (bbox.x + bbox.x2) / 2; textY = (bbox.y + bbox.y2) / 2; textAnchor = "middle"; } return {"x": textX, "y": textY, "textAnchor": textAnchor}; }, /* * Get the legend conf matching with the value * @param value the value to match with a slice in the legend * @param legend the legend params object * @return the legend slice matching with the value */ getLegendSlice: function (value, legend) { for (var i = 0, length = legend.slices.length; i < length; ++i) { if ((legend.slices[i].sliceValue !== undefined && value == legend.slices[i].sliceValue) || ((legend.slices[i].sliceValue === undefined) && (legend.slices[i].min === undefined || value >= legend.slices[i].min) && (legend.slices[i].max === undefined || value <= legend.slices[i].max)) ) { return legend.slices[i]; } } return {}; }, /* * Animated view box changes * As from http://code.voidblossom.com/animating-viewbox-easing-formulas/, * (from https://github.com/theshaun works on mapael) * @param x coordinate of the point to focus on * @param y coordinate of the point to focus on * @param w map defined width * @param h map defined height * @param duration defined length of time for animation * @param easingFunction defined Raphael supported easing_formula to use * @param callback method when animated action is complete */ animateViewBox: function (x, y, w, h, duration, easingFunction) { var self = this; var cx = self.paper._viewBox ? self.paper._viewBox[0] : 0; var dx = x - cx; var cy = self.paper._viewBox ? self.paper._viewBox[1] : 0; var dy = y - cy; var cw = self.paper._viewBox ? self.paper._viewBox[2] : self.paper.width; var dw = w - cw; var ch = self.paper._viewBox ? self.paper._viewBox[3] : self.paper.height; var dh = h - ch; var interval = 25; var steps = duration / interval; var currentStep = 0; var easingFormula; easingFunction = easingFunction || "linear"; easingFormula = Raphael.easing_formulas[easingFunction]; clearInterval(self.animationIntervalID); self.animationIntervalID = setInterval(function () { var ratio = currentStep / steps; self.paper.setViewBox(cx + dx * easingFormula(ratio), cy + dy * easingFormula(ratio), cw + dw * easingFormula(ratio), ch + dh * easingFormula(ratio), false); if (currentStep++ >= steps) { clearInterval(self.animationIntervalID); clearTimeout(self.zoomTO); self.zoomTO = setTimeout(function () { self.$map.trigger("afterZoom", {x1: x, y1: y, x2: (x + w), y2: (y + h)}); }, 150); } }, interval ); }, // Default map options defaultOptions: { map: { cssClass: "map", tooltip: { cssClass: "mapTooltip" }, defaultArea: { attrs: { fill: "#343434", stroke: "#5d5d5d", "stroke-width": 1, "stroke-linejoin": "round" }, attrsHover: { fill: "#f38a03", animDuration: 300 }, text: { position: "inner", margin: 10, attrs: { "font-size": 15, fill: "#c7c7c7" }, attrsHover: { fill: "#eaeaea", "animDuration": 300 } }, target: "_self" }, defaultPlot: { type: "circle", size: 15, attrs: { fill: "#0088db", stroke: "#fff", "stroke-width": 0, "stroke-linejoin": "round" }, attrsHover: { "stroke-width": 3, animDuration: 300 }, text: { position: "right", margin: 10, attrs: { "font-size": 15, fill: "#c7c7c7" }, attrsHover: { fill: "#eaeaea", animDuration: 300 } }, target: "_self" }, defaultLink: { factor: 0.5, attrs: { stroke: "#0088db", "stroke-width": 2 }, attrsHover: { animDuration: 300 }, text: { position: "inner", margin: 10, attrs: { "font-size": 15, fill: "#c7c7c7" }, attrsHover: { fill: "#eaeaea", animDuration: 300 } }, target: "_self" }, zoom: { enabled: false, maxLevel: 10, step: 0.25, zoomInCssClass: "zoomIn", zoomOutCssClass: "zoomOut", mousewheel: true, touch: true, animDuration: 200, animEasing: "linear" } }, legend: { area: [], plot: [] }, areas: {}, plots: {}, links: {} }, // Default legends option legendDefaultOptions: { area: { cssClass: "areaLegend", display: true, marginLeft: 10, marginLeftTitle: 5, marginBottomTitle: 10, marginLeftLabel: 10, marginBottom: 10, titleAttrs: { "font-size": 16, fill: "#343434", "text-anchor": "start" }, labelAttrs: { "font-size": 12, fill: "#343434", "text-anchor": "start" }, labelAttrsHover: { fill: "#787878", animDuration: 300 }, hideElemsOnClick: { enabled: true, opacity: 0.2, animDuration: 300 }, slices: [], mode: "vertical" }, plot: { cssClass: "plotLegend", display: true, marginLeft: 10, marginLeftTitle: 5, marginBottomTitle: 10, marginLeftLabel: 10, marginBottom: 10, titleAttrs: { "font-size": 16, fill: "#343434", "text-anchor": "start" }, labelAttrs: { "font-size": 12, fill: "#343434", "text-anchor": "start" }, labelAttrsHover: { fill: "#787878", animDuration: 300 }, hideElemsOnClick: { enabled: true, opacity: 0.2 }, slices: [], mode: "vertical" } } }; // Extend jQuery with Mapael $[pluginName] = Mapael; // Add jQuery DOM function $.fn[pluginName] = function (options) { // Call Mapael on each element return this.each(function () { // Avoid leaking problem on multiple instanciation by removing an old mapael object on a container if ($.data(this, pluginName)) { $.data(this, pluginName).destroy(); } // Create Mapael and save it as jQuery data // This allow external access to Mapael using $(".mapcontainer").data("mapael") $.data(this, pluginName, new Mapael(this, options)); }); }; }));
js/jquery.mapael.js
/*! * * Jquery Mapael - Dynamic maps jQuery plugin (based on raphael.js) * Requires jQuery, raphael.js and jquery.mousewheel * * Version: 2.0.0-dev * * Copyright (c) 2015 Vincent Brouté (http://www.vincentbroute.fr/mapael) * Licensed under the MIT license (http://www.opensource.org/licenses/mit-license.php). * * Thanks to Indigo744 * */ (function (factory) { if (typeof exports === 'object') { // CommonJS module.exports = factory(require('jquery'), require('raphael'), require('mousewheel')); } else if (typeof define === 'function' && define.amd) { // AMD. Register as an anonymous module. define(['jquery', 'raphael', 'mousewheel'], factory); } else { // Browser globals factory(jQuery, Raphael, jQuery.fn.mousewheel); } }(function ($, Raphael, mousewheel, undefined) { "use strict"; // The plugin name (used on several places) var pluginName = "mapael"; // Version number of jQuery Mapael. See http://semver.org/ for more information. var version = "2.0.0-dev"; /* * Mapael constructor * Init instance vars and call init() * @param container the DOM element on which to apply the plugin * @param options the complete options to use */ var Mapael = function (container, options) { var self = this; // the global container (DOM element object) self.container = container; // the global container (jQuery object) self.$container = $(container); // the global options self.options = self.extendDefaultOptions(options); // Save initial HTML content (used by destroy method) self.initialHTMLContent = self.$container.html(); // zoom TimeOut handler (used to set and clear) self.zoomTO = 0; // zoom center coordinate (set at touchstart) self.zoomCenterX = 0; self.zoomCenterY = 0; // Zoom pinch (set at touchstart and touchmove) self.previousPinchDist = 0; // Zoom data self.zoomData = { zoomLevel: 0, zoomX: 0, zoomY: 0, panX: 0, panY: 0 }; // resize TimeOut handler (used to set and clear) self.resizeTO = 0; // Panning: tell if panning action is in progress self.panning = false; // Panning TimeOut handler (used to set and clear) self.panningTO = 0; // Animate view box Interval handler (used to set and clear) self.animationIntervalID = null; // Map subcontainer jQuery object self.$map = {}; // The tooltip jQuery object self.$tooltip = {}; // The paper Raphael object self.paper = {}; // The areas object list self.areas = {}; // The plots object list self.plots = {}; // The links object list self.links = {}; // The map configuration object (taken from map file) self.mapConf = {}; // Let's start the initialization self.init(); }; /* * Mapael Prototype * Defines all methods and properties needed by Mapael * Each mapael object inherits their properties and methods from this prototype */ Mapael.prototype = { /* * Version number */ version: version, /* * Initialize the plugin * Called by the constructor */ init: function () { var self = this; // Init check for class existence if (self.options.map.cssClass === "" || $("." + self.options.map.cssClass, self.container).length === 0) { throw new Error("The map class `" + self.options.map.cssClass + "` doesn't exists"); } // Create the tooltip container self.$tooltip = $("<div>").addClass(self.options.map.tooltip.cssClass).css("display", "none"); // Get the map container, empty it then append tooltip self.$map = $("." + self.options.map.cssClass, self.container).empty().append(self.$tooltip); // Get the map from $.mapael or $.fn.mapael (backward compatibility) if ($[pluginName] && $[pluginName].maps && $[pluginName].maps[self.options.map.name]) { // Mapael version >= 2.x self.mapConf = $[pluginName].maps[self.options.map.name]; } else if ($.fn[pluginName] && $.fn[pluginName].maps && $.fn[pluginName].maps[self.options.map.name]) { // Mapael version <= 1.x - DEPRECATED self.mapConf = $.fn[pluginName].maps[self.options.map.name]; if (window.console && window.console.warn) { window.console.warn("Extending $.fn.mapael is deprecated (map '" + self.options.map.name + "')"); } } else { throw new Error("Unknown map '" + self.options.map.name + "'"); } // Create Raphael paper self.paper = new Raphael(self.$map[0], self.mapConf.width, self.mapConf.height); // add plugin class name on element self.$container.addClass(pluginName); if (self.options.map.tooltip.css) self.$tooltip.css(self.options.map.tooltip.css); self.paper.setViewBox(0, 0, self.mapConf.width, self.mapConf.height, false); // Draw map areas $.each(self.mapConf.elems, function (id) { var elemOptions = self.getElemOptions( self.options.map.defaultArea, (self.options.areas[id] ? self.options.areas[id] : {}), self.options.legend.area ); self.areas[id] = {"mapElem": self.paper.path(self.mapConf.elems[id]).attr(elemOptions.attrs)}; }); // Hook that allows to add custom processing on the map if (self.options.map.beforeInit) self.options.map.beforeInit(self.$container, self.paper, self.options); // Init map areas in a second loop (prevent texts to be hidden by map elements) $.each(self.mapConf.elems, function (id) { var elemOptions = self.getElemOptions( self.options.map.defaultArea, (self.options.areas[id] ? self.options.areas[id] : {}), self.options.legend.area ); self.initElem(self.areas[id], elemOptions, id); }); // Draw links self.links = self.drawLinksCollection(self.options.links); // Draw plots $.each(self.options.plots, function (id) { self.plots[id] = self.drawPlot(id); }); // Attach zoom event self.$container.on("zoom." + pluginName, function (e, zoomOptions) { self.onZoomEvent(e, zoomOptions); }); if (self.options.map.zoom.enabled) { // Enable zoom self.initZoom(self.mapConf.width, self.mapConf.height, self.options.map.zoom); } // Set initial zoom if (self.options.map.zoom.init !== undefined) { if (self.options.map.zoom.init.animDuration === undefined) { self.options.map.zoom.init.animDuration = 0; } self.$container.trigger("zoom." + pluginName, self.options.map.zoom.init); } // Create the legends for areas self.createLegends("area", self.areas, 1); // Attach update event self.$container.on("update." + pluginName, function (e, opt) { self.onUpdateEvent(e, opt); }); // Attach showElementsInRange event self.$container.on("showElementsInRange." + pluginName, function (e, opt) { self.onShowElementsInRange(e, opt); }); // Handle map size if (self.options.map.width) { // NOT responsive: map has a fixed width self.paper.setSize(self.options.map.width, self.mapConf.height * (self.options.map.width / self.mapConf.width)); // Create the legends for plots taking into account the scale of the map self.createLegends("plot", self.plots, (self.options.map.width / self.mapConf.width)); } else { // Responsive: handle resizing of the map self.handleMapResizing(); } // Hook that allows to add custom processing on the map if (self.options.map.afterInit) self.options.map.afterInit(self.$container, self.paper, self.areas, self.plots, self.options); $(self.paper.desc).append(" and Mapael " + self.version + " (http://www.vincentbroute.fr/mapael/)"); }, /* * Destroy mapael * This function effectively detach mapael from the container * - Set the container back to the way it was before mapael instanciation * - Remove all data associated to it (memory can then be free'ed by browser) * * This method can be call directly by user: * $(".mapcontainer").data("mapael").destroy(); * * This method is also automatically called if the user try to call mapael * on a container already containing a mapael instance */ destroy: function () { var self = this; // Empty the container (this will also detach all event listeners) self.$container.empty(); // Detach the global resize event handler if (self.onResizeEvent) $(window).off("resize." + pluginName, self.onResizeEvent); // Replace initial HTML content self.$container.html(self.initialHTMLContent); // Remove mapael class self.$container.removeClass(pluginName); // Remove the data self.$container.removeData(pluginName); // Remove all internal reference self.container = undefined; self.$container = undefined; self.options = undefined; self.paper = undefined; self.$map = undefined; self.$tooltip = undefined; self.mapConf = undefined; self.areas = undefined; self.plots = undefined; self.links = undefined; }, handleMapResizing: function () { var self = this; // Create the legends for plots taking into account the scale of the map var createPlotLegend = function () { self.createLegends("plot", self.plots, (self.$map.width() / self.mapConf.width)); self.$map.off("resizeEnd." + pluginName, createPlotLegend); }; // onResizeEvent: call when the window element trigger the resize event // We create it inside this function (and not in the prototype) in order to have a closure // Otherwise, in the prototype, 'this' when triggered is *not* the mapael object but the global window self.onResizeEvent = function () { // Clear any previous setTimeout (avoid too much triggering) clearTimeout(self.resizeTO); // setTimeout to wait for the user to finish its resizing self.resizeTO = setTimeout(function () { self.$map.trigger("resizeEnd." + pluginName); }, 150); }; // Attach resize handler $(window).on("resize." + pluginName, self.onResizeEvent); self.$map.on("resizeEnd." + pluginName, function () { var containerWidth = self.$map.width(); if (self.paper.width != containerWidth) { self.paper.setSize(containerWidth, self.mapConf.height * (containerWidth / self.mapConf.width)); } }).on("resizeEnd." + pluginName, createPlotLegend).trigger("resizeEnd." + pluginName); }, /* * Extend the user option with the default one * @param options the user options * @return new options object */ extendDefaultOptions: function (options) { // Extend default options with user options options = $.extend(true, {}, Mapael.prototype.defaultOptions, options); // Extend legend default options $.each(options.legend, function (type) { if ($.isArray(options.legend[type])) { for (var i = 0; i < options.legend[type].length; ++i) options.legend[type][i] = $.extend(true, {}, Mapael.prototype.legendDefaultOptions[type], options.legend[type][i]); } else { options.legend[type] = $.extend(true, {}, Mapael.prototype.legendDefaultOptions[type], options.legend[type]); } }); return options; }, /* * Init the element "elem" on the map (drawing, setting attributes, events, tooltip, ...) */ initElem: function (elem, elemOptions, id) { var self = this; var bbox = {}; var textPosition = {}; if (elemOptions.value !== undefined) elem.value = elemOptions.value; // Init attrsHover self.setHoverOptions(elem.mapElem, elemOptions.attrs, elemOptions.attrsHover); // Init the label related to the element if (elemOptions.text && elemOptions.text.content !== undefined) { // Set a text label in the area bbox = elem.mapElem.getBBox(); textPosition = self.getTextPosition(bbox, elemOptions.text.position, elemOptions.text.margin); elemOptions.text.attrs["text-anchor"] = textPosition.textAnchor; elem.textElem = self.paper.text(textPosition.x, textPosition.y, elemOptions.text.content).attr(elemOptions.text.attrs); self.setHoverOptions(elem.textElem, elemOptions.text.attrs, elemOptions.text.attrsHover); if (elemOptions.eventHandlers) self.setEventHandlers(id, elemOptions, elem.mapElem, elem.textElem); self.setHover(elem.mapElem, elem.textElem); $(elem.textElem.node).attr("data-id", id); } else { if (elemOptions.eventHandlers) self.setEventHandlers(id, elemOptions, elem.mapElem); self.setHover(elem.mapElem); } // Init the tooltip if (elemOptions.tooltip) { elem.mapElem.tooltip = elemOptions.tooltip; self.setTooltip(elem.mapElem); if (elemOptions.text && elemOptions.text.content !== undefined) { elem.textElem.tooltip = elemOptions.tooltip; self.setTooltip(elem.textElem); } } // Init the link if (elemOptions.href) { elem.mapElem.href = elemOptions.href; elem.mapElem.target = elemOptions.target; self.setHref(elem.mapElem); if (elemOptions.text && elemOptions.text.content !== undefined) { elem.textElem.href = elemOptions.href; elem.textElem.target = elemOptions.target; self.setHref(elem.textElem); } } $(elem.mapElem.node).attr("data-id", id); }, /* * Init zoom and panning for the map * @param mapWidth * @param mapHeight * @param zoomOptions */ initZoom: function (mapWidth, mapHeight, zoomOptions) { var self = this; var $zoomIn; var $zoomOut; var mousedown = false; var previousX = 0; var previousY = 0; // init Zoom data $.extend(self.zoomData, { zoomLevel: 0, panX: 0, panY: 0 }); // init zoom button $zoomIn = $("<div>").addClass(zoomOptions.zoomInCssClass).html("+"); $zoomOut = $("<div>").addClass(zoomOptions.zoomOutCssClass).html("&#x2212;"); self.$map.append($zoomIn).append($zoomOut); $zoomIn.on("click." + pluginName, function () { self.$container.trigger("zoom." + pluginName, {"level": self.zoomData.zoomLevel + 1}); }); $zoomOut.on("click." + pluginName, function () { self.$container.trigger("zoom." + pluginName, {"level": self.zoomData.zoomLevel - 1}); }); // Update the zoom level of the map on mousewheel if (self.options.map.zoom.mousewheel) { self.$map.on("mousewheel." + pluginName, function (e) { var offset = self.$map.offset(); var initFactor = (self.options.map.width) ? (self.mapConf.width / self.options.map.width) : (self.mapConf.width / self.$map.width()); var zoomLevel = (e.deltaY > 0) ? 1 : -1; var zoomFactor = 1 / (1 + (self.zoomData.zoomLevel) * self.options.map.zoom.step); var x = zoomFactor * initFactor * (e.clientX + $(window).scrollLeft() - offset.left) + self.zoomData.panX; var y = zoomFactor * initFactor * (e.clientY + $(window).scrollTop() - offset.top) + self.zoomData.panY; self.$container.trigger("zoom." + pluginName, { "fixedCenter": true, "level": self.zoomData.zoomLevel + zoomLevel, "x": x, "y": y }); return false; }); } // Update the zoom level of the map on touch pinch if (self.options.map.zoom.touch) { self.$map.on("touchstart." + pluginName, function (e) { if (e.originalEvent.touches.length === 2) { self.zoomCenterX = (e.originalEvent.touches[0].clientX + e.originalEvent.touches[1].clientX) / 2; self.zoomCenterY = (e.originalEvent.touches[0].clientY + e.originalEvent.touches[1].clientY) / 2; self.previousPinchDist = Math.sqrt(Math.pow((e.originalEvent.touches[1].clientX - e.originalEvent.touches[0].clientX), 2) + Math.pow((e.originalEvent.touches[1].clientY - e.originalEvent.touches[0].clientY), 2)); } }); self.$map.on("touchmove." + pluginName, function (e) { var offset = 0; var initFactor = 0; var zoomFactor = 0; var x = 0; var y = 0; var pinchDist = 0; var zoomLevel = 0; if (e.originalEvent.touches.length === 2) { pinchDist = Math.sqrt(Math.pow((e.originalEvent.touches[1].clientX - e.originalEvent.touches[0].clientX), 2) + Math.pow((e.originalEvent.touches[1].clientY - e.originalEvent.touches[0].clientY), 2)); if (Math.abs(pinchDist - self.previousPinchDist) > 15) { offset = self.$map.offset(); initFactor = (self.options.map.width) ? (self.mapConf.width / self.options.map.width) : (self.mapConf.width / self.$map.width()); zoomFactor = 1 / (1 + (self.zoomData.zoomLevel) * self.options.map.zoom.step); x = zoomFactor * initFactor * (self.zoomCenterX + $(window).scrollLeft() - offset.left) + self.zoomData.panX; y = zoomFactor * initFactor * (self.zoomCenterY + $(window).scrollTop() - offset.top) + self.zoomData.panY; zoomLevel = (pinchDist - self.previousPinchDist) / Math.abs(pinchDist - self.previousPinchDist); self.$container.trigger("zoom." + pluginName, { "fixedCenter": true, "level": self.zoomData.zoomLevel + zoomLevel, "x": x, "y": y }); self.previousPinchDist = pinchDist; } return false; } }); } // Panning $("body").on("mouseup." + pluginName + (zoomOptions.touch ? " touchend" : ""), function () { mousedown = false; setTimeout(function () { self.panning = false; }, 50); }); self.$map.on("mousedown." + pluginName + (zoomOptions.touch ? " touchstart" : ""), function (e) { if (e.pageX !== undefined) { mousedown = true; previousX = e.pageX; previousY = e.pageY; } else { if (e.originalEvent.touches.length === 1) { mousedown = true; previousX = e.originalEvent.touches[0].pageX; previousY = e.originalEvent.touches[0].pageY; } } }).on("mousemove." + pluginName + (zoomOptions.touch ? " touchmove" : ""), function (e) { var currentLevel = self.zoomData.zoomLevel; var pageX = 0; var pageY = 0; if (e.pageX !== undefined) { pageX = e.pageX; pageY = e.pageY; } else { if (e.originalEvent.touches.length === 1) { pageX = e.originalEvent.touches[0].pageX; pageY = e.originalEvent.touches[0].pageY; } else { mousedown = false; } } if (mousedown && currentLevel !== 0) { var offsetX = (previousX - pageX) / (1 + (currentLevel * zoomOptions.step)) * (mapWidth / self.paper.width); var offsetY = (previousY - pageY) / (1 + (currentLevel * zoomOptions.step)) * (mapHeight / self.paper.height); var panX = Math.min(Math.max(0, self.paper._viewBox[0] + offsetX), (mapWidth - self.paper._viewBox[2])); var panY = Math.min(Math.max(0, self.paper._viewBox[1] + offsetY), (mapHeight - self.paper._viewBox[3])); if (Math.abs(offsetX) > 5 || Math.abs(offsetY) > 5) { $.extend(self.zoomData, { panX: panX, panY: panY, zoomX: panX + self.paper._viewBox[2] / 2, zoomY: panY + self.paper._viewBox[3] / 2 }); self.paper.setViewBox(panX, panY, self.paper._viewBox[2], self.paper._viewBox[3]); clearTimeout(self.panningTO); self.panningTO = setTimeout(function () { self.$map.trigger("afterPanning", { x1: panX, y1: panY, x2: (panX + self.paper._viewBox[2]), y2: (panY + self.paper._viewBox[3]) }); }, 150); previousX = pageX; previousY = pageY; self.panning = true; } return false; } }); }, /* * Zoom on the map at a specific level focused on specific coordinates * If no coordinates are specified, the zoom will be focused on the center of the map * options : * "level" : level of the zoom between 0 and maxLevel * "x" or "latitude" : x coordinate or latitude of the point to focus on * "y" or "longitude" : y coordinate or longitude of the point to focus on * "fixedCenter" : set to true in order to preserve the position of x,y in the canvas when zoomed * "animDuration" : zoom duration */ onZoomEvent: function (e, zoomOptions) { var self = this; var newLevel = Math.min(Math.max(zoomOptions.level, 0), self.options.map.zoom.maxLevel); var panX = 0; var panY = 0; var previousZoomLevel = (1 + self.zoomData.zoomLevel * self.options.map.zoom.step); var zoomLevel = (1 + newLevel * self.options.map.zoom.step); var animDuration = (zoomOptions.animDuration !== undefined) ? zoomOptions.animDuration : self.options.map.zoom.animDuration; var offsetX = 0; var offsetY = 0; var coords = {}; if (zoomOptions.latitude !== undefined && zoomOptions.longitude !== undefined) { coords = self.mapConf.getCoords(zoomOptions.latitude, zoomOptions.longitude); zoomOptions.x = coords.x; zoomOptions.y = coords.y; } if (zoomOptions.x === undefined) zoomOptions.x = self.paper._viewBox[0] + self.paper._viewBox[2] / 2; if (zoomOptions.y === undefined) zoomOptions.y = (self.paper._viewBox[1] + self.paper._viewBox[3] / 2); if (newLevel === 0) { panX = 0; panY = 0; } else if (zoomOptions.fixedCenter !== undefined && zoomOptions.fixedCenter === true) { offsetX = self.zoomData.panX + ((zoomOptions.x - self.zoomData.panX) * (zoomLevel - previousZoomLevel)) / zoomLevel; offsetY = self.zoomData.panY + ((zoomOptions.y - self.zoomData.panY) * (zoomLevel - previousZoomLevel)) / zoomLevel; panX = Math.min(Math.max(0, offsetX), (self.mapConf.width - (self.mapConf.width / zoomLevel))); panY = Math.min(Math.max(0, offsetY), (self.mapConf.height - (self.mapConf.height / zoomLevel))); } else { panX = Math.min(Math.max(0, zoomOptions.x - (self.mapConf.width / zoomLevel) / 2), (self.mapConf.width - (self.mapConf.width / zoomLevel))); panY = Math.min(Math.max(0, zoomOptions.y - (self.mapConf.height / zoomLevel) / 2), (self.mapConf.height - (self.mapConf.height / zoomLevel))); } // Update zoom level of the map if (zoomLevel == previousZoomLevel && panX == self.zoomData.panX && panY == self.zoomData.panY) return; if (animDuration > 0) { self.animateViewBox(panX, panY, self.mapConf.width / zoomLevel, self.mapConf.height / zoomLevel, animDuration, self.options.map.zoom.animEasing); } else { self.paper.setViewBox(panX, panY, self.mapConf.width / zoomLevel, self.mapConf.height / zoomLevel); clearTimeout(self.zoomTO); self.zoomTO = setTimeout(function () { self.$map.trigger("afterZoom", { x1: panX, y1: panY, x2: (panX + (self.mapConf.width / zoomLevel)), y2: (panY + (self.mapConf.height / zoomLevel)) }); }, 150); } $.extend(self.zoomData, { zoomLevel: newLevel, panX: panX, panY: panY, zoomX: panX + self.paper._viewBox[2] / 2, zoomY: panY + self.paper._viewBox[3] / 2 }); }, /* * Show some element in range defined by user * Triggered by user $(".mapcontainer").trigger("showElementsInRange", [opt]); * * @param opt the options * opt.hiddenOpacity opacity for hidden element (default = 0.3) * opt.animDuration animation duration in ms (default = 0) * opt.afterShowRange callback * opt.ranges the range to show: * Example: * opt.ranges = { * 'plot' : { * 0 : { // valueIndex * 'min': 1000, * 'max': 1200 * }, * 1 : { // valueIndex * 'min': 10, * 'max': 12 * } * }, * 'area' : { * {'min': 10, 'max': 20} // No valueIndex, only an object, use 0 as valueIndex (easy case) * } * } */ onShowElementsInRange: function(e, opt) { var self = this; // set animDuration to default if not defined if (opt.animDuration === undefined) { opt.animDuration = 0; } // set hiddenOpacity to default if not defined if (opt.hiddenOpacity === undefined) { opt.hiddenOpacity = 0.3; } // handle area if (opt.ranges && opt.ranges.area) { self.showElemByRange(opt.ranges.area, self.areas, opt.hiddenOpacity, opt.animDuration); } // handle plot if (opt.ranges && opt.ranges.plot) { self.showElemByRange(opt.ranges.plot, self.plots, opt.hiddenOpacity, opt.animDuration); } // handle link if (opt.ranges && opt.ranges.link) { self.showElemByRange(opt.ranges.link, self.links, opt.hiddenOpacity, opt.animDuration); } // Call user callback if (opt.afterShowRange) opt.afterShowRange(); }, /* * Show some element in range * @param ranges: the ranges * @param elems: list of element on which to check against previous range * @hiddenOpacity: the opacity when hidden * @animDuration: the animation duration */ showElemByRange: function(ranges, elems, hiddenOpacity, animDuration) { var self = this; // Hold the final opacity value for all elements consolidated after applying each ranges // This allow to set the opacity only once for each elements var elemsFinalOpacity = {}; // set object with one valueIndex to 0 if we have directly the min/max if (ranges.min !== undefined || ranges.max !== undefined) { ranges = {0: ranges}; } // Loop through each valueIndex $.each(ranges, function (valueIndex) { var range = ranges[valueIndex]; // Check if user defined at least a min or max value if (range.min === undefined && range.max === undefined) { return true; // skip this iteration (each loop), goto next range } // Loop through each elements $.each(elems, function (id) { var elemValue = elems[id].value; // set value with one valueIndex to 0 if not object if (typeof elemValue !== "object") { elemValue = [elemValue]; } // Check existence of this value index if (elemValue[valueIndex] === undefined) { return true; // skip this iteration (each loop), goto next element } // Check if in range if ((range.min !== undefined && elemValue[valueIndex] < range.min) || (range.max !== undefined && elemValue[valueIndex] > range.max)) { // Element not in range elemsFinalOpacity[id] = hiddenOpacity; } else { // Element in range elemsFinalOpacity[id] = 1; } }); }); // Now that we looped through all ranges, we can really assign the final opacity $.each(elemsFinalOpacity, function (id) { self.setElementOpacity(elems[id], elemsFinalOpacity[id], animDuration); }); }, /* * Set element opacity * Handle elem.mapElem and elem.textElem * @param elem the element * @param opacity the opacity to apply * @param animDuration the animation duration to use */ setElementOpacity: function(elem, opacity, animDuration) { // Ensure no animation is running elem.mapElem.stop(); if (elem.textElem) elem.textElem.stop(); // If final opacity is not null, ensure element is shown before proceeding if (opacity > 0) { elem.mapElem.show(); if (elem.textElem) elem.textElem.show(); } if (animDuration > 0) { // Animate attribute elem.mapElem.animate({"opacity": opacity}, animDuration, "linear", function () { // If final attribute is 0, hide if (opacity === 0) elem.mapElem.hide(); }); // Handle text element if (elem.textElem) { // Animate attribute elem.textElem.animate({"opacity": opacity}, animDuration, "linear", function () { // If final attribute is 0, hide if (opacity === 0) elem.textElem.hide(); }); } } else { // Set attribute elem.mapElem.attr({"opacity": opacity}); // For extrem opacity, hide or show if (opacity === 0) elem.mapElem.hide(); else if (opacity === 1) elem.mapElem.show(); // Handle text elemen if (elem.textElem) { // Set attribute elem.textElem.attr({"opacity": opacity}); // For extrem opacity, hide or show if (opacity === 0) elem.textElem.hide(); else if (opacity === 1) elem.textElem.show(); } } }, /* * * Update the current map * Refresh attributes and tooltips for areas and plots * @param opt option for the refresh : * opt.mapOptions: options to update for plots and areas * opt.replaceOptions: whether mapsOptions should entirely replace current map options, or just extend it * opt.opt.newPlots new plots to add to the map * opt.newLinks new links to add to the map * opt.deletePlotKeys plots to delete from the map (array, or "all" to remove all plots) * opt.deleteLinkKeys links to remove from the map (array, or "all" to remove all links) * opt.setLegendElemsState the state of legend elements to be set : show (default) or hide * opt.animDuration animation duration in ms (default = 0) * opt.afterUpdate Hook that allows to add custom processing on the map */ onUpdateEvent: function (e, opt) { var self = this; // Abort if opt is undefined if (typeof opt !== "object") return; var i = 0; var animDuration = (opt.animDuration) ? opt.animDuration : 0; // This function remove an element using animation (or not, depending on animDuration) // Used for deletePlotKeys and deleteLinkKeys var fnRemoveElement = function (elem) { // Unset all event handlers self.unsetHover(elem.mapElem, elem.textElem); if (animDuration > 0) { elem.mapElem.animate({"opacity": 0}, animDuration, "linear", function () { elem.mapElem.remove(); }); if (elem.textElem) { elem.textElem.animate({"opacity": 0}, animDuration, "linear", function () { elem.textElem.remove(); }); } } else { elem.mapElem.remove(); if (elem.textElem) { elem.textElem.remove(); } } }; // This function show an element using animation // Used for newPlots and newLinks var fnShowElement = function (elem) { // Starts with hidden elements elem.mapElem.attr({opacity: 0}); if (elem.textElem) elem.textElem.attr({opacity: 0}); // Set final element opacity self.setElementOpacity( elem, (elem.mapElem.originalAttrs.opacity !== undefined) ? elem.mapElem.originalAttrs.opacity : 1, animDuration ); }; if (typeof opt.mapOptions === "object") { if (opt.replaceOptions === true) self.options = self.extendDefaultOptions(opt.mapOptions); else $.extend(true, self.options, opt.mapOptions); // IF we update areas, plots or legend, then reset all legend state to "show" if (opt.mapOptions.areas !== undefined || opt.mapOptions.plots !== undefined || opt.mapOptions.legend !== undefined) { $("[data-type='elem']", self.$container).each(function (id, elem) { if ($(elem).attr('data-hidden') === "1") { // Toggle state of element by clicking $(elem).trigger("click." + pluginName, [false, animDuration]); } }); } } // Delete plots by name if deletePlotKeys is array if (typeof opt.deletePlotKeys === "object") { for (; i < opt.deletePlotKeys.length; i++) { if (self.plots[opt.deletePlotKeys[i]] !== undefined) { fnRemoveElement(self.plots[opt.deletePlotKeys[i]]); delete self.plots[opt.deletePlotKeys[i]]; } } // Delete ALL plots if deletePlotKeys is set to "all" } else if (opt.deletePlotKeys === "all") { $.each(self.plots, function (id, elem) { fnRemoveElement(elem); }); // Empty plots object self.plots = {}; } // Delete links by name if deleteLinkKeys is array if (typeof opt.deleteLinkKeys === "object") { for (i = 0; i < opt.deleteLinkKeys.length; i++) { if (self.links[opt.deleteLinkKeys[i]] !== undefined) { fnRemoveElement(self.links[opt.deleteLinkKeys[i]]); delete self.links[opt.deleteLinkKeys[i]]; } } // Delete ALL links if deleteLinkKeys is set to "all" } else if (opt.deleteLinkKeys === "all") { $.each(self.links, function (id, elem) { fnRemoveElement(elem); }); // Empty links object self.links = {}; } // New plots if (typeof opt.newPlots === "object") { $.each(opt.newPlots, function (id) { if (self.plots[id] === undefined) { self.options.plots[id] = opt.newPlots[id]; self.plots[id] = self.drawPlot(id); if (animDuration > 0) { fnShowElement(self.plots[id]); } } }); } // New links if (typeof opt.newLinks === "object") { var newLinks = self.drawLinksCollection(opt.newLinks); $.extend(self.links, newLinks); $.extend(self.options.links, opt.newLinks); if (animDuration > 0) { $.each(newLinks, function (id) { fnShowElement(newLinks[id]); }); } } // Update areas attributes and tooltips $.each(self.areas, function (id) { var elemOptions = self.getElemOptions( self.options.map.defaultArea, (self.options.areas[id] ? self.options.areas[id] : {}), self.options.legend.area ); self.updateElem(elemOptions, self.areas[id], animDuration); }); // Update plots attributes and tooltips $.each(self.plots, function (id) { var elemOptions = self.getElemOptions( self.options.map.defaultPlot, (self.options.plots[id] ? self.options.plots[id] : {}), self.options.legend.plot ); if (elemOptions.type == "square") { elemOptions.attrs.width = elemOptions.size; elemOptions.attrs.height = elemOptions.size; elemOptions.attrs.x = self.plots[id].mapElem.attrs.x - (elemOptions.size - self.plots[id].mapElem.attrs.width) / 2; elemOptions.attrs.y = self.plots[id].mapElem.attrs.y - (elemOptions.size - self.plots[id].mapElem.attrs.height) / 2; } else if (elemOptions.type == "image") { elemOptions.attrs.width = elemOptions.width; elemOptions.attrs.height = elemOptions.height; elemOptions.attrs.x = self.plots[id].mapElem.attrs.x - (elemOptions.width - self.plots[id].mapElem.attrs.width) / 2; elemOptions.attrs.y = self.plots[id].mapElem.attrs.y - (elemOptions.height - self.plots[id].mapElem.attrs.height) / 2; } else { // Default : circle elemOptions.attrs.r = elemOptions.size / 2; } self.updateElem(elemOptions, self.plots[id], animDuration); }); // Update links attributes and tooltips $.each(self.links, function (id) { var elemOptions = self.getElemOptions( self.options.map.defaultLink, (self.options.links[id] ? self.options.links[id] : {}), {} ); self.updateElem(elemOptions, self.links[id], animDuration); }); // Update legends if (opt.mapOptions && typeof opt.mapOptions.legend === "object") { self.createLegends("area", self.areas, 1); if (self.options.map.width) { self.createLegends("plot", self.plots, (self.options.map.width / self.mapConf.width)); } else { self.createLegends("plot", self.plots, (self.$map.width() / self.mapConf.width)); } } // Hide/Show all elements based on showlegendElems // Toggle (i.e. click) only if: // - slice legend is shown AND we want to hide // - slice legend is hidden AND we want to show if (typeof opt.setLegendElemsState === "object") { // setLegendElemsState is an object listing the legend we want to hide/show $.each(opt.setLegendElemsState, function (legendCSSClass, action) { // Search for the legend var $legend = self.$container.find("." + legendCSSClass)[0]; if ($legend !== undefined) { // Select all elem inside this legend $("[data-type='elem']", $legend).each(function (id, elem) { if (($(elem).attr('data-hidden') === "0" && action === "hide") || ($(elem).attr('data-hidden') === "1" && action === "show")) { // Toggle state of element by clicking $(elem).trigger("click." + pluginName, [false, animDuration]); } }); } }); } else { // setLegendElemsState is a string, or is undefined // Default : "show" var action = (opt.setLegendElemsState === "hide") ? "hide" : "show"; $("[data-type='elem']", self.$container).each(function (id, elem) { if (($(elem).attr('data-hidden') === "0" && action === "hide") || ($(elem).attr('data-hidden') === "1" && action === "show")) { // Toggle state of element by clicking $(elem).trigger("click." + pluginName, [false, animDuration]); } }); } if (opt.afterUpdate) opt.afterUpdate(self.$container, self.paper, self.areas, plots, self.options); }, /* * Draw all links between plots on the paper */ drawLinksCollection: function (linksCollection) { var self = this; var p1 = {}; var p2 = {}; var coordsP1 = {}; var coordsP2 = {}; var links = {}; $.each(linksCollection, function (id) { var elemOptions = self.getElemOptions(self.options.map.defaultLink, linksCollection[id], {}); if (typeof linksCollection[id].between[0] == 'string') { p1 = self.options.plots[linksCollection[id].between[0]]; } else { p1 = linksCollection[id].between[0]; } if (typeof linksCollection[id].between[1] == 'string') { p2 = self.options.plots[linksCollection[id].between[1]]; } else { p2 = linksCollection[id].between[1]; } if (p1.latitude !== undefined && p1.longitude !== undefined) { coordsP1 = self.mapConf.getCoords(p1.latitude, p1.longitude); } else { coordsP1.x = p1.x; coordsP1.y = p1.y; } if (p2.latitude !== undefined && p2.longitude !== undefined) { coordsP2 = self.mapConf.getCoords(p2.latitude, p2.longitude); } else { coordsP2.x = p2.x; coordsP2.y = p2.y; } links[id] = self.drawLink(id, coordsP1.x, coordsP1.y, coordsP2.x, coordsP2.y, elemOptions); }); return links; }, /* * Draw a curved link between two couples of coordinates a(xa,ya) and b(xb, yb) on the paper */ drawLink: function (id, xa, ya, xb, yb, elemOptions) { var self = this; var elem = {}; // Compute the "curveto" SVG point, d(x,y) // c(xc, yc) is the center of (xa,ya) and (xb, yb) var xc = (xa + xb) / 2; var yc = (ya + yb) / 2; // Equation for (cd) : y = acd * x + bcd (d is the cure point) var acd = -1 / ((yb - ya) / (xb - xa)); var bcd = yc - acd * xc; // dist(c,d) = dist(a,b) (=abDist) var abDist = Math.sqrt((xb - xa) * (xb - xa) + (yb - ya) * (yb - ya)); // Solution for equation dist(cd) = sqrt((xd - xc)² + (yd - yc)²) // dist(c,d)² = (xd - xc)² + (yd - yc)² // We assume that dist(c,d) = dist(a,b) // so : (xd - xc)² + (yd - yc)² - dist(a,b)² = 0 // With the factor : (xd - xc)² + (yd - yc)² - (factor*dist(a,b))² = 0 // (xd - xc)² + (acd*xd + bcd - yc)² - (factor*dist(a,b))² = 0 var a = 1 + acd * acd; var b = -2 * xc + 2 * acd * bcd - 2 * acd * yc; var c = xc * xc + bcd * bcd - bcd * yc - yc * bcd + yc * yc - ((elemOptions.factor * abDist) * (elemOptions.factor * abDist)); var delta = b * b - 4 * a * c; var x = 0; var y = 0; // There are two solutions, we choose one or the other depending on the sign of the factor if (elemOptions.factor > 0) { x = (-b + Math.sqrt(delta)) / (2 * a); y = acd * x + bcd; } else { x = (-b - Math.sqrt(delta)) / (2 * a); y = acd * x + bcd; } elem.mapElem = self.paper.path("m " + xa + "," + ya + " C " + x + "," + y + " " + xb + "," + yb + " " + xb + "," + yb + "").attr(elemOptions.attrs); self.initElem(elem, elemOptions, id); return elem; }, /* * Update the element "elem" on the map with the new elemOptions options */ updateElem: function (elemOptions, elem, animDuration) { var self = this; var bbox; var textPosition; var plotOffsetX; var plotOffsetY; if (elemOptions.value !== undefined) elem.value = elemOptions.value; // Update the label if (elem.textElem) { if (elemOptions.text !== undefined && elemOptions.text.content !== undefined && elemOptions.text.content != elem.textElem.attrs.text) elem.textElem.attr({text: elemOptions.text.content}); bbox = elem.mapElem.getBBox(); if (elemOptions.size || (elemOptions.width && elemOptions.height)) { if (elemOptions.type == "image" || elemOptions.type == "svg") { plotOffsetX = (elemOptions.width - bbox.width) / 2; plotOffsetY = (elemOptions.height - bbox.height) / 2; } else { plotOffsetX = (elemOptions.size - bbox.width) / 2; plotOffsetY = (elemOptions.size - bbox.height) / 2; } bbox.x -= plotOffsetX; bbox.x2 += plotOffsetX; bbox.y -= plotOffsetY; bbox.y2 += plotOffsetY; } textPosition = self.getTextPosition(bbox, elemOptions.text.position, elemOptions.text.margin); if (textPosition.x != elem.textElem.attrs.x || textPosition.y != elem.textElem.attrs.y) { if (animDuration > 0) { elem.textElem.attr({"text-anchor": textPosition.textAnchor}); elem.textElem.animate({x: textPosition.x, y: textPosition.y}, animDuration); } else elem.textElem.attr({ x: textPosition.x, y: textPosition.y, "text-anchor": textPosition.textAnchor }); } self.setHoverOptions(elem.textElem, elemOptions.text.attrs, elemOptions.text.attrsHover); if (animDuration > 0) elem.textElem.animate(elemOptions.text.attrs, animDuration); else elem.textElem.attr(elemOptions.text.attrs); } // Update elements attrs and attrsHover self.setHoverOptions(elem.mapElem, elemOptions.attrs, elemOptions.attrsHover); if (animDuration > 0) elem.mapElem.animate(elemOptions.attrs, animDuration); else elem.mapElem.attr(elemOptions.attrs); // Update dimensions of SVG plots if (elemOptions.type == "svg") { elem.mapElem.transform("m" + (elemOptions.width / elem.mapElem.originalWidth) + ",0,0," + (elemOptions.height / elem.mapElem.originalHeight) + "," + bbox.x + "," + bbox.y); } // Update the tooltip if (elemOptions.tooltip) { if (elem.mapElem.tooltip === undefined) { self.setTooltip(elem.mapElem); if (elem.textElem) self.setTooltip(elem.textElem); } elem.mapElem.tooltip = elemOptions.tooltip; if (elem.textElem) elem.textElem.tooltip = elemOptions.tooltip; } // Update the link if (elemOptions.href !== undefined) { if (elem.mapElem.href === undefined) { self.setHref(elem.mapElem); if (elem.textElem) self.setHref(elem.textElem); } elem.mapElem.href = elemOptions.href; elem.mapElem.target = elemOptions.target; if (elem.textElem) { elem.textElem.href = elemOptions.href; elem.textElem.target = elemOptions.target; } } }, /* * Draw the plot */ drawPlot: function (id) { var self = this; var plot = {}; var coords = {}; var elemOptions = self.getElemOptions( self.options.map.defaultPlot, (self.options.plots[id] ? self.options.plots[id] : {}), self.options.legend.plot ); if (elemOptions.x !== undefined && elemOptions.y !== undefined) coords = {x: elemOptions.x, y: elemOptions.y}; else coords = self.mapConf.getCoords(elemOptions.latitude, elemOptions.longitude); if (elemOptions.type == "square") { plot = { "mapElem": self.paper.rect( coords.x - (elemOptions.size / 2), coords.y - (elemOptions.size / 2), elemOptions.size, elemOptions.size ).attr(elemOptions.attrs) }; } else if (elemOptions.type == "image") { plot = { "mapElem": self.paper.image( elemOptions.url, coords.x - elemOptions.width / 2, coords.y - elemOptions.height / 2, elemOptions.width, elemOptions.height ).attr(elemOptions.attrs) }; } else if (elemOptions.type == "svg") { plot = {"mapElem": self.paper.path(elemOptions.path).attr(elemOptions.attrs)}; plot.mapElem.originalWidth = plot.mapElem.getBBox().width; plot.mapElem.originalHeight = plot.mapElem.getBBox().height; plot.mapElem.transform("m" + (elemOptions.width / plot.mapElem.originalWidth) + ",0,0," + (elemOptions.height / plot.mapElem.originalHeight) + "," + (coords.x - elemOptions.width / 2) + "," + (coords.y - elemOptions.height / 2)); } else { // Default = circle plot = {"mapElem": self.paper.circle(coords.x, coords.y, elemOptions.size / 2).attr(elemOptions.attrs)}; } self.initElem(plot, elemOptions, id); return plot; }, /* * Set target link on elem */ setHref: function (elem) { var self = this; elem.attr({cursor: "pointer"}); $(elem.node).on("click." + pluginName, function () { if (!self.panning && elem.href) window.open(elem.href, elem.target); }); }, /* * Set a tooltip for the areas and plots * @param elem area or plot element * @param content the content to set in the tooltip */ setTooltip: function (elem) { var self = this; var tooltipTO = 0; var cssClass = self.$tooltip.attr('class'); var updateTooltipPosition = function (x, y) { var tooltipPosition = { "left": Math.min(self.$map.width() - self.$tooltip.outerWidth() - 5, x - self.$map.offset().left + 10), "top": Math.min(self.$map.height() - self.$tooltip.outerHeight() - 5, y - self.$map.offset().top + 20) }; if (elem.tooltip.overflow !== undefined) { if (elem.tooltip.overflow.right !== undefined && elem.tooltip.overflow.right === true) { tooltipPosition.left = x - self.$map.offset().left + 10; } if (elem.tooltip.overflow.bottom !== undefined && elem.tooltip.overflow.bottom === true) { tooltipPosition.top = y - self.$map.offset().top + 20; } } self.$tooltip.css(tooltipPosition); }; $(elem.node).on("mouseover." + pluginName, function (e) { tooltipTO = setTimeout( function () { self.$tooltip.attr("class", cssClass); if (elem.tooltip !== undefined) { if (elem.tooltip.content !== undefined) { // if tooltip.content is function, call it. Otherwise, assign it directly. var content = (typeof elem.tooltip.content === "function") ? elem.tooltip.content(elem) : elem.tooltip.content; self.$tooltip.html(content).css("display", "block"); } if (elem.tooltip.cssClass !== undefined) { self.$tooltip.addClass(elem.tooltip.cssClass); } } updateTooltipPosition(e.pageX, e.pageY); }, 120 ); }).on("mouseout." + pluginName, function () { clearTimeout(tooltipTO); self.$tooltip.css("display", "none"); }).on("mousemove." + pluginName, function (e) { updateTooltipPosition(e.pageX, e.pageY); }); }, /* * Set user defined handlers for events on areas and plots * @param id the id of the element * @param elemOptions the element parameters * @param mapElem the map element to set callback on * @param textElem the optional text within the map element */ setEventHandlers: function (id, elemOptions, mapElem, textElem) { var self = this; $.each(elemOptions.eventHandlers, function (event) { (function (event) { $(mapElem.node).on(event, function (e) { if (!self.panning) elemOptions.eventHandlers[event](e, id, mapElem, textElem, elemOptions); }); if (textElem) { $(textElem.node).on(event, function (e) { if (!self.panning) elemOptions.eventHandlers[event](e, id, mapElem, textElem, elemOptions); }); } })(event); }); }, /* * Draw a legend for areas and / or plots * @param legendOptions options for the legend to draw * @param legendType the type of the legend : "area" or "plot" * @param elems collection of plots or areas on the maps * @param legendIndex index of the legend in the conf array */ drawLegend: function (legendOptions, legendType, elems, scale, legendIndex) { var self = this; var $legend = {}; var legendPaper = {}; var width = 0; var height = 0; var title = null; var elem = {}; var elemBBox = {}; var label = {}; var i = 0; var x = 0; var y = 0; var yCenter = 0; var sliceAttrs = []; var length = 0; $legend = $("." + legendOptions.cssClass, self.$container).empty(); legendPaper = new Raphael($legend.get(0)); height = width = 0; // Set the title of the legend if (legendOptions.title && legendOptions.title !== "") { title = legendPaper.text(legendOptions.marginLeftTitle, 0, legendOptions.title).attr(legendOptions.titleAttrs); title.attr({y: 0.5 * title.getBBox().height}); width = legendOptions.marginLeftTitle + title.getBBox().width; height += legendOptions.marginBottomTitle + title.getBBox().height; } // Calculate attrs (and width, height and r (radius)) for legend elements, and yCenter for horizontal legends for (i = 0, length = legendOptions.slices.length; i < length; ++i) { var yCenterCurrent = 0; // Check if size is defined. If not, take defaultPlot size if (legendOptions.slices[i].size === undefined) legendOptions.slices[i].size = self.options.map.defaultPlot.size; if (legendOptions.slices[i].legendSpecificAttrs === undefined) legendOptions.slices[i].legendSpecificAttrs = {}; sliceAttrs[i] = $.extend( {}, (legendType == "plot") ? self.options.map.defaultPlot.attrs : self.options.map.defaultArea.attrs, legendOptions.slices[i].attrs, legendOptions.slices[i].legendSpecificAttrs ); if (legendType == "area") { if (sliceAttrs[i].width === undefined) sliceAttrs[i].width = 30; if (sliceAttrs[i].height === undefined) sliceAttrs[i].height = 20; } else if (legendOptions.slices[i].type == "square") { if (sliceAttrs[i].width === undefined) sliceAttrs[i].width = legendOptions.slices[i].size; if (sliceAttrs[i].height === undefined) sliceAttrs[i].height = legendOptions.slices[i].size; } else if (legendOptions.slices[i].type == "image" || legendOptions.slices[i].type == "svg") { if (sliceAttrs[i].width === undefined) sliceAttrs[i].width = legendOptions.slices[i].width; if (sliceAttrs[i].height === undefined) sliceAttrs[i].height = legendOptions.slices[i].height; } else { if (sliceAttrs[i].r === undefined) sliceAttrs[i].r = legendOptions.slices[i].size / 2; } // Compute yCenter for this legend slice yCenterCurrent = legendOptions.marginBottomTitle; // Add title height if it exists if (title) { yCenterCurrent += title.getBBox().height; } if (legendType == "plot" && (legendOptions.slices[i].type === undefined || legendOptions.slices[i].type == "circle")) { yCenterCurrent += scale * sliceAttrs[i].r; } else { yCenterCurrent += scale * sliceAttrs[i].height / 2; } // Update yCenter if current larger yCenter = Math.max(yCenter, yCenterCurrent); } if (legendOptions.mode == "horizontal") { width = legendOptions.marginLeft; } // Draw legend elements (circle, square or image in vertical or horizontal mode) for (i = 0, length = legendOptions.slices.length; i < length; ++i) { if (legendOptions.slices[i].display === undefined || legendOptions.slices[i].display === true) { if (legendType == "area") { if (legendOptions.mode == "horizontal") { x = width + legendOptions.marginLeft; y = yCenter - (0.5 * scale * sliceAttrs[i].height); } else { x = legendOptions.marginLeft; y = height; } elem = legendPaper.rect(x, y, scale * (sliceAttrs[i].width), scale * (sliceAttrs[i].height)); } else if (legendOptions.slices[i].type == "square") { if (legendOptions.mode == "horizontal") { x = width + legendOptions.marginLeft; y = yCenter - (0.5 * scale * sliceAttrs[i].height); } else { x = legendOptions.marginLeft; y = height; } elem = legendPaper.rect(x, y, scale * (sliceAttrs[i].width), scale * (sliceAttrs[i].height)); } else if (legendOptions.slices[i].type == "image" || legendOptions.slices[i].type == "svg") { if (legendOptions.mode == "horizontal") { x = width + legendOptions.marginLeft; y = yCenter - (0.5 * scale * sliceAttrs[i].height); } else { x = legendOptions.marginLeft; y = height; } if (legendOptions.slices[i].type == "image") { elem = legendPaper.image( legendOptions.slices[i].url, x, y, scale * sliceAttrs[i].width, scale * sliceAttrs[i].height); } else { elem = legendPaper.path(legendOptions.slices[i].path); elem.transform("m" + ((scale * legendOptions.slices[i].width) / elem.getBBox().width) + ",0,0," + ((scale * legendOptions.slices[i].height) / elem.getBBox().height) + "," + x + "," + y); } } else { if (legendOptions.mode == "horizontal") { x = width + legendOptions.marginLeft + scale * (sliceAttrs[i].r); y = yCenter; } else { x = legendOptions.marginLeft + scale * (sliceAttrs[i].r); y = height + scale * (sliceAttrs[i].r); } elem = legendPaper.circle(x, y, scale * (sliceAttrs[i].r)); } // Set attrs to the element drawn above delete sliceAttrs[i].width; delete sliceAttrs[i].height; delete sliceAttrs[i].r; elem.attr(sliceAttrs[i]); elemBBox = elem.getBBox(); // Draw the label associated with the element if (legendOptions.mode == "horizontal") { x = width + legendOptions.marginLeft + elemBBox.width + legendOptions.marginLeftLabel; y = yCenter; } else { x = legendOptions.marginLeft + elemBBox.width + legendOptions.marginLeftLabel; y = height + (elemBBox.height / 2); } label = legendPaper.text(x, y, legendOptions.slices[i].label).attr(legendOptions.labelAttrs); // Update the width and height for the paper if (legendOptions.mode == "horizontal") { var currentHeight = legendOptions.marginBottom + elemBBox.height; width += legendOptions.marginLeft + elemBBox.width + legendOptions.marginLeftLabel + label.getBBox().width; if (legendOptions.slices[i].type != "image" && legendType != "area") { currentHeight += legendOptions.marginBottomTitle; } // Add title height if it exists if (title) { currentHeight += title.getBBox().height; } height = Math.max(height, currentHeight); } else { width = Math.max(width, legendOptions.marginLeft + elemBBox.width + legendOptions.marginLeftLabel + label.getBBox().width); height += legendOptions.marginBottom + elemBBox.height; } $(elem.node).attr({"data-type": "elem", "data-index": i, "data-hidden": 0}); $(label.node).attr({"data-type": "label", "data-index": i, "data-hidden": 0}); // Hide map elements when the user clicks on a legend item if (legendOptions.hideElemsOnClick.enabled) { // Hide/show elements when user clicks on a legend element label.attr({cursor: "pointer"}); elem.attr({cursor: "pointer"}); self.setHoverOptions(elem, sliceAttrs[i], sliceAttrs[i]); self.setHoverOptions(label, legendOptions.labelAttrs, legendOptions.labelAttrsHover); self.setHover(elem, label); self.handleClickOnLegendElem(legendOptions, legendOptions.slices[i], label, elem, elems, legendIndex); } } } // VMLWidth option allows you to set static width for the legend // only for VML render because text.getBBox() returns wrong values on IE6/7 if (Raphael.type != "SVG" && legendOptions.VMLWidth) width = legendOptions.VMLWidth; legendPaper.setSize(width, height); return legendPaper; }, /* * Allow to hide elements of the map when the user clicks on a related legend item * @param legendOptions options for the legend to draw * @param sliceOptions options of the slice * @param label label of the legend item * @param elem element of the legend item * @param elems collection of plots or areas displayed on the map * @param legendIndex index of the legend in the conf array */ handleClickOnLegendElem: function (legendOptions, sliceOptions, label, elem, elems, legendIndex) { var self = this; var hideMapElems = function (e, hideOtherElems, animDuration) { var elemValue = 0; var hidden = $(label.node).attr('data-hidden'); var hiddenNewAttr = (hidden === '0') ? {"data-hidden": '1'} : {"data-hidden": '0'}; // Check animDuration: if not set, this is a regular click, use the value specified in options if (animDuration === undefined) animDuration = legendOptions.hideElemsOnClick.animDuration; if (hidden === '0') { if (animDuration > 0) label.animate({"opacity": 0.5}, animDuration); else label.attr({"opacity": 0.5}); } else { if (animDuration > 0) label.animate({"opacity": 1}, animDuration); else label.attr({"opacity": 1}); } $.each(elems, function (id) { // Retreive stored data of element // 'hidden-by' contains the list of legendIndex that is hiding this element var hiddenBy = elems[id].mapElem.data('hidden-by'); // Set to empty object if undefined if (hiddenBy === undefined) hiddenBy = {}; if ($.isArray(elems[id].value)) { elemValue = elems[id].value[legendIndex]; } else { elemValue = elems[id].value; } if ((sliceOptions.sliceValue !== undefined && elemValue == sliceOptions.sliceValue) || ((sliceOptions.sliceValue === undefined) && (sliceOptions.min === undefined || elemValue >= sliceOptions.min) && (sliceOptions.max === undefined || elemValue <= sliceOptions.max)) ) { (function (id) { if (hidden === '0') { // we want to hide this element hiddenBy[legendIndex] = true; // add legendIndex to the data object for later use self.setElementOpacity(elems[id], legendOptions.hideElemsOnClick.opacity, animDuration); } else { // We want to show this element delete hiddenBy[legendIndex]; // Remove this legendIndex from object // Check if another legendIndex is defined // We will show this element only if no legend is no longer hiding it if ($.isEmptyObject(hiddenBy)) { self.setElementOpacity( elems[id], elems[id].mapElem.originalAttrs.opacity !== undefined ? elems[id].mapElem.originalAttrs.opacity : 1, animDuration ); } } // Update elem data with new values elems[id].mapElem.data('hidden-by', hiddenBy); })(id); } }); $(elem.node).attr(hiddenNewAttr); $(label.node).attr(hiddenNewAttr); if ((hideOtherElems === undefined || hideOtherElems === true) && legendOptions.exclusive !== undefined && legendOptions.exclusive === true ) { $("[data-type='elem'][data-hidden=0]", self.$container).each(function () { if ($(this).attr('data-index') !== $(elem.node).attr('data-index')) { $(this).trigger("click." + pluginName, false); } }); } }; $(label.node).on("click." + pluginName, hideMapElems); $(elem.node).on("click." + pluginName, hideMapElems); if (sliceOptions.clicked !== undefined && sliceOptions.clicked === true) { $(elem.node).trigger("click." + pluginName, false); } }, /* * Create all legends for a specified type (area or plot) * @param legendType the type of the legend : "area" or "plot" * @param elems collection of plots or areas displayed on the map * @param scale scale ratio of the map */ createLegends: function (legendType, elems, scale) { var self = this; var legendsOptions = self.options.legend[legendType]; var legends = []; if (!$.isArray(self.options.legend[legendType])) { legendsOptions = [self.options.legend[legendType]]; } for (var j = 0; j < legendsOptions.length; ++j) { // Check for class existence if (legendsOptions[j].cssClass === "" || $("." + legendsOptions[j].cssClass, self.$container).length === 0) { throw new Error("The legend class `" + legendsOptions[j].cssClass + "` doesn't exists."); } if (legendsOptions[j].display === true && $.isArray(legendsOptions[j].slices) && legendsOptions[j].slices.length > 0) { legends.push(self.drawLegend(legendsOptions[j], legendType, elems, scale, j)); } } return legends; }, /* * Set the attributes on hover and the attributes to restore for a map element * @param elem the map element * @param originalAttrs the original attributes to restore on mouseout event * @param attrsHover the attributes to set on mouseover event */ setHoverOptions: function (elem, originalAttrs, attrsHover) { // Disable transform option on hover for VML (IE<9) because of several bugs if (Raphael.type != "SVG") delete attrsHover.transform; elem.attrsHover = attrsHover; if (elem.attrsHover.transform) elem.originalAttrs = $.extend({transform: "s1"}, originalAttrs); else elem.originalAttrs = originalAttrs; }, /* * Set the hover behavior (mouseover & mouseout) for plots and areas * @param mapElem the map element * @param textElem the optional text element (within the map element) */ setHover: function (mapElem, textElem) { var self = this; var $mapElem = {}; var $textElem = {}; var hoverTO = 0; var overBehaviour = function () { hoverTO = setTimeout(function () { self.elemHover(mapElem, textElem); }, 120); }; var outBehaviour = function () { clearTimeout(hoverTO); self.elemOut(mapElem, textElem); }; $mapElem = $(mapElem.node); $mapElem.on("mouseover." + pluginName, overBehaviour); $mapElem.on("mouseout." + pluginName, outBehaviour); if (textElem) { $textElem = $(textElem.node); $textElem.on("mouseover." + pluginName, overBehaviour); $(textElem.node).on("mouseout." + pluginName, outBehaviour); } }, /* * Remove the hover behavior for plots and areas * @param mapElem the map element * @param textElem the optional text element (within the map element) */ unsetHover: function (mapElem, textElem) { $(mapElem.node).off("." + pluginName); if (textElem) $(textElem.node).off("." + pluginName); }, /* * Set he behaviour for "mouseover" event * @param mapElem mapElem the map element * @param textElem the optional text element (within the map element) */ elemHover: function (mapElem, textElem) { var self = this; // Set mapElem if (mapElem.attrsHover.animDuration > 0) mapElem.animate(mapElem.attrsHover, mapElem.attrsHover.animDuration); else mapElem.attr(mapElem.attrsHover); // Set textElem if (textElem) { if (textElem.attrsHover.animDuration > 0) textElem.animate(textElem.attrsHover, textElem.attrsHover.animDuration); else textElem.attr(textElem.attrsHover); } // workaround for older version of Raphael if (self.paper.safari) self.paper.safari(); }, /* * Set he behaviour for "mouseout" event * @param mapElem the map element * @param textElem the optional text element (within the map element) */ elemOut: function (mapElem, textElem) { var self = this; // Set mapElem if (mapElem.attrsHover.animDuration > 0) mapElem.animate(mapElem.originalAttrs, mapElem.attrsHover.animDuration); else mapElem.attr(mapElem.originalAttrs); // Set textElem if (textElem) { if (textElem.attrsHover.animDuration > 0) textElem.animate(textElem.originalAttrs, textElem.attrsHover.animDuration); else textElem.attr(textElem.originalAttrs); } // workaround for older version of Raphael if (self.paper.safari) self.paper.safari(); }, /* * Get element options by merging default options, element options and legend options * @param defaultOptions * @param elemOptions * @param legendOptions */ getElemOptions: function (defaultOptions, elemOptions, legendOptions) { var self = this; var options = $.extend(true, {}, defaultOptions, elemOptions); if (options.value !== undefined) { if ($.isArray(legendOptions)) { for (var i = 0, length = legendOptions.length; i < length; ++i) { options = $.extend(true, {}, options, self.getLegendSlice(options.value[i], legendOptions[i])); } } else { options = $.extend(true, {}, options, self.getLegendSlice(options.value, legendOptions)); } } return options; }, /* * Get the coordinates of the text relative to a bbox and a position * @param bbox the boundary box of the element * @param textPosition the wanted text position (inner, right, left, top or bottom) */ getTextPosition: function (bbox, textPosition, margin) { var textX = 0; var textY = 0; var textAnchor = ""; switch (textPosition) { case "bottom" : textX = (bbox.x + bbox.x2) / 2; textY = bbox.y2 + margin; textAnchor = "middle"; break; case "top" : textX = (bbox.x + bbox.x2) / 2; textY = bbox.y - margin; textAnchor = "middle"; break; case "left" : textX = bbox.x - margin; textY = (bbox.y + bbox.y2) / 2; textAnchor = "end"; break; case "right" : textX = bbox.x2 + margin; textY = (bbox.y + bbox.y2) / 2; textAnchor = "start"; break; default : // "inner" position textX = (bbox.x + bbox.x2) / 2; textY = (bbox.y + bbox.y2) / 2; textAnchor = "middle"; } return {"x": textX, "y": textY, "textAnchor": textAnchor}; }, /* * Get the legend conf matching with the value * @param value the value to match with a slice in the legend * @param legend the legend params object * @return the legend slice matching with the value */ getLegendSlice: function (value, legend) { for (var i = 0, length = legend.slices.length; i < length; ++i) { if ((legend.slices[i].sliceValue !== undefined && value == legend.slices[i].sliceValue) || ((legend.slices[i].sliceValue === undefined) && (legend.slices[i].min === undefined || value >= legend.slices[i].min) && (legend.slices[i].max === undefined || value <= legend.slices[i].max)) ) { return legend.slices[i]; } } return {}; }, /* * Animated view box changes * As from http://code.voidblossom.com/animating-viewbox-easing-formulas/, * (from https://github.com/theshaun works on mapael) * @param x coordinate of the point to focus on * @param y coordinate of the point to focus on * @param w map defined width * @param h map defined height * @param duration defined length of time for animation * @param easingFunction defined Raphael supported easing_formula to use * @param callback method when animated action is complete */ animateViewBox: function (x, y, w, h, duration, easingFunction) { var self = this; var cx = self.paper._viewBox ? self.paper._viewBox[0] : 0; var dx = x - cx; var cy = self.paper._viewBox ? self.paper._viewBox[1] : 0; var dy = y - cy; var cw = self.paper._viewBox ? self.paper._viewBox[2] : self.paper.width; var dw = w - cw; var ch = self.paper._viewBox ? self.paper._viewBox[3] : self.paper.height; var dh = h - ch; var interval = 25; var steps = duration / interval; var currentStep = 0; var easingFormula; easingFunction = easingFunction || "linear"; easingFormula = Raphael.easing_formulas[easingFunction]; clearInterval(self.animationIntervalID); self.animationIntervalID = setInterval(function () { var ratio = currentStep / steps; self.paper.setViewBox(cx + dx * easingFormula(ratio), cy + dy * easingFormula(ratio), cw + dw * easingFormula(ratio), ch + dh * easingFormula(ratio), false); if (currentStep++ >= steps) { clearInterval(self.animationIntervalID); clearTimeout(self.zoomTO); self.zoomTO = setTimeout(function () { self.$map.trigger("afterZoom", {x1: x, y1: y, x2: (x + w), y2: (y + h)}); }, 150); } }, interval ); }, // Default map options defaultOptions: { map: { cssClass: "map", tooltip: { cssClass: "mapTooltip" }, defaultArea: { attrs: { fill: "#343434", stroke: "#5d5d5d", "stroke-width": 1, "stroke-linejoin": "round" }, attrsHover: { fill: "#f38a03", animDuration: 300 }, text: { position: "inner", margin: 10, attrs: { "font-size": 15, fill: "#c7c7c7" }, attrsHover: { fill: "#eaeaea", "animDuration": 300 } }, target: "_self" }, defaultPlot: { type: "circle", size: 15, attrs: { fill: "#0088db", stroke: "#fff", "stroke-width": 0, "stroke-linejoin": "round" }, attrsHover: { "stroke-width": 3, animDuration: 300 }, text: { position: "right", margin: 10, attrs: { "font-size": 15, fill: "#c7c7c7" }, attrsHover: { fill: "#eaeaea", animDuration: 300 } }, target: "_self" }, defaultLink: { factor: 0.5, attrs: { stroke: "#0088db", "stroke-width": 2 }, attrsHover: { animDuration: 300 }, text: { position: "inner", margin: 10, attrs: { "font-size": 15, fill: "#c7c7c7" }, attrsHover: { fill: "#eaeaea", animDuration: 300 } }, target: "_self" }, zoom: { enabled: false, maxLevel: 10, step: 0.25, zoomInCssClass: "zoomIn", zoomOutCssClass: "zoomOut", mousewheel: true, touch: true, animDuration: 200, animEasing: "linear" } }, legend: { area: [], plot: [] }, areas: {}, plots: {}, links: {} }, // Default legends option legendDefaultOptions: { area: { cssClass: "areaLegend", display: true, marginLeft: 10, marginLeftTitle: 5, marginBottomTitle: 10, marginLeftLabel: 10, marginBottom: 10, titleAttrs: { "font-size": 16, fill: "#343434", "text-anchor": "start" }, labelAttrs: { "font-size": 12, fill: "#343434", "text-anchor": "start" }, labelAttrsHover: { fill: "#787878", animDuration: 300 }, hideElemsOnClick: { enabled: true, opacity: 0.2, animDuration: 300 }, slices: [], mode: "vertical" }, plot: { cssClass: "plotLegend", display: true, marginLeft: 10, marginLeftTitle: 5, marginBottomTitle: 10, marginLeftLabel: 10, marginBottom: 10, titleAttrs: { "font-size": 16, fill: "#343434", "text-anchor": "start" }, labelAttrs: { "font-size": 12, fill: "#343434", "text-anchor": "start" }, labelAttrsHover: { fill: "#787878", animDuration: 300 }, hideElemsOnClick: { enabled: true, opacity: 0.2 }, slices: [], mode: "vertical" } } }; // Extend jQuery with Mapael $[pluginName] = Mapael; // Add jQuery DOM function $.fn[pluginName] = function (options) { // Call Mapael on each element return this.each(function () { // Avoid leaking problem on multiple instanciation by removing an old mapael object on a container if ($.data(this, pluginName)) { $.data(this, pluginName).destroy(); } // Create Mapael and save it as jQuery data // This allow external access to Mapael using $(".mapcontainer").data("mapael") $.data(this, pluginName, new Mapael(this, options)); }); }; }));
Removed redundant elem.show()
js/jquery.mapael.js
Removed redundant elem.show()
<ide><path>s/jquery.mapael.js <ide> } else { <ide> // Set attribute <ide> elem.mapElem.attr({"opacity": opacity}); <del> // For extrem opacity, hide or show <add> // For null opacity, hide it <ide> if (opacity === 0) elem.mapElem.hide(); <del> else if (opacity === 1) elem.mapElem.show(); <ide> // Handle text elemen <ide> if (elem.textElem) { <ide> // Set attribute <ide> elem.textElem.attr({"opacity": opacity}); <del> // For extrem opacity, hide or show <add> // For null opacity, hide it <ide> if (opacity === 0) elem.textElem.hide(); <del> else if (opacity === 1) elem.textElem.show(); <ide> } <ide> } <ide> },
Java
apache-2.0
a371b409ea09a5ddf38895de9d190eca49df1f7b
0
cloudera-labs/envelope
/* * Copyright (c) 2015-2019, Cloudera, Inc. All Rights Reserved. * * Cloudera, Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"). You may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for * the specific language governing permissions and limitations under the * License. */ package com.cloudera.labs.envelope.spark; import com.cloudera.labs.envelope.utils.ConfigUtils; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; import org.apache.spark.SparkConf; import org.apache.spark.sql.AnalysisException; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.util.HashMap; import java.util.Map; import java.util.Properties; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; public class TestContexts { private static final String RESOURCES_PATH = "/spark"; private static final String HADOOP_CONF_PREFIX = Contexts.APPLICATION_SECTION_PREFIX + "." + Contexts.SPARK_CONF_PROPERTY_PREFIX + ".spark.hadoop."; @Before public void setup() { Contexts.closeSparkSession(); } private Map<String, Object> getTestConfigMap() { Map<String, Object> params = new HashMap<>(); params.put(HADOOP_CONF_PREFIX + "javax.jdo.option.ConnectionURL", "jdbc:derby:memory:db;create=true"); params.put(HADOOP_CONF_PREFIX + "hive.exec.scratchdir", "core/target/scratch"); return params; } private Config getTestConfig() { return ConfigFactory.parseMap(getTestConfigMap()); } @Test public void testSparkPassthroughGood() { Config config = ConfigUtils.configFromPath( this.getClass().getResource(RESOURCES_PATH + "/spark-passthrough-good.conf").getPath()); Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST); SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf(); assertTrue(sparkConf.contains("spark.driver.allowMultipleContexts")); assertEquals("true", sparkConf.get("spark.driver.allowMultipleContexts")); assertTrue(sparkConf.contains("spark.master")); assertEquals("local[1]", sparkConf.get("spark.master")); } @Test public void testApplicationNameProvided() { Properties props = new Properties(); props.setProperty("application.name", "test"); Config config = ConfigFactory.parseProperties(props); Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST); SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf(); assertEquals(sparkConf.get("spark.app.name"), "test"); } @Test public void testApplicationNameNotProvided() { Config config = ConfigFactory.empty(); Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST); SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf(); assertEquals(sparkConf.get("spark.app.name"), ""); } @Test public void testDefaultBatchConfiguration() { Config config = getTestConfig(); Contexts.initialize(config, Contexts.ExecutionMode.BATCH); SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf(); assertTrue(!sparkConf.contains("spark.dynamicAllocation.enabled")); assertTrue(!sparkConf.contains("spark.sql.shuffle.partitions")); assertEquals(sparkConf.get("spark.sql.catalogImplementation"), "hive"); } @Test public void testDefaultStreamingConfiguration() { Config config = getTestConfig(); Contexts.initialize(config, Contexts.ExecutionMode.STREAMING); SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf(); assertTrue(sparkConf.contains("spark.dynamicAllocation.enabled")); assertTrue(sparkConf.contains("spark.sql.shuffle.partitions")); assertEquals(sparkConf.get("spark.sql.catalogImplementation"), "hive"); } @Test public void testDefaultUnitTestConfiguration() { Config config = ConfigFactory.empty(); Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST); SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf(); assertEquals(sparkConf.get("spark.sql.catalogImplementation"), "in-memory"); assertEquals(sparkConf.get("spark.sql.shuffle.partitions"), "1"); } @Test (expected = AnalysisException.class) public void testHiveDisabledConfiguration() { Map<String, Object> sparamMap = new HashMap<>(); sparamMap.put(Contexts.APPLICATION_SECTION_PREFIX + "." + Contexts.SPARK_SESSION_ENABLE_HIVE_SUPPORT, false); sparamMap.putAll(getTestConfigMap()); Contexts.initialize(ConfigFactory.parseMap(sparamMap), Contexts.ExecutionMode.BATCH); Contexts.getSparkSession().sql("CREATE TABLE testHiveDisabled(d int)"); try { Contexts.getSparkSession().sql("SELECT count(*) from testHiveDisabled"); } finally { Contexts.getSparkSession().sql("DROP TABLE testHiveDisabled"); } } @Test (expected = AnalysisException.class) public void testDefaultHiveDisabledForUnitTestsConfiguration() { Contexts.getSparkSession().sql("CREATE TABLE testHiveDisabled(d int)"); try { Contexts.getSparkSession().sql("SELECT count(*) from testHiveDisabled"); } finally { Contexts.getSparkSession().sql("DROP TABLE testHiveDisabled"); } } @Test public void testHiveEnabledConfiguration() { Map<String, Object> sparamMap = new HashMap<>(); sparamMap.put(Contexts.SPARK_CONF_PROPERTY_PREFIX + "spark.sql.warehouse.dir", "target/spark-warehouse"); sparamMap.putAll(getTestConfigMap()); Contexts.initialize(ConfigFactory.parseMap(sparamMap), Contexts.ExecutionMode.BATCH); Contexts.getSparkSession().sql("CREATE TABLE testHiveEnabled(d int)"); Contexts.getSparkSession().sql("SELECT count(*) from testHiveEnabled"); Contexts.getSparkSession().sql("DROP TABLE testHiveEnabled"); } @Test public void testHiveEnabledByDefault() { Config config = getTestConfig(); Contexts.initialize(config, Contexts.ExecutionMode.BATCH); Contexts.getSparkSession().sql("CREATE TABLE testHiveEnabled(d int)"); Contexts.getSparkSession().sql("SELECT count(*) from testHiveEnabled"); Contexts.getSparkSession().sql("DROP TABLE testHiveEnabled"); } @Test public void testDriverMemoryClusterMode() { Properties props = new Properties(); props.setProperty(Contexts.APPLICATION_SECTION_PREFIX + "." + Contexts.SPARK_CONF_PROPERTY_PREFIX + "." + Contexts.SPARK_DEPLOY_MODE_PROPERTY, Contexts.SPARK_DEPLOY_MODE_CLUSTER); props.setProperty(Contexts.APPLICATION_SECTION_PREFIX + "." + Contexts.DRIVER_MEMORY_PROPERTY, "2G"); Config config = ConfigFactory.parseProperties(props); Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST); SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf(); assertEquals(sparkConf.get(Contexts.SPARK_DRIVER_MEMORY_PROPERTY), "2G"); } @Rule public ExpectedException thrown = ExpectedException.none(); @Test public void testDriverMemoryClientMode() { Properties props = new Properties(); props.setProperty(Contexts.APPLICATION_SECTION_PREFIX + "." + Contexts.SPARK_CONF_PROPERTY_PREFIX + "." + Contexts.SPARK_DEPLOY_MODE_PROPERTY, Contexts.SPARK_DEPLOY_MODE_CLIENT); props.setProperty(Contexts.APPLICATION_SECTION_PREFIX + "." + Contexts.DRIVER_MEMORY_PROPERTY, "2G"); Config config = ConfigFactory.parseProperties(props); thrown.expect(RuntimeException.class); thrown.expectMessage("Driver memory can not be set"); Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST); Contexts.getSparkSession().sparkContext().getConf(); } @Test public void testAppDriverMemoryClientMode() { Properties props = new Properties(); props.setProperty(Contexts.APPLICATION_SECTION_PREFIX + "." + Contexts.SPARK_CONF_PROPERTY_PREFIX + "." + Contexts.SPARK_DEPLOY_MODE_PROPERTY, Contexts.SPARK_DEPLOY_MODE_CLIENT); props.setProperty(Contexts.APPLICATION_SECTION_PREFIX + "." + Contexts.SPARK_CONF_PROPERTY_PREFIX + "." + Contexts.SPARK_DRIVER_MEMORY_PROPERTY, "2G"); Config config = ConfigFactory.parseProperties(props); thrown.expect(RuntimeException.class); thrown.expectMessage("Driver memory can not be set"); Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST); Contexts.getSparkSession().sparkContext().getConf(); } }
core/src/test/java/com/cloudera/labs/envelope/spark/TestContexts.java
/* * Copyright (c) 2015-2019, Cloudera, Inc. All Rights Reserved. * * Cloudera, Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"). You may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for * the specific language governing permissions and limitations under the * License. */ package com.cloudera.labs.envelope.spark; import com.cloudera.labs.envelope.utils.ConfigUtils; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; import org.apache.spark.SparkConf; import org.apache.spark.sql.AnalysisException; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.util.HashMap; import java.util.Map; import java.util.Properties; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; public class TestContexts { private static final String RESOURCES_PATH = "/spark"; @Before public void setup() { Contexts.closeSparkSession(); } @Test public void testSparkPassthroughGood() { Config config = ConfigUtils.configFromPath( this.getClass().getResource(RESOURCES_PATH + "/spark-passthrough-good.conf").getPath()); Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST); SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf(); assertTrue(sparkConf.contains("spark.driver.allowMultipleContexts")); assertEquals("true", sparkConf.get("spark.driver.allowMultipleContexts")); assertTrue(sparkConf.contains("spark.master")); assertEquals("local[1]", sparkConf.get("spark.master")); } @Test public void testApplicationNameProvided() { Properties props = new Properties(); props.setProperty("application.name", "test"); Config config = ConfigFactory.parseProperties(props); Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST); SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf(); assertEquals(sparkConf.get("spark.app.name"), "test"); } @Test public void testApplicationNameNotProvided() { Config config = ConfigFactory.empty(); Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST); SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf(); assertEquals(sparkConf.get("spark.app.name"), ""); } @Test public void testDefaultBatchConfiguration() { Config config = ConfigFactory.empty(); Contexts.initialize(config, Contexts.ExecutionMode.BATCH); SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf(); assertTrue(!sparkConf.contains("spark.dynamicAllocation.enabled")); assertTrue(!sparkConf.contains("spark.sql.shuffle.partitions")); assertEquals(sparkConf.get("spark.sql.catalogImplementation"), "hive"); } @Test public void testDefaultStreamingConfiguration() { Config config = ConfigFactory.empty(); Contexts.initialize(config, Contexts.ExecutionMode.STREAMING); SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf(); assertTrue(sparkConf.contains("spark.dynamicAllocation.enabled")); assertTrue(sparkConf.contains("spark.sql.shuffle.partitions")); assertEquals(sparkConf.get("spark.sql.catalogImplementation"), "hive"); } @Test public void testDefaultUnitTestConfiguration() { Config config = ConfigFactory.empty(); Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST); SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf(); assertEquals(sparkConf.get("spark.sql.catalogImplementation"), "in-memory"); assertEquals(sparkConf.get("spark.sql.shuffle.partitions"), "1"); } @Test (expected = AnalysisException.class) public void testHiveDisabledConfiguration() { Map<String, Object> sparamMap = new HashMap<>(); sparamMap.put(Contexts.APPLICATION_SECTION_PREFIX + "." + Contexts.SPARK_SESSION_ENABLE_HIVE_SUPPORT, false); Contexts.initialize(ConfigFactory.parseMap(sparamMap), Contexts.ExecutionMode.BATCH); Contexts.getSparkSession().sql("CREATE TABLE testHiveDisabled(d int)"); try { Contexts.getSparkSession().sql("SELECT count(*) from testHiveDisabled"); } finally { Contexts.getSparkSession().sql("DROP TABLE testHiveDisabled"); } } @Test (expected = AnalysisException.class) public void testDefaultHiveDisabledForUnitTestsConfiguration() { Contexts.getSparkSession().sql("CREATE TABLE testHiveDisabled(d int)"); try { Contexts.getSparkSession().sql("SELECT count(*) from testHiveDisabled"); } finally { Contexts.getSparkSession().sql("DROP TABLE testHiveDisabled"); } } @Test public void testHiveEnabledConfiguration() { Map<String, Object> sparamMap = new HashMap<>(); sparamMap.put(Contexts.SPARK_CONF_PROPERTY_PREFIX + "spark.sql.warehouse.dir", "target/spark-warehouse"); Contexts.initialize(ConfigFactory.parseMap(sparamMap), Contexts.ExecutionMode.BATCH); Contexts.getSparkSession().sql("CREATE TABLE testHiveEnabled(d int)"); Contexts.getSparkSession().sql("SELECT count(*) from testHiveEnabled"); Contexts.getSparkSession().sql("DROP TABLE testHiveEnabled"); } @Test public void testHiveEnabledByDefault() { Contexts.initialize(ConfigFactory.empty(), Contexts.ExecutionMode.BATCH); Contexts.getSparkSession().sql("CREATE TABLE testHiveEnabled(d int)"); Contexts.getSparkSession().sql("SELECT count(*) from testHiveEnabled"); Contexts.getSparkSession().sql("DROP TABLE testHiveEnabled"); } @Test public void testDriverMemoryClusterMode() { Properties props = new Properties(); props.setProperty(Contexts.APPLICATION_SECTION_PREFIX + "." + Contexts.SPARK_CONF_PROPERTY_PREFIX + "." + Contexts.SPARK_DEPLOY_MODE_PROPERTY, Contexts.SPARK_DEPLOY_MODE_CLUSTER); props.setProperty(Contexts.APPLICATION_SECTION_PREFIX + "." + Contexts.DRIVER_MEMORY_PROPERTY, "2G"); Config config = ConfigFactory.parseProperties(props); Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST); SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf(); assertEquals(sparkConf.get(Contexts.SPARK_DRIVER_MEMORY_PROPERTY), "2G"); } @Rule public ExpectedException thrown = ExpectedException.none(); @Test public void testDriverMemoryClientMode() { Properties props = new Properties(); props.setProperty(Contexts.APPLICATION_SECTION_PREFIX + "." + Contexts.SPARK_CONF_PROPERTY_PREFIX + "." + Contexts.SPARK_DEPLOY_MODE_PROPERTY, Contexts.SPARK_DEPLOY_MODE_CLIENT); props.setProperty(Contexts.APPLICATION_SECTION_PREFIX + "." + Contexts.DRIVER_MEMORY_PROPERTY, "2G"); Config config = ConfigFactory.parseProperties(props); thrown.expect(RuntimeException.class); thrown.expectMessage("Driver memory can not be set"); Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST); Contexts.getSparkSession().sparkContext().getConf(); } @Test public void testAppDriverMemoryClientMode() { Properties props = new Properties(); props.setProperty(Contexts.APPLICATION_SECTION_PREFIX + "." + Contexts.SPARK_CONF_PROPERTY_PREFIX + "." + Contexts.SPARK_DEPLOY_MODE_PROPERTY, Contexts.SPARK_DEPLOY_MODE_CLIENT); props.setProperty(Contexts.APPLICATION_SECTION_PREFIX + "." + Contexts.SPARK_CONF_PROPERTY_PREFIX + "." + Contexts.SPARK_DRIVER_MEMORY_PROPERTY, "2G"); Config config = ConfigFactory.parseProperties(props); thrown.expect(RuntimeException.class); thrown.expectMessage("Driver memory can not be set"); Contexts.initialize(config, Contexts.ExecutionMode.UNIT_TEST); Contexts.getSparkSession().sparkContext().getConf(); } }
[ENV-412] Use an in-memory HMS DB for unit tests (#280)
core/src/test/java/com/cloudera/labs/envelope/spark/TestContexts.java
[ENV-412] Use an in-memory HMS DB for unit tests (#280)
<ide><path>ore/src/test/java/com/cloudera/labs/envelope/spark/TestContexts.java <ide> import com.cloudera.labs.envelope.utils.ConfigUtils; <ide> import com.typesafe.config.Config; <ide> import com.typesafe.config.ConfigFactory; <add> <ide> import org.apache.spark.SparkConf; <ide> import org.apache.spark.sql.AnalysisException; <ide> import org.junit.Before; <ide> public class TestContexts { <ide> <ide> private static final String RESOURCES_PATH = "/spark"; <add> private static final String HADOOP_CONF_PREFIX = Contexts.APPLICATION_SECTION_PREFIX + "." + <add> Contexts.SPARK_CONF_PROPERTY_PREFIX + ".spark.hadoop."; <ide> <ide> @Before <ide> public void setup() { <ide> Contexts.closeSparkSession(); <add> } <add> <add> private Map<String, Object> getTestConfigMap() { <add> Map<String, Object> params = new HashMap<>(); <add> params.put(HADOOP_CONF_PREFIX + "javax.jdo.option.ConnectionURL", <add> "jdbc:derby:memory:db;create=true"); <add> params.put(HADOOP_CONF_PREFIX + "hive.exec.scratchdir", "core/target/scratch"); <add> return params; <add> } <add> <add> private Config getTestConfig() { <add> return ConfigFactory.parseMap(getTestConfigMap()); <ide> } <ide> <ide> @Test <ide> <ide> @Test <ide> public void testDefaultBatchConfiguration() { <del> Config config = ConfigFactory.empty(); <add> Config config = getTestConfig(); <ide> Contexts.initialize(config, Contexts.ExecutionMode.BATCH); <ide> SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf(); <ide> assertTrue(!sparkConf.contains("spark.dynamicAllocation.enabled")); <ide> <ide> @Test <ide> public void testDefaultStreamingConfiguration() { <del> Config config = ConfigFactory.empty(); <add> Config config = getTestConfig(); <ide> Contexts.initialize(config, Contexts.ExecutionMode.STREAMING); <ide> SparkConf sparkConf = Contexts.getSparkSession().sparkContext().getConf(); <ide> assertTrue(sparkConf.contains("spark.dynamicAllocation.enabled")); <ide> Map<String, Object> sparamMap = new HashMap<>(); <ide> sparamMap.put(Contexts.APPLICATION_SECTION_PREFIX + "." + <ide> Contexts.SPARK_SESSION_ENABLE_HIVE_SUPPORT, false); <add> sparamMap.putAll(getTestConfigMap()); <ide> Contexts.initialize(ConfigFactory.parseMap(sparamMap), Contexts.ExecutionMode.BATCH); <ide> Contexts.getSparkSession().sql("CREATE TABLE testHiveDisabled(d int)"); <ide> try { <ide> Map<String, Object> sparamMap = new HashMap<>(); <ide> sparamMap.put(Contexts.SPARK_CONF_PROPERTY_PREFIX + "spark.sql.warehouse.dir", <ide> "target/spark-warehouse"); <add> sparamMap.putAll(getTestConfigMap()); <ide> Contexts.initialize(ConfigFactory.parseMap(sparamMap), Contexts.ExecutionMode.BATCH); <ide> Contexts.getSparkSession().sql("CREATE TABLE testHiveEnabled(d int)"); <ide> Contexts.getSparkSession().sql("SELECT count(*) from testHiveEnabled"); <ide> <ide> @Test <ide> public void testHiveEnabledByDefault() { <del> Contexts.initialize(ConfigFactory.empty(), Contexts.ExecutionMode.BATCH); <add> Config config = getTestConfig(); <add> Contexts.initialize(config, Contexts.ExecutionMode.BATCH); <ide> Contexts.getSparkSession().sql("CREATE TABLE testHiveEnabled(d int)"); <ide> Contexts.getSparkSession().sql("SELECT count(*) from testHiveEnabled"); <ide> Contexts.getSparkSession().sql("DROP TABLE testHiveEnabled");
JavaScript
mit
15e436843904cceef2410927552312b9ed242d2c
0
cloudmine/cloudmine-js,cloudmine/cloudmine-js,cloudmine/cloudmine-js,cloudmine/CloudMineSDK-JavaScript,cloudmine/CloudMineSDK-JavaScript,cloudmine/CloudMineSDK-JavaScript
/* To-do list: sample Cloudmine app Features: - Simple data pushing: Creating new items, updating them as "done," and deleting them using Cloudmine object storage - Easy and secure user management: Logging in or registering as a new user, with the session saved for seven days or until the user logs out. Global variables: - cloudmine: instance of Cloudmine js library - todo: object of functions for this app - priority_button: prototype for custom button that sets new todo item priority, called by todo.draw_item Cloudmine library functions implemented: login, logout, createUser, update, destroy */ $(document).ready(function(){ /* todo will be an object of functions that makes our to-do list run cm will be an instance of the cloudmine.WebService library object */ var todo = {}, cm = {}; /* Binding UI events to buttons, and login on hitting Enter while in the password field. Focus on the email field automatically. */ $('#login_button').click(function(){ todo.login_user(); }); $('#register_button').click(function(){ todo.register_user(); }); $('#create_button').click(function(){ todo.create_item(); }); $('#logout_button').click(function(){ todo.logout_user(); }); $('#login_password').keydown(function(e){ e.which == 13 && todo.login_user(); }); $('#login_email').focus(); /* Check for a session_token from a previous login, stored in a cloudmineTodoSession document cookie NOTE: This doesn't work in Chrome on default settings because it doesn't allow local cookies to be stored. */ var check_for_session = function(){ var cookies = document.cookie.split(';'); for (var i in cookies){ var cookie = cookies[i].split('='); if (cookie[0] == 'cloudmineTodoSession' && cookie[1] != 'none'){ $('#login').hide(); $('#restoring_session').show(); return cookie[1]; } } return null }; /* Function to initialize the cloudmine.WebService library, we'll call this at the very end of $(document).ready after the todo object is defined. */ var init_cloudmine = function(){ // Set up an object with our App id and API key var init_vals = { appid: '84e5c4a381e7424b8df62e055f0b69db', apikey: '84c8c3f1223b4710b180d181cd6fb1df' } // Perform the check for the session cookie var previous_session = check_for_session(); // If found, add that to the init_vals object if (previous_session){ init_vals['session_token'] = previous_session; } // Initialize Cloudmine library using everything in init_vals cm = new cloudmine.WebService(init_vals); // If we found that cookie, let's go ahead and set up the list right away if (previous_session){ todo.get_items(); } } /* Set up the todo object with all we need to make this to-do list work dynamically with no refreshes. We'll mostly be using jQuery to manipulate DOM elements and our instance of the Cloudmine JS library - cm - to make all the data calls. */ todo = { data: [ ], // This will store the JSON that makes up the list priority_colors: ['', '#F9B3A1', '#FBF285', '#C3DD89'], // Each item has a priority -> 1 is red, 2 is yellow, 3 is green selected_priority: 1, // Default priority /* register_user Called by the Register button click on the login screen. It uses cm.createUser to register a new user account using the info entered in */ register_user: function(){ var userid = $('#login_email').val(), password = $('#login_password').val(); $('#register_button').attr('value', 'Creating account...'); $('#login_button, #or').hide(); // Run the Cloudmine createUser call and chain on success and error callbacks. cm.createUser(userid, password) .on('success', function(response){ todo.process_registration(response, { userid: userid, password: password }); }) .on('conflict', function(data){ todo.error('login', data.errors[0]); }) .on('badrequest', function(data){ todo.error('login', 'Please enter a valid email.'); }) .on('error', function(){ $('#register_button').attr('value', 'Register'); $('#login_button, #or').show(); }); }, /* process_registration Called by todo.register_user. Logs in newly created user. */ process_registration: function(response, input){ todo.login_user(input); }, /* login_user Called by Login button click and todo.process_registration (new user is immediately logged in after registration) Parameter: credentials: optional object containing username and password */ login_user: function(credentials){ if (credentials == undefined){ credentials = { userid: $('#login_email').val(), password: $('#login_password').val() }; } // Don't actually run if one of the values is blank if (!credentials.userid || !credentials.password){ return; } // Alter the UI to indicate that the login request is pending $('#login_button').attr('value', 'Logging in...'); $('#register_button, #or').hide(); // Run Cloudmine login request. cm.login(credentials) .on('success', function(data){ todo.process_login(data); }) .on('unauthorized', function(data){ $('#login_button').attr('value', 'Login'); $('#register_button, #or').show(); todo.error('login', data.errors[0]); }); }, /* process_login Called by todo.login_user. Creates a cookie with the session_token we got back from Cloudmine and calls for this user's data for their to-do list. Parameter: response: response data from the server, passed in by todo.login_user */ process_login: function(response){ document.cookie = 'cloudmineTodoSession=' + response.session_token + '; expires=' + response.expires + '; path=/'; todo.get_items(); }, /* logout_user Called by Logout button in the list view. Logs user out, clears session cookie. */ logout_user: function(){ cm.logout().on('success', function(){ todo.process_logout(); }); }, /* process_logout Called by todo.logout_user. Clears the session cookie and resets the login screen. */ process_logout: function(){ var _splice, cookies; // Read for session cookie document.cookie = 'cloudmineTodoSession=none; expires=' + new Date(0).toUTCString() + '; path=/'; // Reset everything $('#todo').empty().hide(); $('#todo_header, #new').hide(); $('#login, #or, #register_button').show(); $('#login_button').attr('value', 'Login'); $('#login_email, #login_password').val(''); $('#login_email').focus(); $('#priority_buttons').html('Priority:'); }, /* push_item Called by todo.create_item and todo.toggle_item. Pushes item data to the server (this can be creating it or updating it). If it's creating the item, chain on a function to draw that item and put it in the UI when the call is successful. */ push_item: function(data, unique_id){ if (unique_id == undefined){ // The unique_id will be the key for this object in Cloudmine's database. unique_id = new Date().getTime(); // When creating objects with Cloudmine you get to specify their key yourself. data = { // In our case, we'll use javascript's built-in new Date().getTime() to get an ID unique for the moment text: data.title, // this item was created if a unique_id hasn't been specified (which means we're making a new item priority: data.priority, // and not updating an existing one). picture: null, __class__: 'TBTodoItem', deadline: { __class__: 'datetime', timestamp: data.deadline }, location: null, __id__: unique_id, done: false } callback = function(response){ todo.draw_item(data) } } else { callback = function() {} } // Make the Cloudmine library call to send the data to the cloud, along with the unique_id cm.update(unique_id, data) .on('success', function(){ todo.draw_item(data); }) .on('unauthorized', function(data){ todo.error('list', data.errors[0]); }) .on('notfound', function(data){ todo.error('list', data.errors[0]); }); }, /* get_items Called by todo.process_login. Retrieves all the user's to-do items from Cloudmine and calls todo.draw_list to build the elements that display the list. */ get_items: function(){ // Calling the Cloudmine get() function with the argument null retrieves all data available. cm.get(null).on('success', function(data){ // Save the response data todo.data = data; $('#login').hide(); $('#todo, #new').show(); todo.setup_priority_buttons(); todo.draw_list(data); }); }, /* create_item Sets up and validates variables for a new to-do item, then passes the data to todo.push_item. Gets the data from the input elements in the DOM */ create_item: function(){ var data = { title: $('#new_item').val(), priority: todo.selected_priority, deadline: todo.create_deadline($('#due_in').val()) } if (data.title == ''){ return } $('#new_item').val(''); todo.push_item(data); // Push data to Cloudmine }, /* delete_item Called by Delete button click on an item. Removes the item from the cloud with cm.destroy and then removes it from the UI. The callback on this one Parameters: key: The item's key in the Cloudmine db */ delete_item: function(key){ cm.destroy(key) .on('complete', function(){ $('span[item="' + key + '"]').remove(); }) }, /* draw_list Takes success data from get_items and draws the UI using todo.draw_item() */ draw_list: function(data){ $('#restoring_session').hide(); $('#todo_header').show(); if (!todo.is_empty_object(data)){ $('#empty_list').hide(); } for (var key in data){ var item = data[key]; todo.draw_item(item); } }, /* draw_item Creates the DOM elements that make up each item in the list, and binds a Click handler to it all which calls toggle_item on it. */ draw_item: function(item_data){ var item_text, // The text that will display in the item todo_div, todo_checkbox, todo_delete, todo_wrapper; // DOM elements (main div, checkbox that indicates done-ness) todo.data[item_data.__id__] = item_data; item_text = ''; // By default, start with an empty string. if (item_data.deadline.timestamp != null){ // Parse how much time is left to complete this task. parsed_deadline = todo.parse_remaining_time(item_data.deadline.timestamp); if (parsed_deadline <= 0){ item_text += '<span class="overdue">Overdue</span>'; // If time is up, put a bold "Overdue" flag on the item. } else { item_text += '<span class="due">Due in ' + parsed_deadline + ' hours.</span>'; // Else, put a subtler flag indicating the hours left to complete the task. } } // Build the elements todo_wrapper = $('<span item="' + item_data.__id__ + '"><br></span>'); todo_div = $('<div class="todo_item"><span class="value"></span>' + item_text + '</div>'); todo_div.find('.value').text(item_data.text); // Use $.text() to prevent script injection todo_checkbox = $('<input type="checkbox" />'), todo_delete = $('<div class="delete_button"></div>'); // Styling for if the item is done: "done" class crosses out text and makes it lighter. Check off the checkbox, too. if (item_data.done){ todo_div.addClass('done'); todo_checkbox.attr('checked', true); } /* Prepend the checkbox to the div element and give the whole thing a click function to toggle the listing's done status. (This is just for UI's sake: it's easier to click the whole thing than ticking the checkbox itself. The CSS cursor: pointer on the item will make it clear that it's clickable) */ todo_div.prepend(todo_checkbox).click(function(){ var item_data = todo.data[$(this).parent().attr('item')]; todo.toggle_item(item_data); }).css({ background: todo.priority_colors[item_data.priority] // Give the item the color corresponding to its priority level. }); // Bind click event to the delete button todo_delete.click(function(e){ e.stopPropagation(); todo.delete_item($(this).parent().parent().attr('item')) }); // Commit the element to the page. $(todo_div).append(todo_delete); $(todo_wrapper).prepend(todo_div); $('#todo').prepend(todo_wrapper); // In case this is the first item added, hide the "You haven't added any items yet" message. $('#empty_list').hide(); }, /* setup_priority_buttons Called by todo.get_items. Sets up the three traffic-light buttons used to select a priority level when creating a new item. */ setup_priority_buttons: function(){ var _i, pb, all_pbs = [ ]; for (_i = 3; _i > 0; _i --){ pb = new todo.priority_button(_i); $('#priority_buttons').append(pb.button); all_pbs.push(pb); if (_i == 3){ pb.select(); // Select lower priority by default } } todo.all_pbs = all_pbs; }, /* toggle_item Called by a Click handler defined in todo.draw_item. Toggles an item between done and not done, both in the UI and the Cloudmine db. Parameters: data: Item data, from which this function gets its done status and its id. */ toggle_item: function(data){ var todo_div = $('span[item="' + data.__id__ + '"]').find('div'), todo_checkbox = $('span[item="' + data.__id__ + '"]').find('input[type="checkbox"]'); if (data.done){ data.done = false; todo_div.removeClass('done'); todo_checkbox.attr('checked', false); } else { data.done = true; todo_div.addClass('done'); todo_checkbox.attr('checked', true); } cm.update(data.__id__, { done: data.done }); }, /* create_deadline Called by todo.create_item. Converts a simple user input of hours into seconds from the moment it's being made to give its deadline a proper timestamp we can store and read later in todo.parse_remaining_time. */ create_deadline: function(hours){ var deadline; if (hours == ''){ return null } deadline = new Date(); deadline.setTime( deadline.getTime() + (hours * 3600000) ); return deadline.getTime() / 1000; }, /* parse_remaining_time Called by draw_item. This parses the hours remaining to finish a task given its deadline timestamp created by create_deadline. */ parse_remaining_time: function(seconds){ var now, deadline; now = new Date(); deadline = new Date(); // Convert back to milliseconds by multiplying by 1000 deadline.setTime(seconds * 1000); return parseInt( deadline.getTime() / 3600000 - now.getTime() / 3600000); }, /* is_empty_object Checks if an object is empty, because for some reason in Javascript empty objects are truthy while empty arrays evaluate as false ,'>/ Parameters: object: The object we're checking. */ is_empty_object: function(item) { if (item) { for (var k in item) { if (item.hasOwnProperty(k)) return false; } } return true; }, /* error Flashes a red error message. Parameters: view: 'login' or 'list': which view is the user on? Determines which DOM element is used to show the error. message: The message to display, pulled straight from the Cloudmine server response. */ error: function(view, message){ $('#error_' + view).css({display: 'inline-block'}).text('Error! ' + message); setTimeout(function(){ $('#error_' + view).fadeOut(500); }, 3500); } } /* priority_button Constructor for the traffic-light-style buttons used to select the new item's priority. */ var priority_button = function(value){ var self = this; this.value = value; this.button = $('<div class="priority"></div>'); this.selected = false; this.color = todo.priority_colors[value] // Bind the action the button $(this.button).click(function(){ self.select(); }).css({ 'background-image': 'url("priority_' + value + '.png")' }); return this } priority_button.prototype = { // Give the priority button a couple methods for selection/deselection (works much like a radio button) select: function(){ var self = this; todo.selected_priority = this.value; $(todo.all_pbs).each(function(i, pb){ if (pb != self){ pb.deselect(); } }); this.selected = true; $(this.button).css({ 'background-position': '0px -50px' }); $('#new_item').css({ 'background-color': self.color }); }, deselect: function(){ this.selected = false; $(this.button).css({ 'background-position': '' // Set to empty string rather than 0px 0px to keep the :hover action working. }); } } todo.priority_button = priority_button // Attach the priority button object to the todo object /* After everything is defined, finally initialize Cloudmine. */ init_cloudmine(); // Uncomment the next line to make the todo object available globally (for testing/playing around) // window.todo = todo; });
examples/todo_app/todo.js
/* To-do list: sample Cloudmine app Features: - Simple data pushing: Creating new items, updating them as "done," and deleting them using Cloudmine object storage - Easy and secure user management: Logging in or registering as a new user, with the session saved for seven days or until the user logs out. Global variables: - cloudmine: instance of Cloudmine js library - todo: object of functions for this app - priority_button: prototype for custom button that sets new todo item priority, called by todo.draw_item Cloudmine library functions implemented: login, logout, createUser, update, destroy */ $(document).ready(function(){ /* todo will be an object of functions that makes our to-do list run cm will be an instance of the cloudmine.WebService library object */ var todo = {}, cm = {}; /* Binding UI events to buttons, and login on hitting Enter while in the password field. Focus on the email field automatically. */ $('#login_button').click(function(){ todo.login_user(); }); $('#register_button').click(function(){ todo.register_user(); }); $('#create_button').click(function(){ todo.create_item(); }); $('#logout_button').click(function(){ todo.logout_user(); }); $('#login_password').keydown(function(e){ e.which == 13 && todo.login_user(); }); $('#login_email').focus(); /* Check for a session_token from a previous login, stored in a cloudmineTodoSession document cookie NOTE: This doesn't work in Chrome on default settings because it doesn't allow local cookies to be stored. */ var check_for_session = function(){ var cookies = document.cookie.split(';'); for (var i in cookies){ var cookie = cookies[i].split('='); if (cookie[0] == 'cloudmineTodoSession' && cookie[1] != 'none'){ $('#login').hide(); $('#restoring_session').show(); return cookie[1]; } } return null }; /* Function to initialize the cloudmine.WebService library, we'll call this at the very end of $(document).ready after the todo object is defined. */ var init_cloudmine = function(){ // Set up an object with our App id and API key var init_vals = { appid: '84e5c4a381e7424b8df62e055f0b69db', apikey: '84c8c3f1223b4710b180d181cd6fb1df' } // Perform the check for the session cookie var previous_session = check_for_session(); // If found, add that to the init_vals object if (previous_session){ init_vals['session_token'] = previous_session; } // Initialize Cloudmine library using everything in init_vals cm = new cloudmine.WebService(init_vals); // If we found that cookie, let's go ahead and set up the list right away if (previous_session){ todo.get_items(); } } /* Set up the todo object with all we need to make this to-do list work dynamically with no refreshes. We'll mostly be using jQuery to manipulate DOM elements and our instance of the Cloudmine JS library - cm - to make all the data calls. */ todo = { data: [ ], // This will store the JSON that makes up the list priority_colors: ['', '#F9B3A1', '#FBF285', '#C3DD89'], // Each item has a priority -> 1 is red, 2 is yellow, 3 is green selected_priority: 1, // Default priority /* register_user Called by the Register button click on the login screen. It uses cm.createUser to register a new user account using the info entered in */ register_user: function(){ var userid = $('#login_email').val(), password = $('#login_password').val(); $('#register_button').attr('value', 'Creating account...'); $('#login_button, #or').hide(); // Run the Cloudmine createUser call and chain on success and error callbacks. cm.createUser(userid, password) .on('success', function(response){ todo.process_registration(response, { userid: userid, password: password }); }) .on('conflict', function(data){ todo.error('login', data.errors[0]); }) .on('badrequest', function(data){ todo.error('login', 'Please enter a valid email.'); }) .on('error', function(){ $('#register_button').attr('value', 'Register'); $('#login_button, #or').show(); }); }, /* process_registration Called by todo.register_user. Logs in newly created user. */ process_registration: function(response, input){ todo.login_user(input); }, /* login_user Called by Login button click and todo.process_registration (new user is immediately logged in after registration) Parameter: credentials: optional object containing username and password */ login_user: function(credentials){ if (credentials == undefined){ credentials = { userid: $('#login_email').val(), password: $('#login_password').val() }; } // Don't actually run if one of the values is blank if (!credentials.userid || !credentials.password){ return; } // Alter the UI to indicate that the login request is pending $('#login_button').attr('value', 'Logging in...'); $('#register_button, #or').hide(); // Run Cloudmine login request. cm.login(credentials) .on('success', function(data){ todo.process_login(data); }) .on('unauthorized', function(data){ $('#login_button').attr('value', 'Login'); $('#register_button, #or').show(); todo.error('login', data.errors[0]); }); }, /* process_login Called by todo.login_user. Creates a cookie with the session_token we got back from Cloudmine and calls for this user's data for their to-do list. Parameter: response: response data from the server, passed in by todo.login_user */ process_login: function(response){ document.cookie = 'cloudmineTodoSession=' + response.session_token + '; expires=' + response.expires + '; path=/'; todo.get_items(); }, /* logout_user Called by Logout button in the list view. Logs user out, clears session cookie. */ logout_user: function(){ cm.logout().on('success', function(){ todo.process_logout(); }); }, /* process_logout Called by todo.logout_user. Clears the session cookie and resets the login screen. */ process_logout: function(){ var _splice, cookies; // Read for session cookie document.cookie = 'cloudmineTodoSession=none; expires=' + new Date(0).toUTCString() + '; path=/'; // Reset everything $('#todo').empty().hide(); $('#todo_header, #new').hide(); $('#login, #or, #register_button').show(); $('#login_button').attr('value', 'Login'); $('#login_email, #login_password').val(''); $('#login_email').focus(); $('#priority_buttons').html('Priority:'); }, /* push_item Called by todo.create_item and todo.toggle_item. Pushes item data to the server (this can be creating it or updating it). If it's creating the item, chain on a function to draw that item and put it in the UI when the call is successful. */ push_item: function(data, unique_id){ if (unique_id == undefined){ // The unique_id will be the key for this object in Cloudmine's database. unique_id = new Date().getTime(); // When creating objects with Cloudmine you get to specify their key yourself. data = { // In our case, we'll use javascript's built-in new Date().getTime() to get an ID unique for the moment text: data.title, // this item was created if a unique_id hasn't been specified (which means we're making a new item priority: data.priority, // and not updating an existing one). picture: null, __class__: 'TBTodoItem', deadline: { __class__: 'datetime', timestamp: data.deadline }, location: null, __id__: unique_id, done: false } callback = function(response){ todo.draw_item(data) } } else { callback = function() {} } // Make the Cloudmine library call to send the data to the cloud, along with the unique_id cm.update(unique_id, object_data) .on('success', function(){ todo.draw_new_item(object_data); }) .on('unauthorized', function(data){ todo.error('list', data.errors[0]); }) .on('notfound', function(data){ todo.error('list', data.errors[0]); }); }, /* get_items Called by todo.process_login. Retrieves all the user's to-do items from Cloudmine and calls todo.draw_list to build the elements that display the list. */ get_items: function(){ // Calling the Cloudmine get() function with the argument null retrieves all data available. cm.get(null).on('success', function(data){ // Save the response data todo.data = data; $('#login').hide(); $('#todo, #new').show(); todo.setup_priority_buttons(); todo.draw_list(data); }); }, /* create_item Sets up and validates variables for a new to-do item, then passes the data to todo.push_item. Gets the data from the input elements in the DOM */ create_item: function(){ var data = { title: $('#new_item').val(), priority: todo.selected_priority, deadline: todo.create_deadline($('#due_in').val()) } if (data.title == ''){ return } $('#new_item').val(''); todo.push_item(data); // Push data to Cloudmine }, /* delete_item Called by Delete button click on an item. Removes the item from the cloud with cm.destroy and then removes it from the UI. The callback on this one Parameters: key: The item's key in the Cloudmine db */ delete_item: function(key){ cm.destroy(key) .on('complete', function(){ $('span[item="' + key + '"]').remove(); }) }, /* draw_list Takes success data from get_items and draws the UI using todo.draw_item() */ draw_list: function(data){ $('#restoring_session').hide(); $('#todo_header').show(); if (!todo.is_empty_object(data)){ $('#empty_list').hide(); } for (var key in data){ var item = data[key]; todo.draw_item(item); } }, /* draw_item Creates the DOM elements that make up each item in the list, and binds a Click handler to it all which calls toggle_item on it. */ draw_item: function(item_data){ var item_text, // The text that will display in the item todo_div, todo_checkbox, todo_delete, todo_wrapper; // DOM elements (main div, checkbox that indicates done-ness) todo.data[item_data.__id__] = item_data; item_text = ''; // By default, start with an empty string. if (item_data.deadline.timestamp != null){ // Parse how much time is left to complete this task. parsed_deadline = todo.parse_remaining_time(item_data.deadline.timestamp); if (parsed_deadline <= 0){ item_text += '<span class="overdue">Overdue</span>'; // If time is up, put a bold "Overdue" flag on the item. } else { item_text += '<span class="due">Due in ' + parsed_deadline + ' hours.</span>'; // Else, put a subtler flag indicating the hours left to complete the task. } } // Build the elements todo_wrapper = $('<span item="' + item_data.__id__ + '"><br></span>'); todo_div = $('<div class="todo_item"><span class="value"></span>' + item_text + '</div>'); todo_div.find('.value').text(item_data.text); // Use $.text() to prevent script injection todo_checkbox = $('<input type="checkbox" />'), todo_delete = $('<div class="delete_button"></div>'); // Styling for if the item is done: "done" class crosses out text and makes it lighter. Check off the checkbox, too. if (item_data.done){ todo_div.addClass('done'); todo_checkbox.attr('checked', true); } /* Prepend the checkbox to the div element and give the whole thing a click function to toggle the listing's done status. (This is just for UI's sake: it's easier to click the whole thing than ticking the checkbox itself. The CSS cursor: pointer on the item will make it clear that it's clickable) */ todo_div.prepend(todo_checkbox).click(function(){ var item_data = todo.data[$(this).parent().attr('item')]; todo.toggle_item(item_data); }).css({ background: todo.priority_colors[item_data.priority] // Give the item the color corresponding to its priority level. }); // Bind click event to the delete button todo_delete.click(function(e){ e.stopPropagation(); todo.delete_item($(this).parent().parent().attr('item')) }); // Commit the element to the page. $(todo_div).append(todo_delete); $(todo_wrapper).prepend(todo_div); $('#todo').prepend(todo_wrapper); // In case this is the first item added, hide the "You haven't added any items yet" message. $('#empty_list').hide(); }, /* setup_priority_buttons Called by todo.get_items. Sets up the three traffic-light buttons used to select a priority level when creating a new item. */ setup_priority_buttons: function(){ var _i, pb, all_pbs = [ ]; for (_i = 3; _i > 0; _i --){ pb = new todo.priority_button(_i); $('#priority_buttons').append(pb.button); all_pbs.push(pb); if (_i == 3){ pb.select(); // Select lower priority by default } } todo.all_pbs = all_pbs; }, /* toggle_item Called by a Click handler defined in todo.draw_item. Toggles an item between done and not done, both in the UI and the Cloudmine db. Parameters: data: Item data, from which this function gets its done status and its id. */ toggle_item: function(data){ var todo_div = $('span[item="' + data.__id__ + '"]').find('div'), todo_checkbox = $('span[item="' + data.__id__ + '"]').find('input[type="checkbox"]'); if (data.done){ data.done = false; todo_div.removeClass('done'); todo_checkbox.attr('checked', false); } else { data.done = true; todo_div.addClass('done'); todo_checkbox.attr('checked', true); } todo.push_item({ done: data.done }, data.__id__, function(){ }); }, /* create_deadline Called by todo.create_item. Converts a simple user input of hours into seconds from the moment it's being made to give its deadline a proper timestamp we can store and read later in todo.parse_remaining_time. */ create_deadline: function(hours){ var deadline; if (hours == ''){ return null } deadline = new Date(); deadline.setTime( deadline.getTime() + (hours * 3600000) ); return deadline.getTime() / 1000; }, /* parse_remaining_time Called by draw_item. This parses the hours remaining to finish a task given its deadline timestamp created by create_deadline. */ parse_remaining_time: function(seconds){ var now, deadline; now = new Date(); deadline = new Date(); // Convert back to milliseconds by multiplying by 1000 deadline.setTime(seconds * 1000); return parseInt( deadline.getTime() / 3600000 - now.getTime() / 3600000); }, /* is_empty_object Checks if an object is empty, because for some reason in Javascript empty objects are truthy while empty arrays evaluate as false ,'>/ Parameters: object: The object we're checking. */ is_empty_object: function(item) { if (item) { for (var k in item) { if (item.hasOwnProperty(k)) return false; } } return true; }, /* error Flashes a red error message. Parameters: view: 'login' or 'list': which view is the user on? Determines which DOM element is used to show the error. message: The message to display, pulled straight from the Cloudmine server response. */ error: function(view, message){ $('#error_' + view).css({display: 'inline-block'}).text('Error! ' + message); setTimeout(function(){ $('#error_' + view).fadeOut(500); }, 3500); } } /* priority_button Constructor for the traffic-light-style buttons used to select the new item's priority. */ var priority_button = function(value){ var self = this; this.value = value; this.button = $('<div class="priority"></div>'); this.selected = false; this.color = todo.priority_colors[value] // Bind the action the button $(this.button).click(function(){ self.select(); }).css({ 'background-image': 'url("priority_' + value + '.png")' }); return this } priority_button.prototype = { // Give the priority button a couple methods for selection/deselection (works much like a radio button) select: function(){ var self = this; todo.selected_priority = this.value; $(todo.all_pbs).each(function(i, pb){ if (pb != self){ pb.deselect(); } }); this.selected = true; $(this.button).css({ 'background-position': '0px -50px' }); $('#new_item').css({ 'background-color': self.color }); }, deselect: function(){ this.selected = false; $(this.button).css({ 'background-position': '' // Set to empty string rather than 0px 0px to keep the :hover action working. }); } } todo.priority_button = priority_button // Attach the priority button object to the todo object /* After everything is defined, finally initialize Cloudmine. */ init_cloudmine(); // Uncomment the next line to make the todo object available globally (for testing/playing around) // window.todo = todo; });
js updated
examples/todo_app/todo.js
js updated
<ide><path>xamples/todo_app/todo.js <ide> <ide> // Make the Cloudmine library call to send the data to the cloud, along with the unique_id <ide> <del> cm.update(unique_id, object_data) <add> cm.update(unique_id, data) <ide> .on('success', function(){ <del> todo.draw_new_item(object_data); <add> todo.draw_item(data); <ide> }) <ide> .on('unauthorized', function(data){ <ide> todo.error('list', data.errors[0]); <ide> todo.data[item_data.__id__] = item_data; <ide> <ide> item_text = ''; // By default, start with an empty string. <del> <ide> if (item_data.deadline.timestamp != null){ // Parse how much time is left to complete this task. <ide> parsed_deadline = todo.parse_remaining_time(item_data.deadline.timestamp); <ide> if (parsed_deadline <= 0){ <ide> todo_div.addClass('done'); <ide> todo_checkbox.attr('checked', true); <ide> } <del> <del> todo.push_item({ <del> done: data.done <del> }, data.__id__, function(){ }); <add> cm.update(data.__id__, { done: data.done }); <ide> }, <ide> <ide> /*
Java
mit
315e6a8cb54c5b845f938bd52243f4fa07e9ec54
0
marbros/Programing-Memories,marbros/Programing-Memories,marnig/Programing-Memories,marnig/Programing-Memories,marnig/Programing-Memories,marbros/Programing-Memories,marbros/Programing-Memories,marbros/Programing-Memories,marnig/Programing-Memories,marnig/Programing-Memories
/** * @author Mario Giraldo Restrepo * @deprecated Algorithm of Huffman * @version 0.0.3 */ //http://www.eztigma.tk/juegos/binary.php import java.util.Scanner; import java.util.TreeMap; import java.util.PriorityQueue; public class Huffman { static String keys = ""; static String k = ""; /** * Este método se encarga de retirar cada par de elementos de menor * frecuencia en la cola, y sumar su peso con el fin de crear * pequeños subArboles, que seran nuevamente agregados a la cola * hasta obtener un unico arbol. * * @param cola Cola de prioridades ordenada por frecuencia de * cada caracter en el mapa. * @param arbol Es una instancia de la clase Arbol. */ private static void createTree(PriorityQueue cola, Arbol arbol) { Nodo father = null; while(cola.size() > 1) { Nodo one = (Nodo) cola.poll(); //System.out.println(one); Nodo two = (Nodo) cola.poll(); //System.out.println(two); Integer onePlusTwo = one.getValue() + two.getValue(); father = arbol.insertarNodo(onePlusTwo,one,two,father); cola.add(father); } /*while(!cola.isEmpty()) { System.out.println(cola.poll()); } */ getKeys((Nodo)cola.poll()); //Imprimir Arbol //posOrden((Nodo)cola.poll()); } /** * Este método realiza un recorrido en orden de un arbol * * @param arbol Es una instancia de la clase Nodo. * @param key Es el binario 0 ó 1 asociado a cada arco del arbol */ public static void order(Nodo arbol, String key){ keys += key; if (arbol != null){ order(arbol.getHijoIzq(), "0"); if(!(arbol.getkey().equals(""))) k += (arbol.getkey() + " : " + keys + ","); //System.out.println(arbol.getkey() + " key " + k); order(arbol.getHijoDer(), "1"); } keys = keys.substring(0,keys.length()-1); } /** * Este método realiza un recorrido a medias, en Pos Orden de un arbol, * con el fin de asociar un valor binario a cada arco de este. * * @param arbol Es una instancia de la clase Nodo. */ public static void getKeys(Nodo arbol) { if(arbol != null) { order(arbol.getHijoIzq(), "0"); order(arbol.getHijoDer(), "1"); } } /** * Este método imprime las claves asociadas a cada hoja del arbol. */ public static void printKeys() { String keys = k.replace(",", "\n"); //String [] keys = k.split(","); //Imprime Keys System.out.println("-Keys- \n" + keys); } /** * Este método realiza un reemplazo de cada caracter del texto * por su respectiva key asignada al arbol; luego imprime el nuevo * texto resultante del anterior proceso. * * @param T Texto */ public static void prinTextCompress(String T) { char[] keys = T.toCharArray(); //maar //m 00,r 01,a 1, //String key = new String(k); char comp = k.charAt(0); String keyComplete = ""; for(int i = 0; i <= keys.length-1; i++) { comp = keys[i]; if(keys[i] == comp) { keyComplete += k.substring((k.indexOf(comp)+4), k.indexOf(",",k.indexOf(comp))); } } //Imprime Texto Comprimido System.out.println("Texto Comprimido: " + keyComplete); } /** * Esta función retorna un cola de prioridades ordenada por * frecuencia de cada caracter en el mapa. * * @param map Es el mapa de caracteres asociado con su frecuencia. * @param arbol Es una instancia de la clase Arbol * @Retun cola */ private static PriorityQueue col_priority(TreeMap map, Arbol arbol) { PriorityQueue<Nodo> cola = new PriorityQueue<Nodo>(); Nodo node = null; for (Object key : map.keySet()) { int value = (int) map.get(key); cola.add(node = new Nodo((String)key,value)); //equivalente a //cola.add(arbol.insertarNodo((String)key,value,node)); } System.out.println(cola.toString()); // while(!cola.isEmpty()){ // System.out.println(cola.poll()); // } return cola; } /** * Esta función retorna un map <k,v> con cada caracter * y el número de veces con que este se repite en un texto. * * @param T Es el Texto * @Retun map */ private static TreeMap cont_Letters(String T) { String[] letters = T.split(""); int len = letters.length; TreeMap <String, Integer> map = new TreeMap <String, Integer>(); for (int i=0; i < len; ++i) { String key = letters[i]; if (letters[i].length() > 0) { if(map.get(key) == null) { map.put(key, 1); }else { int value = map.get(key).intValue(); value++; map.put(key,value); } } } return map; } /** * Este método realiza la tarea de contenedor de cada funcíon y * método del programa y los ejecuta en un orden establecido. * * @param T Es el Texto * @Retun arbol Es una instancia de la clase Arbol. */ private static void huffman (String T, Arbol arbol) { createTree(col_priority(cont_Letters(T),arbol),arbol); printKeys(); prinTextCompress(T); printTextDescompress(T); } /** * Metodo de inicio. */ public static void main(String[] args) { Arbol arbol = new Arbol(); Scanner Read = new Scanner(System.in); String text; System.out.print("Ingrese el texto : "); text = Read.nextLine(); System.out.print("\n"); String k = ""; huffman(text,arbol); } }
Algorithm_Of_Huffman/Huffman.java
/** * @author Mario Giraldo Restrepo * @deprecated Algorithm of Huffman * @version 0.0.3 */ //http://www.eztigma.tk/juegos/binary.php import java.util.Scanner; import java.util.TreeMap; import java.util.PriorityQueue; public class Huffman { static String keys = ""; static String k = ""; /** * Este método se encarga de retirar cada par de elementos de menor * frecuencia en la cola, y sumar su peso con el fin de crear * pequeños subArboles, que seran nuevamente agregados a la cola * hasta obtener un unico arbol. * * @param cola Cola de prioridades ordenada por frecuencia de * cada caracter en el mapa. * @param arbol Es una instancia de la clase Arbol. */ private static void createTree(PriorityQueue cola, Arbol arbol) { Nodo father = null; while(cola.size() > 1) { Nodo one = (Nodo) cola.poll(); //System.out.println(one); Nodo two = (Nodo) cola.poll(); //System.out.println(two); Integer onePlusTwo = one.getValue() + two.getValue(); father = arbol.insertarNodo(onePlusTwo,one,two,father); cola.add(father); } /*while(!cola.isEmpty()) { System.out.println(cola.poll()); } */ getKeys((Nodo)cola.poll()); //Imprimir Arbol //posOrden((Nodo)cola.poll()); } /** * Este método realiza un recorrido en orden de un arbol * * @param arbol Es una instancia de la clase Nodo. * @param key Es el binario 0 ó 1 asociado a cada arco del arbol */ public static void order(Nodo arbol, String key){ keys += key; if (arbol != null){ order(arbol.getHijoIzq(), "0"); if(!(arbol.getkey().equals(""))) k += (arbol.getkey() + " : " + keys + ","); //System.out.println(arbol.getkey() + " key " + k); order(arbol.getHijoDer(), "1"); } keys = keys.substring(0,keys.length()-1); } /** * Este método realiza un recorrido a medias, en Pos Orden de un arbol, * con el fin de asociar un valor binario a cada arco de este. * * @param arbol Es una instancia de la clase Nodo. */ public static void getKeys(Nodo arbol) { if(arbol != null) { order(arbol.getHijoIzq(), "0"); order(arbol.getHijoDer(), "1"); } } /** * Este método imprime las claves asociadas a cada hoja del arbol. */ public static void printKeys() { String keys = k.replace(",", "\n"); //String [] keys = k.split(","); //Imprime Keys System.out.println("-Keys- \n" + keys); } /** * Esta función retorna un cola de prioridades ordenada por * frecuencia de cada caracter en el mapa. * * @param map Es el mapa de caracteres asociado con su frecuencia. * @param arbol Es una instancia de la clase Arbol * @Retun cola */ private static PriorityQueue col_priority(TreeMap map, Arbol arbol) { PriorityQueue<Nodo> cola = new PriorityQueue<Nodo>(); Nodo node = null; for (Object key : map.keySet()) { int value = (int) map.get(key); cola.add(node = new Nodo((String)key,value)); //equivalente a //cola.add(arbol.insertarNodo((String)key,value,node)); } System.out.println(cola.toString()); // while(!cola.isEmpty()){ // System.out.println(cola.poll()); // } return cola; } /** * Esta función retorna un map <k,v> con cada caracter * y el número de veces con que este se repite en un texto. * * @param T Es el Texto * @Retun map */ private static TreeMap cont_Letters(String T) { String[] letters = T.split(""); int len = letters.length; TreeMap <String, Integer> map = new TreeMap <String, Integer>(); for (int i=0; i < len; ++i) { String key = letters[i]; if (letters[i].length() > 0) { if(map.get(key) == null) { map.put(key, 1); }else { int value = map.get(key).intValue(); value++; map.put(key,value); } } } return map; } /** * Este método realiza la tarea de contenedor de cada funcíon y * método del programa y los ejecuta en un orden establecido. * * @param T Es el Texto * @Retun arbol Es una instancia de la clase Arbol. */ private static void huffman (String T, Arbol arbol) { createTree(col_priority(cont_Letters(T),arbol),arbol); printKeys(); prinTextCompress(T); printTextDescompress(T); } /** * Metodo de inicio. */ public static void main(String[] args) { Arbol arbol = new Arbol(); Scanner Read = new Scanner(System.in); String text; System.out.print("Ingrese el texto : "); text = Read.nextLine(); System.out.print("\n"); String k = ""; huffman(text,arbol); } }
the method 'printTextCompress' is added.
Algorithm_Of_Huffman/Huffman.java
the method 'printTextCompress' is added.
<ide><path>lgorithm_Of_Huffman/Huffman.java <ide> <ide> //Imprime Keys <ide> System.out.println("-Keys- \n" + keys); <del> } <add> } <add> <add> /** <add> * Este método realiza un reemplazo de cada caracter del texto <add> * por su respectiva key asignada al arbol; luego imprime el nuevo <add> * texto resultante del anterior proceso. <add> * <add> * @param T Texto <add> */ <add> public static void prinTextCompress(String T) { <add> char[] keys = T.toCharArray(); <add> //maar <add> //m 00,r 01,a 1, <add> //String key = new String(k); <add> char comp = k.charAt(0); <add> String keyComplete = ""; <add> for(int i = 0; i <= keys.length-1; i++) { <add> comp = keys[i]; <add> if(keys[i] == comp) { <add> keyComplete += k.substring((k.indexOf(comp)+4), k.indexOf(",",k.indexOf(comp))); <add> } <add> } <add> <add> //Imprime Texto Comprimido <add> System.out.println("Texto Comprimido: " + keyComplete); <add> } <ide> <ide> /** <ide> * Esta función retorna un cola de prioridades ordenada por
Java
mit
7001ccdd237668479e341bcef81b607728b4cb45
0
imhobo/iitkconv
package com.aps.iitkconv.activities; /** * Created by imhobo on 31/3/17. */ import android.app.ActionBar; import android.app.Activity; import android.app.SearchManager; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Color; import android.net.Uri; import android.os.Bundle; import android.support.v4.view.MenuItemCompat; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.text.Html; import android.util.Log; import android.util.Pair; import android.view.Gravity; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.widget.LinearLayout; import android.widget.SearchView; import android.widget.TextView; import com.aps.iitconv.R; import com.aps.iitkconv.models.DBHandler_Grad; import com.aps.iitkconv.models.DataObject; import com.aps.iitkconv.models.MyRecyclerViewAdapter; import com.aps.iitkconv.models.Table_Awards; import com.aps.iitkconv.models.Table_Contact; import com.aps.iitkconv.models.Table_Grad_Students; import com.aps.iitkconv.models.Table_Guest; import com.aps.iitkconv.models.Table_Prev_Rec; import com.aps.iitkconv.models.Table_Schedule; import java.io.InputStream; import java.util.ArrayList; import java.util.Iterator; public class CardViewActivity extends MainActivity { private RecyclerView mRecyclerView; private RecyclerView.Adapter mAdapter; private RecyclerView.LayoutManager mLayoutManager; private static String LOG_TAG = "CardViewActivity"; private DBHandler_Grad db; //An integer representing which tab was clicked to reach this activity int value = -1; //Keeps track of which program and dept was clicked private ArrayList<DataObject> programs; private ArrayList<DataObject> depts; private int program = -1, dept = -1; //Keeps track of which award was clicked private ArrayList<DataObject> awards; private int awardNum = -1; //Keep track whether Previous Recipients was clicked for Honourary,Chief guest or President Gold tab boolean prevHon = false; boolean prevChief = false; boolean prevPres = false; //Handling the search button boolean hasSearchedGrad = false; boolean hasSearchedAwards = false; String query = ""; //Handling the back button int ch = -1; //Search private MenuItem searchMenuItem; private SearchView searchView; //Schedule page 1 or 2 int schedule_page = 1; int chief_page = 1; int hon_page = 1; String date = ""; String guestNameC = ""; String guestNameH = ""; private Context mContext; private Bundle b; private Menu menu; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); //Maintaining context mContext = this; //Clearing the existing UI frameLayout.removeAllViews(); // Log.d("ch before init", String.valueOf(ch)); b = getIntent().getExtras(); if(b != null) value = b.getInt("key"); //Handling back button ch = MainActivity.getChoice(); MainActivity.setChoice(value); // Log.d("ch onCreate", String.valueOf(ch)); // Log.d("value onCreate", String.valueOf(value)); displayData(); } @Override protected void onResume() { super.onResume(); Log.d("onResume", String.valueOf(value)); // displayData(); ((MyRecyclerViewAdapter) mAdapter).setOnItemClickListener(new MyRecyclerViewAdapter .MyClickListener() { @Override public void onItemClick(int position, View v) { Log.i(LOG_TAG, " Clicked on Item " + position ); Log.i(LOG_TAG, " Value " + value ); if(value == 1 && schedule_page == 1) { if(position == 0) date = "15 June"; else if(position == 1) date = "16 June"; schedule_page = 2; displayData(); } else if(value == 3 && awardNum == -1) { awardNum = position; displayData(); } else if(value == 3 && awardNum ==0 && position == (mAdapter.getItemCount()-1)) { Log.i(LOG_TAG, " Clicked on PrevPres " + position ); prevPres = true; displayData(); } else if(value==4 && program == -1) { program = position; displayData(); } else if(value==4 && program > -1 && dept == -1) { dept = position; displayData(); } else if(value==4 && program > -1 && dept > -1) { //Click on the student name } else if(value==5 && position == (mAdapter.getItemCount()-1) && hon_page == 1) { // Log.i(LOG_TAG, " Clicked on HonPrevious " + position ); prevHon = true; hon_page = 2; displayData(); } else if(value==5 && hon_page == 1) { // Log.i(LOG_TAG, " Clicked on HonPrevious " + position ); if(position == 0) guestNameH = "Professor Ajay Kumar Sood"; else if(position == 1) guestNameH = "Professor Mriganka Sur"; else if(position == 2) guestNameH = "P.T Usha"; else if(position == 3) guestNameH = "Dr. Monkombu Sambasivan Swaminathan"; hon_page = 2; displayData(); } else if(value==50 && position == (mAdapter.getItemCount()-1) && chief_page == 1) { // Log.i(LOG_TAG, " Clicked on ChiefPrevious " + position ); prevChief = true; chief_page = 2; displayData(); } else if(value==50 && chief_page == 1) { // Log.i(LOG_TAG, " Clicked on HonPrevious " + position ); if(position == 0) guestNameC = "Mr. Natarajan Chandrasekaran "; else if(position == 1) guestNameC = "Dr. Clayton Daniel Mote, Jr."; chief_page = 2; displayData(); } //Important Links else if(value == 10) { String url = ((MyRecyclerViewAdapter) mAdapter).getDataSet().get(position).getmText2(); Log.d("url", url); Intent i = new Intent(Intent.ACTION_VIEW); i.setData(Uri.parse(url)); startActivity(i); } else if(value == 9) { String phone = db.getContacts().get(position).getNumber(); Intent intent = new Intent(Intent.ACTION_CALL); intent.setData(Uri.parse("tel:" + phone)); mContext.startActivity(intent); } } }); } //Possibly the worst way to implement the back button feature. //Recreates the view when back is pressed @Override public void onBackPressed() { Log.d("CDA", "onBackPressed Called"); //Handling the back button // Log.d("ch onBack", String.valueOf(ch)); // Log.d("value onBack", String.valueOf(value)); if(value == 4 && hasSearchedGrad) { hasSearchedGrad = false; displayData(); return; } if(value == 3 && hasSearchedAwards) { hasSearchedAwards = false; displayData(); return; } if((value==1 && schedule_page == 1) || value ==2 || (value ==5 && !prevHon && hon_page == 1)|| value == 9 || value == 10 || (value == 50 && !prevChief && chief_page == 1)) { MainActivity.setChoice(ch); finish(); return; } else if(value == 50 && chief_page == 2) { chief_page = 1; prevChief = false; displayData(); } else if(value == 5 && hon_page == 2) { hon_page = 1; prevHon = false; displayData(); } else if(value == 1 && schedule_page == 2) { schedule_page = 1; displayData(); } else if(value == 5 && prevHon) { prevHon = false; displayData(); } else if(value == 50 && prevChief) { prevChief = false; displayData(); } else if(value==3 && awardNum == -1) { MainActivity.setChoice(ch); finish(); return; } else if(value==3 && awardNum > -1 && !prevPres) { awardNum = -1; displayData(); } else if(value==3 && awardNum > -1 && prevPres) { prevPres = false; displayData(); } else if(value==4 && program == -1) { MainActivity.setChoice(ch); finish(); return; } else if(value==4 && program > -1 && dept == -1) { program = -1; displayData(); } else if(value==4 && program > -1 && dept > -1) { dept = -1; displayData(); } } //---------------------------------------------------------------Methods to get data------------------------------------------------------- //Get all events private ArrayList<DataObject> getSchedule(String date) { ArrayList<Table_Schedule> events = new ArrayList<Table_Schedule>(); ArrayList<DataObject> results = new ArrayList<DataObject>(); events = (ArrayList) db.getSchedule(date); int size = events.size(); Log.d("Size of events : ", String.valueOf(size)); for (int i = 0 ; i< size; i++) { Table_Schedule t = events.get(i); DataObject obj= new DataObject(t.getEvent(),t.getVenue(), t.getDate(), t.getTime()); results.add(obj); } return results; } //Create first page of schedule private ArrayList<DataObject> schedule_page1() { ArrayList<DataObject> results = new ArrayList<DataObject>(); DataObject obj= new DataObject("Session - 1", "Auditorium, IIT Kanpur", "15 June, 2017", "09:00 am to 13:35 pm"); results.add(obj); obj= new DataObject("Session - 2", "Auditorium, IIT Kanpur", "16 June, 2017", "09:00 am to 13:35 pm"); results.add(obj); return results; } //Create first page of chief_guests private ArrayList<DataObject> chief_page1() { ArrayList<DataObject> results = new ArrayList<DataObject>(); DataObject obj= new DataObject("","Mr. Natarajan Chandrasekaran", "","","Chairman, TATA SONS","15 June, 2017\nSession - 1\nAuditorium, IIT Kanpur"); int i = mContext.getResources().getIdentifier("img1","raw", mContext.getPackageName()); InputStream input = mContext.getResources().openRawResource(i); Bitmap myBitmap = BitmapFactory.decodeStream(input); obj.setmImg(myBitmap); results.add(obj); obj= new DataObject("","Professor C. D. Mote, Jr.", "","","President, National Academy of Engineering, USA","16 June, 2017\nSession - 2\nAuditorium, IIT Kanpur"); i = mContext.getResources().getIdentifier("img2","raw", mContext.getPackageName()); input = mContext.getResources().openRawResource(i); myBitmap = BitmapFactory.decodeStream(input); obj.setmImg(myBitmap); results.add(obj); obj= new DataObject("Previous Guests"); results.add(obj); return results; } //Create first page of hon_guests private ArrayList<DataObject> hon_page1() { ArrayList<DataObject> results = new ArrayList<DataObject>(); DataObject obj= new DataObject("","Professor Ajay Kumar Sood", "","","HONORARY DEGREE (HONORIS CAUSA)\nFiftieth Convocation, 2017"); int i = mContext.getResources().getIdentifier("hon1","raw", mContext.getPackageName()); InputStream input = mContext.getResources().openRawResource(i); Bitmap myBitmap = BitmapFactory.decodeStream(input); obj.setmImg(myBitmap); results.add(obj); obj= new DataObject("","Professor Mriganka Sur", "","","HONORARY DEGREE (HONORIS CAUSA)\nFiftieth Convocation, 2017"); i = mContext.getResources().getIdentifier("hon2","raw", mContext.getPackageName()); input = mContext.getResources().openRawResource(i); myBitmap = BitmapFactory.decodeStream(input); obj.setmImg(myBitmap); results.add(obj); obj= new DataObject("","Ms P.T Usha", "","","HONORARY DEGREE (HONORIS CAUSA)\nFiftieth Convocation, 2017"); i = mContext.getResources().getIdentifier("hon3","raw", mContext.getPackageName()); input = mContext.getResources().openRawResource(i); myBitmap = BitmapFactory.decodeStream(input); obj.setmImg(myBitmap); results.add(obj); obj= new DataObject("","Dr. M S Swaminathan ", "","","HONORARY DEGREE (HONORIS CAUSA)\nFiftieth Convocation, 2017"); i = mContext.getResources().getIdentifier("hon4","raw", mContext.getPackageName()); input = mContext.getResources().openRawResource(i); myBitmap = BitmapFactory.decodeStream(input); obj.setmImg(myBitmap); results.add(obj); obj= new DataObject("Previous Recipients"); results.add(obj); return results; } //Get all links private ArrayList<DataObject> getLinks() { ArrayList<Pair<String,String>> links = new ArrayList<Pair<String,String>>(); ArrayList<DataObject> results = new ArrayList<DataObject>(); links = (ArrayList) db.getLinks(); int size = links.size(); for (int i = 0 ; i< size; i++) { Pair<String,String> t = links.get(i); DataObject obj= new DataObject(t.first, t.second); results.add(obj); } return results; } //Get all announcements private ArrayList<DataObject> getAnnouncements() { ArrayList results = new ArrayList<DataObject>(); ArrayList<String> tempHolder = new ArrayList<String>(); tempHolder = (ArrayList) db.getAnnouncements(); for (String s : tempHolder) { results.add(new DataObject(s)); } if(tempHolder.size()==0) { TextView txt1 = new TextView(CardViewActivity.this); txt1.setText("No announcements yet."); txt1.setGravity(Gravity.CENTER_HORIZONTAL); frameLayout.addView(txt1); } return results; } //Get all contacts private ArrayList<DataObject> getContacts() { ArrayList<Table_Contact> contacts = new ArrayList<Table_Contact>(); ArrayList<DataObject> results = new ArrayList<DataObject>(); contacts = (ArrayList<Table_Contact>) db.getContacts(); int size = contacts.size(); Log.d("Size of Contacts : ", String.valueOf(size)); for (int i = 0 ; i< size; i++) { Table_Contact t = contacts.get(i); DataObject obj= new DataObject(t.getName(),t.getNumber(), t.getTransport()); //Log.d("getStudents2",String.valueOf(t.getId())+t.getEvent()+t.getName()+award+t.getTime()+t.getDept()+t.getProgram()+t.getYear()); results.add(obj); } return results; } //Get all Guests of a certain type private ArrayList<DataObject> getGuests(String type, String guestName) { ArrayList<Table_Guest> g = new ArrayList<Table_Guest>(); ArrayList<DataObject> results = new ArrayList<DataObject>(); g = (ArrayList<Table_Guest>) db.getGuests(type); int size = g.size(); for (int i = 0 ; i< size; i++) { Table_Guest t = g.get(i); Bitmap bmp = db.getImage(t.getPicture()); /* if(bmp==null) Log.d("image from DB", "NULL"); else Log.d("image from DB", "NOT NULL"); */ DataObject obj= new DataObject(bmp, t.getName(),t.getTitle(), t.getYear(), t.getPicture(), t.getDescription()); //Log.d("getStudents2",String.valueOf(t.getId())+t.getEvent()+t.getName()+award+t.getTime()+t.getDept()+t.getProgram()+t.getYear()); if(t.getName().equals(guestName)) results.add(obj); } // DataObject obj= new DataObject("Previous Recipients"); // results.add(obj); return results; } //Get all Awards private ArrayList<DataObject> getAwards() { ArrayList results = new ArrayList<DataObject>(); ArrayList<String> tempHolder = new ArrayList<String>(); tempHolder = (ArrayList) db.getAwards2(); for (String i : tempHolder) { int num = db.getStudentCountInAward(i); if(!i.equals("")) results.add(new DataObject(i, String.valueOf(num))); } return results; } //Get all Programs private ArrayList<DataObject> getPrograms() { ArrayList results = new ArrayList<DataObject>(); ArrayList<String> tempHolder = new ArrayList<String>(); tempHolder = (ArrayList) db.getProgram1(); int k = 0; for (String i : tempHolder) { int res = db.getStudentCountInProgram(i); if(!i.equals("")) results.add(new DataObject(i, String.valueOf(res))); // Log.d("Programs : ", i + " : " + k); k++; } // Log.d("Size of k : ", String.valueOf(k)); // Log.d("Size of program1 : ", String.valueOf(results.size())); return results; } //Get Departments for Graduating Students private ArrayList<DataObject> getDept1(String program) { ArrayList results = new ArrayList<DataObject>(); ArrayList<String> tempHolder = new ArrayList<String>(); tempHolder = (ArrayList) db.getDept1(program); for (String i : tempHolder) { int res = db.getStudentCountInDept(program, i); results.add(new DataObject(i, String.valueOf(res))); } return results; } //Get Student for Graduating section private ArrayList<DataObject> getStudents1(String program, String dept) { ArrayList<Table_Grad_Students> students = new ArrayList<Table_Grad_Students>(); ArrayList<DataObject> results = new ArrayList<DataObject>(); String query = "SELECT * FROM Table_Grad_Students WHERE program = " + "'" + program + "'" + " AND dept = " + "'" + dept + "'"; students = (ArrayList) db.runSelectQuery1(query); int size = students.size(); Log.d("Size of students : ", String.valueOf(size)); for (int i = 0 ; i< size; i++) { Table_Grad_Students t = students.get(i); DataObject obj= new DataObject(t.getName(),t.getRoll(), t.getAdvisers(), t.getDescription()); results.add(obj); } return results; } //Get student for Awards section private ArrayList<DataObject> getStudents2(String award) { ArrayList<Table_Awards> students = new ArrayList<Table_Awards>(); ArrayList<DataObject> results = new ArrayList<DataObject>(); students = (ArrayList) db.runSelectQuery2(award); int size = students.size(); Log.d("Size of students : ", String.valueOf(size)); for (int i = 0 ; i< size; i++) { Table_Awards t = students.get(i); Bitmap bmp = db.getImage(t.getPicture()); /* if(bmp==null) Log.d("image from DB", "NULL"); else Log.d("image from DB", "NOT NULL"); */ DataObject obj= new DataObject(bmp, t.getRoll(), t.getName(), award, t.getDescription(), t.getComment(), t.getProgram(), t.getYear()); Log.d("Checking values : ", "Roll-"+t.getRoll() + ";" + "Name-" + t.getName() + ";" + "Award-"+ award + ";"+ "Desc-"+t.getDescription() + ";"+ "Comment-" + t.getComment() + ";" + "Program-" + t.getProgram() + ";" + "Dept-" + t.getDept() + ";" + "Year-" + t.getYear()); results.add(obj); } // Previous Recipients for award // if(awardNum == 0 && !hasSearchedAwards) // { // DataObject obj= new DataObject("Previous Recipients"); // results.add(obj); // } return results; } //-------------------------------------------------------------------Get searched data------------------------------------------------------------------------- protected ArrayList<DataObject> getSearchedGrad(String q) { ArrayList<Table_Grad_Students> students = new ArrayList<Table_Grad_Students>(); ArrayList<DataObject> results = new ArrayList<DataObject>(); String query = "SELECT * FROM Table_Grad_Students WHERE name like " + "'" + "%" + q + "%" + "'"; students = (ArrayList) db.runSelectQuery1(query); int size = students.size(); Log.d("Size of students : ", String.valueOf(size)); for (int i = 0 ; i< size; i++) { Table_Grad_Students t = students.get(i); DataObject obj= new DataObject(t.getName(),t.getRoll(), t.getAdvisers(), t.getDescription(), t.getProgram(), t.getDept()); Log.d("Program : ", t.getProgram()); Log.d("Program : ", t.getName()); Log.d("Program : ", t.getAdvisers()); Log.d("Program : ", t.getDescription()); Log.d("Program : ", t.getProgram()); Log.d("Program : ", t.getDept()); results.add(obj); } return results; } protected ArrayList<DataObject> getSearchedAwards(String q) { ArrayList<Table_Awards> students = new ArrayList<Table_Awards>(); ArrayList<DataObject> results = new ArrayList<DataObject>(); students = (ArrayList) db.getStudentsbyName(q); int size = students.size(); Log.d("Size of students : ", String.valueOf(size)); for (int i = 0 ; i< size; i++) { Table_Awards t = students.get(i); Bitmap bmp = db.getImage(t.getPicture()); DataObject obj= new DataObject(bmp, t.getRoll(), t.getName(), t.getAward(), t.getDescription(), t.getComment(), t.getProgram(), t.getDept(), t.getYear(), t.getPicture()); Log.d("Name : ", t.getName()); Log.d("Award : ", t.getAward()); results.add(obj); } return results; } //-------------------------------------------------------------------------------------------------------------------------------------------------------------- protected void displayData() { // Log.d("displayData init", String.valueOf(value)); // b = getIntent().getExtras(); // if(b != null) // value = b.getInt("key"); // Log.d("displayData after init", String.valueOf(value)); //Clearing the existing UI frameLayout.removeAllViews(); //Instance used to read data db = DBHandler_Grad.getInstance(this); //Set different card views here. if((value == 3 && awardNum > -1 && !prevPres && !hasSearchedAwards) || (chief_page == 1 && value == 50) || (hon_page == 1 && value == 5)) { getLayoutInflater().inflate(R.layout.card_view_award, frameLayout); } else { getLayoutInflater().inflate(R.layout.card_view_generic, frameLayout); } //This should be done only after getLayoutInflater is called on frameLayout mRecyclerView = (RecyclerView) findViewById(R.id.my_recycler_view); mRecyclerView.setHasFixedSize(true); mLayoutManager = new LinearLayoutManager(CardViewActivity.this); mRecyclerView.setLayoutManager(mLayoutManager); // Log.d("Value","val=" + value) ; // Schedule if(value==1) { this.setTitle("Schedule"); if(schedule_page == 1) { mAdapter = new MyRecyclerViewAdapter(schedule_page1(), 1); } else if(schedule_page == 2) mAdapter = new MyRecyclerViewAdapter(getSchedule(date),1); } //Searched in awards else if(value == 3 && hasSearchedAwards) { this.setTitle(Html.fromHtml(getString(R.string.app_title))); mAdapter = new MyRecyclerViewAdapter(getSearchedAwards(query),1000); } //Searched for grad students else if(value == 4 && hasSearchedGrad) { this.setTitle(Html.fromHtml(getString(R.string.app_title))); mAdapter = new MyRecyclerViewAdapter(getSearchedGrad(query),999); } // Announcements else if(value==2) { mAdapter = new MyRecyclerViewAdapter(getAnnouncements(), 2); } //List of Awards else if(value==3 && awardNum == -1) { this.setTitle("Medals"); awards = getAwards(); mAdapter = new MyRecyclerViewAdapter(awards, 3); } //Some Award clicked else if(value==3 && awardNum > -1 && !prevPres) { String curAward = awards.get(awardNum).getmText1(); String curDesc = db.getDesc2(curAward); ArrayList<DataObject> students = getStudents2(curAward); TextView award = (TextView) findViewById(R.id.textViewA1); TextView desc = (TextView) findViewById(R.id.textViewA2); award.setText(curAward); //desc.setText(curDesc); //If a student with a certain award has a picture associated, then we assume that everyone in that category has a picture String imgName = db.getImageName(students.get(0).getmText2(), curAward); if(!imgName.equals("")) mAdapter = new MyRecyclerViewAdapter(students, 30); else mAdapter = new MyRecyclerViewAdapter(students, 31); } //Prev Recipient in Pres Gold Medal else if(value ==3 && awardNum == 0 && prevPres) { this.setTitle(Html.fromHtml(getString(R.string.app_title))); mAdapter = new MyRecyclerViewAdapter(getPrevPresExcel(), 300); } //Honourary Degrees and Chief Guests else if((value==5 && !prevHon) || (value==50 && !prevChief)) { if (value == 5) { this.setTitle("Honorary"); if (hon_page == 1) { TextView award = (TextView) findViewById(R.id.textViewA1); award.setText("Honorary Degrees"); mAdapter = new MyRecyclerViewAdapter(hon_page1(), 519); } else mAdapter = new MyRecyclerViewAdapter(getGuests("H", guestNameH), 5); } else if (value == 50) { this.setTitle(Html.fromHtml(getString(R.string.app_title))); if(chief_page == 1) { TextView award = (TextView) findViewById(R.id.textViewA1); award.setText("Chief Guests"); mAdapter = new MyRecyclerViewAdapter(chief_page1(), 509); } else mAdapter = new MyRecyclerViewAdapter(getGuests("C", guestNameC), 50); } } //Honourary Degrees and Chief Guests with Prev Recipients else if((value==5 && prevHon) || (value==50 && prevChief)) { this.setTitle(Html.fromHtml(getString(R.string.app_title))); if(value == 5) mAdapter = new MyRecyclerViewAdapter(getPrevHonExcel(), 51); else if(value == 50) mAdapter = new MyRecyclerViewAdapter(getPrevChiefExcel(), 501); } //Taxi Contacts else if(value==9) { this.setTitle("Contacts"); ArrayList<Table_Contact> contacts = (ArrayList<Table_Contact>) db.getContacts(); mAdapter = new MyRecyclerViewAdapter(getContacts(), 9); } //List of Programs for Graduating Students with the number of students in each of them. else if(value==4 && program == -1) { CardViewActivity.this.setTitle("Degrees"); programs = getPrograms(); Log.d("Size of Programs : ", String.valueOf(programs.size())); mAdapter = new MyRecyclerViewAdapter(programs,4); } //List of Students for Graduating Students when Program already clicked with the number of students in each of them. else if(value == 4 && program > -1 && dept == -1) { String curDep = programs.get(program).getmText1(); CardViewActivity.this.setTitle(curDep); depts = getDept1(curDep); mAdapter = new MyRecyclerViewAdapter(depts,40); } //List of Students for Graduating Students when Program and Dept already clicked else if(value == 4 && program > -1 && dept > -1) { String curDep = programs.get(program).getmText1(); String curBr = depts.get(dept).getmText1(); CardViewActivity.this.setTitle(curDep + " -> " + curBr); ArrayList<DataObject> students = getStudents1(curDep, curBr); //Log.d("Branch", programs.get(program).getmText1() + ":"+ depts.get(dept).getmText1()); if(!curDep.equals("PhD")) mAdapter = new MyRecyclerViewAdapter(students,400); else mAdapter = new MyRecyclerViewAdapter(students,401); } //List of Useful links else if(value==10) { this.setTitle("Other Links"); mAdapter = new MyRecyclerViewAdapter(getLinks(),10); } mRecyclerView.setAdapter(mAdapter); } //-----------------------------------------------------------------------------Previous Year Data Parsing Functions--------------------------------------------------- private ArrayList<DataObject> getPrevHonExcel() { ArrayList<DataObject> result = new ArrayList<DataObject>(); ArrayList<Table_Prev_Rec> prevList = new ArrayList<Table_Prev_Rec>(); prevList = (ArrayList<Table_Prev_Rec>) db.getPrevRec("H"); //Also reads the first row of the excel file. i.e Name,Roll number etc for (Iterator<Table_Prev_Rec> rit = prevList.iterator(); rit.hasNext(); ) { Table_Prev_Rec p = rit.next(); // Log.d("ExcelData", row.getCell(0, Row.CREATE_NULL_AS_BLANK).getStringCellValue()); DataObject g = new DataObject(p.getName(),p.getConvo_num(),p.getDesignation(),p.getComment()); result.add(g); } return result; } private ArrayList<DataObject> getPrevChiefExcel() { ArrayList<DataObject> result = new ArrayList<DataObject>(); ArrayList<Table_Prev_Rec> prevList = new ArrayList<Table_Prev_Rec>(); prevList = (ArrayList<Table_Prev_Rec>) db.getPrevRec("C"); //Also reads the first row of the excel file. i.e Name,Roll number etc for (Iterator<Table_Prev_Rec> rit = prevList.iterator(); rit.hasNext(); ) { Table_Prev_Rec p = rit.next(); // Log.d("ExcelData", row.getCell(0, Row.CREATE_NULL_AS_BLANK).getStringCellValue()); DataObject g = new DataObject(p.getName(),p.getConvo_num(),p.getComment(), p.getDesignation()); result.add(g); } return result; } private ArrayList<DataObject> getPrevPresExcel() { ArrayList<DataObject> result = new ArrayList<DataObject>(); ArrayList<Table_Prev_Rec> prevList = new ArrayList<Table_Prev_Rec>(); prevList = (ArrayList<Table_Prev_Rec>) db.getPrevRec("S"); //Also reads the first row of the excel file. i.e Name,Roll number etc for (Iterator<Table_Prev_Rec> rit = prevList.iterator(); rit.hasNext(); ) { Table_Prev_Rec p = rit.next(); // Log.d("ExcelData", row.getCell(0, Row.CREATE_NULL_AS_BLANK).getStringCellValue()); DataObject g = new DataObject(p.getName(),p.getDesignation()); result.add(g); } return result; } //-------------------------------------------------------------------------------------Creating and handling the search bar--------------------------------------------------- @Override public boolean onCreateOptionsMenu(Menu menu) { if(value != 3 && value != 4)return false; this.menu = menu; MenuInflater menuInflater = getMenuInflater(); menuInflater.inflate(R.menu.search_menu, menu); MenuItem searchItem = menu.findItem(R.id.search); searchView = (SearchView) searchItem.getActionView(); searchView.setQueryHint("Enter student name"); //Expanding the search view to take complete width searchView.setMaxWidth( Integer.MAX_VALUE ); MenuItemCompat.expandActionView(searchItem); SearchManager searchManager = (SearchManager) getSystemService(Context.SEARCH_SERVICE); searchView.setSearchableInfo(searchManager.getSearchableInfo(getComponentName())); return super.onCreateOptionsMenu(menu); } @Override protected void onNewIntent(Intent intent) { Log.d("New Intent in CardView", "Reached Here"); if (Intent.ACTION_SEARCH.equals(intent.getAction())) { String q = intent.getStringExtra(SearchManager.QUERY); Log.d("Search query", q); if(value == 3) hasSearchedAwards = true; else if(value == 4) hasSearchedGrad = true; query = q; displayData(); } else { Log.d("New Intent in CardView", "Inside Else"); b = intent.getExtras(); if(b != null) value = b.getInt("key"); Log.d("Value : ", String.valueOf(value)); displayData(); } } }
app/src/main/java/com/aps/iitkconv/activities/CardViewActivity.java
package com.aps.iitkconv.activities; /** * Created by imhobo on 31/3/17. */ import android.app.ActionBar; import android.app.Activity; import android.app.SearchManager; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Color; import android.net.Uri; import android.os.Bundle; import android.support.v4.view.MenuItemCompat; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.text.Html; import android.util.Log; import android.util.Pair; import android.view.Gravity; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.widget.LinearLayout; import android.widget.SearchView; import android.widget.TextView; import com.aps.iitconv.R; import com.aps.iitkconv.models.DBHandler_Grad; import com.aps.iitkconv.models.DataObject; import com.aps.iitkconv.models.MyRecyclerViewAdapter; import com.aps.iitkconv.models.Table_Awards; import com.aps.iitkconv.models.Table_Contact; import com.aps.iitkconv.models.Table_Grad_Students; import com.aps.iitkconv.models.Table_Guest; import com.aps.iitkconv.models.Table_Prev_Rec; import com.aps.iitkconv.models.Table_Schedule; import java.io.InputStream; import java.util.ArrayList; import java.util.Iterator; public class CardViewActivity extends MainActivity { private RecyclerView mRecyclerView; private RecyclerView.Adapter mAdapter; private RecyclerView.LayoutManager mLayoutManager; private static String LOG_TAG = "CardViewActivity"; private DBHandler_Grad db; //An integer representing which tab was clicked to reach this activity int value = -1; //Keeps track of which program and dept was clicked private ArrayList<DataObject> programs; private ArrayList<DataObject> depts; private int program = -1, dept = -1; //Keeps track of which award was clicked private ArrayList<DataObject> awards; private int awardNum = -1; //Keep track whether Previous Recipients was clicked for Honourary,Chief guest or President Gold tab boolean prevHon = false; boolean prevChief = false; boolean prevPres = false; //Handling the search button boolean hasSearchedGrad = false; boolean hasSearchedAwards = false; String query = ""; //Handling the back button int ch = -1; //Search private MenuItem searchMenuItem; private SearchView searchView; //Schedule page 1 or 2 int schedule_page = 1; int chief_page = 1; int hon_page = 1; String date = ""; String guestNameC = ""; String guestNameH = ""; private Context mContext; private Bundle b; private Menu menu; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); //Maintaining context mContext = this; //Clearing the existing UI frameLayout.removeAllViews(); // Log.d("ch before init", String.valueOf(ch)); b = getIntent().getExtras(); if(b != null) value = b.getInt("key"); //Handling back button ch = MainActivity.getChoice(); MainActivity.setChoice(value); // Log.d("ch onCreate", String.valueOf(ch)); // Log.d("value onCreate", String.valueOf(value)); displayData(); } @Override protected void onResume() { super.onResume(); Log.d("onResume", String.valueOf(value)); // displayData(); ((MyRecyclerViewAdapter) mAdapter).setOnItemClickListener(new MyRecyclerViewAdapter .MyClickListener() { @Override public void onItemClick(int position, View v) { Log.i(LOG_TAG, " Clicked on Item " + position ); Log.i(LOG_TAG, " Value " + value ); if(value == 1 && schedule_page == 1) { if(position == 0) date = "15 June"; else if(position == 1) date = "16 June"; schedule_page = 2; displayData(); } else if(value == 3 && awardNum == -1) { awardNum = position; displayData(); } else if(value == 3 && awardNum ==0 && position == (mAdapter.getItemCount()-1)) { Log.i(LOG_TAG, " Clicked on PrevPres " + position ); prevPres = true; displayData(); } else if(value==4 && program == -1) { program = position; displayData(); } else if(value==4 && program > -1 && dept == -1) { dept = position; displayData(); } else if(value==4 && program > -1 && dept > -1) { //Click on the student name } else if(value==5 && position == (mAdapter.getItemCount()-1) && hon_page == 1) { // Log.i(LOG_TAG, " Clicked on HonPrevious " + position ); prevHon = true; hon_page = 2; displayData(); } else if(value==5 && hon_page == 1) { // Log.i(LOG_TAG, " Clicked on HonPrevious " + position ); if(position == 0) guestNameH = "Professor Ajay Kumar Sood"; else if(position == 1) guestNameH = "Professor Mriganka Sur"; else if(position == 2) guestNameH = "P.T Usha"; else if(position == 3) guestNameH = "Dr. Monkombu Sambasivan Swaminathan"; hon_page = 2; displayData(); } else if(value==50 && position == (mAdapter.getItemCount()-1) && chief_page == 1) { // Log.i(LOG_TAG, " Clicked on ChiefPrevious " + position ); prevChief = true; chief_page = 2; displayData(); } else if(value==50 && chief_page == 1) { // Log.i(LOG_TAG, " Clicked on HonPrevious " + position ); if(position == 0) guestNameC = "Mr. Natarajan Chandrasekaran "; else if(position == 1) guestNameC = "Dr. Clayton Daniel Mote, Jr."; chief_page = 2; displayData(); } //Important Links else if(value == 10) { String url = ((MyRecyclerViewAdapter) mAdapter).getDataSet().get(position).getmText2(); Log.d("url", url); Intent i = new Intent(Intent.ACTION_VIEW); i.setData(Uri.parse(url)); startActivity(i); } else if(value == 9) { String phone = db.getContacts().get(position).getNumber(); Intent intent = new Intent(Intent.ACTION_CALL); intent.setData(Uri.parse("tel:" + phone)); mContext.startActivity(intent); } } }); } //Possibly the worst way to implement the back button feature. //Recreates the view when back is pressed @Override public void onBackPressed() { Log.d("CDA", "onBackPressed Called"); //Handling the back button // Log.d("ch onBack", String.valueOf(ch)); // Log.d("value onBack", String.valueOf(value)); if(value == 4 && hasSearchedGrad) { hasSearchedGrad = false; displayData(); return; } if(value == 3 && hasSearchedAwards) { hasSearchedAwards = false; displayData(); return; } if((value==1 && schedule_page == 1) || value ==2 || (value ==5 && !prevHon && hon_page == 1)|| value == 9 || value == 10 || (value == 50 && !prevChief && chief_page == 1)) { MainActivity.setChoice(ch); finish(); return; } else if(value == 50 && chief_page == 2) { chief_page = 1; prevChief = false; displayData(); } else if(value == 5 && hon_page == 2) { hon_page = 1; prevHon = false; displayData(); } else if(value == 1 && schedule_page == 2) { schedule_page = 1; displayData(); } else if(value == 5 && prevHon) { prevHon = false; displayData(); } else if(value == 50 && prevChief) { prevChief = false; displayData(); } else if(value==3 && awardNum == -1) { MainActivity.setChoice(ch); finish(); return; } else if(value==3 && awardNum > -1 && !prevPres) { awardNum = -1; displayData(); } else if(value==3 && awardNum > -1 && prevPres) { prevPres = false; displayData(); } else if(value==4 && program == -1) { MainActivity.setChoice(ch); finish(); return; } else if(value==4 && program > -1 && dept == -1) { program = -1; displayData(); } else if(value==4 && program > -1 && dept > -1) { dept = -1; displayData(); } } //---------------------------------------------------------------Methods to get data------------------------------------------------------- //Get all events private ArrayList<DataObject> getSchedule(String date) { ArrayList<Table_Schedule> events = new ArrayList<Table_Schedule>(); ArrayList<DataObject> results = new ArrayList<DataObject>(); events = (ArrayList) db.getSchedule(date); int size = events.size(); Log.d("Size of events : ", String.valueOf(size)); for (int i = 0 ; i< size; i++) { Table_Schedule t = events.get(i); DataObject obj= new DataObject(t.getEvent(),t.getVenue(), t.getDate(), t.getTime()); results.add(obj); } return results; } //Create first page of schedule private ArrayList<DataObject> schedule_page1() { ArrayList<DataObject> results = new ArrayList<DataObject>(); DataObject obj= new DataObject("Session - 1", "Auditorium, IIT Kanpur", "15 June, 2017", "09:00 am to 13:35 pm"); results.add(obj); obj= new DataObject("Session - 2", "Auditorium, IIT Kanpur", "16 June, 2017", "09:00 am to 13:35 pm"); results.add(obj); return results; } //Create first page of chief_guests private ArrayList<DataObject> chief_page1() { ArrayList<DataObject> results = new ArrayList<DataObject>(); DataObject obj= new DataObject("","Mr. Natarajan Chandrasekaran", "","","Chairman, TATA SONS","15 June, 2017\nSession - 1\nAuditorium, IIT Kanpur"); int i = mContext.getResources().getIdentifier("img1","raw", mContext.getPackageName()); InputStream input = mContext.getResources().openRawResource(i); Bitmap myBitmap = BitmapFactory.decodeStream(input); obj.setmImg(myBitmap); results.add(obj); obj= new DataObject("","Professor C. D. Mote, Jr.", "","","President, National Academy of Engineering, USA","16 June, 2017\nSession - 2\nAuditorium, IIT Kanpur"); i = mContext.getResources().getIdentifier("img2","raw", mContext.getPackageName()); input = mContext.getResources().openRawResource(i); myBitmap = BitmapFactory.decodeStream(input); obj.setmImg(myBitmap); results.add(obj); obj= new DataObject("Previous Guests"); results.add(obj); return results; } //Create first page of hon_guests private ArrayList<DataObject> hon_page1() { ArrayList<DataObject> results = new ArrayList<DataObject>(); DataObject obj= new DataObject("","Professor Ajay Kumar Sood", "","","HONORARY DEGREE (HONORIS CAUSA)\nFiftieth Convocation, 2017"); int i = mContext.getResources().getIdentifier("hon1","raw", mContext.getPackageName()); InputStream input = mContext.getResources().openRawResource(i); Bitmap myBitmap = BitmapFactory.decodeStream(input); obj.setmImg(myBitmap); results.add(obj); obj= new DataObject("","Professor Mriganka Sur", "","","HONORARY DEGREE (HONORIS CAUSA)\nFiftieth Convocation, 2017"); i = mContext.getResources().getIdentifier("hon2","raw", mContext.getPackageName()); input = mContext.getResources().openRawResource(i); myBitmap = BitmapFactory.decodeStream(input); obj.setmImg(myBitmap); results.add(obj); obj= new DataObject("","Ms P.T Usha", "","","HONORARY DEGREE (HONORIS CAUSA)\nFiftieth Convocation, 2017"); i = mContext.getResources().getIdentifier("hon3","raw", mContext.getPackageName()); input = mContext.getResources().openRawResource(i); myBitmap = BitmapFactory.decodeStream(input); obj.setmImg(myBitmap); results.add(obj); obj= new DataObject("","Dr. M S Swaminathan ", "","","HONORARY DEGREE (HONORIS CAUSA)\nFiftieth Convocation, 2017"); i = mContext.getResources().getIdentifier("hon4","raw", mContext.getPackageName()); input = mContext.getResources().openRawResource(i); myBitmap = BitmapFactory.decodeStream(input); obj.setmImg(myBitmap); results.add(obj); obj= new DataObject("Previous Recipients"); results.add(obj); return results; } //Get all links private ArrayList<DataObject> getLinks() { ArrayList<Pair<String,String>> links = new ArrayList<Pair<String,String>>(); ArrayList<DataObject> results = new ArrayList<DataObject>(); links = (ArrayList) db.getLinks(); int size = links.size(); for (int i = 0 ; i< size; i++) { Pair<String,String> t = links.get(i); DataObject obj= new DataObject(t.first, t.second); results.add(obj); } return results; } //Get all announcements private ArrayList<DataObject> getAnnouncements() { ArrayList results = new ArrayList<DataObject>(); ArrayList<String> tempHolder = new ArrayList<String>(); tempHolder = (ArrayList) db.getAnnouncements(); for (String s : tempHolder) { results.add(new DataObject(s)); } if(tempHolder.size()==0) { TextView txt1 = new TextView(CardViewActivity.this); txt1.setText("No announcements yet."); txt1.setGravity(Gravity.CENTER_HORIZONTAL); frameLayout.addView(txt1); } return results; } //Get all contacts private ArrayList<DataObject> getContacts() { ArrayList<Table_Contact> contacts = new ArrayList<Table_Contact>(); ArrayList<DataObject> results = new ArrayList<DataObject>(); contacts = (ArrayList<Table_Contact>) db.getContacts(); int size = contacts.size(); Log.d("Size of Contacts : ", String.valueOf(size)); for (int i = 0 ; i< size; i++) { Table_Contact t = contacts.get(i); DataObject obj= new DataObject(t.getName(),t.getNumber(), t.getTransport()); //Log.d("getStudents2",String.valueOf(t.getId())+t.getEvent()+t.getName()+award+t.getTime()+t.getDept()+t.getProgram()+t.getYear()); results.add(obj); } return results; } //Get all Guests of a certain type private ArrayList<DataObject> getGuests(String type, String guestName) { ArrayList<Table_Guest> g = new ArrayList<Table_Guest>(); ArrayList<DataObject> results = new ArrayList<DataObject>(); g = (ArrayList<Table_Guest>) db.getGuests(type); int size = g.size(); for (int i = 0 ; i< size; i++) { Table_Guest t = g.get(i); Bitmap bmp = db.getImage(t.getPicture()); /* if(bmp==null) Log.d("image from DB", "NULL"); else Log.d("image from DB", "NOT NULL"); */ DataObject obj= new DataObject(bmp, t.getName(),t.getTitle(), t.getYear(), t.getPicture(), t.getDescription()); //Log.d("getStudents2",String.valueOf(t.getId())+t.getEvent()+t.getName()+award+t.getTime()+t.getDept()+t.getProgram()+t.getYear()); if(t.getName().equals(guestName)) results.add(obj); } // DataObject obj= new DataObject("Previous Recipients"); // results.add(obj); return results; } //Get all Awards private ArrayList<DataObject> getAwards() { ArrayList results = new ArrayList<DataObject>(); ArrayList<String> tempHolder = new ArrayList<String>(); tempHolder = (ArrayList) db.getAwards2(); for (String i : tempHolder) { int num = db.getStudentCountInAward(i); results.add(new DataObject(i, String.valueOf(num))); } return results; } //Get all Programs private ArrayList<DataObject> getPrograms() { ArrayList results = new ArrayList<DataObject>(); ArrayList<String> tempHolder = new ArrayList<String>(); tempHolder = (ArrayList) db.getProgram1(); for (String i : tempHolder) { int res = db.getStudentCountInProgram(i); results.add(new DataObject(i, String.valueOf(res))); } return results; } //Get Departments for Graduating Students private ArrayList<DataObject> getDept1(String program) { ArrayList results = new ArrayList<DataObject>(); ArrayList<String> tempHolder = new ArrayList<String>(); tempHolder = (ArrayList) db.getDept1(program); for (String i : tempHolder) { int res = db.getStudentCountInDept(program, i); results.add(new DataObject(i, String.valueOf(res))); } return results; } //Get Student for Graduating section private ArrayList<DataObject> getStudents1(String program, String dept) { ArrayList<Table_Grad_Students> students = new ArrayList<Table_Grad_Students>(); ArrayList<DataObject> results = new ArrayList<DataObject>(); String query = "SELECT * FROM Table_Grad_Students WHERE program = " + "'" + program + "'" + " AND dept = " + "'" + dept + "'"; students = (ArrayList) db.runSelectQuery1(query); int size = students.size(); Log.d("Size of students : ", String.valueOf(size)); for (int i = 0 ; i< size; i++) { Table_Grad_Students t = students.get(i); DataObject obj= new DataObject(t.getName(),t.getRoll(), t.getAdvisers(), t.getDescription()); results.add(obj); } return results; } //Get student for Awards section private ArrayList<DataObject> getStudents2(String award) { ArrayList<Table_Awards> students = new ArrayList<Table_Awards>(); ArrayList<DataObject> results = new ArrayList<DataObject>(); students = (ArrayList) db.runSelectQuery2(award); int size = students.size(); Log.d("Size of students : ", String.valueOf(size)); for (int i = 0 ; i< size; i++) { Table_Awards t = students.get(i); Bitmap bmp = db.getImage(t.getPicture()); /* if(bmp==null) Log.d("image from DB", "NULL"); else Log.d("image from DB", "NOT NULL"); */ DataObject obj= new DataObject(bmp, t.getRoll(), t.getName(), award, t.getDescription(), t.getComment(), t.getProgram(), t.getYear()); Log.d("Checking values : ", "Roll-"+t.getRoll() + ";" + "Name-" + t.getName() + ";" + "Award-"+ award + ";"+ "Desc-"+t.getDescription() + ";"+ "Comment-" + t.getComment() + ";" + "Program-" + t.getProgram() + ";" + "Dept-" + t.getDept() + ";" + "Year-" + t.getYear()); results.add(obj); } if(awardNum == 0 && !hasSearchedAwards) { DataObject obj= new DataObject("Previous Recipients"); results.add(obj); } return results; } //-------------------------------------------------------------------Get searched data------------------------------------------------------------------------- protected ArrayList<DataObject> getSearchedGrad(String q) { ArrayList<Table_Grad_Students> students = new ArrayList<Table_Grad_Students>(); ArrayList<DataObject> results = new ArrayList<DataObject>(); String query = "SELECT * FROM Table_Grad_Students WHERE name like " + "'" + "%" + q + "%" + "'"; students = (ArrayList) db.runSelectQuery1(query); int size = students.size(); Log.d("Size of students : ", String.valueOf(size)); for (int i = 0 ; i< size; i++) { Table_Grad_Students t = students.get(i); DataObject obj= new DataObject(t.getName(),t.getRoll(), t.getAdvisers(), t.getDescription(), t.getProgram(), t.getDept()); Log.d("Program : ", t.getProgram()); Log.d("Program : ", t.getName()); Log.d("Program : ", t.getAdvisers()); Log.d("Program : ", t.getDescription()); Log.d("Program : ", t.getProgram()); Log.d("Program : ", t.getDept()); results.add(obj); } return results; } protected ArrayList<DataObject> getSearchedAwards(String q) { ArrayList<Table_Awards> students = new ArrayList<Table_Awards>(); ArrayList<DataObject> results = new ArrayList<DataObject>(); students = (ArrayList) db.getStudentsbyName(q); int size = students.size(); Log.d("Size of students : ", String.valueOf(size)); for (int i = 0 ; i< size; i++) { Table_Awards t = students.get(i); Bitmap bmp = db.getImage(t.getPicture()); DataObject obj= new DataObject(bmp, t.getRoll(), t.getName(), t.getAward(), t.getDescription(), t.getComment(), t.getProgram(), t.getDept(), t.getYear(), t.getPicture()); Log.d("Name : ", t.getName()); Log.d("Award : ", t.getAward()); results.add(obj); } return results; } //-------------------------------------------------------------------------------------------------------------------------------------------------------------- protected void displayData() { // Log.d("displayData init", String.valueOf(value)); // b = getIntent().getExtras(); // if(b != null) // value = b.getInt("key"); // Log.d("displayData after init", String.valueOf(value)); //Clearing the existing UI frameLayout.removeAllViews(); //Instance used to read data db = DBHandler_Grad.getInstance(this); //Set different card views here. if((value == 3 && awardNum > -1 && !prevPres && !hasSearchedAwards) || (chief_page == 1 && value == 50) || (hon_page == 1 && value == 5)) { getLayoutInflater().inflate(R.layout.card_view_award, frameLayout); } else { getLayoutInflater().inflate(R.layout.card_view_generic, frameLayout); } //This should be done only after getLayoutInflater is called on frameLayout mRecyclerView = (RecyclerView) findViewById(R.id.my_recycler_view); mRecyclerView.setHasFixedSize(true); mLayoutManager = new LinearLayoutManager(CardViewActivity.this); mRecyclerView.setLayoutManager(mLayoutManager); // Log.d("Value","val=" + value) ; // Schedule if(value==1) { this.setTitle("Schedule"); if(schedule_page == 1) { mAdapter = new MyRecyclerViewAdapter(schedule_page1(), 1); } else if(schedule_page == 2) mAdapter = new MyRecyclerViewAdapter(getSchedule(date),1); } //Searched in awards else if(value == 3 && hasSearchedAwards) { this.setTitle(Html.fromHtml(getString(R.string.app_title))); mAdapter = new MyRecyclerViewAdapter(getSearchedAwards(query),1000); } //Searched for grad students else if(value == 4 && hasSearchedGrad) { this.setTitle(Html.fromHtml(getString(R.string.app_title))); mAdapter = new MyRecyclerViewAdapter(getSearchedGrad(query),999); } // Announcements else if(value==2) { mAdapter = new MyRecyclerViewAdapter(getAnnouncements(), 2); } //List of Awards else if(value==3 && awardNum == -1) { this.setTitle("Medals"); awards = getAwards(); mAdapter = new MyRecyclerViewAdapter(awards, 3); } //Some Award clicked else if(value==3 && awardNum > -1 && !prevPres) { String curAward = awards.get(awardNum).getmText1(); String curDesc = db.getDesc2(curAward); ArrayList<DataObject> students = getStudents2(curAward); TextView award = (TextView) findViewById(R.id.textViewA1); TextView desc = (TextView) findViewById(R.id.textViewA2); award.setText(curAward); //desc.setText(curDesc); //If a student with a certain award has a picture associated, then we assume that everyone in that category has a picture String imgName = db.getImageName(students.get(0).getmText2(), curAward); if(!imgName.equals("")) mAdapter = new MyRecyclerViewAdapter(students, 30); else mAdapter = new MyRecyclerViewAdapter(students, 31); } //Prev Recipient in Pres Gold Medal else if(value ==3 && awardNum == 0 && prevPres) { this.setTitle(Html.fromHtml(getString(R.string.app_title))); mAdapter = new MyRecyclerViewAdapter(getPrevPresExcel(), 300); } //Honourary Degrees and Chief Guests else if((value==5 && !prevHon) || (value==50 && !prevChief)) { if (value == 5) { this.setTitle("Honorary"); if (hon_page == 1) { TextView award = (TextView) findViewById(R.id.textViewA1); award.setText("Honorary Degrees"); mAdapter = new MyRecyclerViewAdapter(hon_page1(), 519); } else mAdapter = new MyRecyclerViewAdapter(getGuests("H", guestNameH), 5); } else if (value == 50) { this.setTitle(Html.fromHtml(getString(R.string.app_title))); if(chief_page == 1) { TextView award = (TextView) findViewById(R.id.textViewA1); award.setText("Chief Guests"); mAdapter = new MyRecyclerViewAdapter(chief_page1(), 509); } else mAdapter = new MyRecyclerViewAdapter(getGuests("C", guestNameC), 50); } } //Honourary Degrees and Chief Guests with Prev Recipients else if((value==5 && prevHon) || (value==50 && prevChief)) { this.setTitle(Html.fromHtml(getString(R.string.app_title))); if(value == 5) mAdapter = new MyRecyclerViewAdapter(getPrevHonExcel(), 51); else if(value == 50) mAdapter = new MyRecyclerViewAdapter(getPrevChiefExcel(), 501); } //Taxi Contacts else if(value==9) { this.setTitle("Contacts"); ArrayList<Table_Contact> contacts = (ArrayList<Table_Contact>) db.getContacts(); mAdapter = new MyRecyclerViewAdapter(getContacts(), 9); } //List of Programs for Graduating Students with the number of students in each of them. else if(value==4 && program == -1) { CardViewActivity.this.setTitle("Degrees"); programs = getPrograms(); mAdapter = new MyRecyclerViewAdapter(programs,4); } //List of Students for Graduating Students when Program already clicked with the number of students in each of them. else if(value == 4 && program > -1 && dept == -1) { String curDep = programs.get(program).getmText1(); CardViewActivity.this.setTitle(curDep); depts = getDept1(curDep); mAdapter = new MyRecyclerViewAdapter(depts,40); } //List of Students for Graduating Students when Program and Dept already clicked else if(value == 4 && program > -1 && dept > -1) { String curDep = programs.get(program).getmText1(); String curBr = depts.get(dept).getmText1(); CardViewActivity.this.setTitle(curDep + " -> " + curBr); ArrayList<DataObject> students = getStudents1(curDep, curBr); //Log.d("Branch", programs.get(program).getmText1() + ":"+ depts.get(dept).getmText1()); if(!curDep.equals("Ph.D.")) mAdapter = new MyRecyclerViewAdapter(students,400); else mAdapter = new MyRecyclerViewAdapter(students,401); } //List of Useful links else if(value==10) { this.setTitle("Other Links"); mAdapter = new MyRecyclerViewAdapter(getLinks(),10); } mRecyclerView.setAdapter(mAdapter); } //-----------------------------------------------------------------------------Previous Year Data Parsing Functions--------------------------------------------------- private ArrayList<DataObject> getPrevHonExcel() { ArrayList<DataObject> result = new ArrayList<DataObject>(); ArrayList<Table_Prev_Rec> prevList = new ArrayList<Table_Prev_Rec>(); prevList = (ArrayList<Table_Prev_Rec>) db.getPrevRec("H"); //Also reads the first row of the excel file. i.e Name,Roll number etc for (Iterator<Table_Prev_Rec> rit = prevList.iterator(); rit.hasNext(); ) { Table_Prev_Rec p = rit.next(); // Log.d("ExcelData", row.getCell(0, Row.CREATE_NULL_AS_BLANK).getStringCellValue()); DataObject g = new DataObject(p.getName(),p.getConvo_num(),p.getDesignation(),p.getComment()); result.add(g); } return result; } private ArrayList<DataObject> getPrevChiefExcel() { ArrayList<DataObject> result = new ArrayList<DataObject>(); ArrayList<Table_Prev_Rec> prevList = new ArrayList<Table_Prev_Rec>(); prevList = (ArrayList<Table_Prev_Rec>) db.getPrevRec("C"); //Also reads the first row of the excel file. i.e Name,Roll number etc for (Iterator<Table_Prev_Rec> rit = prevList.iterator(); rit.hasNext(); ) { Table_Prev_Rec p = rit.next(); // Log.d("ExcelData", row.getCell(0, Row.CREATE_NULL_AS_BLANK).getStringCellValue()); DataObject g = new DataObject(p.getName(),p.getConvo_num(),p.getComment(), p.getDesignation()); result.add(g); } return result; } private ArrayList<DataObject> getPrevPresExcel() { ArrayList<DataObject> result = new ArrayList<DataObject>(); ArrayList<Table_Prev_Rec> prevList = new ArrayList<Table_Prev_Rec>(); prevList = (ArrayList<Table_Prev_Rec>) db.getPrevRec("S"); //Also reads the first row of the excel file. i.e Name,Roll number etc for (Iterator<Table_Prev_Rec> rit = prevList.iterator(); rit.hasNext(); ) { Table_Prev_Rec p = rit.next(); // Log.d("ExcelData", row.getCell(0, Row.CREATE_NULL_AS_BLANK).getStringCellValue()); DataObject g = new DataObject(p.getName(),p.getDesignation()); result.add(g); } return result; } //-------------------------------------------------------------------------------------Creating and handling the search bar--------------------------------------------------- @Override public boolean onCreateOptionsMenu(Menu menu) { if(value != 3 && value != 4)return false; this.menu = menu; MenuInflater menuInflater = getMenuInflater(); menuInflater.inflate(R.menu.search_menu, menu); MenuItem searchItem = menu.findItem(R.id.search); searchView = (SearchView) searchItem.getActionView(); searchView.setQueryHint("Enter student name"); //Expanding the search view to take complete width searchView.setMaxWidth( Integer.MAX_VALUE ); MenuItemCompat.expandActionView(searchItem); SearchManager searchManager = (SearchManager) getSystemService(Context.SEARCH_SERVICE); searchView.setSearchableInfo(searchManager.getSearchableInfo(getComponentName())); return super.onCreateOptionsMenu(menu); } @Override protected void onNewIntent(Intent intent) { Log.d("New Intent in CardView", "Reached Here"); if (Intent.ACTION_SEARCH.equals(intent.getAction())) { String q = intent.getStringExtra(SearchManager.QUERY); Log.d("Search query", q); if(value == 3) hasSearchedAwards = true; else if(value == 4) hasSearchedGrad = true; query = q; displayData(); } else { Log.d("New Intent in CardView", "Inside Else"); b = intent.getExtras(); if(b != null) value = b.getInt("key"); Log.d("Value : ", String.valueOf(value)); displayData(); } } }
Version 0.92
app/src/main/java/com/aps/iitkconv/activities/CardViewActivity.java
Version 0.92
<ide><path>pp/src/main/java/com/aps/iitkconv/activities/CardViewActivity.java <ide> for (String i : tempHolder) <ide> { <ide> int num = db.getStudentCountInAward(i); <del> results.add(new DataObject(i, String.valueOf(num))); <add> if(!i.equals("")) <add> results.add(new DataObject(i, String.valueOf(num))); <ide> } <ide> <ide> return results; <ide> ArrayList<String> tempHolder = new ArrayList<String>(); <ide> tempHolder = (ArrayList) db.getProgram1(); <ide> <add> int k = 0; <ide> for (String i : tempHolder) <ide> { <ide> int res = db.getStudentCountInProgram(i); <del> results.add(new DataObject(i, String.valueOf(res))); <del> } <add> if(!i.equals("")) <add> results.add(new DataObject(i, String.valueOf(res))); <add>// Log.d("Programs : ", i + " : " + k); <add> k++; <add> } <add>// Log.d("Size of k : ", String.valueOf(k)); <add>// Log.d("Size of program1 : ", String.valueOf(results.size())); <ide> <ide> return results; <ide> } <ide> results.add(obj); <ide> } <ide> <del> if(awardNum == 0 && !hasSearchedAwards) <del> { <del> DataObject obj= new DataObject("Previous Recipients"); <del> results.add(obj); <del> } <add>// Previous Recipients for award <add>// if(awardNum == 0 && !hasSearchedAwards) <add>// { <add>// DataObject obj= new DataObject("Previous Recipients"); <add>// results.add(obj); <add>// } <ide> <ide> return results; <ide> } <ide> <ide> CardViewActivity.this.setTitle("Degrees"); <ide> programs = getPrograms(); <add> Log.d("Size of Programs : ", String.valueOf(programs.size())); <ide> mAdapter = new MyRecyclerViewAdapter(programs,4); <ide> <ide> } <ide> CardViewActivity.this.setTitle(curDep + " -> " + curBr); <ide> ArrayList<DataObject> students = getStudents1(curDep, curBr); <ide> //Log.d("Branch", programs.get(program).getmText1() + ":"+ depts.get(dept).getmText1()); <del> if(!curDep.equals("Ph.D.")) <add> if(!curDep.equals("PhD")) <ide> mAdapter = new MyRecyclerViewAdapter(students,400); <ide> else <ide> mAdapter = new MyRecyclerViewAdapter(students,401);
JavaScript
bsd-3-clause
37e1a7f5b6c609469f658cdcac3aae1345978aa8
0
erdc-cm/tethysapp-streamflow_prediction_tool,erdc-cm/tethysapp-streamflow_prediction_tool,erdc-cm/tethysapp-streamflow_prediction_tool
/***************************************************************************** * FILE: Streamflow Prediciton Map Tool * DATE: 08/24/2015 * AUTHOR: Alan Snow * COPYRIGHT: (c) 2015 US Army Corps of Engineers ERDC * LICENSE: BSD 2-Clause *****************************************************************************/ /***************************************************************************** * LIBRARY WRAPPER *****************************************************************************/ var ERFP_MAP = (function() { // Wrap the library in a package function "use strict"; // And enable strict mode for this library /************************************************************************ * MODULE LEVEL / GLOBAL VARIABLES *************************************************************************/ var public_interface, // Object returned by the module m_map, // the main map m_map_projection, //main map projection m_map_extent, //the extent of all objects in map m_basemap_layer, m_drainage_line_layers, m_select_interaction, m_selected_feature, m_selected_ecmwf_watershed, m_selected_ecmwf_subbasin, m_selected_wrf_hydro_watershed, m_selected_wrf_hydro_subbasin, m_selected_reach_id, m_selected_usgs_id, m_selected_nws_id, m_selected_hydroserver_url, m_downloading_ecmwf_hydrograph, m_downloading_era_interim_hydrograph, m_downloading_long_term_select, m_downloading_short_term_select, m_downloading_wrf_hydro_hydrograph, m_downloading_usgs, m_downloading_nws, m_downloading_hydroserver, m_searching_for_reach, m_long_term_chart_data_ajax_load_failed, m_long_term_select_data_ajax_handle, m_short_term_chart_data_ajax_load_failed, m_short_term_select_data_ajax_handle, m_ecmwf_start_folder, m_wrf_hydro_date_string, m_units, m_ecmwf_show, m_wrf_show, m_return_20_features_source, m_return_10_features_source, m_return_2_features_source, m_flood_maps; /************************************************************************ * PRIVATE FUNCTION DECLARATIONS *************************************************************************/ var resizeAppContent, bindInputs, convertTimeSeriesMetricToEnglish, getCI, convertTimeSeriesEnglishToMetric, isNotLoadingPastRequest, zoomToAll, zoomToLayer, zoomToFeature, toTitleCase, datePadString, getBaseLayer, getTileLayer, getKMLLayer, clearAllMessages, clearInfoMessages, clearOldChart, dateToUTCString, clearChartSelect2, getChartData, displayHydrograph, loadHydrographFromFeature,resetChartSelectMessage, addECMWFSeriesToCharts, addSeriesToCharts, isThereDataToLoad, checkCleanString, dateToUTCDateTimeString, getValidSeries, convertValueMetricToEnglish, unbindInputs; /************************************************************************ * PRIVATE FUNCTION IMPLEMENTATIONS *************************************************************************/ //FUNCTION: reset chart and select options resetChartSelectMessage = function() { //remove old chart reguardless clearOldChart('long-term'); $('.short-term-select').addClass('hidden'); $('.long-term-select').addClass('hidden'); //clear messages clearAllMessages(); }; //FUNCTION: resize content based resizeAppContent = function() { var map_div = $('#inner-app-content').children().first(); map_div.attr("style","height:" + parseInt($(document).height()*0.8) + "px"); if (typeof m_map != 'undefined') { m_map.updateSize(); } var document_width = $(document).width(); if (document_width > 900) { $('#app-content-wrapper').addClass('show-nav'); } var container = $('.container'); container.removeClass('no-padding'); var height_ratio = 0.97; if (document_width > 1500) { height_ratio = 0.57; } $('#wrf_toogle_col').removeClass('col-sm-2') .removeClass('col-sm-3') .removeClass('col-sm-4'); if (document_width > 1900) { $('#wrf_toogle_col').addClass('col-sm-2'); } else if (document_width > 1400){ $('#wrf_toogle_col').addClass('col-sm-3'); } else if (document_width > 900) { $('#wrf_toogle_col').addClass('col-sm-4'); } else if (document_width > 700) { $('#wrf_toogle_col').addClass('col-sm-2'); } else { $('#wrf_toogle_col').addClass('col-sm-3'); } //resize highchart var long_term_chart = $("#long-term-chart").highcharts(); if (typeof long_term_chart != 'undefined') { var width = $("#long-term-chart-row").width(); long_term_chart.setSize(0.97*width,height_ratio*width); } }; //FUNCTION: binds dom elements to layer bindInputs = function(layerid, layer) { var visibilityInput = $(layerid + ' input.visible'); visibilityInput.prop("checked", layer.getVisible()); visibilityInput.on('change', function() { layer.setVisible(this.checked); }); var opacityInput = $(layerid + ' input.opacity'); opacityInput.val(layer.getOpacity()); opacityInput.on('input change', function() { layer.setOpacity(parseFloat(this.value)); }); }; //FUNCTION: unbind dom elements from layer unbindInputs = function(layerid) { $(layerid + ' input.visible').off(); $(layerid + ' input.opacity').off(); } //FUNCTION: check to see if there is data to redraw on chart isThereDataToLoad = function(){ return ((m_ecmwf_show && m_selected_ecmwf_watershed != null && m_selected_ecmwf_subbasin != null) || (m_wrf_show && m_selected_wrf_hydro_watershed != null && m_selected_wrf_hydro_subbasin != null) || (!isNaN(m_selected_usgs_id) && m_selected_usgs_id != null) || (!isNaN(m_selected_nws_id) && m_selected_nws_id != null) || m_selected_hydroserver_url != null); }; //FUNCTION: convert units from metric to english convertValueMetricToEnglish = function(data_value) { var conversion_factor = 1; if(m_units=="english") { conversion_factor = 35.3146667; } return data_value * conversion_factor; }; //FUNCTION: convert units from metric to english convertTimeSeriesMetricToEnglish = function(time_series) { var new_time_series = []; var conversion_factor = 1; if(m_units=="english") { conversion_factor = 35.3146667; } time_series.map(function(data_row) { var new_data_array = [data_row[0]]; for (var i = 1; i<data_row.length; i++) { new_data_array.push(parseFloat((data_row[i]*conversion_factor).toFixed(5))); } new_time_series.push(new_data_array); }); return new_time_series; }; //FUNCTION: convert units from english to metric convertTimeSeriesEnglishToMetric = function(time_series, series_name) { var new_time_series = []; var date_time_value, data_value; var conversion_factor = 1; try { if (m_units == "metric") { conversion_factor = 35.3146667; } time_series.map(function(data) { if (series_name=="USGS") { data_value = data.value; date_time_value = data.dateTime; } else { date_time_value = data[0]; data_value = data[1]; } new_time_series.push([Date.parse(date_time_value), parseFloat(data_value)/conversion_factor]); }); } catch (e) { if (e instanceof TypeError) { appendErrorMessage("Error loading " + series_name + " data.", "load_series_error", "message-error"); } } return new_time_series; }; //FUNCTION: cleans sting and returns null if empty checkCleanString = function(string) { if(typeof string == 'undefined' || string == null) { return null; } else if (typeof string != 'string') { return string; } else { string = string.trim(); //set to null if it is empty string if (string.length <= 0) { return null; } return string; } }; //FUNCTION: ol case insensitive get feature property getCI = function(obj,prop){ prop = prop.toLowerCase(); for(var key in obj.getProperties()){ if(prop == key.toLowerCase()){ return checkCleanString(obj.get(key)); } } return null; }; //FUNCTION: get series with actual data getValidSeries = function(series_array){ if (series_array != null) { var valid_series; for (var i=0; i<series_array.length; i++) { valid_series = true; for (var j=0; j<series_array[i].length; j++) { if (series_array[i][j][1] < 0) { valid_series = false; break; } } if (valid_series) { return series_array[i]; } } } return null; }; //FUNCTION: check if loading past request isNotLoadingPastRequest = function() { return !m_downloading_ecmwf_hydrograph && !m_downloading_long_term_select && !m_downloading_usgs && !m_downloading_nws && !m_downloading_hydroserver && !m_downloading_short_term_select && !m_downloading_wrf_hydro_hydrograph && !m_downloading_era_interim_hydrograph; }; //FUNCTION: zooms to all kml files zoomToAll = function() { m_map.getView().fitExtent(m_map_extent, m_map.getSize()); }; //FUNCTION: zooms to layer with id layer_id zoomToLayer = function(layer_id) { m_map.getLayers().forEach(function(layer, i) { if (layer instanceof ol.layer.Group) { layer.getLayers().forEach(function(sublayer, j) { if(sublayer.get('layer_id') == layer_id) { if(sublayer.get('layer_type') == "kml") { var source = sublayer.getSource(); m_map.getView().fitExtent(source.getExtent(), m_map.getSize()); return; } else if (sublayer.get('layer_type') == "geoserver") { m_map.getView().fitExtent(sublayer.get('extent'), m_map.getSize()); return; } } }); } }); }; //FUNCTION: zooms to feature in layer zoomToFeature = function(watershed_info, reach_id) { if(!m_searching_for_reach) { $("#reach-id-help-message").text(''); $("#reach-id-help-message").parent().removeClass('alert-danger'); var search_id_button = $("#submit-search-reach-id"); var search_id_button_html = search_id_button.html(); search_id_button.text('Searching ...'); var watershed_split = watershed_info.split(":"); var watershed_name = watershed_split[0]; var subbasin_name = watershed_split[1]; m_drainage_line_layers.forEach(function(drainage_line_layer, j) { if(drainage_line_layer.get('watershed_name') == watershed_name && drainage_line_layer.get('subbasin_name') == subbasin_name) { if(drainage_line_layer.get('layer_type') == "kml") { var features = drainage_line_layer.getSource().getFeatures(); for(var i=0; features.length>i; i++) { var feature_reach_id = getCI(features[i],'COMID'); if(feature_reach_id == null || isNaN(feature_reach_id)) { var feature_reach_id = getCI(features[i],'hydroid'); } if (feature_reach_id == reach_id) { var geometry = features[i].get('geometry'); m_map.getView().fitExtent(geometry.getExtent(), m_map.getSize()); search_id_button.html(search_id_button_html); m_select_interaction.getFeatures().clear(); m_select_interaction.getFeatures().push(features[i]); return; } } $("#reach-id-help-message").text('Reach ID ' + reach_id + ' not found'); $("#reach-id-help-message").parent().addClass('alert-danger'); search_id_button.html(search_id_button_html); return; } else if (drainage_line_layer.get('layer_type') == "geoserver") { m_searching_for_reach = true; var reach_id_attr_name = getCI(drainage_line_layer, 'reach_id_attr_name'); if (reach_id_attr_name != null) { //TODO: Make query more robust var url = drainage_line_layer.get('geoserver_url') + '&format_options=callback:searchFeatures' + '&CQL_FILTER='+ drainage_line_layer.get('reach_id_attr_name') +' =' + reach_id + '&srsname=' + m_map_projection; jQuery.ajax({ url: encodeURI(url), dataType: 'jsonp', jsonpCallback: 'searchFeatures', }) .done(function(response) { if (response.totalFeatures > 0) { var features = drainage_line_layer.getSource().readFeatures(response); m_map.getView().fitExtent(features[0].getGeometry().getExtent(), m_map.getSize()); m_select_interaction.getFeatures().clear(); m_select_interaction.getFeatures().push(features[0]); } else { $("#reach-id-help-message").text('Reach ID ' + reach_id + ' not found'); $("#reach-id-help-message").parent().addClass('alert-danger'); } }) .always(function() { m_searching_for_reach = false; search_id_button.html(search_id_button_html); }); } else { $("#reach-id-help-message").text('No valid reach ID attribute found.'); $("#reach-id-help-message").parent().addClass('alert-danger'); } return; } } }); } }; //FUNCTION: converts string to title case toTitleCase = function(str) { return str.replace(/\w\S*/g, function(txt){return txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase();}); }; //FUNCTION: to convert date to string datePadString = function(i) { return (i < 10) ? "0" + i : "" + i; }; //FUNCTION: adds appropriate base layer based on name getBaseLayer = function(base_layer_name, api_key) { if(base_layer_name == "BingMaps") { return new ol.layer.Tile({ source: new ol.source.BingMaps({key: api_key, imagerySet: "AerialWithLabels"}), }); } else if (base_layer_name == "Esri") { return new ol.layer.Tile({ source: new ol.source.XYZ({ attributions: [new ol.Attribution({ html: 'Tiles &copy; <a href="http://services.arcgisonline.com/ArcGIS/' + 'rest/services/World_Topo_Map/MapServer">ArcGIS</a>' })], url: 'http://server.arcgisonline.com/ArcGIS/rest/services/' + 'World_Topo_Map/MapServer/tile/{z}/{y}/{x}' }) }); } //default to mapquest return new ol.layer.Group({ style: 'AerialWithLabels', layers: [ new ol.layer.Tile({ source: new ol.source.MapQuest({layer: 'sat'}) }), new ol.layer.Tile({ source: new ol.source.MapQuest({layer: 'hyb'}) }) ] }); }; //FUNCTION: gets KML layer for geoserver getKMLLayer = function(layer_info, layer_id, watershed_name, subbasin_name) { var layer = new ol.layer.Vector({ source: new ol.source.KML({ projection: new ol.proj.get(m_map_projection), url: layer_info, }), }); layer.set('layer_id', layer_id); layer.set('layer_type', 'kml'); if( typeof watershed_name != 'undefined') { layer.set('watershed_name', watershed_name); } if( typeof subbasin_name != 'undefined') { layer.set('subbasin_name', subbasin_name); } return layer; }; //FUNCTION: gets tile layer for geoserver getTileLayer = function(layer_info, geoserver_url, layer_id) { //validate extent var extent = layer_info['latlon_bbox'].map(Number) if (Math.abs(extent[0]-extent[1]) > 0.001 && Math.abs(extent[2]-extent[3]) > 0.001) { var layer = new ol.layer.Tile({ source: new ol.source.TileWMS({ url: geoserver_url, params: {'LAYERS': layer_info['name'], 'TILED': true}, serverType: 'geoserver', }), }); layer.set('extent', ol.proj.transformExtent(extent, 'EPSG:4326', m_map_projection)); layer.set('layer_id', layer_id); layer.set('layer_type', 'geoserver'); return layer; } return null; }; //FUNCTION: removes message and hides the div clearInfoMessages = function() { $('#message').addClass('hidden'); $('#message').empty(); }; clearAllMessages = function() { clearInfoMessages(); $('#message-error').addClass('hidden'); $('#message-error').empty(); } //FUNCTION: removes highchart clearOldChart = function(model_name) { //clear old chart var highcharts_attr = $('#' + model_name + '-chart').attr('data-highcharts-chart'); // For some browsers, `attr` is undefined; for others, // `attr` is false. Check for both. if (typeof highcharts_attr !== typeof undefined && highcharts_attr !== false) { $("#" + model_name +"-chart").highcharts().destroy(); $('#' + model_name + '-chart').empty(); } }; //FUNCTION: removes chart select2 clearChartSelect2 = function(model_name) { if($('#' + model_name + '-select').data('select2')) { $('#' + model_name + '-select').off('change.select2') //remove event handler .select2('val', '') //remove selection .select2('destroy'); //destroy } }; //FUNCTION: converts date to UTC string in the format yyyy-mm-dd dateToUTCString = function(date) { return datePadString(date.getUTCFullYear()) + "-" + datePadString(1 + date.getUTCMonth()) + "-" + datePadString(date.getUTCDate()); }; //FUNCTION: converts date to UTC string in the format yyyy-mm-dd dateToUTCDateTimeString = function(date) { return dateToUTCString(date) + "T00:00:00"; }; //FUNCTION: adds a series to both the chart addECMWFSeriesToCharts = function(series_name, series_data, series_color, series_type){ var long_term_chart = $("#long-term-chart").highcharts(); var new_series = { name: series_name, data: convertTimeSeriesMetricToEnglish(series_data), color: series_color, selected: true }; if(typeof series_type != 'undefined' && new_series != null) { new_series.type = series_type; new_series.lineWidth = 0; new_series.linkedTo = ":previous"; new_series.fillOpacity = 0.3; } long_term_chart.addSeries(new_series); }; //FUNCTION: adds data to the chart addSeriesToCharts = function(series){ var long_term_chart = $("#long-term-chart").highcharts(); long_term_chart.addSeries(series); $("#long-term-chart").removeClass("hidden"); }; //FUNCTION: gets all data for chart getChartData = function() { if(!isNotLoadingPastRequest()) { //updateInfoAlert addWarningMessage("Please wait for datasets to download before making another selection."); } else if (!isThereDataToLoad()) { resetChartSelectMessage(); //updateInfoAlert addWarningMessage("No data found to load. Please toggle on a dataset."); } else { resetChartSelectMessage(); m_long_term_chart_data_ajax_load_failed = false; //turn off select interaction m_map.removeInteraction(m_select_interaction); addInfoMessage("Retrieving Data ..."); var y_axis_title = "Flow (cms)"; if (m_units == "english") { y_axis_title = "Flow (cfs)"; } var default_chart_settings = { title: { text: "Forecast"}, chart: { zoomType: 'x', }, rangeSelector: { selected: 0 }, plotOptions: { series: { marker: { enabled: false } } }, xAxis: { type: 'datetime', title: { text: 'Date (UTC)' }, minRange: 1 * 24 * 3600000 // one day }, yAxis: { title: { text: y_axis_title }, min: 0 }, }; //handle subtitles - ECMWF first priority var subtitle = null; if(m_selected_ecmwf_watershed != null && m_selected_ecmwf_subbasin != null) { subtitle = {text: toTitleCase(m_selected_ecmwf_watershed) + " (" + toTitleCase(m_selected_ecmwf_subbasin) + "): " + m_selected_reach_id} } else if (m_selected_wrf_hydro_watershed != null && m_selected_wrf_hydro_subbasin != null) { subtitle = {text: toTitleCase(m_selected_wrf_hydro_watershed) + " (" + toTitleCase(m_selected_wrf_hydro_subbasin) + "): " + m_selected_reach_id} } if (subtitle != null) { default_chart_settings.subtitle = subtitle; } $("#long-term-chart").highcharts('StockChart', default_chart_settings); //get ecmwf data if (m_ecmwf_show && m_selected_ecmwf_watershed != null && m_selected_ecmwf_subbasin != null) { m_downloading_ecmwf_hydrograph = true; var xhr_ecmwf_hydrograph = jQuery.ajax({ type: "GET", url: "ecmwf-get-hydrograph", dataType: "json", data: { watershed_name: m_selected_ecmwf_watershed, subbasin_name: m_selected_ecmwf_subbasin, reach_id: m_selected_reach_id, start_folder: m_ecmwf_start_folder, }, }); xhr_ecmwf_hydrograph.done(function (data) { if ("success" in data) { if ("mean" in data) { addECMWFSeriesToCharts("ECMWF", data.mean, Highcharts.getOptions().colors[2]); } if ("outer_range" in data) { addECMWFSeriesToCharts("ECMWF - Outer Range", data.outer_range, Highcharts.getOptions().colors[2], 'arearange'); } if ("std_dev_range" in data) { addECMWFSeriesToCharts("ECMWF - Std. Dev.", data.std_dev_range, Highcharts.getOptions().colors[2], 'arearange'); } if ("high_res" in data) { addECMWFSeriesToCharts("ECMWF - High Res.", data.high_res, Highcharts.getOptions().colors[1]); } $('.long-term-select').removeClass('hidden'); var long_term_chart = $("#long-term-chart").highcharts(); long_term_chart.rangeSelector.clickButton(0,0,true); $('#long-term-chart').removeClass('hidden'); } else { m_long_term_chart_data_ajax_load_failed = true; appendErrorMessage(data["error"], "ecmwf_error", "message-error"); clearChartSelect2('long-term'); } }) .fail(function (request, status, error) { m_long_term_chart_data_ajax_load_failed = true; appendErrorMessage("Error: " + error, "ecmwf_error", "message-error"); clearChartSelect2('long-term'); }) .always(function () { m_downloading_ecmwf_hydrograph = false; m_map.addInteraction(m_select_interaction); if(isNotLoadingPastRequest()){ clearInfoMessages(); } }); m_downloading_era_interim_hydrograph = true; jQuery.ajax({ type: "GET", url: "era-interim-get-hydrograph", dataType: "json", data: { watershed_name: m_selected_ecmwf_watershed, subbasin_name: m_selected_ecmwf_subbasin, reach_id: m_selected_reach_id, }, }) .done(function (data) { if ("success" in data) { var long_term_chart = $("#long-term-chart").highcharts(); //load interim data to chart xhr_ecmwf_hydrograph.always(function(){ if ("era_interim" in data) { if (!("error" in data.era_interim)) { var era_interim_series = { name: "ERA Interim", data: convertTimeSeriesMetricToEnglish(data.era_interim.series), dashStyle: 'longdash', color: Highcharts.getOptions().colors[10], }; long_term_chart.addSeries(era_interim_series); } else { appendErrorMessage("Error: " + data.era_interim.error, "era_interim_error", "message-error"); } } //load return peeriod data to chart if ("return_period" in data) { if (!("error" in data.return_period)) { var extremes = long_term_chart.yAxis[0].getExtremes(); var maxY = Math.max(extremes.max, convertValueMetricToEnglish(parseFloat(data.return_period.max))); long_term_chart.yAxis[0].addPlotBand({ from: convertValueMetricToEnglish(parseFloat(data.return_period.twenty)), to: convertValueMetricToEnglish(maxY), color: 'rgba(128,0,128,0.4)', id: '20-yr', label: { text: '20-yr', align: 'right', } }); long_term_chart.yAxis[0].addPlotBand({ from: convertValueMetricToEnglish(parseFloat(data.return_period.ten)), to: convertValueMetricToEnglish(parseFloat(data.return_period.twenty)), color: 'rgba(255,0,0,0.3)', id: '10-yr', label: { text: '10-yr', align: 'right', } }); long_term_chart.yAxis[0].addPlotBand({ from: convertValueMetricToEnglish(parseFloat(data.return_period.two)), to: convertValueMetricToEnglish(parseFloat(data.return_period.ten)), color: 'rgba(255,255,0,0.3)', id: '2-yr', label: { text: '2-yr', align: 'right', } }); } else { appendErrorMessage("Error: " + data.return_period.error, "era_interim_error", "message-error"); } } //if ERA Interim series present, show chart if ("era_interim" in data) { if (!("error" in data.era_interim)) { $('#long-term-chart').removeClass('hidden'); } } }); } else { appendErrorMessage("Error: " + data.error, "era_interim_error", "message-error"); } }) .fail(function (request, status, error) { appendErrorMessage("Error: " + error, "era_interim_error", "message-error"); }) .always(function () { m_downloading_era_interim_hydrograph = false; m_map.addInteraction(m_select_interaction); if(isNotLoadingPastRequest()){ clearInfoMessages(); } }); } //if there is a wrf watershed & subbasin attribute if (m_wrf_show && m_selected_wrf_hydro_watershed != null && m_selected_wrf_hydro_subbasin != null) { m_downloading_wrf_hydro_hydrograph = true; jQuery.ajax({ type: "GET", url: "wrf-hydro-get-hydrograph", dataType: "json", data: { watershed_name: m_selected_wrf_hydro_watershed, subbasin_name: m_selected_wrf_hydro_subbasin, reach_id: m_selected_reach_id, date_string: m_wrf_hydro_date_string, }, }) .done(function (data) { if ("success" in data) { //wrf_hydro if ("wrf_hydro" in data) { var wrf_series = { name: "WRF-Hydro (HRRR)", data: convertTimeSeriesMetricToEnglish(data.wrf_hydro), dashStyle: 'longdash', color: Highcharts.getOptions().colors[3] }; var long_term_chart = $("#long-term-chart").highcharts(); long_term_chart.addSeries(wrf_series); $('.short-term-select').removeClass('hidden'); $('#long-term-chart').removeClass('hidden'); } } else { m_short_term_chart_data_ajax_load_failed = true; appendErrorMessage("Error: " + data["error"], "wrf_hydro_error", "message-error"); clearChartSelect2('short-term'); } }) .fail(function (request, status, error) { m_short_term_chart_data_ajax_load_failed = true; appendErrorMessage("Error: " + error, "wrf_hydro_error", "message-error"); clearChartSelect2('short-term'); }) .always(function () { m_downloading_wrf_hydro_hydrograph = false; m_map.addInteraction(m_select_interaction); if(isNotLoadingPastRequest()){ clearInfoMessages(); } }); } //get current dates var date_now = new Date(); var date_past = new Date(); date_past.setUTCDate(date_now.getUTCDate()-3); var date_future = new Date(); date_future.setUTCDate(date_now.getUTCDate()+15); var date_observed_end = date_now; var date_nws_end = date_future; //ECMWF Dates var ecmwf_date_forecast_begin = new Date(8640000000000000); var ecmwf_date_forecast_end = new Date(-8640000000000000); var get_ecmwf = m_ecmwf_start_folder != null && typeof m_ecmwf_start_folder != "undefined" && m_ecmwf_start_folder != "most_recent" && m_ecmwf_show; //get ECMWF forcast dates if available if(get_ecmwf) { var ecmwf_forecast_start_year = parseInt(m_ecmwf_start_folder.substring(0,4)); var ecmwf_forecast_start_month = parseInt(m_ecmwf_start_folder.substring(4,6)); var ecmwf_forecast_start_day = parseInt(m_ecmwf_start_folder.substring(6,8)); var ecmwf_forecast_start_hour = parseInt(m_ecmwf_start_folder.split(".")[1].substring(0,2)); ecmwf_date_forecast_begin = new Date(Date.UTC(ecmwf_forecast_start_year, ecmwf_forecast_start_month-1, ecmwf_forecast_start_day, ecmwf_forecast_start_hour)); ecmwf_date_forecast_end = new Date(); ecmwf_date_forecast_end.setUTCDate(ecmwf_date_forecast_begin.getUTCDate()+15); //reset dates if applicable date_observed_end = ecmwf_date_forecast_end; date_nws_end = ecmwf_date_forecast_end; } //WRF-Hydro Dates var wrf_hydro_date_forecast_begin = new Date(8640000000000000); var wrf_hydro_date_forecast_end = new Date(-8640000000000000); //get WRF-Hydro forcast dates if available if(m_wrf_hydro_date_string != null && typeof m_wrf_hydro_date_string != "undefined" && m_wrf_hydro_date_string != "most_recent" && m_wrf_show) { var wrf_hydro_forecast_start_year = parseInt(m_wrf_hydro_date_string.substring(0,4)); var wrf_hydro_forecast_start_month = parseInt(m_wrf_hydro_date_string.substring(4,6)); var wrf_hydro_forecast_start_day = parseInt(m_wrf_hydro_date_string.substring(6,8)); var wrf_hydro_forecast_start_hour = parseInt(m_wrf_hydro_date_string.split("T")[1].substring(0,2)); wrf_hydro_date_forecast_begin = new Date(Date.UTC(wrf_hydro_forecast_start_year, wrf_hydro_forecast_start_month-1, wrf_hydro_forecast_start_day, wrf_hydro_forecast_start_hour)); wrf_hydro_date_forecast_end = new Date(wrf_hydro_date_forecast_begin.getTime()+15*60*60000); //reset dates if applicable if(get_ecmwf) { date_observed_end = new Date(Math.max.apply(null,[date_observed_end, wrf_hydro_date_forecast_end])); date_nws_end = new Date(Math.max.apply(null,[date_nws_end, wrf_hydro_date_forecast_end])); } else { date_observed_end = wrf_hydro_date_forecast_end; date_nws_end = wrf_hydro_date_forecast_end; } } var date_observed_start = new Date(Math.min.apply(null,[date_past, ecmwf_date_forecast_begin, wrf_hydro_date_forecast_begin])); var date_nws_start = new Date(Math.min.apply(null,[date_now, ecmwf_date_forecast_begin, wrf_hydro_date_forecast_begin])); //Get USGS data if USGS ID attribute exists if(!isNaN(m_selected_usgs_id) && m_selected_usgs_id != null) { if(m_selected_usgs_id.length >= 8) { m_downloading_usgs = true; //get USGS data var chart_usgs_data_ajax_handle = jQuery.ajax({ type: "GET", url: "http://waterservices.usgs.gov/nwis/iv/", dataType: "json", data: { format: 'json', sites: m_selected_usgs_id, startDT: dateToUTCString(date_observed_start), endDT: dateToUTCString(date_observed_end), parameterCd: '00060', }, }) .done(function (data) { if (typeof data != 'undefined') { try { var usgs_series = { name: "USGS (" + m_selected_usgs_id + ")", data: convertTimeSeriesEnglishToMetric(data.value.timeSeries[0].values[0].value, "USGS"), dashStyle: 'longdash', color: Highcharts.getOptions().colors[0] }; addSeriesToCharts(usgs_series); } catch (e) { if (e instanceof TypeError) { appendErrorMessage("Recent USGS data not found.", "usgs_error", "message-error"); } } } }) .fail(function (request, status, error) { appendErrorMessage("USGS Error: " + error, "usgs_error", "message-error"); }) .always(function () { m_downloading_usgs = false; if(isNotLoadingPastRequest()){ clearInfoMessages(); } }); } } //Get AHPS data if NWD ID attribute exists if(m_selected_nws_id != null) { m_downloading_nws = true; //get NWS data //Example URL: http://ua-fews.ua.edu/WaterMlService/waterml? // request=GetObservation&featureId=ACRT2&observedProperty=QINE // &beginPosition=2015-01-01T00:00:00&endPosition=2015-06-22T00:00:00 var chart_nws_data_ajax_handle = jQuery.ajax({ type: "GET", url: "http://ua-fews.ua.edu/WaterMlService/waterml", data: { request: 'GetObservation', observedProperty: 'QINE', featureId: m_selected_nws_id, beginPosition: dateToUTCDateTimeString(date_nws_start), endPosition: dateToUTCDateTimeString(date_nws_end), }, }) .done(function(data) { //var series_data = getValidSeries(WATERML.get_json_from_streamflow_waterml(data, m_units)); var series_data = WATERML.get_json_from_streamflow_waterml(data, m_units, "T0 (Time of analysis)"); if(series_data == null) { appendErrorMessage("No valid recent data found for AHPS (" + m_selected_nws_id + ")", "ahps_error", "message-error"); } else { var ahps_series = { name: "AHPS (" + m_selected_nws_id + ")", data: series_data[0], dashStyle: 'longdash', color: Highcharts.getOptions().colors[4], }; addSeriesToCharts(ahps_series); $('#long-term-chart').removeClass('hidden'); } }) .fail(function(request, status, error) { appendErrorMessage("AHPS Error: " + error, "ahps_error", "message-error"); }) .always(function() { m_downloading_nws = false; if(isNotLoadingPastRequest()){ clearInfoMessages(); } }); } //Get HydroServer Data if Available if(m_selected_hydroserver_url != null) { m_downloading_hydroserver = true; //get WorldWater data var chart_ww_data_ajax_handle = jQuery.ajax({ type: "GET", url: m_selected_hydroserver_url, data: { startDate: dateToUTCString(date_observed_start), endDate: dateToUTCString(date_observed_end), }, }) .done(function(data) { var series_data = WATERML.get_json_from_streamflow_waterml(data, m_units); if(series_data == null) { appendErrorMessage("No data found for WorldWater", "hydro_server_error", "message-error"); } else { var hydro_server_series = { name: "HydroServer", data: series_data[0], dashStyle: 'longdash', color: Highcharts.getOptions().colors[5], }; addSeriesToCharts(hydro_server_series); } }) .fail(function(request, status, error) { appendErrorMessage("Error: " + error, "hydro_server_error", "message-error"); }) .always(function() { m_downloading_hydroserver = false; if(isNotLoadingPastRequest()){ clearInfoMessages(); } }); } } }; //FUNCTION: displays hydrograph at stream segment displayHydrograph = function() { //check if old ajax call still running if(!isNotLoadingPastRequest()) { //updateInfoAlert appendWarningMessage("Please wait for datasets to download before making another selection.", "wait_warning"); } else if (!isThereDataToLoad()) { resetChartSelectMessage(); //updateInfoAlert addWarningMessage("No data found to load. Please toggle on a dataset."); } else { resetChartSelectMessage(); //Get chart data m_ecmwf_start_folder = "most_recent"; m_wrf_hydro_date_string = "most_recent"; getChartData(); //Get available ECMWF Dates if (m_ecmwf_show && m_selected_ecmwf_watershed != null && m_selected_ecmwf_subbasin != null) { m_downloading_long_term_select = true; m_long_term_select_data_ajax_handle = jQuery.ajax({ type: "GET", url: "ecmwf-get-avaialable-dates", dataType: "json", data: { watershed_name: m_selected_ecmwf_watershed, subbasin_name: m_selected_ecmwf_subbasin, reach_id: m_selected_reach_id, }, }) .done(function (data) { if ("success" in data && !m_long_term_chart_data_ajax_load_failed) { //remove select2 if exists clearChartSelect2('long-term'); $('.long-term-select').removeClass('hidden'); //create new select2 $('#long-term-select').select2({ data: data.output_directories, placeholder: "Select a Date" }); if (m_downloading_ecmwf_hydrograph && m_downloading_era_interim_hydrograph) { $('.long-term-select').addClass('hidden'); } //add on change event handler $('#long-term-select').on('change.select2', function () { m_ecmwf_start_folder = $(this).select2('data').id; getChartData(); }); } else if ("error" in data) { appendErrorMessage("Error: " + data.error, "ecmwf_error", "message-error"); clearChartSelect2('long-term'); } }) .fail(function (request, status, error) { appendErrorMessage("Error: " + error, "ecmwf_error", "message-error"); clearChartSelect2('long-term'); }) .always(function () { m_downloading_long_term_select = false; if(isNotLoadingPastRequest()){ clearInfoMessages(); } }); } //Get available WRF-Hydro Dates if (m_wrf_show && m_selected_wrf_hydro_watershed != null && m_selected_wrf_hydro_subbasin != null) { m_downloading_short_term_select = true; m_short_term_select_data_ajax_handle = jQuery.ajax({ type: "GET", url: "wrf-hydro-get-avaialable-dates", dataType: "json", data: { watershed_name: m_selected_wrf_hydro_watershed, subbasin_name: m_selected_wrf_hydro_subbasin, }, }) .done(function (data) { if ("success" in data && !m_short_term_chart_data_ajax_load_failed) { //remove select2 if exists clearChartSelect2('short-term'); $('.short-term-select').removeClass('hidden'); //create new select2 $('#short-term-select').select2({ data: data.output_files, placeholder: "Select a Date" }); if (m_downloading_wrf_hydro_hydrograph) { $('.short-term-select').addClass('hidden'); } //add on change event handler $('#short-term-select').on('change.select2', function () { m_wrf_hydro_date_string = $(this).select2('data').id; getChartData(); }); } else if ("error" in data) { appendErrorMessage("Error: " + data.error, "wrf_hydro_error", "message-error"); clearChartSelect2('short-term'); } }) .fail(function (request, status, error) { appendErrorMessage("Error: " + error, "wrf_hydro_error", "message-error"); clearChartSelect2('short-term'); }) .always(function () { m_downloading_short_term_select = false; if(isNotLoadingPastRequest()){ clearInfoMessages(); } }); } } }; //FUNCTION: Loads Hydrograph from Selected feature loadHydrographFromFeature = function(selected_feature) { //check if old ajax call still running if(!isNotLoadingPastRequest()) { //updateInfoAlert appendWarningMessage("Please wait for datasets to download before making another selection.", "wait_warning"); } else { //get attributes var reach_id = getCI(selected_feature, 'COMID'); var ecmwf_watershed_name = getCI(selected_feature, "watershed"); var ecmwf_subbasin_name = getCI(selected_feature, "subbasin"); var wrf_hydro_watershed_name = getCI(selected_feature, "wwatershed"); var wrf_hydro_subbasin_name = getCI(selected_feature, "wsubbasin"); var usgs_id = getCI(selected_feature, "usgs_id"); var nws_id = getCI(selected_feature, "nws_id"); var hydroserver_url = getCI(selected_feature, "hydroserve"); //check if the variables are under a different name if(reach_id == null || isNaN(reach_id)) { var reach_id = getCI(selected_feature, 'hydroid'); } if(ecmwf_watershed_name == null) { var ecmwf_watershed_name = getCI(selected_feature, 'watershed_name'); } if(ecmwf_subbasin_name == null) { var ecmwf_subbasin_name = getCI(selected_feature, 'subbasin_name'); } //clean up usgs_id if(!isNaN(usgs_id) && usgs_id != null) { //add zero in case it was removed when converted to a number while(usgs_id.length < 8 && usgs_id.length > 0) { usgs_id = '0' + usgs_id; } } if(reach_id != null && (ecmwf_watershed_name != null && ecmwf_subbasin_name != null) || (wrf_hydro_watershed_name != null && wrf_hydro_subbasin_name != null)) { m_selected_feature = selected_feature; m_selected_reach_id = reach_id; m_selected_ecmwf_watershed = ecmwf_watershed_name; m_selected_ecmwf_subbasin = ecmwf_subbasin_name; m_selected_wrf_hydro_watershed = wrf_hydro_watershed_name; m_selected_wrf_hydro_subbasin = wrf_hydro_subbasin_name; m_selected_usgs_id = usgs_id; m_selected_nws_id = nws_id; m_selected_hydroserver_url = hydroserver_url; displayHydrograph(); } else { appendErrorMessage('The attributes in the file are faulty. Please fix and upload again.', "file_attr_error", "message-error"); } } }; /************************************************************************ * DEFINE PUBLIC INTERFACE *************************************************************************/ /* * Library object that contains public facing functions of the package. * This is the object that is returned by the library wrapper function. * See below. * NOTE: The functions in the public interface have access to the private * functions of the library because of JavaScript function scope. */ public_interface = { zoomToAll: function() { zoomToAll(); }, }; /************************************************************************ * INITIALIZATION / CONSTRUCTOR *************************************************************************/ // Initialization: jQuery function that gets called when // the DOM tree finishes loading $(function() { resizeAppContent(); $('#map_top_navigation').find('.form-group').addClass('inline-block'); //initialize map global variables m_map_projection = 'EPSG:3857'; m_map_extent = ol.extent.createEmpty(); m_selected_feature = null; m_selected_ecmwf_watershed = null; m_selected_ecmwf_subbasin = null; m_selected_wrf_hydro_watershed = null; m_selected_wrf_hydro_subbasin = null; m_selected_reach_id = null; m_selected_usgs_id = null; m_selected_nws_id = null; m_selected_hydroserver_url = null; m_downloading_ecmwf_hydrograph = false; m_downloading_era_interim_hydrograph = false; m_downloading_long_term_select = false; m_downloading_wrf_hydro_hydrograph = false; m_downloading_short_term_select = false; m_downloading_usgs = false; m_downloading_nws = false; m_downloading_hydroserver = false; m_searching_for_reach = false; m_long_term_chart_data_ajax_load_failed = false; m_short_term_chart_data_ajax_load_failed = false; m_long_term_select_data_ajax_handle = null; m_ecmwf_start_folder = "most_recent"; m_wrf_hydro_date_string = "most_recent"; //Init from toggle m_units = "metric"; if(!$('#units-toggle').bootstrapSwitch('state')) { m_units = "english"; } m_wrf_show = $('#wrf-toggle').bootstrapSwitch('state'); m_ecmwf_show = $('#ecmwf-toggle').bootstrapSwitch('state'); //create symbols for warnings var twenty_symbols = [new ol.style.RegularShape({ points: 3, radius: 5, fill: new ol.style.Fill({ color: 'rgba(128,0,128,0.8)' }), stroke: new ol.style.Stroke({ color: 'rgba(128,0,128,1)', width: 1 }), }),new ol.style.RegularShape({ points: 3, radius: 9, fill: new ol.style.Fill({ color: 'rgba(128,0,128,0.3)' }), stroke: new ol.style.Stroke({ color: 'rgba(128,0,128,1)', width: 1 }), })]; //symbols var ten_symbols = [new ol.style.RegularShape({ points: 3, radius: 5, fill: new ol.style.Fill({ color: 'rgba(255,0,0,0.7)' }), stroke: new ol.style.Stroke({ color: 'rgba(255,0,0,1)', width: 1 }), }),new ol.style.RegularShape({ points: 3, radius: 9, fill: new ol.style.Fill({ color: 'rgba(255,0,0,0.3)' }), stroke: new ol.style.Stroke({ color: 'rgba(255,0,0,1)', width: 1 }), })]; //symbols var two_symbols = [new ol.style.RegularShape({ points: 3, radius: 5, fill: new ol.style.Fill({ color: 'rgba(255,255,0,0.7)' }), stroke: new ol.style.Stroke({ color: 'rgba(255,255,0,1)', width: 1 }), }),new ol.style.RegularShape({ points: 3, radius: 9, fill: new ol.style.Fill({ color: 'rgba(255,255,0,0.3)' }), stroke: new ol.style.Stroke({ color: 'rgba(255,255,0,1)', width: 1 }), })]; //load base layer var base_layer_info = JSON.parse($("#map").attr('base-layer-info')); m_basemap_layer = getBaseLayer(base_layer_info.name,base_layer_info.api_key); //load drainage line kml layers var layers_info = JSON.parse($("#map").attr('layers-info')); var all_group_layers = []; m_drainage_line_layers = []; m_flood_maps = []; //add each watershed kml group layers_info.forEach(function(layer_info, group_index) { var layers = []; if('geoserver_url' in layer_info) { //add catchment if exists if('catchment' in layer_info) { var catchment_layer_id = 'layer' + group_index + 'g' + 1 if ("error" in layer_info.catchment) { appendErrorMessage("Catchment Layer: " + layer_info.title + ": " + layer_info.catchment.error, "error_" + catchment_layer_id, "message-error"); } else { var layer = getTileLayer(layer_info['catchment'], layer_info['geoserver_url'], catchment_layer_id); if (layer != null) { layer.setOpacity(0.5); layers.push(layer); } else { appendErrorMessage("Catchment Layer Invalid ... ", "error_" + catchment_layer_id, "message-error"); } } } //add gage if exists if('gage' in layer_info) { var gage_layer_id = 'layer' + group_index + 'g' + 2; if ("error" in layer_info.gage) { appendErrorMessage("Gage Layer: " + layer_info.title + ": " + layer_info.gage.error, 'error_' + gage_layer_id, "message-error"); } else { var layer = getTileLayer(layer_info['gage'], layer_info['geoserver_url'], gage_layer_id); if (layer != null) { layers.push(layer); } else { appendErrorMessage("Gage Layer Invalid ... ", "error_" + gage_layer_id, "message-error"); } } } //add flood maps if they exist if('flood_maps' in layer_info) { var flood_maps = []; if ('geoserver_info_list' in layer_info.flood_maps) { var flood_map_dataset_id = 'layer' + group_index + 'g' + 7; var valid_floodmap_count = 0; layer_info.flood_maps.geoserver_info_list.forEach(function(flood_map_info, flood_map_index){ var flood_map_sublayer_id = flood_map_dataset_id + "f" + flood_map_index; if ("error" in flood_map_info) { appendErrorMessage("Flood Map Layer: " + layer_info.title + " " + flood_map_info.forecast_directory + ": " + flood_map_info.error, 'error_' +flood_map_sublayer_id , "message-error"); } else { var layer = getTileLayer(flood_map_info, layer_info.geoserver_url, flood_map_dataset_id); if (layer != null) { layer.setOpacity(0.5); if(valid_floodmap_count>0) { layer.setVisible(false); } valid_floodmap_count += 1; layer.set('watershed_name', layer_info.watershed); layer.set('subbasin_name', layer_info.subbasin); layer.set('date_timestep', flood_map_info.forecast_directory); layer.set("flood_map_sublayer_id", flood_map_sublayer_id); flood_maps.push(layer); } else { console.log("Invalid Floodmap Layer: "); console.log(flood_map_info); } } }); if (flood_maps.length > 0) { m_flood_maps.push(new ol.layer.Group({ layers: flood_maps, })); } } } //add ahps station if exists if('ahps_station' in layer_info) { var ahps_station_layer_id = 'layer' + group_index + 'g' + 3; if ("error" in layer_info.ahps_station) { appendErrorMessage("AHPS Station Layer: " + layer_info.title + ": " + layer_info.ahps_station.error, 'error_' + ahps_station_layer_id, "message-error") } else { var ahps_station_vector_source = new ol.source.ServerVector({ format: new ol.format.GeoJSON(), loader: function(extent, resolution, projection) { var url = layer_info.ahps_station.geojsonp + '&format_options=callback:loadFeatures' + ahps_station_layer_id + '&PROPERTYNAME=the_geom' + '&srsname=' + m_map_projection; jQuery.ajax({ url: encodeURI(url), dataType: 'jsonp', jsonpCallback: 'loadFeatures' + ahps_station_layer_id, success: function(response) { ahps_station_vector_source.addFeatures(ahps_station_vector_source.readFeatures(response)); }, }); }, strategy: ol.loadingstrategy.bbox, projection: m_map_projection }); var ahps_station = new ol.layer.Vector({ source: ahps_station_vector_source, style: new ol.style.Style({ image: new ol.style.RegularShape({ points: 5, radius: 7, stroke: new ol.style.Stroke({ color: 'rgba(0,255,0,0.3)' }), fill: new ol.style.Fill({ color: 'rgba(0,128,0,0.5)' }) }), }) }); ahps_station.set('geoserver_url', layer_info.ahps_station.geojsonp) ahps_station.set('watershed_name', layer_info.watershed); ahps_station.set('subbasin_name', layer_info.subbasin); ahps_station.set('extent', ol.proj.transformExtent(layer_info.ahps_station.latlon_bbox.map(Number), 'EPSG:4326', m_map_projection)); ahps_station.set('layer_id', ahps_station_layer_id); ahps_station.set('layer_type', 'geoserver'); layers.push(ahps_station); } } //add drainage line if exists if('drainage_line' in layer_info) { var drainage_line_layer_id = 'layer' + group_index + 'g' + 0; if ("error" in layer_info.drainage_line) { appendErrorMessage("Drainage Line Layer: " + layer_info.title + ": " + layer_info.drainage_line.error, "error_" + drainage_line_layer_id, "message-error"); } else { //check if required parameters exist if(layer_info['drainage_line']['missing_attributes'].length > 2) { appendErrorMessage('The drainage line layer for ' + layer_info['watershed'] + '(' + layer_info['subbasin'] + ') ' + 'is missing '+ layer_info['drainage_line']['missing_attributes'].join(", ") + ' attributes and will not function properly.', "layer_loading_error", "message-error"); } var drainage_line; //check layer capabilites if(layer_info['drainage_line']['geoserver_method'] == "natur_flow_query") { var load_features_xhr = null; var drainage_line_vector_source = new ol.source.ServerVector({ format: new ol.format.GeoJSON(), loader: function(extent, resolution, projection) { if (typeof this.url == 'undefined') { this.url = layer_info.drainage_line.geojsonp; } if (typeof this.contained_attributes == 'undefined') { this.contained_attributes = layer_info.drainage_line.contained_attributes.join(","); } if (typeof this.query_attribute == 'undefined') { this.query_attribute = layer_info.drainage_line.geoserver_query_attribute; } var stream_flow_limit = 5000; var map_zoom = m_map.getView().getZoom(); if (map_zoom >= 12) { stream_flow_limit = 0; } else if (map_zoom >= 11) { stream_flow_limit = 20; } else if (map_zoom >= 10) { stream_flow_limit = 100; } else if (map_zoom >= 9) { stream_flow_limit = 1000; } else if (map_zoom >= 8) { stream_flow_limit = 3000; } else if (map_zoom >= 7) { stream_flow_limit = 4000; } var url = this.url + '&format_options=callback:loadFeatures' + drainage_line_layer_id + '&PROPERTYNAME=the_geom,' + this.contained_attributes + '&CQL_FILTER=' + this.query_attribute + ' > ' + stream_flow_limit + ' AND bbox(the_geom,' + extent.join(',') + ',\'' + m_map_projection + '\')' + '&srsname=' + m_map_projection; //cancel load featues if still active if(load_features_xhr != null) { load_features_xhr.abort(); } //TODO: ADD LOADING MESSAGE load_features_xhr = jQuery.ajax({ url: encodeURI(url), dataType: 'jsonp', jsonpCallback: 'loadFeatures' + drainage_line_layer_id, }) .done(function(response){ drainage_line_vector_source.addFeatures(drainage_line_vector_source.readFeatures(response)); }) .always(function() { load_features_xhr = null; }); //ON ERROR ADD MESSAGE //ALWAYS REMOVE LOADING MESSAGE }, strategy: function(extent, resolution) { var zoom_range = 1; var map_zoom = m_map.getView().getZoom(); if (map_zoom >= 12) { zoom_range = 2; } else if (map_zoom >= 11) { zoom_range = 3; } else if (map_zoom >= 10) { zoom_range = 4; } else if (map_zoom >= 9) { zoom_range = 5; } else if (map_zoom >= 8) { zoom_range = 6; } else if (map_zoom >= 7) { zoom_range = 7; } if(zoom_range != this.zoom_range && typeof this.zoom_range != 'undefined') { this.clear(); } this.zoom_range = zoom_range; return [extent]; }, projection: m_map_projection, }); drainage_line = new ol.layer.Vector({ source: drainage_line_vector_source, maxResolution: 10000 }); } else if(layer_info['drainage_line']['geoserver_method'] == "river_order_query") { var load_features_xhr = null; var drainage_line_vector_source = new ol.source.ServerVector({ format: new ol.format.GeoJSON(), loader: function(extent, resolution, projection) { if (typeof this.url == 'undefined') { this.url = layer_info.drainage_line.geojsonp; } if (typeof this.contained_attributes == 'undefined') { this.contained_attributes = layer_info.drainage_line.contained_attributes.join(","); } if (typeof this.query_attribute == 'undefined') { this.query_attribute = layer_info.drainage_line.geoserver_query_attribute; } var river_order_limit = 1000; var map_zoom = m_map.getView().getZoom(); if (map_zoom >= 12) { river_order_limit = 0; } else if (map_zoom >= 11) { river_order_limit = 2; } else if (map_zoom >= 10) { river_order_limit = 8; } else if (map_zoom >= 9) { river_order_limit = 64; } else if (map_zoom >= 8) { river_order_limit = 128; } else if (map_zoom >= 7) { river_order_limit = 300; } var url = this.url + '&format_options=callback:loadFeatures' + drainage_line_layer_id + '&PROPERTYNAME=the_geom,' + this.contained_attributes + '&CQL_FILTER=' + this.query_attribute + ' > ' + river_order_limit + ' AND bbox(the_geom,' + extent.join(',') + ',\'' + m_map_projection + '\')' + '&srsname=' + m_map_projection; //cancel load featues if still active if(load_features_xhr != null) { load_features_xhr.abort(); } //TODO: ADD LOADING MESSAGE load_features_xhr = jQuery.ajax({ url: encodeURI(url), dataType: 'jsonp', jsonpCallback: 'loadFeatures' + drainage_line_layer_id, }) .done(function(response){ drainage_line_vector_source.addFeatures(drainage_line_vector_source.readFeatures(response)); }) .always(function() { load_features_xhr = null; }); //ON ERROR ADD MESSAGE //ALWAYS REMOVE LOADING MESSAGE }, strategy: function(extent, resolution) { var zoom_range = 1; var map_zoom = m_map.getView().getZoom(); if (map_zoom >= 12) { zoom_range = 2; } else if (map_zoom >= 11) { zoom_range = 3; } else if (map_zoom >= 10) { zoom_range = 4; } else if (map_zoom >= 9) { zoom_range = 5; } else if (map_zoom >= 8) { zoom_range = 6; } else if (map_zoom >= 7) { zoom_range = 7; } if(zoom_range != this.zoom_range && typeof this.zoom_range != 'undefined') { this.clear(); } this.zoom_range = zoom_range; return [extent]; }, projection: m_map_projection, }); drainage_line = new ol.layer.Vector({ source: drainage_line_vector_source, maxResolution: 10000 }); } else { //layer_info['drainage_line']['geoserver_method'] == "simple" var drainage_line_vector_source = new ol.source.ServerVector({ format: new ol.format.GeoJSON(), loader: function(extent, resolution, projection) { this.geojsonp_url = layer_info['drainage_line']['geojsonp']; this.callback = 'loadFeatures' + drainage_line_layer_id; this.attributes = layer_info['drainage_line']['contained_attributes'].join(","); var url = this.geojsonp_url + '&format_options=callback:loadFeatures' + drainage_line_layer_id + '&PROPERTYNAME=the_geom,' + this.attributes + '&BBOX=' + extent.join(',') + ','+ m_map_projection + '&srsname=' + m_map_projection; jQuery.ajax({ url: encodeURI(url), dataType: 'jsonp', jsonpCallback: this.callback, success: function(response) { drainage_line_vector_source.addFeatures(drainage_line_vector_source.readFeatures(response)); }, }); }, strategy: ol.loadingstrategy.bbox, projection: m_map_projection }); drainage_line = new ol.layer.Vector({ source: drainage_line_vector_source, maxResolution: 1000 }); } layer_info['drainage_line']['contained_attributes'].some(function(attribute) { if (attribute.toLowerCase() == "comid" || attribute.toLowerCase() == "hydroid") { drainage_line.set('reach_id_attr_name', attribute); return true; } }); drainage_line.set('geoserver_url', layer_info['drainage_line']['geojsonp']) drainage_line.set('watershed_name', layer_info['watershed']); drainage_line.set('subbasin_name', layer_info['subbasin']); drainage_line.set('extent', ol.proj.transformExtent(layer_info['drainage_line']['latlon_bbox'].map(Number), 'EPSG:4326', m_map_projection)); drainage_line.set('layer_id', drainage_line_layer_id); drainage_line.set('layer_type', 'geoserver'); m_drainage_line_layers.push(drainage_line); layers.push(drainage_line); } } } else { //assume KML //add catchment if exists if('catchment' in layer_info) { var catchment_layer_id = 'layer' + group_index + 'g' + 1; layers.push(getKMLLayer(layer_info['catchment'], catchment_layer_id)); } //add gage if exists if('gage' in layer_info) { var gage_layer_id = 'layer' + group_index + 'g' + 2; layers.push(getKMLLayer(layer_info['gage'], gage_layer_id)); } //add drainage line if exists if('drainage_line' in layer_info) { var drainage_line_layer_id = 'layer' + group_index + 'g' + 0; var drainage_line_layer = getKMLLayer(layer_info['drainage_line'], drainage_line_layer_id, layer_info['watershed'], layer_info['subbasin']) layers.push(drainage_line_layer); m_drainage_line_layers.push(drainage_line_layer); } } //create empty layers to add data to later var return_20_layer = new ol.layer.Vector({ source: new ol.source.Cluster({ source: new ol.source.Vector({ source: []}), distance: 20 }), style: function(feature, resolution) { var features = feature.get("features"); var size = -1 if (typeof features != 'undefined') { size = features.length; } var style; if (size > 3) { style = [new ol.style.Style({ image: new ol.style.RegularShape({ points: 3, radius: 12, stroke: new ol.style.Stroke({ color: '#fff' }), fill: new ol.style.Fill({ color: 'rgba(128,0,128,0.7)' }) }), text: new ol.style.Text({ text: size.toString(), fill: new ol.style.Fill({ color: '#fff' }) }) })]; } else if (size < 0) { style = []; } else { style = []; for (var i=0; i<size; i++) { style.push(new ol.style.Style({ image: twenty_symbols[features[i].get('point_size')], })); } } return style; } }); return_20_layer.set('layer_id', 'layer' + group_index + 'g' + 4); return_20_layer.set('layer_type', 'warning_points'); return_20_layer.set('return_period', 20); return_20_layer.set('ecmwf_watershed_name', layer_info['ecmwf_watershed']); return_20_layer.set('ecmwf_subbasin_name', layer_info['ecmwf_subbasin']); var return_10_layer = new ol.layer.Vector({ source: new ol.source.Cluster({ source: new ol.source.Vector({ source: []}), distance: 20 }), style: function(feature, resolution) { var features = feature.get("features"); var size = -1 if (typeof features != 'undefined') { var size = features.length; } var style; if (size > 3) { style = [new ol.style.Style({ image: new ol.style.RegularShape({ points: 3, radius: 12, stroke: new ol.style.Stroke({ color: '#fff' }), fill: new ol.style.Fill({ color: 'rgba(255,0,0,0.6)' }) }), text: new ol.style.Text({ text: size.toString(), fill: new ol.style.Fill({ color: '#fff' }) }) })]; } else if (size < 0) { style = []; } else { style = []; for (var i=0; i<size; i++) { style.push(new ol.style.Style({ image: ten_symbols[features[i].get('point_size')] })); } } return style; } }); return_10_layer.set('layer_id', 'layer' + group_index + 'g' + 5); return_10_layer.set('layer_type', 'warning_points'); return_10_layer.set('return_period', 10); return_10_layer.set('ecmwf_watershed_name', layer_info['ecmwf_watershed']); return_10_layer.set('ecmwf_subbasin_name', layer_info['ecmwf_subbasin']); var return_2_layer = new ol.layer.Vector({ source: new ol.source.Cluster({ source: new ol.source.Vector({ source: []}), distance: 20 }), style: function(feature, resolution) { var features = feature.get("features"); var size = -1 if (typeof features != 'undefined') { var size = features.length; } var style; if (size > 3) { style = [new ol.style.Style({ image: new ol.style.RegularShape({ points: 3, radius: 12, stroke: new ol.style.Stroke({ color: '#fff' }), fill: new ol.style.Fill({ color: 'rgba(255,255,0,0.6)' }) }), text: new ol.style.Text({ text: size.toString(), fill: new ol.style.Fill({ color: '#fff' }) }) })]; } else if (size < 0) { style = []; } else { style = []; for (var i=0; i<size; i++) { style.push(new ol.style.Style({ image: two_symbols[features[i].get('point_size')] })); } } return style; } }); return_2_layer.set('layer_id', 'layer' + group_index + 'g' + 6); return_2_layer.set('layer_type', 'warning_points'); return_2_layer.set('return_period', 2); return_2_layer.set('ecmwf_watershed_name', layer_info['ecmwf_watershed']); return_2_layer.set('ecmwf_subbasin_name', layer_info['ecmwf_subbasin']); layers.push(return_2_layer); layers.push(return_10_layer); layers.push(return_20_layer); //make sure there are layers to add if (layers.length > 0) { var group_layer = new ol.layer.Group({ layers: layers, }); all_group_layers.push(group_layer); } }); //send message to user if Drainage Line KML file not found if (m_drainage_line_layers.length <= 0) { appendErrorMessage('No valid drainage line layers found. Please upload to begin.', "drainage_line_error", "message-error"); } //make drainage line layers selectable m_select_interaction = new ol.interaction.Select({ layers: m_drainage_line_layers, }); if(m_flood_maps.length > 0) { all_group_layers = all_group_layers.concat(m_flood_maps); } var all_map_layers = [m_basemap_layer].concat(all_group_layers); //var all_map_layers = all_group_layers; //create map m_map = new ol.Map({ target: 'map', controls: ol.control.defaults().extend([ new ol.control.FullScreen(), new ol.control.ZoomToExtent(), ]), interactions: ol.interaction.defaults().extend([ new ol.interaction.DragRotateAndZoom(), m_select_interaction, ]), layers : all_map_layers, view: new ol.View({ center: [-33519607, 5616436], zoom: 8 }), }); //wait for kml layers to load and then zoom to them all_group_layers.forEach(function(group_layer){ if (group_layer instanceof ol.layer.Group) { group_layer.getLayers().forEach(function(vector_layer, j) { if(vector_layer.get('layer_type') == "kml") { var vector_source = vector_layer.getSource(); var listener_key = vector_source.on('change', function() { if (vector_source.getState() == 'ready') { bindInputs('#'+vector_layer.get('layer_id'), vector_layer); ol.extent.extend(m_map_extent, vector_source.getExtent()); m_map.getView().fitExtent(m_map_extent, m_map.getSize()); } }); } else if (vector_layer.get('layer_type') == "geoserver") { bindInputs('#'+vector_layer.get('layer_id'), vector_layer); ol.extent.extend(m_map_extent, vector_layer.get('extent')); m_map.getView().fitExtent(m_map_extent, m_map.getSize()); } else if (vector_layer.get('layer_type') == "warning_points") { var layer_id = '#'+vector_layer.get('layer_id'); bindInputs(layer_id, vector_layer); //get warning points for map jQuery.ajax({ type: "GET", url: 'get-warning-points', dataType: "json", data: { watershed_name: vector_layer.get('ecmwf_watershed_name'), subbasin_name: vector_layer.get('ecmwf_subbasin_name'), return_period: vector_layer.get('return_period'), }, }) .done(function (data) { if ("success" in data) { $(layer_id).parent().removeClass('hidden'); //symbols var feature_count = data.warning_points.length var features = []; var feature, geometry, symbol; for (var i = 0; i < feature_count; ++i) { geometry = new ol.geom.Point(ol.proj.transform([data.warning_points[i].lon, data.warning_points[i].lat], 'EPSG:4326', m_map_projection)); feature = new ol.Feature({ geometry: geometry, point_size: data.warning_points[i].size, comid: data.warning_points[i].comid }); features.push(feature); } var vector_source = vector_layer.getSource().getSource(); vector_source.addFeatures(features); m_map.render(); } else { console.log(data.error); //appendErrorMessage("Error: " + data["error"], "warning_points_error", "message-error"); } }) .fail(function (request, status, error) { console.log(error); //appendErrorMessage("Error: " + error, "warning_points_error", "message-error"); }); } }); } }); //bind flood maps m_flood_maps.forEach(function(layer_group, j) { layer_group.getLayers().forEach(function(layer, j) { if (j==0){ bindInputs('#'+layer.get('layer_id'), layer); } }); }); //when selected, call function to make hydrograph m_select_interaction.getFeatures().on('change:length', function(e) { if (e.target.getArray().length === 0) { // this means it's changed to no features selected } else { // this means there is at least 1 feature selected var selected_feature = e.target.item(0); // 1st feature in Collection loadHydrographFromFeature(selected_feature); } }); //change displayed flood map on click $('.flood_map_select').off().change(function() { var watershed_name = $(this).parent().parent().parent().parent().attr('watershed'); var subbasin_name = $(this).parent().parent().parent().parent().attr('subbasin'); var date_timestep = $(this).val(); m_flood_maps.forEach(function(layer_group, j) { layer_group.getLayers().forEach(function(layer, j) { if (layer.get('watershed_name') == watershed_name && layer.get('subbasin_name') == subbasin_name) { layer.setVisible(false); unbindInputs('#'+layer.get('layer_id')); } }); }); m_flood_maps.forEach(function(layer_group, j) { layer_group.getLayers().forEach(function(layer, j) { if (layer.get('watershed_name') == watershed_name && layer.get('subbasin_name') == subbasin_name) { if (layer.get('date_timestep') == date_timestep){ layer.setVisible(true); bindInputs('#'+layer.get('layer_id'), layer); } } }); }); }); //create function to zoom to layer $('.zoom-to-layer').off().click(function() { var layer_id = $(this).parent().parent().attr('id'); zoomToLayer(layer_id); }); //function to zoom to feature by id $('#submit-search-reach-id').off().click(function() { var watershed_info = $(this).parent().parent().find('#watershed_select').select2('val'); var reach_id = $(this).parent().parent().find('#reach-id-input').val(); zoomToFeature(watershed_info, reach_id); }); //zoom to all $('.ol-zoom-extent').off().click(function() { zoomToAll(); }); //show hide elements based on shape upload toggle selection $('#units-toggle').on('switchChange.bootstrapSwitch', function(event, state) { if(state) { //units metric m_units = "metric"; } else { //units english m_units = "english"; } if (m_selected_feature != null) { loadHydrographFromFeature(m_selected_feature); } }); //show/hide forecasts based on toggle $('#ecmwf-toggle').on('switchChange.bootstrapSwitch', function(event, state) { m_ecmwf_show = state; if (m_selected_feature != null && m_selected_ecmwf_watershed != null && m_selected_ecmwf_subbasin != null) { loadHydrographFromFeature(m_selected_feature); } }); $('#wrf-toggle').on('switchChange.bootstrapSwitch', function(event, state) { m_wrf_show = state; if (m_selected_feature != null && m_selected_wrf_hydro_watershed != null && m_selected_wrf_hydro_subbasin != null) { loadHydrographFromFeature(m_selected_feature); } }); //resize app content based on window size and nav bar $('.toggle-nav').off(); $(window).resize(function() { resizeAppContent(); }); }); return public_interface; }()); // End of package wrapper // NOTE: that the call operator (open-closed parenthesis) is used to invoke the library wrapper // function immediately after being parsed.
tethysapp/streamflow_prediction_tool/public/js/map.js
/***************************************************************************** * FILE: Streamflow Prediciton Map Tool * DATE: 08/24/2015 * AUTHOR: Alan Snow * COPYRIGHT: (c) 2015 US Army Corps of Engineers ERDC * LICENSE: BSD 2-Clause *****************************************************************************/ /***************************************************************************** * LIBRARY WRAPPER *****************************************************************************/ var ERFP_MAP = (function() { // Wrap the library in a package function "use strict"; // And enable strict mode for this library /************************************************************************ * MODULE LEVEL / GLOBAL VARIABLES *************************************************************************/ var public_interface, // Object returned by the module m_map, // the main map m_map_projection, //main map projection m_map_extent, //the extent of all objects in map m_basemap_layer, m_drainage_line_layers, m_select_interaction, m_selected_feature, m_selected_ecmwf_watershed, m_selected_ecmwf_subbasin, m_selected_wrf_hydro_watershed, m_selected_wrf_hydro_subbasin, m_selected_reach_id, m_selected_usgs_id, m_selected_nws_id, m_selected_hydroserver_url, m_downloading_ecmwf_hydrograph, m_downloading_era_interim_hydrograph, m_downloading_long_term_select, m_downloading_short_term_select, m_downloading_wrf_hydro_hydrograph, m_downloading_usgs, m_downloading_nws, m_downloading_hydroserver, m_searching_for_reach, m_long_term_chart_data_ajax_load_failed, m_long_term_select_data_ajax_handle, m_short_term_chart_data_ajax_load_failed, m_short_term_select_data_ajax_handle, m_ecmwf_start_folder, m_wrf_hydro_date_string, m_units, m_ecmwf_show, m_wrf_show, m_return_20_features_source, m_return_10_features_source, m_return_2_features_source, m_flood_maps; /************************************************************************ * PRIVATE FUNCTION DECLARATIONS *************************************************************************/ var resizeAppContent, bindInputs, convertTimeSeriesMetricToEnglish, getCI, convertTimeSeriesEnglishToMetric, isNotLoadingPastRequest, zoomToAll, zoomToLayer, zoomToFeature, toTitleCase, datePadString, getBaseLayer, getTileLayer, getKMLLayer, clearAllMessages, clearInfoMessages, clearOldChart, dateToUTCString, clearChartSelect2, getChartData, displayHydrograph, loadHydrographFromFeature,resetChartSelectMessage, addECMWFSeriesToCharts, addSeriesToCharts, isThereDataToLoad, checkCleanString, dateToUTCDateTimeString, getValidSeries, convertValueMetricToEnglish, unbindInputs; /************************************************************************ * PRIVATE FUNCTION IMPLEMENTATIONS *************************************************************************/ //FUNCTION: reset chart and select options resetChartSelectMessage = function() { //remove old chart reguardless clearOldChart('long-term'); $('.short-term-select').addClass('hidden'); $('.long-term-select').addClass('hidden'); //clear messages clearAllMessages(); }; //FUNCTION: resize content based resizeAppContent = function() { var map_div = $('#inner-app-content').children().first(); map_div.attr("style","height:" + parseInt($(document).height()*0.8) + "px"); if (typeof m_map != 'undefined') { m_map.updateSize(); } var document_width = $(document).width(); if (document_width > 900) { $('#app-content-wrapper').addClass('show-nav'); } var container = $('.container'); container.removeClass('no-padding'); var height_ratio = 0.97; if (document_width > 1500) { height_ratio = 0.57; } $('#wrf_toogle_col').removeClass('col-sm-2') .removeClass('col-sm-3') .removeClass('col-sm-4'); if (document_width > 1900) { $('#wrf_toogle_col').addClass('col-sm-2'); } else if (document_width > 1400){ $('#wrf_toogle_col').addClass('col-sm-3'); } else if (document_width > 900) { $('#wrf_toogle_col').addClass('col-sm-4'); } else if (document_width > 700) { $('#wrf_toogle_col').addClass('col-sm-2'); } else { $('#wrf_toogle_col').addClass('col-sm-3'); } //resize highchart var long_term_chart = $("#long-term-chart").highcharts(); if (typeof long_term_chart != 'undefined') { var width = $("#long-term-chart-row").width(); long_term_chart.setSize(0.97*width,height_ratio*width); } }; //FUNCTION: binds dom elements to layer bindInputs = function(layerid, layer) { var visibilityInput = $(layerid + ' input.visible'); visibilityInput.prop("checked", layer.getVisible()); visibilityInput.on('change', function() { layer.setVisible(this.checked); }); var opacityInput = $(layerid + ' input.opacity'); opacityInput.val(layer.getOpacity()); opacityInput.on('input change', function() { layer.setOpacity(parseFloat(this.value)); }); }; //FUNCTION: unbind dom elements from layer unbindInputs = function(layerid) { $(layerid + ' input.visible').off(); $(layerid + ' input.opacity').off(); } //FUNCTION: check to see if there is data to redraw on chart isThereDataToLoad = function(){ return ((m_ecmwf_show && m_selected_ecmwf_watershed != null && m_selected_ecmwf_subbasin != null) || (m_wrf_show && m_selected_wrf_hydro_watershed != null && m_selected_wrf_hydro_subbasin != null) || (!isNaN(m_selected_usgs_id) && m_selected_usgs_id != null) || (!isNaN(m_selected_nws_id) && m_selected_nws_id != null) || m_selected_hydroserver_url != null); }; //FUNCTION: convert units from metric to english convertValueMetricToEnglish = function(data_value) { var conversion_factor = 1; if(m_units=="english") { conversion_factor = 35.3146667; } return data_value * conversion_factor; }; //FUNCTION: convert units from metric to english convertTimeSeriesMetricToEnglish = function(time_series) { var new_time_series = []; var conversion_factor = 1; if(m_units=="english") { conversion_factor = 35.3146667; } time_series.map(function(data_row) { var new_data_array = [data_row[0]]; for (var i = 1; i<data_row.length; i++) { new_data_array.push(parseFloat((data_row[i]*conversion_factor).toFixed(5))); } new_time_series.push(new_data_array); }); return new_time_series; }; //FUNCTION: convert units from english to metric convertTimeSeriesEnglishToMetric = function(time_series, series_name) { var new_time_series = []; var date_time_value, data_value; var conversion_factor = 1; try { if (m_units == "metric") { conversion_factor = 35.3146667; } time_series.map(function(data) { if (series_name=="USGS") { data_value = data.value; date_time_value = data.dateTime; } else { date_time_value = data[0]; data_value = data[1]; } new_time_series.push([Date.parse(date_time_value), parseFloat(data_value)/conversion_factor]); }); } catch (e) { if (e instanceof TypeError) { appendErrorMessage("Error loading " + series_name + " data.", "load_series_error", "message-error"); } } return new_time_series; }; //FUNCTION: cleans sting and returns null if empty checkCleanString = function(string) { if(typeof string == 'undefined' || string == null) { return null; } else if (typeof string != 'string') { return string; } else { string = string.trim(); //set to null if it is empty string if (string.length <= 0) { return null; } return string; } }; //FUNCTION: ol case insensitive get feature property getCI = function(obj,prop){ prop = prop.toLowerCase(); for(var key in obj.getProperties()){ if(prop == key.toLowerCase()){ return checkCleanString(obj.get(key)); } } return null; }; //FUNCTION: get series with actual data getValidSeries = function(series_array){ if (series_array != null) { var valid_series; for (var i=0; i<series_array.length; i++) { valid_series = true; for (var j=0; j<series_array[i].length; j++) { if (series_array[i][j][1] < 0) { valid_series = false; break; } } if (valid_series) { return series_array[i]; } } } return null; }; //FUNCTION: check if loading past request isNotLoadingPastRequest = function() { return !m_downloading_ecmwf_hydrograph && !m_downloading_long_term_select && !m_downloading_usgs && !m_downloading_nws && !m_downloading_hydroserver && !m_downloading_short_term_select && !m_downloading_wrf_hydro_hydrograph && !m_downloading_era_interim_hydrograph; }; //FUNCTION: zooms to all kml files zoomToAll = function() { m_map.getView().fitExtent(m_map_extent, m_map.getSize()); }; //FUNCTION: zooms to layer with id layer_id zoomToLayer = function(layer_id) { m_map.getLayers().forEach(function(layer, i) { if (layer instanceof ol.layer.Group) { layer.getLayers().forEach(function(sublayer, j) { if(sublayer.get('layer_id') == layer_id) { if(sublayer.get('layer_type') == "kml") { var source = sublayer.getSource(); m_map.getView().fitExtent(source.getExtent(), m_map.getSize()); return; } else if (sublayer.get('layer_type') == "geoserver") { m_map.getView().fitExtent(sublayer.get('extent'), m_map.getSize()); return; } } }); } }); }; //FUNCTION: zooms to feature in layer zoomToFeature = function(watershed_info, reach_id) { if(!m_searching_for_reach) { $("#reach-id-help-message").text(''); $("#reach-id-help-message").parent().removeClass('alert-danger'); var search_id_button = $("#submit-search-reach-id"); var search_id_button_html = search_id_button.html(); search_id_button.text('Searching ...'); var watershed_split = watershed_info.split(":"); var watershed_name = watershed_split[0]; var subbasin_name = watershed_split[1]; m_drainage_line_layers.forEach(function(drainage_line_layer, j) { if(drainage_line_layer.get('watershed_name') == watershed_name && drainage_line_layer.get('subbasin_name') == subbasin_name) { if(drainage_line_layer.get('layer_type') == "kml") { var features = drainage_line_layer.getSource().getFeatures(); for(var i=0; features.length>i; i++) { var feature_reach_id = getCI(features[i],'COMID'); if(feature_reach_id == null || isNaN(feature_reach_id)) { var feature_reach_id = getCI(features[i],'hydroid'); } if (feature_reach_id == reach_id) { var geometry = features[i].get('geometry'); m_map.getView().fitExtent(geometry.getExtent(), m_map.getSize()); search_id_button.html(search_id_button_html); m_select_interaction.getFeatures().clear(); m_select_interaction.getFeatures().push(features[i]); return; } } $("#reach-id-help-message").text('Reach ID ' + reach_id + ' not found'); $("#reach-id-help-message").parent().addClass('alert-danger'); search_id_button.html(search_id_button_html); return; } else if (drainage_line_layer.get('layer_type') == "geoserver") { m_searching_for_reach = true; var reach_id_attr_name = getCI(drainage_line_layer, 'reach_id_attr_name'); if (reach_id_attr_name != null) { //TODO: Make query more robust var url = drainage_line_layer.get('geoserver_url') + '&format_options=callback:searchFeatures' + '&CQL_FILTER='+ drainage_line_layer.get('reach_id_attr_name') +' =' + reach_id + '&srsname=' + m_map_projection; jQuery.ajax({ url: encodeURI(url), dataType: 'jsonp', jsonpCallback: 'searchFeatures', }) .done(function(response) { if (response.totalFeatures > 0) { var features = drainage_line_layer.getSource().readFeatures(response); m_map.getView().fitExtent(features[0].getGeometry().getExtent(), m_map.getSize()); m_select_interaction.getFeatures().clear(); m_select_interaction.getFeatures().push(features[0]); } else { $("#reach-id-help-message").text('Reach ID ' + reach_id + ' not found'); $("#reach-id-help-message").parent().addClass('alert-danger'); } }) .always(function() { m_searching_for_reach = false; search_id_button.html(search_id_button_html); }); } else { $("#reach-id-help-message").text('No valid reach ID attribute found.'); $("#reach-id-help-message").parent().addClass('alert-danger'); } return; } } }); } }; //FUNCTION: converts string to title case toTitleCase = function(str) { return str.replace(/\w\S*/g, function(txt){return txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase();}); }; //FUNCTION: to convert date to string datePadString = function(i) { return (i < 10) ? "0" + i : "" + i; }; //FUNCTION: adds appropriate base layer based on name getBaseLayer = function(base_layer_name, api_key) { if(base_layer_name == "BingMaps") { return new ol.layer.Tile({ source: new ol.source.BingMaps({key: api_key, imagerySet: "AerialWithLabels"}), }); } else if (base_layer_name == "Esri") { return new ol.layer.Tile({ source: new ol.source.XYZ({ attributions: [new ol.Attribution({ html: 'Tiles &copy; <a href="http://services.arcgisonline.com/ArcGIS/' + 'rest/services/World_Topo_Map/MapServer">ArcGIS</a>' })], url: 'http://server.arcgisonline.com/ArcGIS/rest/services/' + 'World_Topo_Map/MapServer/tile/{z}/{y}/{x}' }) }); } //default to mapquest return new ol.layer.Group({ style: 'AerialWithLabels', layers: [ new ol.layer.Tile({ source: new ol.source.MapQuest({layer: 'sat'}) }), new ol.layer.Tile({ source: new ol.source.MapQuest({layer: 'hyb'}) }) ] }); }; //FUNCTION: gets KML layer for geoserver getKMLLayer = function(layer_info, layer_id, watershed_name, subbasin_name) { var layer = new ol.layer.Vector({ source: new ol.source.KML({ projection: new ol.proj.get(m_map_projection), url: layer_info, }), }); layer.set('layer_id', layer_id); layer.set('layer_type', 'kml'); if( typeof watershed_name != 'undefined') { layer.set('watershed_name', watershed_name); } if( typeof subbasin_name != 'undefined') { layer.set('subbasin_name', subbasin_name); } return layer; }; //FUNCTION: gets tile layer for geoserver getTileLayer = function(layer_info, geoserver_url, layer_id) { //validate extent var extent = layer_info['latlon_bbox'].map(Number) if (Math.abs(extent[0]-extent[1]) > 0.001 && Math.abs(extent[2]-extent[3]) > 0.001) { var layer = new ol.layer.Tile({ source: new ol.source.TileWMS({ url: geoserver_url, params: {'LAYERS': layer_info['name'], 'TILED': true}, serverType: 'geoserver', }), }); layer.set('extent', ol.proj.transformExtent(extent, 'EPSG:4326', m_map_projection)); layer.set('layer_id', layer_id); layer.set('layer_type', 'geoserver'); return layer; } return null; }; //FUNCTION: removes message and hides the div clearInfoMessages = function() { $('#message').addClass('hidden'); $('#message').empty(); }; clearAllMessages = function() { clearInfoMessages(); $('#message-error').addClass('hidden'); $('#message-error').empty(); } //FUNCTION: removes highchart clearOldChart = function(model_name) { //clear old chart var highcharts_attr = $('#' + model_name + '-chart').attr('data-highcharts-chart'); // For some browsers, `attr` is undefined; for others, // `attr` is false. Check for both. if (typeof highcharts_attr !== typeof undefined && highcharts_attr !== false) { $("#" + model_name +"-chart").highcharts().destroy(); $('#' + model_name + '-chart').empty(); } }; //FUNCTION: removes chart select2 clearChartSelect2 = function(model_name) { if($('#' + model_name + '-select').data('select2')) { $('#' + model_name + '-select').off('change.select2') //remove event handler .select2('val', '') //remove selection .select2('destroy'); //destroy } }; //FUNCTION: converts date to UTC string in the format yyyy-mm-dd dateToUTCString = function(date) { return datePadString(date.getUTCFullYear()) + "-" + datePadString(1 + date.getUTCMonth()) + "-" + datePadString(date.getUTCDate()); }; //FUNCTION: converts date to UTC string in the format yyyy-mm-dd dateToUTCDateTimeString = function(date) { return dateToUTCString(date) + "T00:00:00"; }; //FUNCTION: adds a series to both the chart addECMWFSeriesToCharts = function(series_name, series_data, series_color, series_type){ var long_term_chart = $("#long-term-chart").highcharts(); var new_series = { name: series_name, data: convertTimeSeriesMetricToEnglish(series_data), color: series_color, selected: true }; if(typeof series_type != 'undefined' && new_series != null) { new_series.type = series_type; new_series.lineWidth = 0; new_series.linkedTo = ":previous"; new_series.fillOpacity = 0.3; } long_term_chart.addSeries(new_series); }; //FUNCTION: adds data to the chart addSeriesToCharts = function(series){ var long_term_chart = $("#long-term-chart").highcharts(); long_term_chart.addSeries(series); $("#long-term-chart").removeClass("hidden"); }; //FUNCTION: gets all data for chart getChartData = function() { if(!isNotLoadingPastRequest()) { //updateInfoAlert addWarningMessage("Please wait for datasets to download before making another selection."); } else if (!isThereDataToLoad()) { resetChartSelectMessage(); //updateInfoAlert addWarningMessage("No data found to load. Please toggle on a dataset."); } else { resetChartSelectMessage(); m_long_term_chart_data_ajax_load_failed = false; //turn off select interaction m_map.removeInteraction(m_select_interaction); addInfoMessage("Retrieving Data ..."); var y_axis_title = "Flow (cms)"; if (m_units == "english") { y_axis_title = "Flow (cfs)"; } var default_chart_settings = { title: { text: "Forecast"}, chart: { zoomType: 'x', }, rangeSelector: { selected: 0 }, plotOptions: { series: { marker: { enabled: false } } }, xAxis: { type: 'datetime', title: { text: 'Date (UTC)' }, minRange: 1 * 24 * 3600000 // one day }, yAxis: { title: { text: y_axis_title }, min: 0 }, }; //handle subtitles - ECMWF first priority var subtitle = null; if(m_selected_ecmwf_watershed != null && m_selected_ecmwf_subbasin != null) { subtitle = {text: toTitleCase(m_selected_ecmwf_watershed) + " (" + toTitleCase(m_selected_ecmwf_subbasin) + "): " + m_selected_reach_id} } else if (m_selected_wrf_hydro_watershed != null && m_selected_wrf_hydro_subbasin != null) { subtitle = {text: toTitleCase(m_selected_wrf_hydro_watershed) + " (" + toTitleCase(m_selected_wrf_hydro_subbasin) + "): " + m_selected_reach_id} } if (subtitle != null) { default_chart_settings.subtitle = subtitle; } $("#long-term-chart").highcharts('StockChart', default_chart_settings); //get ecmwf data if (m_ecmwf_show && m_selected_ecmwf_watershed != null && m_selected_ecmwf_subbasin != null) { m_downloading_ecmwf_hydrograph = true; var xhr_ecmwf_hydrograph = jQuery.ajax({ type: "GET", url: "ecmwf-get-hydrograph", dataType: "json", data: { watershed_name: m_selected_ecmwf_watershed, subbasin_name: m_selected_ecmwf_subbasin, reach_id: m_selected_reach_id, start_folder: m_ecmwf_start_folder, }, }); xhr_ecmwf_hydrograph.done(function (data) { if ("success" in data) { if ("mean" in data) { addECMWFSeriesToCharts("ECMWF", data.mean, Highcharts.getOptions().colors[2]); } if ("outer_range" in data) { addECMWFSeriesToCharts("ECMWF - Outer Range", data.outer_range, Highcharts.getOptions().colors[2], 'arearange'); } if ("std_dev_range" in data) { addECMWFSeriesToCharts("ECMWF - Std. Dev.", data.std_dev_range, Highcharts.getOptions().colors[2], 'arearange'); } if ("high_res" in data) { addECMWFSeriesToCharts("ECMWF - High Res.", data.high_res, Highcharts.getOptions().colors[1]); } $('.long-term-select').removeClass('hidden'); $('#long-term-chart').removeClass('hidden'); } else { m_long_term_chart_data_ajax_load_failed = true; appendErrorMessage(data["error"], "ecmwf_error", "message-error"); clearChartSelect2('long-term'); } }) .fail(function (request, status, error) { m_long_term_chart_data_ajax_load_failed = true; appendErrorMessage("Error: " + error, "ecmwf_error", "message-error"); clearChartSelect2('long-term'); }) .always(function () { m_downloading_ecmwf_hydrograph = false; m_map.addInteraction(m_select_interaction); if(isNotLoadingPastRequest()){ clearInfoMessages(); } }); m_downloading_era_interim_hydrograph = true; jQuery.ajax({ type: "GET", url: "era-interim-get-hydrograph", dataType: "json", data: { watershed_name: m_selected_ecmwf_watershed, subbasin_name: m_selected_ecmwf_subbasin, reach_id: m_selected_reach_id, }, }) .done(function (data) { if ("success" in data) { var long_term_chart = $("#long-term-chart").highcharts(); //load interim data to chart xhr_ecmwf_hydrograph.always(function(){ if ("era_interim" in data) { if (!("error" in data.era_interim)) { var era_interim_series = { name: "ERA Interim", data: convertTimeSeriesMetricToEnglish(data.era_interim.series), dashStyle: 'longdash', color: Highcharts.getOptions().colors[10], }; long_term_chart.addSeries(era_interim_series); } else { appendErrorMessage("Error: " + data.era_interim.error, "era_interim_error", "message-error"); } } //load return peeriod data to chart if ("return_period" in data) { if (!("error" in data.return_period)) { var extremes = long_term_chart.yAxis[0].getExtremes(); var maxY = Math.max(extremes.max, convertValueMetricToEnglish(parseFloat(data.return_period.max))); long_term_chart.yAxis[0].addPlotBand({ from: convertValueMetricToEnglish(parseFloat(data.return_period.twenty)), to: convertValueMetricToEnglish(maxY), color: 'rgba(128,0,128,0.4)', id: '20-yr', label: { text: '20-yr', align: 'right', } }); long_term_chart.yAxis[0].addPlotBand({ from: convertValueMetricToEnglish(parseFloat(data.return_period.ten)), to: convertValueMetricToEnglish(parseFloat(data.return_period.twenty)), color: 'rgba(255,0,0,0.3)', id: '10-yr', label: { text: '10-yr', align: 'right', } }); long_term_chart.yAxis[0].addPlotBand({ from: convertValueMetricToEnglish(parseFloat(data.return_period.two)), to: convertValueMetricToEnglish(parseFloat(data.return_period.ten)), color: 'rgba(255,255,0,0.3)', id: '2-yr', label: { text: '2-yr', align: 'right', } }); } else { appendErrorMessage("Error: " + data.return_period.error, "era_interim_error", "message-error"); } } //if ERA Interim series present, show chart if ("era_interim" in data) { if (!("error" in data.era_interim)) { $('#long-term-chart').removeClass('hidden'); } } }); } else { appendErrorMessage("Error: " + data.error, "era_interim_error", "message-error"); } }) .fail(function (request, status, error) { appendErrorMessage("Error: " + error, "era_interim_error", "message-error"); }) .always(function () { m_downloading_era_interim_hydrograph = false; m_map.addInteraction(m_select_interaction); if(isNotLoadingPastRequest()){ clearInfoMessages(); } }); } //if there is a wrf watershed & subbasin attribute if (m_wrf_show && m_selected_wrf_hydro_watershed != null && m_selected_wrf_hydro_subbasin != null) { m_downloading_wrf_hydro_hydrograph = true; jQuery.ajax({ type: "GET", url: "wrf-hydro-get-hydrograph", dataType: "json", data: { watershed_name: m_selected_wrf_hydro_watershed, subbasin_name: m_selected_wrf_hydro_subbasin, reach_id: m_selected_reach_id, date_string: m_wrf_hydro_date_string, }, }) .done(function (data) { if ("success" in data) { //wrf_hydro if ("wrf_hydro" in data) { var wrf_series = { name: "WRF-Hydro (HRRR)", data: convertTimeSeriesMetricToEnglish(data.wrf_hydro), dashStyle: 'longdash', color: Highcharts.getOptions().colors[3] }; var long_term_chart = $("#long-term-chart").highcharts(); long_term_chart.addSeries(wrf_series); $('.short-term-select').removeClass('hidden'); $('#long-term-chart').removeClass('hidden'); } } else { m_short_term_chart_data_ajax_load_failed = true; appendErrorMessage("Error: " + data["error"], "wrf_hydro_error", "message-error"); clearChartSelect2('short-term'); } }) .fail(function (request, status, error) { m_short_term_chart_data_ajax_load_failed = true; appendErrorMessage("Error: " + error, "wrf_hydro_error", "message-error"); clearChartSelect2('short-term'); }) .always(function () { m_downloading_wrf_hydro_hydrograph = false; m_map.addInteraction(m_select_interaction); if(isNotLoadingPastRequest()){ clearInfoMessages(); } }); } //get current dates var date_now = new Date(); var date_past = new Date(); date_past.setUTCDate(date_now.getUTCDate()-3); var date_future = new Date(); date_future.setUTCDate(date_now.getUTCDate()+15); var date_observed_end = date_now; var date_nws_end = date_future; //ECMWF Dates var ecmwf_date_forecast_begin = new Date(8640000000000000); var ecmwf_date_forecast_end = new Date(-8640000000000000); var get_ecmwf = m_ecmwf_start_folder != null && typeof m_ecmwf_start_folder != "undefined" && m_ecmwf_start_folder != "most_recent" && m_ecmwf_show; //get ECMWF forcast dates if available if(get_ecmwf) { var ecmwf_forecast_start_year = parseInt(m_ecmwf_start_folder.substring(0,4)); var ecmwf_forecast_start_month = parseInt(m_ecmwf_start_folder.substring(4,6)); var ecmwf_forecast_start_day = parseInt(m_ecmwf_start_folder.substring(6,8)); var ecmwf_forecast_start_hour = parseInt(m_ecmwf_start_folder.split(".")[1].substring(0,2)); ecmwf_date_forecast_begin = new Date(Date.UTC(ecmwf_forecast_start_year, ecmwf_forecast_start_month-1, ecmwf_forecast_start_day, ecmwf_forecast_start_hour)); ecmwf_date_forecast_end = new Date(); ecmwf_date_forecast_end.setUTCDate(ecmwf_date_forecast_begin.getUTCDate()+15); //reset dates if applicable date_observed_end = ecmwf_date_forecast_end; date_nws_end = ecmwf_date_forecast_end; } //WRF-Hydro Dates var wrf_hydro_date_forecast_begin = new Date(8640000000000000); var wrf_hydro_date_forecast_end = new Date(-8640000000000000); //get WRF-Hydro forcast dates if available if(m_wrf_hydro_date_string != null && typeof m_wrf_hydro_date_string != "undefined" && m_wrf_hydro_date_string != "most_recent" && m_wrf_show) { var wrf_hydro_forecast_start_year = parseInt(m_wrf_hydro_date_string.substring(0,4)); var wrf_hydro_forecast_start_month = parseInt(m_wrf_hydro_date_string.substring(4,6)); var wrf_hydro_forecast_start_day = parseInt(m_wrf_hydro_date_string.substring(6,8)); var wrf_hydro_forecast_start_hour = parseInt(m_wrf_hydro_date_string.split("T")[1].substring(0,2)); wrf_hydro_date_forecast_begin = new Date(Date.UTC(wrf_hydro_forecast_start_year, wrf_hydro_forecast_start_month-1, wrf_hydro_forecast_start_day, wrf_hydro_forecast_start_hour)); wrf_hydro_date_forecast_end = new Date(wrf_hydro_date_forecast_begin.getTime()+15*60*60000); //reset dates if applicable if(get_ecmwf) { date_observed_end = new Date(Math.max.apply(null,[date_observed_end, wrf_hydro_date_forecast_end])); date_nws_end = new Date(Math.max.apply(null,[date_nws_end, wrf_hydro_date_forecast_end])); } else { date_observed_end = wrf_hydro_date_forecast_end; date_nws_end = wrf_hydro_date_forecast_end; } } var date_observed_start = new Date(Math.min.apply(null,[date_past, ecmwf_date_forecast_begin, wrf_hydro_date_forecast_begin])); var date_nws_start = new Date(Math.min.apply(null,[date_now, ecmwf_date_forecast_begin, wrf_hydro_date_forecast_begin])); //Get USGS data if USGS ID attribute exists if(!isNaN(m_selected_usgs_id) && m_selected_usgs_id != null) { if(m_selected_usgs_id.length >= 8) { m_downloading_usgs = true; //get USGS data var chart_usgs_data_ajax_handle = jQuery.ajax({ type: "GET", url: "http://waterservices.usgs.gov/nwis/iv/", dataType: "json", data: { format: 'json', sites: m_selected_usgs_id, startDT: dateToUTCString(date_observed_start), endDT: dateToUTCString(date_observed_end), parameterCd: '00060', }, }) .done(function (data) { if (typeof data != 'undefined') { try { var usgs_series = { name: "USGS (" + m_selected_usgs_id + ")", data: convertTimeSeriesEnglishToMetric(data.value.timeSeries[0].values[0].value, "USGS"), dashStyle: 'longdash', color: Highcharts.getOptions().colors[0] }; addSeriesToCharts(usgs_series); } catch (e) { if (e instanceof TypeError) { appendErrorMessage("Recent USGS data not found.", "usgs_error", "message-error"); } } } }) .fail(function (request, status, error) { appendErrorMessage("USGS Error: " + error, "usgs_error", "message-error"); }) .always(function () { m_downloading_usgs = false; if(isNotLoadingPastRequest()){ clearInfoMessages(); } }); } } //Get AHPS data if NWD ID attribute exists if(m_selected_nws_id != null) { m_downloading_nws = true; //get NWS data //Example URL: http://ua-fews.ua.edu/WaterMlService/waterml? // request=GetObservation&featureId=ACRT2&observedProperty=QINE // &beginPosition=2015-01-01T00:00:00&endPosition=2015-06-22T00:00:00 var chart_nws_data_ajax_handle = jQuery.ajax({ type: "GET", url: "http://ua-fews.ua.edu/WaterMlService/waterml", data: { request: 'GetObservation', observedProperty: 'QINE', featureId: m_selected_nws_id, beginPosition: dateToUTCDateTimeString(date_nws_start), endPosition: dateToUTCDateTimeString(date_nws_end), }, }) .done(function(data) { //var series_data = getValidSeries(WATERML.get_json_from_streamflow_waterml(data, m_units)); var series_data = WATERML.get_json_from_streamflow_waterml(data, m_units, "T0 (Time of analysis)"); if(series_data == null) { appendErrorMessage("No valid recent data found for AHPS (" + m_selected_nws_id + ")", "ahps_error", "message-error"); } else { var ahps_series = { name: "AHPS (" + m_selected_nws_id + ")", data: series_data[0], dashStyle: 'longdash', color: Highcharts.getOptions().colors[4], }; addSeriesToCharts(ahps_series); $('#long-term-chart').removeClass('hidden'); } }) .fail(function(request, status, error) { appendErrorMessage("AHPS Error: " + error, "ahps_error", "message-error"); }) .always(function() { m_downloading_nws = false; if(isNotLoadingPastRequest()){ clearInfoMessages(); } }); } //Get HydroServer Data if Available if(m_selected_hydroserver_url != null) { m_downloading_hydroserver = true; //get WorldWater data var chart_ww_data_ajax_handle = jQuery.ajax({ type: "GET", url: m_selected_hydroserver_url, data: { startDate: dateToUTCString(date_observed_start), endDate: dateToUTCString(date_observed_end), }, }) .done(function(data) { var series_data = WATERML.get_json_from_streamflow_waterml(data, m_units); if(series_data == null) { appendErrorMessage("No data found for WorldWater", "hydro_server_error", "message-error"); } else { var hydro_server_series = { name: "HydroServer", data: series_data[0], dashStyle: 'longdash', color: Highcharts.getOptions().colors[5], }; addSeriesToCharts(hydro_server_series); } }) .fail(function(request, status, error) { appendErrorMessage("Error: " + error, "hydro_server_error", "message-error"); }) .always(function() { m_downloading_hydroserver = false; if(isNotLoadingPastRequest()){ clearInfoMessages(); } }); } } }; //FUNCTION: displays hydrograph at stream segment displayHydrograph = function() { //check if old ajax call still running if(!isNotLoadingPastRequest()) { //updateInfoAlert appendWarningMessage("Please wait for datasets to download before making another selection.", "wait_warning"); } else if (!isThereDataToLoad()) { resetChartSelectMessage(); //updateInfoAlert addWarningMessage("No data found to load. Please toggle on a dataset."); } else { resetChartSelectMessage(); //Get chart data m_ecmwf_start_folder = "most_recent"; m_wrf_hydro_date_string = "most_recent"; getChartData(); //Get available ECMWF Dates if (m_ecmwf_show && m_selected_ecmwf_watershed != null && m_selected_ecmwf_subbasin != null) { m_downloading_long_term_select = true; m_long_term_select_data_ajax_handle = jQuery.ajax({ type: "GET", url: "ecmwf-get-avaialable-dates", dataType: "json", data: { watershed_name: m_selected_ecmwf_watershed, subbasin_name: m_selected_ecmwf_subbasin, reach_id: m_selected_reach_id, }, }) .done(function (data) { if ("success" in data && !m_long_term_chart_data_ajax_load_failed) { //remove select2 if exists clearChartSelect2('long-term'); $('.long-term-select').removeClass('hidden'); //create new select2 $('#long-term-select').select2({ data: data.output_directories, placeholder: "Select a Date" }); if (m_downloading_ecmwf_hydrograph && m_downloading_era_interim_hydrograph) { $('.long-term-select').addClass('hidden'); } //add on change event handler $('#long-term-select').on('change.select2', function () { m_ecmwf_start_folder = $(this).select2('data').id; getChartData(); }); } else if ("error" in data) { appendErrorMessage("Error: " + data.error, "ecmwf_error", "message-error"); clearChartSelect2('long-term'); } }) .fail(function (request, status, error) { appendErrorMessage("Error: " + error, "ecmwf_error", "message-error"); clearChartSelect2('long-term'); }) .always(function () { m_downloading_long_term_select = false; if(isNotLoadingPastRequest()){ clearInfoMessages(); } }); } //Get available WRF-Hydro Dates if (m_wrf_show && m_selected_wrf_hydro_watershed != null && m_selected_wrf_hydro_subbasin != null) { m_downloading_short_term_select = true; m_short_term_select_data_ajax_handle = jQuery.ajax({ type: "GET", url: "wrf-hydro-get-avaialable-dates", dataType: "json", data: { watershed_name: m_selected_wrf_hydro_watershed, subbasin_name: m_selected_wrf_hydro_subbasin, }, }) .done(function (data) { if ("success" in data && !m_short_term_chart_data_ajax_load_failed) { //remove select2 if exists clearChartSelect2('short-term'); $('.short-term-select').removeClass('hidden'); //create new select2 $('#short-term-select').select2({ data: data.output_files, placeholder: "Select a Date" }); if (m_downloading_wrf_hydro_hydrograph) { $('.short-term-select').addClass('hidden'); } //add on change event handler $('#short-term-select').on('change.select2', function () { m_wrf_hydro_date_string = $(this).select2('data').id; getChartData(); }); } else if ("error" in data) { appendErrorMessage("Error: " + data.error, "wrf_hydro_error", "message-error"); clearChartSelect2('short-term'); } }) .fail(function (request, status, error) { appendErrorMessage("Error: " + error, "wrf_hydro_error", "message-error"); clearChartSelect2('short-term'); }) .always(function () { m_downloading_short_term_select = false; if(isNotLoadingPastRequest()){ clearInfoMessages(); } }); } } }; //FUNCTION: Loads Hydrograph from Selected feature loadHydrographFromFeature = function(selected_feature) { //check if old ajax call still running if(!isNotLoadingPastRequest()) { //updateInfoAlert appendWarningMessage("Please wait for datasets to download before making another selection.", "wait_warning"); } else { //get attributes var reach_id = getCI(selected_feature, 'COMID'); var ecmwf_watershed_name = getCI(selected_feature, "watershed"); var ecmwf_subbasin_name = getCI(selected_feature, "subbasin"); var wrf_hydro_watershed_name = getCI(selected_feature, "wwatershed"); var wrf_hydro_subbasin_name = getCI(selected_feature, "wsubbasin"); var usgs_id = getCI(selected_feature, "usgs_id"); var nws_id = getCI(selected_feature, "nws_id"); var hydroserver_url = getCI(selected_feature, "hydroserve"); //check if the variables are under a different name if(reach_id == null || isNaN(reach_id)) { var reach_id = getCI(selected_feature, 'hydroid'); } if(ecmwf_watershed_name == null) { var ecmwf_watershed_name = getCI(selected_feature, 'watershed_name'); } if(ecmwf_subbasin_name == null) { var ecmwf_subbasin_name = getCI(selected_feature, 'subbasin_name'); } //clean up usgs_id if(!isNaN(usgs_id) && usgs_id != null) { //add zero in case it was removed when converted to a number while(usgs_id.length < 8 && usgs_id.length > 0) { usgs_id = '0' + usgs_id; } } if(reach_id != null && (ecmwf_watershed_name != null && ecmwf_subbasin_name != null) || (wrf_hydro_watershed_name != null && wrf_hydro_subbasin_name != null)) { m_selected_feature = selected_feature; m_selected_reach_id = reach_id; m_selected_ecmwf_watershed = ecmwf_watershed_name; m_selected_ecmwf_subbasin = ecmwf_subbasin_name; m_selected_wrf_hydro_watershed = wrf_hydro_watershed_name; m_selected_wrf_hydro_subbasin = wrf_hydro_subbasin_name; m_selected_usgs_id = usgs_id; m_selected_nws_id = nws_id; m_selected_hydroserver_url = hydroserver_url; displayHydrograph(); } else { appendErrorMessage('The attributes in the file are faulty. Please fix and upload again.', "file_attr_error", "message-error"); } } }; /************************************************************************ * DEFINE PUBLIC INTERFACE *************************************************************************/ /* * Library object that contains public facing functions of the package. * This is the object that is returned by the library wrapper function. * See below. * NOTE: The functions in the public interface have access to the private * functions of the library because of JavaScript function scope. */ public_interface = { zoomToAll: function() { zoomToAll(); }, }; /************************************************************************ * INITIALIZATION / CONSTRUCTOR *************************************************************************/ // Initialization: jQuery function that gets called when // the DOM tree finishes loading $(function() { resizeAppContent(); $('#map_top_navigation').find('.form-group').addClass('inline-block'); //initialize map global variables m_map_projection = 'EPSG:3857'; m_map_extent = ol.extent.createEmpty(); m_selected_feature = null; m_selected_ecmwf_watershed = null; m_selected_ecmwf_subbasin = null; m_selected_wrf_hydro_watershed = null; m_selected_wrf_hydro_subbasin = null; m_selected_reach_id = null; m_selected_usgs_id = null; m_selected_nws_id = null; m_selected_hydroserver_url = null; m_downloading_ecmwf_hydrograph = false; m_downloading_era_interim_hydrograph = false; m_downloading_long_term_select = false; m_downloading_wrf_hydro_hydrograph = false; m_downloading_short_term_select = false; m_downloading_usgs = false; m_downloading_nws = false; m_downloading_hydroserver = false; m_searching_for_reach = false; m_long_term_chart_data_ajax_load_failed = false; m_short_term_chart_data_ajax_load_failed = false; m_long_term_select_data_ajax_handle = null; m_ecmwf_start_folder = "most_recent"; m_wrf_hydro_date_string = "most_recent"; //Init from toggle m_units = "metric"; if(!$('#units-toggle').bootstrapSwitch('state')) { m_units = "english"; } m_wrf_show = $('#wrf-toggle').bootstrapSwitch('state'); m_ecmwf_show = $('#ecmwf-toggle').bootstrapSwitch('state'); //create symbols for warnings var twenty_symbols = [new ol.style.RegularShape({ points: 3, radius: 5, fill: new ol.style.Fill({ color: 'rgba(128,0,128,0.8)' }), stroke: new ol.style.Stroke({ color: 'rgba(128,0,128,1)', width: 1 }), }),new ol.style.RegularShape({ points: 3, radius: 9, fill: new ol.style.Fill({ color: 'rgba(128,0,128,0.3)' }), stroke: new ol.style.Stroke({ color: 'rgba(128,0,128,1)', width: 1 }), })]; //symbols var ten_symbols = [new ol.style.RegularShape({ points: 3, radius: 5, fill: new ol.style.Fill({ color: 'rgba(255,0,0,0.7)' }), stroke: new ol.style.Stroke({ color: 'rgba(255,0,0,1)', width: 1 }), }),new ol.style.RegularShape({ points: 3, radius: 9, fill: new ol.style.Fill({ color: 'rgba(255,0,0,0.3)' }), stroke: new ol.style.Stroke({ color: 'rgba(255,0,0,1)', width: 1 }), })]; //symbols var two_symbols = [new ol.style.RegularShape({ points: 3, radius: 5, fill: new ol.style.Fill({ color: 'rgba(255,255,0,0.7)' }), stroke: new ol.style.Stroke({ color: 'rgba(255,255,0,1)', width: 1 }), }),new ol.style.RegularShape({ points: 3, radius: 9, fill: new ol.style.Fill({ color: 'rgba(255,255,0,0.3)' }), stroke: new ol.style.Stroke({ color: 'rgba(255,255,0,1)', width: 1 }), })]; //load base layer var base_layer_info = JSON.parse($("#map").attr('base-layer-info')); m_basemap_layer = getBaseLayer(base_layer_info.name,base_layer_info.api_key); //load drainage line kml layers var layers_info = JSON.parse($("#map").attr('layers-info')); var all_group_layers = []; m_drainage_line_layers = []; m_flood_maps = []; //add each watershed kml group layers_info.forEach(function(layer_info, group_index) { var layers = []; if('geoserver_url' in layer_info) { //add catchment if exists if('catchment' in layer_info) { var catchment_layer_id = 'layer' + group_index + 'g' + 1 if ("error" in layer_info.catchment) { appendErrorMessage("Catchment Layer: " + layer_info.title + ": " + layer_info.catchment.error, "error_" + catchment_layer_id, "message-error"); } else { var layer = getTileLayer(layer_info['catchment'], layer_info['geoserver_url'], catchment_layer_id); if (layer != null) { layer.setOpacity(0.5); layers.push(layer); } else { appendErrorMessage("Catchment Layer Invalid ... ", "error_" + catchment_layer_id, "message-error"); } } } //add gage if exists if('gage' in layer_info) { var gage_layer_id = 'layer' + group_index + 'g' + 2; if ("error" in layer_info.gage) { appendErrorMessage("Gage Layer: " + layer_info.title + ": " + layer_info.gage.error, 'error_' + gage_layer_id, "message-error"); } else { var layer = getTileLayer(layer_info['gage'], layer_info['geoserver_url'], gage_layer_id); if (layer != null) { layers.push(layer); } else { appendErrorMessage("Gage Layer Invalid ... ", "error_" + gage_layer_id, "message-error"); } } } //add flood maps if they exist if('flood_maps' in layer_info) { var flood_maps = []; if ('geoserver_info_list' in layer_info.flood_maps) { var flood_map_dataset_id = 'layer' + group_index + 'g' + 7; var valid_floodmap_count = 0; layer_info.flood_maps.geoserver_info_list.forEach(function(flood_map_info, flood_map_index){ var flood_map_sublayer_id = flood_map_dataset_id + "f" + flood_map_index; if ("error" in flood_map_info) { appendErrorMessage("Flood Map Layer: " + layer_info.title + " " + flood_map_info.forecast_directory + ": " + flood_map_info.error, 'error_' +flood_map_sublayer_id , "message-error"); } else { var layer = getTileLayer(flood_map_info, layer_info.geoserver_url, flood_map_dataset_id); if (layer != null) { layer.setOpacity(0.5); if(valid_floodmap_count>0) { layer.setVisible(false); } valid_floodmap_count += 1; layer.set('watershed_name', layer_info.watershed); layer.set('subbasin_name', layer_info.subbasin); layer.set('date_timestep', flood_map_info.forecast_directory); layer.set("flood_map_sublayer_id", flood_map_sublayer_id); flood_maps.push(layer); } else { console.log("Invalid Floodmap Layer: "); console.log(flood_map_info); } } }); if (flood_maps.length > 0) { m_flood_maps.push(new ol.layer.Group({ layers: flood_maps, })); } } } //add ahps station if exists if('ahps_station' in layer_info) { var ahps_station_layer_id = 'layer' + group_index + 'g' + 3; if ("error" in layer_info.ahps_station) { appendErrorMessage("AHPS Station Layer: " + layer_info.title + ": " + layer_info.ahps_station.error, 'error_' + ahps_station_layer_id, "message-error") } else { var ahps_station_vector_source = new ol.source.ServerVector({ format: new ol.format.GeoJSON(), loader: function(extent, resolution, projection) { var url = layer_info.ahps_station.geojsonp + '&format_options=callback:loadFeatures' + ahps_station_layer_id + '&PROPERTYNAME=the_geom' + '&srsname=' + m_map_projection; jQuery.ajax({ url: encodeURI(url), dataType: 'jsonp', jsonpCallback: 'loadFeatures' + ahps_station_layer_id, success: function(response) { ahps_station_vector_source.addFeatures(ahps_station_vector_source.readFeatures(response)); }, }); }, strategy: ol.loadingstrategy.bbox, projection: m_map_projection }); var ahps_station = new ol.layer.Vector({ source: ahps_station_vector_source, style: new ol.style.Style({ image: new ol.style.RegularShape({ points: 5, radius: 7, stroke: new ol.style.Stroke({ color: 'rgba(0,255,0,0.3)' }), fill: new ol.style.Fill({ color: 'rgba(0,128,0,0.5)' }) }), }) }); ahps_station.set('geoserver_url', layer_info.ahps_station.geojsonp) ahps_station.set('watershed_name', layer_info.watershed); ahps_station.set('subbasin_name', layer_info.subbasin); ahps_station.set('extent', ol.proj.transformExtent(layer_info.ahps_station.latlon_bbox.map(Number), 'EPSG:4326', m_map_projection)); ahps_station.set('layer_id', ahps_station_layer_id); ahps_station.set('layer_type', 'geoserver'); layers.push(ahps_station); } } //add drainage line if exists if('drainage_line' in layer_info) { var drainage_line_layer_id = 'layer' + group_index + 'g' + 0; if ("error" in layer_info.drainage_line) { appendErrorMessage("Drainage Line Layer: " + layer_info.title + ": " + layer_info.drainage_line.error, "error_" + drainage_line_layer_id, "message-error"); } else { //check if required parameters exist if(layer_info['drainage_line']['missing_attributes'].length > 2) { appendErrorMessage('The drainage line layer for ' + layer_info['watershed'] + '(' + layer_info['subbasin'] + ') ' + 'is missing '+ layer_info['drainage_line']['missing_attributes'].join(", ") + ' attributes and will not function properly.', "layer_loading_error", "message-error"); } var drainage_line; //check layer capabilites if(layer_info['drainage_line']['geoserver_method'] == "natur_flow_query") { var load_features_xhr = null; var drainage_line_vector_source = new ol.source.ServerVector({ format: new ol.format.GeoJSON(), loader: function(extent, resolution, projection) { if (typeof this.url == 'undefined') { this.url = layer_info.drainage_line.geojsonp; } if (typeof this.contained_attributes == 'undefined') { this.contained_attributes = layer_info.drainage_line.contained_attributes.join(","); } if (typeof this.query_attribute == 'undefined') { this.query_attribute = layer_info.drainage_line.geoserver_query_attribute; } var stream_flow_limit = 5000; var map_zoom = m_map.getView().getZoom(); if (map_zoom >= 12) { stream_flow_limit = 0; } else if (map_zoom >= 11) { stream_flow_limit = 20; } else if (map_zoom >= 10) { stream_flow_limit = 100; } else if (map_zoom >= 9) { stream_flow_limit = 1000; } else if (map_zoom >= 8) { stream_flow_limit = 3000; } else if (map_zoom >= 7) { stream_flow_limit = 4000; } var url = this.url + '&format_options=callback:loadFeatures' + drainage_line_layer_id + '&PROPERTYNAME=the_geom,' + this.contained_attributes + '&CQL_FILTER=' + this.query_attribute + ' > ' + stream_flow_limit + ' AND bbox(the_geom,' + extent.join(',') + ',\'' + m_map_projection + '\')' + '&srsname=' + m_map_projection; //cancel load featues if still active if(load_features_xhr != null) { load_features_xhr.abort(); } //TODO: ADD LOADING MESSAGE load_features_xhr = jQuery.ajax({ url: encodeURI(url), dataType: 'jsonp', jsonpCallback: 'loadFeatures' + drainage_line_layer_id, }) .done(function(response){ drainage_line_vector_source.addFeatures(drainage_line_vector_source.readFeatures(response)); }) .always(function() { load_features_xhr = null; }); //ON ERROR ADD MESSAGE //ALWAYS REMOVE LOADING MESSAGE }, strategy: function(extent, resolution) { var zoom_range = 1; var map_zoom = m_map.getView().getZoom(); if (map_zoom >= 12) { zoom_range = 2; } else if (map_zoom >= 11) { zoom_range = 3; } else if (map_zoom >= 10) { zoom_range = 4; } else if (map_zoom >= 9) { zoom_range = 5; } else if (map_zoom >= 8) { zoom_range = 6; } else if (map_zoom >= 7) { zoom_range = 7; } if(zoom_range != this.zoom_range && typeof this.zoom_range != 'undefined') { this.clear(); } this.zoom_range = zoom_range; return [extent]; }, projection: m_map_projection, }); drainage_line = new ol.layer.Vector({ source: drainage_line_vector_source, maxResolution: 10000 }); } else if(layer_info['drainage_line']['geoserver_method'] == "river_order_query") { var load_features_xhr = null; var drainage_line_vector_source = new ol.source.ServerVector({ format: new ol.format.GeoJSON(), loader: function(extent, resolution, projection) { if (typeof this.url == 'undefined') { this.url = layer_info.drainage_line.geojsonp; } if (typeof this.contained_attributes == 'undefined') { this.contained_attributes = layer_info.drainage_line.contained_attributes.join(","); } if (typeof this.query_attribute == 'undefined') { this.query_attribute = layer_info.drainage_line.geoserver_query_attribute; } var river_order_limit = 1000; var map_zoom = m_map.getView().getZoom(); if (map_zoom >= 12) { river_order_limit = 0; } else if (map_zoom >= 11) { river_order_limit = 2; } else if (map_zoom >= 10) { river_order_limit = 8; } else if (map_zoom >= 9) { river_order_limit = 64; } else if (map_zoom >= 8) { river_order_limit = 128; } else if (map_zoom >= 7) { river_order_limit = 300; } var url = this.url + '&format_options=callback:loadFeatures' + drainage_line_layer_id + '&PROPERTYNAME=the_geom,' + this.contained_attributes + '&CQL_FILTER=' + this.query_attribute + ' > ' + river_order_limit + ' AND bbox(the_geom,' + extent.join(',') + ',\'' + m_map_projection + '\')' + '&srsname=' + m_map_projection; //cancel load featues if still active if(load_features_xhr != null) { load_features_xhr.abort(); } //TODO: ADD LOADING MESSAGE load_features_xhr = jQuery.ajax({ url: encodeURI(url), dataType: 'jsonp', jsonpCallback: 'loadFeatures' + drainage_line_layer_id, }) .done(function(response){ drainage_line_vector_source.addFeatures(drainage_line_vector_source.readFeatures(response)); }) .always(function() { load_features_xhr = null; }); //ON ERROR ADD MESSAGE //ALWAYS REMOVE LOADING MESSAGE }, strategy: function(extent, resolution) { var zoom_range = 1; var map_zoom = m_map.getView().getZoom(); if (map_zoom >= 12) { zoom_range = 2; } else if (map_zoom >= 11) { zoom_range = 3; } else if (map_zoom >= 10) { zoom_range = 4; } else if (map_zoom >= 9) { zoom_range = 5; } else if (map_zoom >= 8) { zoom_range = 6; } else if (map_zoom >= 7) { zoom_range = 7; } if(zoom_range != this.zoom_range && typeof this.zoom_range != 'undefined') { this.clear(); } this.zoom_range = zoom_range; return [extent]; }, projection: m_map_projection, }); drainage_line = new ol.layer.Vector({ source: drainage_line_vector_source, maxResolution: 10000 }); } else { //layer_info['drainage_line']['geoserver_method'] == "simple" var drainage_line_vector_source = new ol.source.ServerVector({ format: new ol.format.GeoJSON(), loader: function(extent, resolution, projection) { this.geojsonp_url = layer_info['drainage_line']['geojsonp']; this.callback = 'loadFeatures' + drainage_line_layer_id; this.attributes = layer_info['drainage_line']['contained_attributes'].join(","); var url = this.geojsonp_url + '&format_options=callback:loadFeatures' + drainage_line_layer_id + '&PROPERTYNAME=the_geom,' + this.attributes + '&BBOX=' + extent.join(',') + ','+ m_map_projection + '&srsname=' + m_map_projection; jQuery.ajax({ url: encodeURI(url), dataType: 'jsonp', jsonpCallback: this.callback, success: function(response) { drainage_line_vector_source.addFeatures(drainage_line_vector_source.readFeatures(response)); }, }); }, strategy: ol.loadingstrategy.bbox, projection: m_map_projection }); drainage_line = new ol.layer.Vector({ source: drainage_line_vector_source, maxResolution: 1000 }); } layer_info['drainage_line']['contained_attributes'].some(function(attribute) { if (attribute.toLowerCase() == "comid" || attribute.toLowerCase() == "hydroid") { drainage_line.set('reach_id_attr_name', attribute); return true; } }); drainage_line.set('geoserver_url', layer_info['drainage_line']['geojsonp']) drainage_line.set('watershed_name', layer_info['watershed']); drainage_line.set('subbasin_name', layer_info['subbasin']); drainage_line.set('extent', ol.proj.transformExtent(layer_info['drainage_line']['latlon_bbox'].map(Number), 'EPSG:4326', m_map_projection)); drainage_line.set('layer_id', drainage_line_layer_id); drainage_line.set('layer_type', 'geoserver'); m_drainage_line_layers.push(drainage_line); layers.push(drainage_line); } } } else { //assume KML //add catchment if exists if('catchment' in layer_info) { var catchment_layer_id = 'layer' + group_index + 'g' + 1; layers.push(getKMLLayer(layer_info['catchment'], catchment_layer_id)); } //add gage if exists if('gage' in layer_info) { var gage_layer_id = 'layer' + group_index + 'g' + 2; layers.push(getKMLLayer(layer_info['gage'], gage_layer_id)); } //add drainage line if exists if('drainage_line' in layer_info) { var drainage_line_layer_id = 'layer' + group_index + 'g' + 0; var drainage_line_layer = getKMLLayer(layer_info['drainage_line'], drainage_line_layer_id, layer_info['watershed'], layer_info['subbasin']) layers.push(drainage_line_layer); m_drainage_line_layers.push(drainage_line_layer); } } //create empty layers to add data to later var return_20_layer = new ol.layer.Vector({ source: new ol.source.Cluster({ source: new ol.source.Vector({ source: []}), distance: 20 }), style: function(feature, resolution) { var features = feature.get("features"); var size = -1 if (typeof features != 'undefined') { size = features.length; } var style; if (size > 3) { style = [new ol.style.Style({ image: new ol.style.RegularShape({ points: 3, radius: 12, stroke: new ol.style.Stroke({ color: '#fff' }), fill: new ol.style.Fill({ color: 'rgba(128,0,128,0.7)' }) }), text: new ol.style.Text({ text: size.toString(), fill: new ol.style.Fill({ color: '#fff' }) }) })]; } else if (size < 0) { style = []; } else { style = []; for (var i=0; i<size; i++) { style.push(new ol.style.Style({ image: twenty_symbols[features[i].get('point_size')], })); } } return style; } }); return_20_layer.set('layer_id', 'layer' + group_index + 'g' + 4); return_20_layer.set('layer_type', 'warning_points'); return_20_layer.set('return_period', 20); return_20_layer.set('ecmwf_watershed_name', layer_info['ecmwf_watershed']); return_20_layer.set('ecmwf_subbasin_name', layer_info['ecmwf_subbasin']); var return_10_layer = new ol.layer.Vector({ source: new ol.source.Cluster({ source: new ol.source.Vector({ source: []}), distance: 20 }), style: function(feature, resolution) { var features = feature.get("features"); var size = -1 if (typeof features != 'undefined') { var size = features.length; } var style; if (size > 3) { style = [new ol.style.Style({ image: new ol.style.RegularShape({ points: 3, radius: 12, stroke: new ol.style.Stroke({ color: '#fff' }), fill: new ol.style.Fill({ color: 'rgba(255,0,0,0.6)' }) }), text: new ol.style.Text({ text: size.toString(), fill: new ol.style.Fill({ color: '#fff' }) }) })]; } else if (size < 0) { style = []; } else { style = []; for (var i=0; i<size; i++) { style.push(new ol.style.Style({ image: ten_symbols[features[i].get('point_size')] })); } } return style; } }); return_10_layer.set('layer_id', 'layer' + group_index + 'g' + 5); return_10_layer.set('layer_type', 'warning_points'); return_10_layer.set('return_period', 10); return_10_layer.set('ecmwf_watershed_name', layer_info['ecmwf_watershed']); return_10_layer.set('ecmwf_subbasin_name', layer_info['ecmwf_subbasin']); var return_2_layer = new ol.layer.Vector({ source: new ol.source.Cluster({ source: new ol.source.Vector({ source: []}), distance: 20 }), style: function(feature, resolution) { var features = feature.get("features"); var size = -1 if (typeof features != 'undefined') { var size = features.length; } var style; if (size > 3) { style = [new ol.style.Style({ image: new ol.style.RegularShape({ points: 3, radius: 12, stroke: new ol.style.Stroke({ color: '#fff' }), fill: new ol.style.Fill({ color: 'rgba(255,255,0,0.6)' }) }), text: new ol.style.Text({ text: size.toString(), fill: new ol.style.Fill({ color: '#fff' }) }) })]; } else if (size < 0) { style = []; } else { style = []; for (var i=0; i<size; i++) { style.push(new ol.style.Style({ image: two_symbols[features[i].get('point_size')] })); } } return style; } }); return_2_layer.set('layer_id', 'layer' + group_index + 'g' + 6); return_2_layer.set('layer_type', 'warning_points'); return_2_layer.set('return_period', 2); return_2_layer.set('ecmwf_watershed_name', layer_info['ecmwf_watershed']); return_2_layer.set('ecmwf_subbasin_name', layer_info['ecmwf_subbasin']); layers.push(return_2_layer); layers.push(return_10_layer); layers.push(return_20_layer); //make sure there are layers to add if (layers.length > 0) { var group_layer = new ol.layer.Group({ layers: layers, }); all_group_layers.push(group_layer); } }); //send message to user if Drainage Line KML file not found if (m_drainage_line_layers.length <= 0) { appendErrorMessage('No valid drainage line layers found. Please upload to begin.', "drainage_line_error", "message-error"); } //make drainage line layers selectable m_select_interaction = new ol.interaction.Select({ layers: m_drainage_line_layers, }); if(m_flood_maps.length > 0) { all_group_layers = all_group_layers.concat(m_flood_maps); } var all_map_layers = [m_basemap_layer].concat(all_group_layers); //var all_map_layers = all_group_layers; //create map m_map = new ol.Map({ target: 'map', controls: ol.control.defaults().extend([ new ol.control.FullScreen(), new ol.control.ZoomToExtent(), ]), interactions: ol.interaction.defaults().extend([ new ol.interaction.DragRotateAndZoom(), m_select_interaction, ]), layers : all_map_layers, view: new ol.View({ center: [-33519607, 5616436], zoom: 8 }), }); //wait for kml layers to load and then zoom to them all_group_layers.forEach(function(group_layer){ if (group_layer instanceof ol.layer.Group) { group_layer.getLayers().forEach(function(vector_layer, j) { if(vector_layer.get('layer_type') == "kml") { var vector_source = vector_layer.getSource(); var listener_key = vector_source.on('change', function() { if (vector_source.getState() == 'ready') { bindInputs('#'+vector_layer.get('layer_id'), vector_layer); ol.extent.extend(m_map_extent, vector_source.getExtent()); m_map.getView().fitExtent(m_map_extent, m_map.getSize()); } }); } else if (vector_layer.get('layer_type') == "geoserver") { bindInputs('#'+vector_layer.get('layer_id'), vector_layer); ol.extent.extend(m_map_extent, vector_layer.get('extent')); m_map.getView().fitExtent(m_map_extent, m_map.getSize()); } else if (vector_layer.get('layer_type') == "warning_points") { var layer_id = '#'+vector_layer.get('layer_id'); bindInputs(layer_id, vector_layer); //get warning points for map jQuery.ajax({ type: "GET", url: 'get-warning-points', dataType: "json", data: { watershed_name: vector_layer.get('ecmwf_watershed_name'), subbasin_name: vector_layer.get('ecmwf_subbasin_name'), return_period: vector_layer.get('return_period'), }, }) .done(function (data) { if ("success" in data) { $(layer_id).parent().removeClass('hidden'); //symbols var feature_count = data.warning_points.length var features = []; var feature, geometry, symbol; for (var i = 0; i < feature_count; ++i) { geometry = new ol.geom.Point(ol.proj.transform([data.warning_points[i].lon, data.warning_points[i].lat], 'EPSG:4326', m_map_projection)); feature = new ol.Feature({ geometry: geometry, point_size: data.warning_points[i].size, comid: data.warning_points[i].comid }); features.push(feature); } var vector_source = vector_layer.getSource().getSource(); vector_source.addFeatures(features); m_map.render(); } else { console.log(data.error); //appendErrorMessage("Error: " + data["error"], "warning_points_error", "message-error"); } }) .fail(function (request, status, error) { console.log(error); //appendErrorMessage("Error: " + error, "warning_points_error", "message-error"); }); } }); } }); //bind flood maps m_flood_maps.forEach(function(layer_group, j) { layer_group.getLayers().forEach(function(layer, j) { if (j==0){ bindInputs('#'+layer.get('layer_id'), layer); } }); }); //when selected, call function to make hydrograph m_select_interaction.getFeatures().on('change:length', function(e) { if (e.target.getArray().length === 0) { // this means it's changed to no features selected } else { // this means there is at least 1 feature selected var selected_feature = e.target.item(0); // 1st feature in Collection loadHydrographFromFeature(selected_feature); } }); //change displayed flood map on click $('.flood_map_select').off().change(function() { var watershed_name = $(this).parent().parent().parent().parent().attr('watershed'); var subbasin_name = $(this).parent().parent().parent().parent().attr('subbasin'); var date_timestep = $(this).val(); m_flood_maps.forEach(function(layer_group, j) { layer_group.getLayers().forEach(function(layer, j) { if (layer.get('watershed_name') == watershed_name && layer.get('subbasin_name') == subbasin_name) { layer.setVisible(false); unbindInputs('#'+layer.get('layer_id')); } }); }); m_flood_maps.forEach(function(layer_group, j) { layer_group.getLayers().forEach(function(layer, j) { if (layer.get('watershed_name') == watershed_name && layer.get('subbasin_name') == subbasin_name) { if (layer.get('date_timestep') == date_timestep){ layer.setVisible(true); bindInputs('#'+layer.get('layer_id'), layer); } } }); }); }); //create function to zoom to layer $('.zoom-to-layer').off().click(function() { var layer_id = $(this).parent().parent().attr('id'); zoomToLayer(layer_id); }); //function to zoom to feature by id $('#submit-search-reach-id').off().click(function() { var watershed_info = $(this).parent().parent().find('#watershed_select').select2('val'); var reach_id = $(this).parent().parent().find('#reach-id-input').val(); zoomToFeature(watershed_info, reach_id); }); //zoom to all $('.ol-zoom-extent').off().click(function() { zoomToAll(); }); //show hide elements based on shape upload toggle selection $('#units-toggle').on('switchChange.bootstrapSwitch', function(event, state) { if(state) { //units metric m_units = "metric"; } else { //units english m_units = "english"; } if (m_selected_feature != null) { loadHydrographFromFeature(m_selected_feature); } }); //show/hide forecasts based on toggle $('#ecmwf-toggle').on('switchChange.bootstrapSwitch', function(event, state) { m_ecmwf_show = state; if (m_selected_feature != null && m_selected_ecmwf_watershed != null && m_selected_ecmwf_subbasin != null) { loadHydrographFromFeature(m_selected_feature); } }); $('#wrf-toggle').on('switchChange.bootstrapSwitch', function(event, state) { m_wrf_show = state; if (m_selected_feature != null && m_selected_wrf_hydro_watershed != null && m_selected_wrf_hydro_subbasin != null) { loadHydrographFromFeature(m_selected_feature); } }); //resize app content based on window size and nav bar $('.toggle-nav').off(); $(window).resize(function() { resizeAppContent(); }); }); return public_interface; }()); // End of package wrapper // NOTE: that the call operator (open-closed parenthesis) is used to invoke the library wrapper // function immediately after being parsed.
Fix chart selection zoom
tethysapp/streamflow_prediction_tool/public/js/map.js
Fix chart selection zoom
<ide><path>ethysapp/streamflow_prediction_tool/public/js/map.js <ide> Highcharts.getOptions().colors[1]); <ide> } <ide> $('.long-term-select').removeClass('hidden'); <add> var long_term_chart = $("#long-term-chart").highcharts(); <add> long_term_chart.rangeSelector.clickButton(0,0,true); <ide> $('#long-term-chart').removeClass('hidden'); <ide> } else { <ide> m_long_term_chart_data_ajax_load_failed = true;
Java
apache-2.0
be65022827e91bcc71df7adbb44c3a7c0458231a
0
vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespa.hosted.controller.deployment; import com.yahoo.config.application.api.DeploymentSpec; import com.yahoo.config.application.api.DeploymentSpec.Step; import com.yahoo.config.provision.ApplicationId; import com.yahoo.log.LogLevel; import com.yahoo.vespa.hosted.controller.Application; import com.yahoo.vespa.hosted.controller.ApplicationController; import com.yahoo.vespa.hosted.controller.Controller; import com.yahoo.vespa.hosted.controller.api.identifiers.DeploymentId; import com.yahoo.vespa.hosted.controller.api.integration.BuildService; import com.yahoo.vespa.hosted.controller.api.integration.BuildService.JobState; import com.yahoo.vespa.hosted.controller.api.integration.zone.ZoneId; import com.yahoo.vespa.hosted.controller.application.ApplicationList; import com.yahoo.vespa.hosted.controller.application.ApplicationVersion; import com.yahoo.vespa.hosted.controller.application.Change; import com.yahoo.vespa.hosted.controller.application.Deployment; import com.yahoo.vespa.hosted.controller.application.DeploymentJobs.JobReport; import com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType; import com.yahoo.vespa.hosted.controller.application.JobStatus; import com.yahoo.vespa.hosted.controller.application.JobStatus.JobRun; import java.time.Clock; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Objects; import java.util.Optional; import java.util.OptionalLong; import java.util.function.Supplier; import java.util.logging.Logger; import java.util.stream.Stream; import static com.yahoo.vespa.hosted.controller.api.integration.BuildService.BuildJob; import static com.yahoo.vespa.hosted.controller.api.integration.BuildService.JobState.idle; import static com.yahoo.vespa.hosted.controller.api.integration.BuildService.JobState.queued; import static com.yahoo.vespa.hosted.controller.api.integration.BuildService.JobState.running; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.component; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.stagingTest; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.systemTest; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static java.util.Comparator.comparing; import static java.util.Comparator.naturalOrder; import static java.util.stream.Collectors.groupingBy; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.partitioningBy; import static java.util.stream.Collectors.toList; /** * Responsible for scheduling deployment jobs in a build system and keeping * {@link Application#change()} in sync with what is scheduled. * * This class is multi-thread safe. * * @author bratseth * @author mpolden * @author jonmv */ public class DeploymentTrigger { private final static Logger log = Logger.getLogger(DeploymentTrigger.class.getName()); private final Controller controller; private final Clock clock; private final BuildService buildService; private final JobController jobs; public DeploymentTrigger(Controller controller, BuildService buildService, Clock clock) { this.controller = Objects.requireNonNull(controller, "controller cannot be null"); this.clock = Objects.requireNonNull(clock, "clock cannot be null"); this.buildService = Objects.requireNonNull(buildService, "buildService cannot be null"); this.jobs = controller.jobController(); } public DeploymentSteps steps(DeploymentSpec spec) { return new DeploymentSteps(spec, controller::system); } /** * Records information when a job completes (successfully or not). This information is used when deciding what to * trigger next. */ public void notifyOfCompletion(JobReport report) { log.log(LogLevel.INFO, String.format("Notified of %s for %s of %s (%d)", report.jobError().map(e -> e.toString() + " error") .orElse("success"), report.jobType(), report.applicationId(), report.projectId())); if ( ! applications().get(report.applicationId()).isPresent()) { log.log(LogLevel.WARNING, "Ignoring completion of job of project '" + report.projectId() + "': Unknown application '" + report.applicationId() + "'"); return; } applications().lockOrThrow(report.applicationId(), application -> { JobRun triggering; if (report.jobType() == component) { ApplicationVersion applicationVersion = ApplicationVersion.from(report.sourceRevision().get(), report.buildNumber()); triggering = JobRun.triggering(application.get().oldestDeployedPlatform().orElse(controller.systemVersion()), applicationVersion, Optional.empty(), Optional.empty(), "Application commit", clock.instant()); if (report.success()) { if (acceptNewApplicationVersion(application.get())) { application = application.withChange(application.get().change().with(applicationVersion)) .withOutstandingChange(Change.empty()); if (application.get().deploymentJobs().deployedInternally()) for (Run run : jobs.active()) if (run.id().application().equals(report.applicationId())) jobs.abort(run.id()); } else application = application.withOutstandingChange(Change.of(applicationVersion)); } } else { triggering = application.get().deploymentJobs().statusOf(report.jobType()) .filter(job -> job.lastTriggered().isPresent() && job.lastCompleted() .map(completion -> ! completion.at().isAfter(job.lastTriggered().get().at())) .orElse(true)) .orElseThrow(() -> new IllegalStateException("Notified of completion of " + report.jobType().jobName() + " for " + report.applicationId() + ", but that has neither been triggered nor deployed")) .lastTriggered().get(); } application = application.withJobCompletion(report.projectId(), report.jobType(), triggering.completion(report.buildNumber(), clock.instant()), report.jobError()); application = application.withChange(remainingChange(application.get())); applications().store(application); }); } /** Returns a map of jobs that are scheduled to be run, grouped by the job type */ public Map<JobType, ? extends List<? extends BuildJob>> jobsToRun() { return computeReadyJobs().stream().collect(groupingBy(Job::jobType)); } /** * Finds and triggers jobs that can and should run but are currently not, and returns the number of triggered jobs. * * Only one job is triggered each run for test jobs, since their environments have limited capacity. */ public long triggerReadyJobs() { return computeReadyJobs().stream() .collect(partitioningBy(job -> job.jobType().isTest())) .entrySet().stream() .flatMap(entry -> (entry.getKey() // True for capacity constrained zones -- sort by priority and make a task for each job type. ? entry.getValue().stream() .sorted(comparing(Job::isRetry) .thenComparing(Job::applicationUpgrade) .reversed() .thenComparing(Job::availableSince)) .collect(groupingBy(Job::jobType)) // False for production jobs -- keep step order and make a task for each application. : entry.getValue().stream() .collect(groupingBy(Job::applicationId))) .values().stream() .map(jobs -> (Supplier<Long>) jobs.stream() .filter(this::trigger) .limit(entry.getKey() ? 1 : Long.MAX_VALUE)::count)) .parallel().map(Supplier::get).reduce(0L, Long::sum); } /** * Attempts to trigger the given job for the given application and returns the outcome. * * If the build service can not find the given job, or claims it is illegal to trigger it, * the project id is removed from the application owning the job, to prevent further trigger attempts. */ public boolean trigger(Job job) { log.log(LogLevel.INFO, String.format("Triggering %s: %s", job, job.triggering)); try { applications().lockOrThrow(job.applicationId(), application -> { if (application.get().deploymentJobs().deployedInternally()) jobs.start(job.applicationId(), job.jobType, new Versions(job.triggering.platform(), job.triggering.application(), job.triggering.sourcePlatform(), job.triggering.sourceApplication())); else buildService.trigger(job); applications().store(application.withJobTriggering(job.jobType, job.triggering)); }); return true; } catch (RuntimeException e) { log.log(LogLevel.WARNING, "Exception triggering " + job + ": " + e); if (e instanceof NoSuchElementException || e instanceof IllegalArgumentException) applications().lockOrThrow(job.applicationId(), application -> applications().store(application.withProjectId(OptionalLong.empty()))); return false; } } /** Force triggering of a job for given application. */ public List<JobType> forceTrigger(ApplicationId applicationId, JobType jobType, String user) { Application application = applications().require(applicationId); if (jobType == component) { if (application.deploymentJobs().deployedInternally()) throw new IllegalArgumentException(applicationId + " has no component job we can trigger."); buildService.trigger(BuildJob.of(applicationId, application.deploymentJobs().projectId().getAsLong(), jobType.jobName())); return singletonList(component); } Versions versions = Versions.from(application.change(), application, deploymentFor(application, jobType), controller.systemVersion()); String reason = "Job triggered manually by " + user; return (jobType.isProduction() && ! isTested(application, versions) ? testJobs(application, versions, reason, clock.instant()).stream() : Stream.of(deploymentJob(application, versions, application.change(), jobType, reason, clock.instant()))) .peek(this::trigger) .map(Job::jobType).collect(toList()); } /** * Triggers a change of this application * * @param applicationId the application to trigger * @throws IllegalArgumentException if this application already has an ongoing change */ public void triggerChange(ApplicationId applicationId, Change change) { applications().lockOrThrow(applicationId, application -> { if (application.get().changeAt(controller.clock().instant()).isPresent() && ! application.get().deploymentJobs().hasFailures()) throw new IllegalArgumentException("Could not start " + change + " on " + application + ": " + application.get().change() + " is already in progress"); application = application.withChange(change); if (change.application().isPresent()) application = application.withOutstandingChange(Change.empty()); applications().store(application); }); } /** Cancels a platform upgrade of the given application, and an application upgrade as well if {@code keepApplicationChange}. */ public void cancelChange(ApplicationId applicationId, boolean keepApplicationChange) { applications().lockOrThrow(applicationId, application -> { applications().store(application.withChange(application.get().change().application() .filter(__ -> keepApplicationChange) .map(Change::of) .orElse(Change.empty()))); }); } // ---------- Conveniences ---------- private ApplicationController applications() { return controller.applications(); } private Optional<JobRun> successOn(Application application, JobType jobType, Versions versions) { return application.deploymentJobs().statusOf(jobType).flatMap(JobStatus::lastSuccess) .filter(versions::targetsMatch); } private Optional<Deployment> deploymentFor(Application application, JobType jobType) { return Optional.ofNullable(application.deployments().get(jobType.zone(controller.system()))); } private static <T extends Comparable<T>> Optional<T> max(Optional<T> o1, Optional<T> o2) { return ! o1.isPresent() ? o2 : ! o2.isPresent() ? o1 : o1.get().compareTo(o2.get()) >= 0 ? o1 : o2; } // ---------- Ready job computation ---------- /** Returns the set of all jobs which have changes to propagate from the upstream steps. */ private List<Job> computeReadyJobs() { return ApplicationList.from(applications().asList()) .withProjectId() .deploying() .idList().stream() .map(this::computeReadyJobs) .flatMap(Collection::stream) .collect(toList()); } /** * Finds the next step to trigger for the given application, if any, and returns these as a list. */ private List<Job> computeReadyJobs(ApplicationId id) { List<Job> jobs = new ArrayList<>(); applications().get(id).ifPresent(application -> { Change change = application.changeAt(clock.instant()); Optional<Instant> completedAt = max(application.deploymentJobs().statusOf(systemTest) .<Instant>flatMap(job -> job.lastSuccess().map(JobRun::at)), application.deploymentJobs().statusOf(stagingTest) .<Instant>flatMap(job -> job.lastSuccess().map(JobRun::at))); String reason = "New change available"; List<Job> testJobs = null; // null means "uninitialised", while empty means "don't run any jobs". DeploymentSteps steps = steps(application.deploymentSpec()); if (change.isPresent()) { for (Step step : steps.production()) { List<JobType> stepJobs = steps.toJobs(step); List<JobType> remainingJobs = stepJobs.stream().filter(job -> !isComplete(change, application, job)).collect(toList()); if (!remainingJobs.isEmpty()) { // Change is incomplete; trigger remaining jobs if ready, or their test jobs if untested. for (JobType job : remainingJobs) { Versions versions = Versions.from(change, application, deploymentFor(application, job), controller.systemVersion()); if (isTested(application, versions)) { if (completedAt.isPresent() && canTrigger(job, versions, application, stepJobs)) { jobs.add(deploymentJob(application, versions, change, job, reason, completedAt.get())); } if (!alreadyTriggered(application, versions)) { testJobs = emptyList(); } } else if (testJobs == null) { testJobs = testJobs(application, versions, String.format("Testing deployment for %s (%s)", job.jobName(), versions.toString()), completedAt.orElseGet(clock::instant)); } } completedAt = Optional.empty(); } else { // All jobs are complete; find the time of completion of this step. if (stepJobs.isEmpty()) { // No jobs means this is a delay step. Duration delay = ((DeploymentSpec.Delay) step).duration(); completedAt = completedAt.map(at -> at.plus(delay)).filter(at -> !at.isAfter(clock.instant())); reason += " after a delay of " + delay; } else { completedAt = stepJobs.stream().map(job -> application.deploymentJobs().statusOf(job).get().lastCompleted().get().at()).max(naturalOrder()); reason = "Available change in " + stepJobs.stream().map(JobType::jobName).collect(joining(", ")); } } } } if (testJobs == null) // If nothing to test, but outstanding commits, test those. testJobs = testJobs(application, Versions.from(application.outstandingChange(), application, steps.sortedDeployments(application.productionDeployments().values()).stream().findFirst(), controller.systemVersion()), "Testing last changes outside prod", clock.instant()); jobs.addAll(testJobs); }); return Collections.unmodifiableList(jobs); } /** Returns whether given job should be triggered */ private boolean canTrigger(JobType job, Versions versions, Application application, List<JobType> parallelJobs) { if (jobStateOf(application, job) != idle) return false; // Are we already running jobs which are not in the set which can run in parallel with this? if (parallelJobs != null && ! parallelJobs.containsAll(runningProductionJobs(application))) return false; // Are there another suspended deployment such that we shouldn't simultaneously change this? if (job.isProduction() && isSuspendedInAnotherZone(application, job.zone(controller.system()))) return false; return triggerAt(clock.instant(), job, versions, application); } /** Returns whether given job should be triggered */ private boolean canTrigger(JobType job, Versions versions, Application application) { return canTrigger(job, versions, application, null); } private boolean isSuspendedInAnotherZone(Application application, ZoneId zone) { for (Deployment deployment : application.productionDeployments().values()) { if ( ! deployment.zone().equals(zone) && controller.applications().isSuspended(new DeploymentId(application.id(), deployment.zone()))) return true; } return false; } /** Returns whether job can trigger at given instant */ public boolean triggerAt(Instant instant, JobType job, Versions versions, Application application) { Optional<JobStatus> jobStatus = application.deploymentJobs().statusOf(job); if (!jobStatus.isPresent()) return true; if (jobStatus.get().isSuccess()) return true; // Success if (!jobStatus.get().lastCompleted().isPresent()) return true; // Never completed if (!jobStatus.get().firstFailing().isPresent()) return true; // Should not happen as firstFailing should be set for an unsuccessful job if (!versions.targetsMatch(jobStatus.get().lastCompleted().get())) return true; // Always trigger as targets have changed if (application.deploymentSpec().upgradePolicy() == DeploymentSpec.UpgradePolicy.canary) return true; // Don't throttle canaries Instant firstFailing = jobStatus.get().firstFailing().get().at(); Instant lastCompleted = jobStatus.get().lastCompleted().get().at(); // Retry all errors immediately for 1 minute if (firstFailing.isAfter(instant.minus(Duration.ofMinutes(1)))) return true; // Retry out of capacity errors in test environments every minute if (job.isTest() && jobStatus.get().isOutOfCapacity()) { return lastCompleted.isBefore(instant.minus(Duration.ofMinutes(1))); } // Retry other errors if (firstFailing.isAfter(instant.minus(Duration.ofHours(1)))) { // If we failed within the last hour ... return lastCompleted.isBefore(instant.minus(Duration.ofMinutes(10))); // ... retry every 10 minutes } return lastCompleted.isBefore(instant.minus(Duration.ofHours(2))); // Retry at most every 2 hours } // ---------- Job state helpers ---------- private List<JobType> runningProductionJobs(Application application) { return application.deploymentJobs().jobStatus().keySet().parallelStream() .filter(JobType::isProduction) .filter(job -> isRunning(application, job)) .collect(toList()); } /** Returns whether the given job is currently running; false if completed since last triggered, asking the build service otherwise. */ private boolean isRunning(Application application, JobType jobType) { return ! application.deploymentJobs().statusOf(jobType) .flatMap(job -> job.lastCompleted().map(run -> run.at().isAfter(job.lastTriggered().get().at()))) .orElse(false) && EnumSet.of(running, queued).contains(jobStateOf(application, jobType)); } private JobState jobStateOf(Application application, JobType jobType) { if (application.deploymentJobs().deployedInternally()) { Optional<Run> run = controller.jobController().last(application.id(), jobType); return run.isPresent() && ! run.get().hasEnded() ? JobState.running : JobState.idle; } return buildService.stateOf(BuildJob.of(application.id(), application.deploymentJobs().projectId().getAsLong(), jobType.jobName())); } // ---------- Completion logic ---------- /** * Returns whether the given change is complete for the given application for the given job. * * Any job is complete if the given change is already successful on that job. * A production job is also considered complete if its current change is strictly dominated by what * is already deployed in its zone, i.e., no parts of the change are upgrades, and the full current * change for the application downgrades the deployment, which is an acknowledgement that the deployed * version is broken somehow, such that the job may be locked in failure until a new version is released. */ public boolean isComplete(Change change, Application application, JobType jobType) { Optional<Deployment> existingDeployment = deploymentFor(application, jobType); return application.deploymentJobs().statusOf(jobType).flatMap(JobStatus::lastSuccess) .map(job -> change.platform().map(job.platform()::equals).orElse(true) && change.application().map(job.application()::equals).orElse(true)) .orElse(false) || jobType.isProduction() && existingDeployment.map(deployment -> ! isUpgrade(change, deployment) && isDowngrade(application.change(), deployment)) .orElse(false); } private static boolean isUpgrade(Change change, Deployment deployment) { return change.upgrades(deployment.version()) || change.upgrades(deployment.applicationVersion()); } private static boolean isDowngrade(Change change, Deployment deployment) { return change.downgrades(deployment.version()) || change.downgrades(deployment.applicationVersion()); } private boolean isTested(Application application, Versions versions) { return testedIn(application, systemTest, versions) && testedIn(application, stagingTest, versions) || alreadyTriggered(application, versions); } public boolean testedIn(Application application, JobType testType, Versions versions) { if (testType == systemTest) return successOn(application, systemTest, versions).isPresent(); if (testType == stagingTest) return successOn(application, stagingTest, versions).filter(versions::sourcesMatchIfPresent).isPresent(); throw new IllegalArgumentException(testType + " is not a test job!"); } public boolean alreadyTriggered(Application application, Versions versions) { return application.deploymentJobs().jobStatus().values().stream() .filter(job -> job.type().isProduction()) .anyMatch(job -> job.lastTriggered() .filter(versions::targetsMatch) .filter(versions::sourcesMatchIfPresent) .isPresent()); } // ---------- Change management o_O ---------- private boolean acceptNewApplicationVersion(Application application) { if ( ! application.deploymentSpec().canChangeRevisionAt(clock.instant())) return false; if (application.change().application().isPresent()) return true; // Replacing a previous application change is ok. if (application.deploymentJobs().hasFailures()) return true; // Allow changes to fix upgrade problems. return ! application.changeAt(clock.instant()).platform().isPresent(); } private Change remainingChange(Application application) { DeploymentSteps steps = steps(application.deploymentSpec()); List<JobType> jobs = steps.production().isEmpty() ? steps.testJobs() : steps.productionJobs(); Change change = application.change(); if (jobs.stream().allMatch(job -> isComplete(application.change().withoutApplication(), application, job))) change = change.withoutPlatform(); if (jobs.stream().allMatch(job -> isComplete(application.change().withoutPlatform(), application, job))) change = change.withoutApplication(); return change; } // ---------- Version and job helpers ---------- /** * Returns the list of test jobs that should run now, and that need to succeed on the given versions for it to be considered tested. */ private List<Job> testJobs(Application application, Versions versions, String reason, Instant availableSince) { List<Job> jobs = new ArrayList<>(); for (JobType jobType : steps(application.deploymentSpec()).testJobs()) { Optional<JobRun> completion = successOn(application, jobType, versions) .filter(run -> versions.sourcesMatchIfPresent(run) || jobType == systemTest); if (!completion.isPresent() && canTrigger(jobType, versions, application)) { jobs.add(deploymentJob(application, versions, application.change(), jobType, reason, availableSince)); } } return jobs; } private Job deploymentJob(Application application, Versions versions, Change change, JobType jobType, String reason, Instant availableSince) { boolean isRetry = application.deploymentJobs().statusOf(jobType) .map(JobStatus::isOutOfCapacity) .orElse(false); if (isRetry) reason += "; retrying on out of capacity"; JobRun triggering = JobRun.triggering(versions.targetPlatform(), versions.targetApplication(), versions.sourcePlatform(), versions.sourceApplication(), reason, clock.instant()); return new Job(application, triggering, jobType, availableSince, isRetry, change.application().isPresent()); } // ---------- Data containers ---------- private static class Job extends BuildJob { private final JobType jobType; private final JobRun triggering; private final Instant availableSince; private final boolean isRetry; private final boolean isApplicationUpgrade; private Job(Application application, JobRun triggering, JobType jobType, Instant availableSince, boolean isRetry, boolean isApplicationUpgrade) { super(application.id(), application.deploymentJobs().projectId().getAsLong(), jobType.jobName()); this.jobType = jobType; this.triggering = triggering; this.availableSince = availableSince; this.isRetry = isRetry; this.isApplicationUpgrade = isApplicationUpgrade; } JobType jobType() { return jobType; } Instant availableSince() { return availableSince; } // TODO jvenstad: This is 95% broken now. Change.at() can restore it. boolean isRetry() { return isRetry; } boolean applicationUpgrade() { return isApplicationUpgrade; } } }
controller-server/src/main/java/com/yahoo/vespa/hosted/controller/deployment/DeploymentTrigger.java
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespa.hosted.controller.deployment; import com.yahoo.config.application.api.DeploymentSpec; import com.yahoo.config.application.api.DeploymentSpec.Step; import com.yahoo.config.provision.ApplicationId; import com.yahoo.log.LogLevel; import com.yahoo.vespa.hosted.controller.Application; import com.yahoo.vespa.hosted.controller.ApplicationController; import com.yahoo.vespa.hosted.controller.Controller; import com.yahoo.vespa.hosted.controller.api.identifiers.DeploymentId; import com.yahoo.vespa.hosted.controller.api.integration.BuildService; import com.yahoo.vespa.hosted.controller.api.integration.BuildService.JobState; import com.yahoo.vespa.hosted.controller.api.integration.zone.ZoneId; import com.yahoo.vespa.hosted.controller.application.ApplicationList; import com.yahoo.vespa.hosted.controller.application.ApplicationVersion; import com.yahoo.vespa.hosted.controller.application.Change; import com.yahoo.vespa.hosted.controller.application.Deployment; import com.yahoo.vespa.hosted.controller.application.DeploymentJobs.JobReport; import com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType; import com.yahoo.vespa.hosted.controller.application.JobStatus; import com.yahoo.vespa.hosted.controller.application.JobStatus.JobRun; import java.time.Clock; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Objects; import java.util.Optional; import java.util.OptionalLong; import java.util.function.Supplier; import java.util.logging.Logger; import java.util.stream.Stream; import static com.yahoo.vespa.hosted.controller.api.integration.BuildService.BuildJob; import static com.yahoo.vespa.hosted.controller.api.integration.BuildService.JobState.idle; import static com.yahoo.vespa.hosted.controller.api.integration.BuildService.JobState.queued; import static com.yahoo.vespa.hosted.controller.api.integration.BuildService.JobState.running; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.component; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.stagingTest; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.systemTest; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static java.util.Comparator.comparing; import static java.util.Comparator.naturalOrder; import static java.util.stream.Collectors.groupingBy; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.partitioningBy; import static java.util.stream.Collectors.toList; /** * Responsible for scheduling deployment jobs in a build system and keeping * {@link Application#change()} in sync with what is scheduled. * * This class is multi-thread safe. * * @author bratseth * @author mpolden * @author jonmv */ public class DeploymentTrigger { private final static Logger log = Logger.getLogger(DeploymentTrigger.class.getName()); private final Controller controller; private final Clock clock; private final BuildService buildService; private final JobController jobs; public DeploymentTrigger(Controller controller, BuildService buildService, Clock clock) { this.controller = Objects.requireNonNull(controller, "controller cannot be null"); this.clock = Objects.requireNonNull(clock, "clock cannot be null"); this.buildService = Objects.requireNonNull(buildService, "buildService cannot be null"); this.jobs = controller.jobController(); } public DeploymentSteps steps(DeploymentSpec spec) { return new DeploymentSteps(spec, controller::system); } /** * Records information when a job completes (successfully or not). This information is used when deciding what to * trigger next. */ public void notifyOfCompletion(JobReport report) { log.log(LogLevel.INFO, String.format("Notified of %s for %s of %s (%d)", report.jobError().map(e -> e.toString() + " error") .orElse("success"), report.jobType(), report.applicationId(), report.projectId())); if ( ! applications().get(report.applicationId()).isPresent()) { log.log(LogLevel.WARNING, "Ignoring completion of job of project '" + report.projectId() + "': Unknown application '" + report.applicationId() + "'"); return; } applications().lockOrThrow(report.applicationId(), application -> { JobRun triggering; if (report.jobType() == component) { ApplicationVersion applicationVersion = ApplicationVersion.from(report.sourceRevision().get(), report.buildNumber()); triggering = JobRun.triggering(application.get().oldestDeployedPlatform().orElse(controller.systemVersion()), applicationVersion, Optional.empty(), Optional.empty(), "Application commit", clock.instant()); if (report.success()) { if (acceptNewApplicationVersion(application.get())) { application = application.withChange(application.get().change().with(applicationVersion)) .withOutstandingChange(Change.empty()); if (application.get().deploymentJobs().deployedInternally()) for (Run run : jobs.active()) if (run.id().application().equals(report.applicationId())) jobs.abort(run.id()); } else application = application.withOutstandingChange(Change.of(applicationVersion)); } } else { triggering = application.get().deploymentJobs().statusOf(report.jobType()) .filter(job -> job.lastTriggered().isPresent() && job.lastCompleted() .map(completion -> ! completion.at().isAfter(job.lastTriggered().get().at())) .orElse(true)) .orElseThrow(() -> new IllegalStateException("Notified of completion of " + report.jobType().jobName() + " for " + report.applicationId() + ", but that has neither been triggered nor deployed")) .lastTriggered().get(); } application = application.withJobCompletion(report.projectId(), report.jobType(), triggering.completion(report.buildNumber(), clock.instant()), report.jobError()); application = application.withChange(remainingChange(application.get())); applications().store(application); }); } /** Returns a map of jobs that are scheduled to be run, grouped by the job type */ public Map<JobType, ? extends List<? extends BuildJob>> jobsToRun() { return computeReadyJobs().stream().collect(groupingBy(Job::jobType)); } /** * Finds and triggers jobs that can and should run but are currently not, and returns the number of triggered jobs. * * Only one job is triggered each run for test jobs, since their environments have limited capacity. */ public long triggerReadyJobs() { return computeReadyJobs().stream() .collect(partitioningBy(job -> job.jobType().isTest())) .entrySet().stream() .flatMap(entry -> (entry.getKey() // True for capacity constrained zones -- sort by priority and make a task for each job type. ? entry.getValue().stream() .sorted(comparing(Job::isRetry) .thenComparing(Job::applicationUpgrade) .reversed() .thenComparing(Job::availableSince)) .collect(groupingBy(Job::jobType)) // False for production jobs -- keep step order and make a task for each application. : entry.getValue().stream() .collect(groupingBy(Job::applicationId))) .values().stream() .map(jobs -> (Supplier<Long>) jobs.stream() .filter(this::trigger) .limit(entry.getKey() ? 1 : Long.MAX_VALUE)::count)) .parallel().map(Supplier::get).reduce(0L, Long::sum); } /** * Attempts to trigger the given job for the given application and returns the outcome. * * If the build service can not find the given job, or claims it is illegal to trigger it, * the project id is removed from the application owning the job, to prevent further trigger attempts. */ public boolean trigger(Job job) { log.log(LogLevel.INFO, String.format("Triggering %s: %s", job, job.triggering)); try { applications().lockOrThrow(job.applicationId(), application -> { if (application.get().deploymentJobs().deployedInternally()) jobs.start(job.applicationId(), job.jobType, new Versions(job.triggering.platform(), job.triggering.application(), job.triggering.sourcePlatform(), job.triggering.sourceApplication())); else buildService.trigger(job); applications().store(application.withJobTriggering(job.jobType, job.triggering)); }); return true; } catch (RuntimeException e) { log.log(LogLevel.WARNING, "Exception triggering " + job + ": " + e); if (e instanceof NoSuchElementException || e instanceof IllegalArgumentException) applications().lockOrThrow(job.applicationId(), application -> applications().store(application.withProjectId(OptionalLong.empty()))); return false; } } /** Force triggering of a job for given application. */ public List<JobType> forceTrigger(ApplicationId applicationId, JobType jobType, String user) { Application application = applications().require(applicationId); if (jobType == component) { if (application.deploymentJobs().deployedInternally()) throw new IllegalArgumentException(applicationId + " has no component job we can trigger."); buildService.trigger(BuildJob.of(applicationId, application.deploymentJobs().projectId().getAsLong(), jobType.jobName())); return singletonList(component); } Versions versions = Versions.from(application.change(), application, deploymentFor(application, jobType), controller.systemVersion()); String reason = "Job triggered manually by " + user; return (jobType.isProduction() && ! isTested(application, versions) ? testJobs(application, versions, reason, clock.instant()).stream() : Stream.of(deploymentJob(application, versions, application.change(), jobType, reason, clock.instant()))) .peek(this::trigger) .map(Job::jobType).collect(toList()); } /** * Triggers a change of this application * * @param applicationId the application to trigger * @throws IllegalArgumentException if this application already has an ongoing change */ public void triggerChange(ApplicationId applicationId, Change change) { applications().lockOrThrow(applicationId, application -> { if (application.get().changeAt(controller.clock().instant()).isPresent() && ! application.get().deploymentJobs().hasFailures()) throw new IllegalArgumentException("Could not start " + change + " on " + application + ": " + application.get().change() + " is already in progress"); application = application.withChange(change); if (change.application().isPresent()) application = application.withOutstandingChange(Change.empty()); applications().store(application); }); } /** Cancels a platform upgrade of the given application, and an application upgrade as well if {@code keepApplicationChange}. */ public void cancelChange(ApplicationId applicationId, boolean keepApplicationChange) { applications().lockOrThrow(applicationId, application -> { applications().store(application.withChange(application.get().change().application() .filter(__ -> keepApplicationChange) .map(Change::of) .orElse(Change.empty()))); }); } // ---------- Conveniences ---------- private ApplicationController applications() { return controller.applications(); } private Optional<JobRun> successOn(Application application, JobType jobType, Versions versions) { return application.deploymentJobs().statusOf(jobType).flatMap(JobStatus::lastSuccess) .filter(versions::targetsMatch); } private Optional<Deployment> deploymentFor(Application application, JobType jobType) { return Optional.ofNullable(application.deployments().get(jobType.zone(controller.system()))); } private static <T extends Comparable<T>> Optional<T> max(Optional<T> o1, Optional<T> o2) { return ! o1.isPresent() ? o2 : ! o2.isPresent() ? o1 : o1.get().compareTo(o2.get()) >= 0 ? o1 : o2; } // ---------- Ready job computation ---------- /** Returns the set of all jobs which have changes to propagate from the upstream steps. */ private List<Job> computeReadyJobs() { return ApplicationList.from(applications().asList()) .withProjectId() .deploying() .idList().stream() .map(this::computeReadyJobs) .flatMap(Collection::stream) .collect(toList()); } /** * Finds the next step to trigger for the given application, if any, and returns these as a list. */ private List<Job> computeReadyJobs(ApplicationId id) { List<Job> jobs = new ArrayList<>(); applications().get(id).ifPresent(application -> { Change change = application.changeAt(clock.instant()); Optional<Instant> completedAt = max(application.deploymentJobs().statusOf(systemTest) .<Instant>flatMap(job -> job.lastSuccess().map(JobRun::at)), application.deploymentJobs().statusOf(stagingTest) .<Instant>flatMap(job -> job.lastSuccess().map(JobRun::at))); String reason = "New change available"; List<Job> testJobs = null; // null means "uninitialised", while empty means "don't run any jobs". DeploymentSteps steps = steps(application.deploymentSpec()); if (change.isPresent()) { for (Step step : steps.production()) { List<JobType> stepJobs = steps.toJobs(step); List<JobType> remainingJobs = stepJobs.stream().filter(job -> !isComplete(change, application, job)).collect(toList()); if (!remainingJobs.isEmpty()) { // Change is incomplete; trigger remaining jobs if ready, or their test jobs if untested. for (JobType job : remainingJobs) { Versions versions = Versions.from(change, application, deploymentFor(application, job), controller.systemVersion()); if (isTested(application, versions)) { if (completedAt.isPresent() && canTrigger(job, versions, application, stepJobs)) { jobs.add(deploymentJob(application, versions, change, job, reason, completedAt.get())); } if (!alreadyTriggered(application, versions)) { testJobs = emptyList(); } } else if (testJobs == null) { testJobs = testJobs(application, versions, String.format("Testing deployment for %s (%s)", job.jobName(), versions.toString()), completedAt.orElseGet(clock::instant)); } } completedAt = Optional.empty(); } else { // All jobs are complete; find the time of completion of this step. if (stepJobs.isEmpty()) { // No jobs means this is delay step. Duration delay = ((DeploymentSpec.Delay) step).duration(); completedAt = completedAt.map(at -> at.plus(delay)).filter(at -> !at.isAfter(clock.instant())); reason += " after a delay of " + delay; } else { completedAt = stepJobs.stream().map(job -> application.deploymentJobs().statusOf(job).get().lastCompleted().get().at()).max(naturalOrder()); reason = "Available change in " + stepJobs.stream().map(JobType::jobName).collect(joining(", ")); } } } } if (testJobs == null) { testJobs = testJobs(application, Versions.from(application, controller.systemVersion()), "Testing last changes outside prod", clock.instant()); } jobs.addAll(testJobs); }); return Collections.unmodifiableList(jobs); } /** Returns whether given job should be triggered */ private boolean canTrigger(JobType job, Versions versions, Application application, List<JobType> parallelJobs) { if (jobStateOf(application, job) != idle) return false; // Are we already running jobs which are not in the set which can run in parallel with this? if (parallelJobs != null && ! parallelJobs.containsAll(runningProductionJobs(application))) return false; // Are there another suspended deployment such that we shouldn't simultaneously change this? if (job.isProduction() && isSuspendedInAnotherZone(application, job.zone(controller.system()))) return false; return triggerAt(clock.instant(), job, versions, application); } /** Returns whether given job should be triggered */ private boolean canTrigger(JobType job, Versions versions, Application application) { return canTrigger(job, versions, application, null); } private boolean isSuspendedInAnotherZone(Application application, ZoneId zone) { for (Deployment deployment : application.productionDeployments().values()) { if ( ! deployment.zone().equals(zone) && controller.applications().isSuspended(new DeploymentId(application.id(), deployment.zone()))) return true; } return false; } /** Returns whether job can trigger at given instant */ public boolean triggerAt(Instant instant, JobType job, Versions versions, Application application) { Optional<JobStatus> jobStatus = application.deploymentJobs().statusOf(job); if (!jobStatus.isPresent()) return true; if (jobStatus.get().isSuccess()) return true; // Success if (!jobStatus.get().lastCompleted().isPresent()) return true; // Never completed if (!jobStatus.get().firstFailing().isPresent()) return true; // Should not happen as firstFailing should be set for an unsuccessful job if (!versions.targetsMatch(jobStatus.get().lastCompleted().get())) return true; // Always trigger as targets have changed if (application.deploymentSpec().upgradePolicy() == DeploymentSpec.UpgradePolicy.canary) return true; // Don't throttle canaries Instant firstFailing = jobStatus.get().firstFailing().get().at(); Instant lastCompleted = jobStatus.get().lastCompleted().get().at(); // Retry all errors immediately for 1 minute if (firstFailing.isAfter(instant.minus(Duration.ofMinutes(1)))) return true; // Retry out of capacity errors in test environments every minute if (job.isTest() && jobStatus.get().isOutOfCapacity()) { return lastCompleted.isBefore(instant.minus(Duration.ofMinutes(1))); } // Retry other errors if (firstFailing.isAfter(instant.minus(Duration.ofHours(1)))) { // If we failed within the last hour ... return lastCompleted.isBefore(instant.minus(Duration.ofMinutes(10))); // ... retry every 10 minutes } return lastCompleted.isBefore(instant.minus(Duration.ofHours(2))); // Retry at most every 2 hours } // ---------- Job state helpers ---------- private List<JobType> runningProductionJobs(Application application) { return application.deploymentJobs().jobStatus().keySet().parallelStream() .filter(JobType::isProduction) .filter(job -> isRunning(application, job)) .collect(toList()); } /** Returns whether the given job is currently running; false if completed since last triggered, asking the build service otherwise. */ private boolean isRunning(Application application, JobType jobType) { return ! application.deploymentJobs().statusOf(jobType) .flatMap(job -> job.lastCompleted().map(run -> run.at().isAfter(job.lastTriggered().get().at()))) .orElse(false) && EnumSet.of(running, queued).contains(jobStateOf(application, jobType)); } private JobState jobStateOf(Application application, JobType jobType) { if (application.deploymentJobs().deployedInternally()) { Optional<Run> run = controller.jobController().last(application.id(), jobType); return run.isPresent() && ! run.get().hasEnded() ? JobState.running : JobState.idle; } return buildService.stateOf(BuildJob.of(application.id(), application.deploymentJobs().projectId().getAsLong(), jobType.jobName())); } // ---------- Completion logic ---------- /** * Returns whether the given change is complete for the given application for the given job. * * Any job is complete if the given change is already successful on that job. * A production job is also considered complete if its current change is strictly dominated by what * is already deployed in its zone, i.e., no parts of the change are upgrades, and the full current * change for the application downgrades the deployment, which is an acknowledgement that the deployed * version is broken somehow, such that the job may be locked in failure until a new version is released. */ public boolean isComplete(Change change, Application application, JobType jobType) { Optional<Deployment> existingDeployment = deploymentFor(application, jobType); return application.deploymentJobs().statusOf(jobType).flatMap(JobStatus::lastSuccess) .map(job -> change.platform().map(job.platform()::equals).orElse(true) && change.application().map(job.application()::equals).orElse(true)) .orElse(false) || jobType.isProduction() && existingDeployment.map(deployment -> ! isUpgrade(change, deployment) && isDowngrade(application.change(), deployment)) .orElse(false); } private static boolean isUpgrade(Change change, Deployment deployment) { return change.upgrades(deployment.version()) || change.upgrades(deployment.applicationVersion()); } private static boolean isDowngrade(Change change, Deployment deployment) { return change.downgrades(deployment.version()) || change.downgrades(deployment.applicationVersion()); } private boolean isTested(Application application, Versions versions) { return testedIn(application, systemTest, versions) && testedIn(application, stagingTest, versions) || alreadyTriggered(application, versions); } public boolean testedIn(Application application, JobType testType, Versions versions) { if (testType == systemTest) return successOn(application, systemTest, versions).isPresent(); if (testType == stagingTest) return successOn(application, stagingTest, versions).filter(versions::sourcesMatchIfPresent).isPresent(); throw new IllegalArgumentException(testType + " is not a test job!"); } public boolean alreadyTriggered(Application application, Versions versions) { return application.deploymentJobs().jobStatus().values().stream() .filter(job -> job.type().isProduction()) .anyMatch(job -> job.lastTriggered() .filter(versions::targetsMatch) .filter(versions::sourcesMatchIfPresent) .isPresent()); } // ---------- Change management o_O ---------- private boolean acceptNewApplicationVersion(Application application) { if ( ! application.deploymentSpec().canChangeRevisionAt(clock.instant())) return false; if (application.change().application().isPresent()) return true; // Replacing a previous application change is ok. if (application.deploymentJobs().hasFailures()) return true; // Allow changes to fix upgrade problems. return ! application.changeAt(clock.instant()).platform().isPresent(); } private Change remainingChange(Application application) { DeploymentSteps steps = steps(application.deploymentSpec()); List<JobType> jobs = steps.production().isEmpty() ? steps.testJobs() : steps.productionJobs(); Change change = application.change(); if (jobs.stream().allMatch(job -> isComplete(application.change().withoutApplication(), application, job))) change = change.withoutPlatform(); if (jobs.stream().allMatch(job -> isComplete(application.change().withoutPlatform(), application, job))) change = change.withoutApplication(); return change; } // ---------- Version and job helpers ---------- /** * Returns the list of test jobs that should run now, and that need to succeed on the given versions for it to be considered tested. */ private List<Job> testJobs(Application application, Versions versions, String reason, Instant availableSince) { List<Job> jobs = new ArrayList<>(); for (JobType jobType : steps(application.deploymentSpec()).testJobs()) { Optional<JobRun> completion = successOn(application, jobType, versions) .filter(run -> versions.sourcesMatchIfPresent(run) || jobType == systemTest); if (!completion.isPresent() && canTrigger(jobType, versions, application)) { jobs.add(deploymentJob(application, versions, application.change(), jobType, reason, availableSince)); } } return jobs; } private Job deploymentJob(Application application, Versions versions, Change change, JobType jobType, String reason, Instant availableSince) { boolean isRetry = application.deploymentJobs().statusOf(jobType) .map(JobStatus::isOutOfCapacity) .orElse(false); if (isRetry) reason += "; retrying on out of capacity"; JobRun triggering = JobRun.triggering(versions.targetPlatform(), versions.targetApplication(), versions.sourcePlatform(), versions.sourceApplication(), reason, clock.instant()); return new Job(application, triggering, jobType, availableSince, isRetry, change.application().isPresent()); } // ---------- Data containers ---------- private static class Job extends BuildJob { private final JobType jobType; private final JobRun triggering; private final Instant availableSince; private final boolean isRetry; private final boolean isApplicationUpgrade; private Job(Application application, JobRun triggering, JobType jobType, Instant availableSince, boolean isRetry, boolean isApplicationUpgrade) { super(application.id(), application.deploymentJobs().projectId().getAsLong(), jobType.jobName()); this.jobType = jobType; this.triggering = triggering; this.availableSince = availableSince; this.isRetry = isRetry; this.isApplicationUpgrade = isApplicationUpgrade; } JobType jobType() { return jobType; } Instant availableSince() { return availableSince; } // TODO jvenstad: This is 95% broken now. Change.at() can restore it. boolean isRetry() { return isRetry; } boolean applicationUpgrade() { return isApplicationUpgrade; } } }
Test outstanding commits, when nothing else to test
controller-server/src/main/java/com/yahoo/vespa/hosted/controller/deployment/DeploymentTrigger.java
Test outstanding commits, when nothing else to test
<ide><path>ontroller-server/src/main/java/com/yahoo/vespa/hosted/controller/deployment/DeploymentTrigger.java <ide> if (!alreadyTriggered(application, versions)) { <ide> testJobs = emptyList(); <ide> } <del> } else if (testJobs == null) { <add> } <add> else if (testJobs == null) { <ide> testJobs = testJobs(application, versions, <ide> String.format("Testing deployment for %s (%s)", <ide> job.jobName(), versions.toString()), <ide> } <ide> } <ide> completedAt = Optional.empty(); <del> } else { // All jobs are complete; find the time of completion of this step. <del> if (stepJobs.isEmpty()) { // No jobs means this is delay step. <add> } <add> else { // All jobs are complete; find the time of completion of this step. <add> if (stepJobs.isEmpty()) { // No jobs means this is a delay step. <ide> Duration delay = ((DeploymentSpec.Delay) step).duration(); <ide> completedAt = completedAt.map(at -> at.plus(delay)).filter(at -> !at.isAfter(clock.instant())); <ide> reason += " after a delay of " + delay; <del> } else { <add> } <add> else { <ide> completedAt = stepJobs.stream().map(job -> application.deploymentJobs().statusOf(job).get().lastCompleted().get().at()).max(naturalOrder()); <ide> reason = "Available change in " + stepJobs.stream().map(JobType::jobName).collect(joining(", ")); <ide> } <ide> } <ide> } <ide> } <del> if (testJobs == null) { <del> testJobs = testJobs(application, Versions.from(application, controller.systemVersion()), <add> if (testJobs == null) // If nothing to test, but outstanding commits, test those. <add> testJobs = testJobs(application, Versions.from(application.outstandingChange(), <add> application, <add> steps.sortedDeployments(application.productionDeployments().values()).stream().findFirst(), <add> controller.systemVersion()), <ide> "Testing last changes outside prod", clock.instant()); <del> } <ide> jobs.addAll(testJobs); <ide> }); <ide> return Collections.unmodifiableList(jobs);
Java
mit
25f2a5957bc5e4eee67fe52aab465cb774befb86
0
rjhdby/motocitizen
package motocitizen.Activity; import android.location.Location; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentManager; import android.text.Editable; import android.text.TextWatcher; import android.view.KeyEvent; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.TextView; import android.widget.Toast; import com.google.android.gms.maps.CameraUpdateFactory; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.SupportMapFragment; import com.google.android.gms.maps.model.CameraPosition; import com.google.android.gms.maps.model.CircleOptions; import com.google.android.gms.maps.model.LatLng; import com.google.android.gms.maps.model.MarkerOptions; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.Date; import motocitizen.MyApp; import motocitizen.accident.Accident; import motocitizen.app.general.user.Role; import motocitizen.content.Content; import motocitizen.content.Medicine; import motocitizen.content.AccidentStatus; import motocitizen.content.Type; import motocitizen.draw.Resources; import motocitizen.geolocation.MyLocationManager; import motocitizen.main.R; import motocitizen.network.requests.AsyncTaskCompleteListener; import motocitizen.network.requests.CreateAccidentRequest; import motocitizen.startup.Preferences; import motocitizen.utils.Const; import motocitizen.utils.MyUtils; public class CreateAccActivity extends FragmentActivity implements View.OnClickListener { private final int RADIUS = 1000; private final int TYPE = R.id.mc_create_type_frame; private final int DESCRIPTION = R.id.mc_create_final_frame; private final int ACCIDENT = R.id.mc_create_acc_frame; private final int MEDICINE = R.id.mc_create_people_frame; private final int MAP = R.id.mc_create_map; private Accident accident; private Boolean confirmLock; private int currentScreen; private GoogleMap map; private Button confirmButton; private boolean complete; private Location initialLocation; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.create_point); initialLocation = MyLocationManager.getLocation(this); accident = createDefaultAccident(); confirmLock = false; map = makeMap(); currentScreen = MAP; confirmButton = (Button) findViewById(R.id.CREATE); ((EditText) findViewById(R.id.mc_create_final_text)).addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) {} @Override public void onTextChanged(CharSequence s, int start, int before, int count) { accident.setDescription(s.toString()); setComplete(); } @Override public void afterTextChanged(Editable s) {} }); setUpScreen(MAP); refreshDescription(); setupListener(); } private Accident createDefaultAccident() { JSONObject accident = new JSONObject(); try { accident.put("id", 0); accident.put("lat", initialLocation.getLatitude()); accident.put("lon", initialLocation.getLongitude()); accident.put("owner_id", Preferences.getUserId()); accident.put("owner", Preferences.getUserName()); accident.put("status", AccidentStatus.ACTIVE.toCode()); accident.put("uxtime", String.valueOf(System.currentTimeMillis() / 1000L)); accident.put("address", ""); accident.put("descr", ""); accident.put("type", Type.OTHER.toCode()); accident.put("med", Medicine.UNKNOWN.toCode()); accident.put("m", new JSONArray("[]")); accident.put("h", new JSONArray("[]")); accident.put("v", new JSONArray("[]")); } catch (JSONException e) { e.printStackTrace(); } return new Accident(this, accident); } private GoogleMap makeMap() { FragmentManager fragmentManager = this.getSupportFragmentManager(); SupportMapFragment mapFragment = (SupportMapFragment) fragmentManager.findFragmentById(R.id.mc_create_map_container); GoogleMap map = mapFragment.getMap(); map.animateCamera(CameraUpdateFactory.newLatLngZoom(MyUtils.LocationToLatLng(accident.getLocation()), 16)); map.setMyLocationEnabled(true); map.getUiSettings().setMyLocationButtonEnabled(true); map.getUiSettings().setZoomControlsEnabled(true); if (!Role.isModerator()) { CircleOptions circleOptions = new CircleOptions().center(MyUtils.LocationToLatLng(initialLocation)).radius(RADIUS).fillColor(0x20FF0000); map.addCircle(circleOptions); map.setOnCameraChangeListener(new GoogleMap.OnCameraChangeListener() { @Override public void onCameraChange(CameraPosition camera) { Button mcCreateFineAddressConfirm = (Button) findViewById(R.id.ADDRESS); if (initialLocation != null) { double distance = MyUtils.LatLngToLocation(camera.target).distanceTo(initialLocation); if (distance > RADIUS) { mcCreateFineAddressConfirm.setEnabled(false); } else { mcCreateFineAddressConfirm.setEnabled(true); } } else { mcCreateFineAddressConfirm.setEnabled(false); } } }); } map.clear(); for (int id : Content.getIds()) { motocitizen.accident.Accident point = Content.get(id); if (point.isInvisible()) continue; String title = point.getType().toString(); if (point.getMedicine() != Medicine.NO) { title += ", " + point.getMedicine().toString(); } title += ", " + MyUtils.getIntervalFromNowInText(point.getTime()) + " назад"; float alpha; int age = (int) (((new Date()).getTime() - point.getTime().getTime()) / 3600000); if (age < 2) { alpha = 1.0f; } else if (age < 6) { alpha = 0.5f; } else { alpha = 0.2f; } map.addMarker(new MarkerOptions().position(new LatLng(point.getLat(), point.getLon())).title(title).icon(Resources.getMapBitmapDescriptor(point.getType())).alpha(alpha)); } return map; } public void setComplete() { this.complete = accident.isAccident() || accident.getDescription().length() > 6; setConfirm(isComplete()); } private void setUpScreen(int id) { hideAll(); findViewById(id).setVisibility(View.VISIBLE); currentScreen = id; if (id == MAP) { findViewById(R.id.BACK).setEnabled(false); } else { findViewById(R.id.BACK).setEnabled(true); } } private void refreshDescription() { if (accident.getMedicine() == Medicine.UNKNOWN) { ((TextView) findViewById(R.id.mc_create_what)).setText(accident.getType().toString()); } else { ((TextView) findViewById(R.id.mc_create_what)).setText(accident.getType().toString() + ". " + accident.getMedicine().toString()); } ((TextView) findViewById(R.id.mc_create_who)).setText(Content.auth.getLogin()); ((TextView) findViewById(R.id.mc_create_where)).setText(accident.getAddress()); ((TextView) findViewById(R.id.mc_create_when)).setText(Const.DATE_FORMAT.format(accident.getTime())); } private void setupListener() { Integer[] ids = {R.id.BREAK, R.id.STEAL, R.id.OTHER, R.id.ACCIDENT, R.id.MOTO_AUTO, R.id.SOLO, R.id.MOTO_MOTO, R.id.MOTO_MAN, R.id.PEOPLE_OK, R.id.PEOPLE_LIGHT, R.id.PEOPLE_HEAVY, R.id.PEOPLE_LETHAL, R.id.PEOPLE_UNKNOWN, R.id.ADDRESS, R.id.CREATE, R.id.CANCEL, R.id.BACK}; for (int id : ids) findViewById(id).setOnClickListener(this); } private void setConfirm(Boolean status) { if (!confirmLock) confirmButton.setEnabled(status); } public boolean isComplete() { return complete; } private void hideAll() { Integer[] ids = {TYPE, MAP, MEDICINE, DESCRIPTION, ACCIDENT}; for (int id : ids) findViewById(id).setVisibility(View.INVISIBLE); } @Override public void onClick(View v) { int id = v.getId(); switch (id) { case R.id.BREAK: case R.id.STEAL: case R.id.OTHER: accident.setType(getSelectedType(id)); setUpScreen(DESCRIPTION); break; case R.id.MOTO_AUTO: case R.id.SOLO: case R.id.MOTO_MOTO: case R.id.MOTO_MAN: accident.setType(getSelectedType(id)); setUpScreen(MEDICINE); break; case R.id.ACCIDENT: setUpScreen(ACCIDENT); break; case R.id.PEOPLE_OK: case R.id.PEOPLE_LIGHT: case R.id.PEOPLE_HEAVY: case R.id.PEOPLE_LETHAL: case R.id.PEOPLE_UNKNOWN: accident.setMedicine(getSelectedMedicine(id)); setUpScreen(DESCRIPTION); setComplete(); break; case R.id.ADDRESS: accident.setLatLng(map.getCameraPosition().target); accident.setAddress(((MyApp) this.getApplicationContext()).getAddres(accident.getLocation())); setUpScreen(TYPE); break; case R.id.CREATE: confirm(); break; case R.id.CANCEL: //TODO Добавить подтверждение finish(); break; case R.id.BACK: backButton(); break; } refreshDescription(); } private Type getSelectedType(int id) { switch (id) { case R.id.BREAK: return Type.BREAK; case R.id.STEAL: return Type.STEAL; case R.id.MOTO_AUTO: return Type.MOTO_AUTO; case R.id.SOLO: return Type.SOLO; case R.id.MOTO_MOTO: return Type.MOTO_MOTO; case R.id.MOTO_MAN: return Type.MOTO_MAN; case R.id.OTHER: default: return Type.OTHER; } } private Medicine getSelectedMedicine(int id) { switch (id) { case R.id.PEOPLE_OK: return Medicine.NO; case R.id.PEOPLE_LIGHT: return Medicine.LIGHT; case R.id.PEOPLE_HEAVY: return Medicine.HEAVY; case R.id.PEOPLE_LETHAL: return Medicine.LETHAL; case R.id.PEOPLE_UNKNOWN: default: return Medicine.UNKNOWN; } } private void confirm() { disableConfirm(); CreateAccidentRequest request = new CreateAccidentRequest(new CreateAccidentCallback(), this); request.setType(accident.getType()); request.setMed(accident.getMedicine()); request.setAddress(accident.getAddress()); request.setLocation(accident.getLocation()); request.setDescription(accident.getDescription()); request.setCreated(accident.getTime()); request.execute(); } private void backButton() { switch (currentScreen) { case MAP: finish(); break; case MEDICINE: setUpScreen(ACCIDENT); break; case ACCIDENT: setUpScreen(TYPE); break; case TYPE: setUpScreen(MAP); break; case DESCRIPTION: if (accident.isAccident()) { setUpScreen(MEDICINE); } else { setUpScreen(TYPE); } break; } setInComplete(); refreshDescription(); } private void disableConfirm() { setConfirm(false); confirmLock = true; } public void setInComplete() { this.complete = false; setConfirm(false); } @Override public boolean onKeyUp(int keycode, @NonNull KeyEvent e) { switch (keycode) { case KeyEvent.KEYCODE_BACK: backButton(); return true; } return super.onKeyUp(keycode, e); } private void enableConfirm() { confirmLock = false; setConfirm(true); } private void message(String text) { Toast.makeText(this, text, Toast.LENGTH_LONG).show(); } private class CreateAccidentCallback implements AsyncTaskCompleteListener { @Override public void onTaskComplete(JSONObject result) { if (result.has("error")) { try { message(result.getString("error")); } catch (JSONException e) { message("Неизвестная ошибка" + result.toString()); e.printStackTrace(); } } else { finish(); } enableConfirm(); } } }
Motocitizen/src/motocitizen/Activity/CreateAccActivity.java
package motocitizen.Activity; import android.location.Location; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentManager; import android.text.Editable; import android.text.TextWatcher; import android.view.KeyEvent; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.TextView; import android.widget.Toast; import com.google.android.gms.maps.CameraUpdateFactory; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.SupportMapFragment; import com.google.android.gms.maps.model.CameraPosition; import com.google.android.gms.maps.model.CircleOptions; import com.google.android.gms.maps.model.LatLng; import com.google.android.gms.maps.model.MarkerOptions; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.Date; import motocitizen.MyApp; import motocitizen.accident.Accident; import motocitizen.app.general.user.Role; import motocitizen.content.Content; import motocitizen.content.Medicine; import motocitizen.content.AccidentStatus; import motocitizen.content.Type; import motocitizen.draw.Resources; import motocitizen.geolocation.MyLocationManager; import motocitizen.main.R; import motocitizen.network.requests.AsyncTaskCompleteListener; import motocitizen.network.requests.CreateAccidentRequest; import motocitizen.startup.Preferences; import motocitizen.utils.Const; import motocitizen.utils.MyUtils; public class CreateAccActivity extends FragmentActivity implements View.OnClickListener { private final int RADIUS = 1000; private final int TYPE = R.id.mc_create_type_frame; private final int DESCRIPTION = R.id.mc_create_final_frame; private final int ACCIDENT = R.id.mc_create_acc_frame; private final int MEDICINE = R.id.mc_create_people_frame; private final int MAP = R.id.mc_create_map; private Accident accident; private Boolean confirmLock; private int currentScreen; private GoogleMap map; private Button confirmButton; private boolean complete; private Location initialLocation; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.create_point); initialLocation = MyLocationManager.getLocation(this); accident = createDefaultAccident(); confirmLock = false; map = makeMap(); currentScreen = MAP; confirmButton = (Button) findViewById(R.id.CREATE); ((EditText) findViewById(R.id.mc_create_final_text)).addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) {} @Override public void onTextChanged(CharSequence s, int start, int before, int count) { accident.setDescription(s.toString()); setComplete(); } @Override public void afterTextChanged(Editable s) {} }); setUpScreen(MAP); refreshDescription(); setupListener(); } private Accident createDefaultAccident() { JSONObject accident = new JSONObject(); try { accident.put("id", 0); accident.put("lat", initialLocation.getLatitude()); accident.put("lon", initialLocation.getLongitude()); accident.put("owner_id", Preferences.getUserId()); accident.put("owner", Preferences.getUserName()); accident.put("status", AccidentStatus.ACTIVE.toCode()); accident.put("uxtime", String.valueOf(System.currentTimeMillis() / 1000L)); accident.put("address", ""); accident.put("descr", ""); accident.put("type", Type.OTHER.toCode()); accident.put("med", Medicine.UNKNOWN.toCode()); accident.put("m", new JSONArray("[]")); accident.put("h", new JSONArray("[]")); accident.put("v", new JSONArray("[]")); } catch (JSONException e) { e.printStackTrace(); } return new Accident(this, accident); } private GoogleMap makeMap() { FragmentManager fragmentManager = this.getSupportFragmentManager(); SupportMapFragment mapFragment = (SupportMapFragment) fragmentManager.findFragmentById(R.id.mc_create_map_container); GoogleMap map = mapFragment.getMap(); map.animateCamera(CameraUpdateFactory.newLatLngZoom(MyUtils.LocationToLatLng(accident.getLocation()), 16)); map.getUiSettings().setMyLocationButtonEnabled(true); map.getUiSettings().setZoomControlsEnabled(true); if (!Role.isModerator()) { CircleOptions circleOptions = new CircleOptions().center(MyUtils.LocationToLatLng(initialLocation)).radius(RADIUS).fillColor(0x20FF0000); map.addCircle(circleOptions); map.setOnCameraChangeListener(new GoogleMap.OnCameraChangeListener() { @Override public void onCameraChange(CameraPosition camera) { Button mcCreateFineAddressConfirm = (Button) findViewById(R.id.ADDRESS); if (initialLocation != null) { double distance = MyUtils.LatLngToLocation(camera.target).distanceTo(initialLocation); if (distance > RADIUS) { mcCreateFineAddressConfirm.setEnabled(false); } else { mcCreateFineAddressConfirm.setEnabled(true); } } else { mcCreateFineAddressConfirm.setEnabled(false); } } }); } map.clear(); for (int id : Content.getIds()) { motocitizen.accident.Accident point = Content.get(id); if (point.isInvisible()) continue; String title = point.getType().toString(); if (point.getMedicine() != Medicine.NO) { title += ", " + point.getMedicine().toString(); } title += ", " + MyUtils.getIntervalFromNowInText(point.getTime()) + " назад"; float alpha; int age = (int) (((new Date()).getTime() - point.getTime().getTime()) / 3600000); if (age < 2) { alpha = 1.0f; } else if (age < 6) { alpha = 0.5f; } else { alpha = 0.2f; } map.addMarker(new MarkerOptions().position(new LatLng(point.getLat(), point.getLon())).title(title).icon(Resources.getMapBitmapDescriptor(point.getType())).alpha(alpha)); } return map; } public void setComplete() { this.complete = accident.isAccident() || accident.getDescription().length() > 6; setConfirm(isComplete()); } private void setUpScreen(int id) { hideAll(); findViewById(id).setVisibility(View.VISIBLE); currentScreen = id; if (id == MAP) { findViewById(R.id.BACK).setEnabled(false); } else { findViewById(R.id.BACK).setEnabled(true); } } private void refreshDescription() { if (accident.getMedicine() == Medicine.UNKNOWN) { ((TextView) findViewById(R.id.mc_create_what)).setText(accident.getType().toString()); } else { ((TextView) findViewById(R.id.mc_create_what)).setText(accident.getType().toString() + ". " + accident.getMedicine().toString()); } ((TextView) findViewById(R.id.mc_create_who)).setText(Content.auth.getLogin()); ((TextView) findViewById(R.id.mc_create_where)).setText(accident.getAddress()); ((TextView) findViewById(R.id.mc_create_when)).setText(Const.DATE_FORMAT.format(accident.getTime())); } private void setupListener() { Integer[] ids = {R.id.BREAK, R.id.STEAL, R.id.OTHER, R.id.ACCIDENT, R.id.MOTO_AUTO, R.id.SOLO, R.id.MOTO_MOTO, R.id.MOTO_MAN, R.id.PEOPLE_OK, R.id.PEOPLE_LIGHT, R.id.PEOPLE_HEAVY, R.id.PEOPLE_LETHAL, R.id.PEOPLE_UNKNOWN, R.id.ADDRESS, R.id.CREATE, R.id.CANCEL, R.id.BACK}; for (int id : ids) findViewById(id).setOnClickListener(this); } private void setConfirm(Boolean status) { if (!confirmLock) confirmButton.setEnabled(status); } public boolean isComplete() { return complete; } private void hideAll() { Integer[] ids = {TYPE, MAP, MEDICINE, DESCRIPTION, ACCIDENT}; for (int id : ids) findViewById(id).setVisibility(View.INVISIBLE); } @Override public void onClick(View v) { int id = v.getId(); switch (id) { case R.id.BREAK: case R.id.STEAL: case R.id.OTHER: accident.setType(getSelectedType(id)); setUpScreen(DESCRIPTION); break; case R.id.MOTO_AUTO: case R.id.SOLO: case R.id.MOTO_MOTO: case R.id.MOTO_MAN: accident.setType(getSelectedType(id)); setUpScreen(MEDICINE); break; case R.id.ACCIDENT: setUpScreen(ACCIDENT); break; case R.id.PEOPLE_OK: case R.id.PEOPLE_LIGHT: case R.id.PEOPLE_HEAVY: case R.id.PEOPLE_LETHAL: case R.id.PEOPLE_UNKNOWN: accident.setMedicine(getSelectedMedicine(id)); setUpScreen(DESCRIPTION); setComplete(); break; case R.id.ADDRESS: accident.setLatLng(map.getCameraPosition().target); accident.setAddress(((MyApp) this.getApplicationContext()).getAddres(accident.getLocation())); setUpScreen(TYPE); break; case R.id.CREATE: confirm(); break; case R.id.CANCEL: //TODO Добавить подтверждение finish(); break; case R.id.BACK: backButton(); break; } refreshDescription(); } private Type getSelectedType(int id) { switch (id) { case R.id.BREAK: return Type.BREAK; case R.id.STEAL: return Type.STEAL; case R.id.MOTO_AUTO: return Type.MOTO_AUTO; case R.id.SOLO: return Type.SOLO; case R.id.MOTO_MOTO: return Type.MOTO_MOTO; case R.id.MOTO_MAN: return Type.MOTO_MAN; case R.id.OTHER: default: return Type.OTHER; } } private Medicine getSelectedMedicine(int id) { switch (id) { case R.id.PEOPLE_OK: return Medicine.NO; case R.id.PEOPLE_LIGHT: return Medicine.LIGHT; case R.id.PEOPLE_HEAVY: return Medicine.HEAVY; case R.id.PEOPLE_LETHAL: return Medicine.LETHAL; case R.id.PEOPLE_UNKNOWN: default: return Medicine.UNKNOWN; } } private void confirm() { disableConfirm(); CreateAccidentRequest request = new CreateAccidentRequest(new CreateAccidentCallback(), this); request.setType(accident.getType()); request.setMed(accident.getMedicine()); request.setAddress(accident.getAddress()); request.setLocation(accident.getLocation()); request.setDescription(accident.getDescription()); request.setCreated(accident.getTime()); request.execute(); } private void backButton() { switch (currentScreen) { case MAP: finish(); break; case MEDICINE: setUpScreen(ACCIDENT); break; case ACCIDENT: setUpScreen(TYPE); break; case TYPE: setUpScreen(MAP); break; case DESCRIPTION: if (accident.isAccident()) { setUpScreen(MEDICINE); } else { setUpScreen(TYPE); } break; } setInComplete(); refreshDescription(); } private void disableConfirm() { setConfirm(false); confirmLock = true; } public void setInComplete() { this.complete = false; setConfirm(false); } @Override public boolean onKeyUp(int keycode, @NonNull KeyEvent e) { switch (keycode) { case KeyEvent.KEYCODE_BACK: backButton(); return true; } return super.onKeyUp(keycode, e); } private void enableConfirm() { confirmLock = false; setConfirm(true); } private void message(String text) { Toast.makeText(this, text, Toast.LENGTH_LONG).show(); } private class CreateAccidentCallback implements AsyncTaskCompleteListener { @Override public void onTaskComplete(JSONObject result) { if (result.has("error")) { try { message(result.getString("error")); } catch (JSONException e) { message("Неизвестная ошибка" + result.toString()); e.printStackTrace(); } } else { finish(); } enableConfirm(); } } }
https://github.com/rjhdby/motocitizen/issues/394
Motocitizen/src/motocitizen/Activity/CreateAccActivity.java
https://github.com/rjhdby/motocitizen/issues/394
<ide><path>otocitizen/src/motocitizen/Activity/CreateAccActivity.java <ide> GoogleMap map = mapFragment.getMap(); <ide> <ide> map.animateCamera(CameraUpdateFactory.newLatLngZoom(MyUtils.LocationToLatLng(accident.getLocation()), 16)); <add> map.setMyLocationEnabled(true); <ide> map.getUiSettings().setMyLocationButtonEnabled(true); <ide> map.getUiSettings().setZoomControlsEnabled(true); <ide> if (!Role.isModerator()) {
Java
apache-2.0
error: pathspec 'goci-interfaces/goci-curation/src/test/java/uk/ac/ebi/spot/goci/curation/builder/LocusBuilder.java' did not match any file(s) known to git
97c7cb9a275be906bce23162e912a4cabf69b73e
1
EBISPOT/goci,EBISPOT/goci,EBISPOT/goci,EBISPOT/goci,EBISPOT/goci,EBISPOT/goci
package uk.ac.ebi.spot.goci.curation.builder; import uk.ac.ebi.spot.goci.model.Gene; import uk.ac.ebi.spot.goci.model.Locus; import uk.ac.ebi.spot.goci.model.RiskAllele; import java.util.Collection; /** * Created by emma on 07/04/2016. * * @author emma * <p> * Builder for a locus object */ public class LocusBuilder { private Locus locus = new Locus(); public LocusBuilder setId(Long id) { locus.setId(id); return this; } public LocusBuilder setHaplotypeSnpCount(Integer haplotypeSnpCount) { locus.setHaplotypeSnpCount(haplotypeSnpCount); return this; } public LocusBuilder setDescription(String description) { locus.setDescription(description); return this; } public LocusBuilder setStrongestRiskAlleles(Collection<RiskAllele> strongestRiskAlleles) { locus.setStrongestRiskAlleles(strongestRiskAlleles); return this; } public LocusBuilder setAuthorReportedGenes(Collection<Gene> authorReportedGenes) { locus.setAuthorReportedGenes(authorReportedGenes); return this; } public Locus build() { return locus; } }
goci-interfaces/goci-curation/src/test/java/uk/ac/ebi/spot/goci/curation/builder/LocusBuilder.java
New builder for a Locus object used during testing
goci-interfaces/goci-curation/src/test/java/uk/ac/ebi/spot/goci/curation/builder/LocusBuilder.java
New builder for a Locus object used during testing
<ide><path>oci-interfaces/goci-curation/src/test/java/uk/ac/ebi/spot/goci/curation/builder/LocusBuilder.java <add>package uk.ac.ebi.spot.goci.curation.builder; <add> <add>import uk.ac.ebi.spot.goci.model.Gene; <add>import uk.ac.ebi.spot.goci.model.Locus; <add>import uk.ac.ebi.spot.goci.model.RiskAllele; <add> <add>import java.util.Collection; <add> <add>/** <add> * Created by emma on 07/04/2016. <add> * <add> * @author emma <add> * <p> <add> * Builder for a locus object <add> */ <add>public class LocusBuilder { <add> <add> private Locus locus = new Locus(); <add> <add> public LocusBuilder setId(Long id) { <add> locus.setId(id); <add> return this; <add> } <add> <add> public LocusBuilder setHaplotypeSnpCount(Integer haplotypeSnpCount) { <add> locus.setHaplotypeSnpCount(haplotypeSnpCount); <add> return this; <add> } <add> <add> public LocusBuilder setDescription(String description) { <add> locus.setDescription(description); <add> return this; <add> } <add> <add> public LocusBuilder setStrongestRiskAlleles(Collection<RiskAllele> strongestRiskAlleles) { <add> locus.setStrongestRiskAlleles(strongestRiskAlleles); <add> return this; <add> } <add> <add> public LocusBuilder setAuthorReportedGenes(Collection<Gene> authorReportedGenes) { <add> locus.setAuthorReportedGenes(authorReportedGenes); <add> return this; <add> } <add> <add> public Locus build() { <add> return locus; <add> } <add>}
Java
mit
2ba60e7eb28f4b8b570fd98ab832f0079da41fa1
0
om3g4zell/CityBuilderJSFML
package sim; import java.util.ArrayList; import java.util.Map; import org.jsfml.graphics.Color; import org.jsfml.graphics.RenderWindow; import org.jsfml.system.Time; import org.jsfml.system.Vector2f; import org.jsfml.system.Vector2i; import org.jsfml.window.VideoMode; import graphics.Tile.TileType; import graphics.BuildingProjector; import graphics.Tile; import graphics.TileMap; import world.Building; import world.Building.BuildingType; import world.ResourcesMap; /* * Simulation class. * Contains init, update and render. */ public class Sim { // Constants. protected static final Vector2i TILEMAP_SIZE = new Vector2i(80, 45); protected static final Vector2f TILE_SIZE = new Vector2f(16.f, 16.f); // Attributes. protected RenderWindow window; protected TileMap tilemap; protected ArrayList<ArrayList<Tile>> tiles; protected ResourcesMap resourcesMap; protected ArrayList<Building> buildings; // Constructor public Sim(int width, int height, String title) { this.window = new RenderWindow(new VideoMode(width, height), title); } // Initialization public void init() { // Inits the tiles array. this.tiles = new ArrayList<ArrayList<Tile>>(); for(int i = 0 ; i < TILEMAP_SIZE.y ; ++i) { ArrayList<Tile> row = new ArrayList<Tile>(); for(int j = 0 ; j < TILEMAP_SIZE.x ; ++j) { row.add(new Tile(TileType.TERRAIN_GRASS, new Vector2i(j, i))); } this.tiles.add(row); } // Create the resources map. this.resourcesMap = new ResourcesMap(TILEMAP_SIZE); // Create the buildings list. this.buildings = new ArrayList<Building>(); // Houses. this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(31, 20))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(33, 20))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(35, 20))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(37, 20))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(31, 23))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(33, 23))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(35, 23))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(37, 23))); // Generator. this.buildings.add(new Building(BuildingType.GENERATOR, new Vector2i(39, 21))); // Water station. this.buildings.add(new Building(BuildingType.HYDROLIC_STATION, new Vector2i(39, 23))); // Road. this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(31, 22))); this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(32, 22))); this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(33, 22))); this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(34, 22))); this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(35, 22))); this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(36, 22))); this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(37, 22))); this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(38, 22))); this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(39, 22))); // Grossery store this.buildings.add(new Building(BuildingType.GROCERY_STORE, new Vector2i(40, 21))); // Inits the tilemap. this.tilemap = new TileMap(TILEMAP_SIZE, TILE_SIZE); this.tilemap.addTypeColor(TileType.TERRAIN_GRASS, new Color(0, 70, 0)); this.tilemap.addTypeColor(TileType.BUILDING_HOUSE, new Color(70, 0, 0)); this.tilemap.addTypeColor(TileType.BUILDING_ROAD, new Color(190, 190, 190)); this.tilemap.addTypeColor(TileType.BUILDING_GENERATOR, new Color(227, 168, 87)); this.tilemap.addTypeColor(TileType.BUILDING_HYDROLIC_STATION, new Color(51, 153, 255)); this.tilemap.addTypeColor(TileType.BUILDING_SUPERMARKET, new Color(194, 195, 98)); this.tilemap.setTiles(this.tiles); } // Updates all the simulation. public void update(Time dt) { System.out.println("Update:"); // Reset the resources. this.resourcesMap.reset(); // Generate resources. for(Building b : this.buildings) { b.generateResources(this.resourcesMap); } // Consume resources. for(Building b : this.buildings) { BuildingType requiredBuilding = b.consumeResources(this.resourcesMap); System.out.println("\t" + b.getType().toString() + " wants to build " + requiredBuilding.toString()); } // Project buildings on the tilemap. BuildingProjector.project(this.buildings, this.tilemap); // Update the tilemap. this.tilemap.update(); } // Renders all the simulation. public void render() { this.window.clear(Color.WHITE); ///////////// this.window.draw(tilemap); ///////////// this.window.display(); } // Returns the window. public RenderWindow getWindow() { return this.window; } }
CityBuilderJSFML/src/sim/Sim.java
package sim; import java.util.ArrayList; import java.util.Map; import org.jsfml.graphics.Color; import org.jsfml.graphics.RenderWindow; import org.jsfml.system.Time; import org.jsfml.system.Vector2f; import org.jsfml.system.Vector2i; import org.jsfml.window.VideoMode; import graphics.Tile.TileType; import graphics.BuildingProjector; import graphics.Tile; import graphics.TileMap; import world.Building; import world.Building.BuildingType; import world.ResourcesMap; /* * Simulation class. * Contains init, update and render. */ public class Sim { // Constants. protected static final Vector2i TILEMAP_SIZE = new Vector2i(80, 45); protected static final Vector2f TILE_SIZE = new Vector2f(16.f, 16.f); // Attributes. protected RenderWindow window; protected TileMap tilemap; protected ArrayList<ArrayList<Tile>> tiles; protected ResourcesMap resourcesMap; protected ArrayList<Building> buildings; // Constructor public Sim(int width, int height, String title) { this.window = new RenderWindow(new VideoMode(width, height), title); } // Initialization public void init() { // Inits the tiles array. this.tiles = new ArrayList<ArrayList<Tile>>(); for(int i = 0 ; i < TILEMAP_SIZE.y ; ++i) { ArrayList<Tile> row = new ArrayList<Tile>(); for(int j = 0 ; j < TILEMAP_SIZE.x ; ++j) { row.add(new Tile(TileType.TERRAIN_GRASS, new Vector2i(j, i))); } this.tiles.add(row); } // Create the resources map. this.resourcesMap = new ResourcesMap(TILEMAP_SIZE); // Create the buildings list. this.buildings = new ArrayList<Building>(); // Houses. this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(31, 20))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(33, 20))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(35, 20))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(37, 20))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(31, 23))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(33, 23))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(35, 23))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(37, 23))); // Generator. this.buildings.add(new Building(BuildingType.GENERATOR, new Vector2i(39, 21))); // Water station. this.buildings.add(new Building(BuildingType.HYDROLIC_STATION, new Vector2i(39, 23))); // Road. this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(31, 22))); this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(32, 22))); this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(33, 22))); this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(34, 22))); this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(35, 22))); this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(36, 22))); this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(37, 22))); this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(38, 22))); this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(39, 22))); // Inits the tilemap. this.tilemap = new TileMap(TILEMAP_SIZE, TILE_SIZE); this.tilemap.addTypeColor(TileType.TERRAIN_GRASS, new Color(0, 70, 0)); this.tilemap.addTypeColor(TileType.BUILDING_HOUSE, new Color(70, 0, 0)); this.tilemap.addTypeColor(TileType.BUILDING_ROAD, new Color(190, 190, 190)); this.tilemap.addTypeColor(TileType.BUILDING_GENERATOR, new Color(227, 168, 87)); this.tilemap.addTypeColor(TileType.BUILDING_HYDROLIC_STATION, new Color(51, 153, 255)); this.tilemap.addTypeColor(TileType.BUILDING_SUPERMARKET, new Color(194, 195, 98)); this.tilemap.setTiles(this.tiles); } // Updates all the simulation. public void update(Time dt) { System.out.println("Update:"); // Reset the resources. this.resourcesMap.reset(); // Generate resources. for(Building b : this.buildings) { b.generateResources(this.resourcesMap); } // Consume resources. for(Building b : this.buildings) { BuildingType requiredBuilding = b.consumeResources(this.resourcesMap); System.out.println("\t" + b.getType().toString() + " wants to build " + requiredBuilding.toString()); } // Project buildings on the tilemap. BuildingProjector.project(this.buildings, this.tilemap); // Update the tilemap. this.tilemap.update(); } // Renders all the simulation. public void render() { this.window.clear(Color.WHITE); ///////////// this.window.draw(tilemap); ///////////// this.window.display(); } // Returns the window. public RenderWindow getWindow() { return this.window; } }
Ajout du grossery store
CityBuilderJSFML/src/sim/Sim.java
Ajout du grossery store
<ide><path>ityBuilderJSFML/src/sim/Sim.java <ide> this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(38, 22))); <ide> this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(39, 22))); <ide> <add> // Grossery store <add> this.buildings.add(new Building(BuildingType.GROCERY_STORE, new Vector2i(40, 21))); <add> <ide> // Inits the tilemap. <ide> this.tilemap = new TileMap(TILEMAP_SIZE, TILE_SIZE); <ide> this.tilemap.addTypeColor(TileType.TERRAIN_GRASS, new Color(0, 70, 0));
Java
agpl-3.0
a8e41a7f0a9e591744dcb1d2d8d36d9485e69c05
0
buhe/judge,DMOJ/judge,DMOJ/judge,buhe/judge,buhe/judge,buhe/judge,buhe/judge,DMOJ/judge,buhe/judge
import java.io.*; import java.lang.management.ManagementFactory; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.security.AccessControlException; import java.security.Permission; import java.util.PropertyPermission; import java.util.Scanner; public class JavaSafeExecutor { private static ThreadDeath TLE = new ThreadDeath(); private static int INVOCATION_ERROR_CODE = -1000; private static int ACCESS_ERROR_CODE = -1001; private static int NO_ENTRY_POINT_ERROR_CODE = -1002; private static int PROGRAM_ERROR_CODE = -1; private static ShockerThread shockerThread; private static ProcessExecutionThread submissionThread; private static boolean _safeBlock = false; static { new Scanner(new ByteArrayInputStream(new byte[128])).close(); // Load locale } public static void main(String[] argv) throws MalformedURLException, ClassNotFoundException, UnsupportedEncodingException { String path = argv[0]; String classname = argv[1]; int TL = Integer.parseInt(argv[2]); System.setOut(new UnsafePrintStream(new FileOutputStream(java.io.FileDescriptor.out))); URLClassLoader classLoader = URLClassLoader.newInstance(new URL[]{new File(path).toURI().toURL()}); Class program = classLoader.loadClass(classname); submissionThread = new ProcessExecutionThread(program); // Count runtime loading as part of time used // Note that time here might be negative if RT loading time was greater than TL // Oh well. TL -= ManagementFactory.getRuntimeMXBean().getUptime(); shockerThread = new ShockerThread(TL, submissionThread); System.setSecurityManager(new _SecurityManager()); shockerThread.start(); submissionThread.start(); try { submissionThread.join(); } catch (InterruptedException ignored) { } _safeBlock = true; shockerThread.stop(); System.out.flush(); long totalProgramTime = ManagementFactory.getRuntimeMXBean().getUptime(); boolean tle = submissionThread.tle; long mem = -1; try { BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(new File("/proc/self/status")))); for (String line; (line = in.readLine()) != null; ) { if (line.startsWith("VmHWM:")) { String[] data = line.split("\\s+"); mem = Integer.parseInt(data[1]); } } } catch (Exception ignored) { } boolean mle = submissionThread.mle; int error = submissionThread.error; System.err.println(); System.err.printf("%d %d %d %d %d\n", totalProgramTime, tle ? 1 : 0, mem, mle ? 1 : 0, error); } public static class _SecurityManager extends SecurityManager { @Override public void checkPermission(Permission perm) { if (perm instanceof RuntimePermission) { if (perm.getName().equals("writeFileDescriptor") || perm.getName().equals("readFileDescriptor")) return; } if (perm instanceof PropertyPermission) { if (perm.getActions().contains("write")) throw new AccessControlException(perm.getClass() + " - " + perm.getName() + ": " + perm.getActions(), perm); return; } if (!_safeBlock) { throw new AccessControlException(perm.getClass() + " - " + perm.getName() + ": " + perm.getActions(), perm); } } } public static class ShockerThread extends Thread { private final long timelimit; private final Thread target; public ShockerThread(long timelimit, Thread target) { this.timelimit = timelimit; this.target = target; } @Override public void run() { try { Thread.sleep(timelimit); _safeBlock = true; target.stop(TLE); } catch (InterruptedException ignored) { } } } public static class ProcessExecutionThread extends Thread { private final Class process; private boolean tle = false; private boolean mle = false; private int error = 0; public ProcessExecutionThread(Class process) { this.process = process; } @Override public void run() { Method handle; try { handle = process.getMethod("main", String[].class); if (!Modifier.isStatic(handle.getModifiers())) System.exit(-10); try { handle.invoke(null, new Object[]{new String[0]}); } catch (InvocationTargetException e) { if (e.getCause() == TLE) { tle = true; return; } else if (e.getCause() instanceof OutOfMemoryError) { mle = true; return; } else { e.getCause().printStackTrace(); error = INVOCATION_ERROR_CODE; } } catch (IllegalAccessException e) { e.printStackTrace(); error = ACCESS_ERROR_CODE; } catch (Throwable throwable) { error = PROGRAM_ERROR_CODE; } } catch (NoSuchMethodException e) { e.printStackTrace(); error = NO_ENTRY_POINT_ERROR_CODE; } _safeBlock = true; shockerThread.stop(); } } public static class UnsafePrintStream extends PrintStream { private BufferedWriter acc; public UnsafePrintStream(OutputStream out) throws UnsupportedEncodingException { super(new ByteArrayOutputStream()); acc = new BufferedWriter(new OutputStreamWriter(out, "ASCII"), 4096); } @Override public void flush() { super.flush(); try { acc.flush(); } catch (IOException e) { e.printStackTrace(); } } public void write(int b) { try { acc.write(b); } catch (IOException e) { e.printStackTrace(); } } public void write(byte buf[], int off, int len) { super.write(buf, off, len); // TODO } private void write(char buf[]) { try { acc.write(buf); } catch (IOException e) { e.printStackTrace(); } } private void write(String s) { try { acc.write(s); } catch (IOException e) { e.printStackTrace(); } } private void newLine() { try { acc.write('\n'); } catch (IOException e) { e.printStackTrace(); } } public void print(boolean b) { write(b ? "true" : "false"); } public void print(char c) { write(String.valueOf(c)); } public void print(int i) { write(String.valueOf(i)); } public void print(long l) { write(String.valueOf(l)); } public void print(float f) { write(String.valueOf(f)); } public void print(double d) { write(String.valueOf(d)); } public void print(char s[]) { write(s); } public void print(String s) { write(s == null ? "null" : s); } public void print(Object obj) { write(String.valueOf(obj)); } public void println() { newLine(); } public void println(boolean x) { print(x); newLine(); } public void println(char x) { print(x); newLine(); } public void println(int x) { print(x); newLine(); } public void println(long x) { print(x); newLine(); } public void println(float x) { print(x); newLine(); } public void println(double x) { print(x); newLine(); } public void println(char x[]) { print(x); newLine(); } public void println(String x) { print(x); newLine(); } public void println(Object x) { print(String.valueOf(x)); newLine(); } } }
java_executor/JavaSafeExecutor.java
import java.io.*; import java.lang.management.ManagementFactory; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.security.AccessControlException; import java.security.Permission; import java.util.PropertyPermission; import java.util.Scanner; public class JavaSafeExecutor { private static ThreadDeath TLE = new ThreadDeath(); private static int INVOCATION_ERROR_CODE = -1000; private static int ACCESS_ERROR_CODE = -1001; private static int NO_ENTRY_POINT_ERROR_CODE = -1002; private static int PROGRAM_ERROR_CODE = -1; private static ShockerThread shockerThread; private static ProcessExecutionThread submissionThread; private static boolean _safeBlock = false; static { new Scanner(new ByteArrayInputStream(new byte[128])).close(); // Load locale } public static void main(String[] argv) throws MalformedURLException, ClassNotFoundException, UnsupportedEncodingException { String path = argv[0]; String classname = argv[1]; int TL = Integer.parseInt(argv[2]); System.setOut(new UnsafePrintStream(new FileOutputStream(java.io.FileDescriptor.out))); URLClassLoader classLoader = URLClassLoader.newInstance(new URL[]{new File(path).toURI().toURL()}); Class program = classLoader.loadClass(classname); submissionThread = new ProcessExecutionThread(program); // Count runtime loading as part of time used // Note that time here might be negative if RT loading time was greater than TL // Oh well. TL -= ManagementFactory.getRuntimeMXBean().getUptime(); shockerThread = new ShockerThread(TL, submissionThread); System.setSecurityManager(new _SecurityManager()); shockerThread.start(); submissionThread.start(); try { submissionThread.join(); } catch (InterruptedException ignored) { } _safeBlock = true; shockerThread.stop(); System.out.flush(); long totalProgramTime = ManagementFactory.getRuntimeMXBean().getUptime(); boolean tle = submissionThread.tle; long mem = -1; try { BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(new File("/proc/self/status")))); for (String line; (line = in.readLine()) != null; ) { if (line.startsWith("VmHWM:")) { String[] data = line.split("\\s+"); mem = Integer.parseInt(data[1]); } } } catch (Exception ignored) { } boolean mle = submissionThread.mle; int error = submissionThread.error; System.err.println(); System.err.printf("%d %d %d %d %d\n", totalProgramTime, tle ? 1 : 0, mem, mle ? 1 : 0, error); } public static class _SecurityManager extends SecurityManager { @Override public void checkPermission(Permission perm) { if (perm instanceof RuntimePermission) { if (perm.getName().equals("writeFileDescriptor")) return; } if (perm instanceof PropertyPermission) { if (perm.getActions().contains("write")) throw new AccessControlException(perm.getClass() + " - " + perm.getName() + ": " + perm.getActions(), perm); return; } if (!_safeBlock) { throw new AccessControlException(perm.getClass() + " - " + perm.getName() + ": " + perm.getActions(), perm); } } } public static class ShockerThread extends Thread { private final long timelimit; private final Thread target; public ShockerThread(long timelimit, Thread target) { this.timelimit = timelimit; this.target = target; } @Override public void run() { try { Thread.sleep(timelimit); _safeBlock = true; target.stop(TLE); } catch (InterruptedException ignored) { } } } public static class ProcessExecutionThread extends Thread { private final Class process; private boolean tle = false; private boolean mle = false; private int error = 0; public ProcessExecutionThread(Class process) { this.process = process; } @Override public void run() { Method handle; try { handle = process.getMethod("main", String[].class); if (!Modifier.isStatic(handle.getModifiers())) System.exit(-10); try { handle.invoke(null, new Object[]{new String[0]}); } catch (InvocationTargetException e) { if (e.getCause() == TLE) { tle = true; return; } else if (e.getCause() instanceof OutOfMemoryError) { mle = true; return; } else { e.getCause().printStackTrace(); error = INVOCATION_ERROR_CODE; } } catch (IllegalAccessException e) { e.printStackTrace(); error = ACCESS_ERROR_CODE; } catch (Throwable throwable) { error = PROGRAM_ERROR_CODE; } } catch (NoSuchMethodException e) { e.printStackTrace(); error = NO_ENTRY_POINT_ERROR_CODE; } _safeBlock = true; shockerThread.stop(); } } public static class UnsafePrintStream extends PrintStream { private BufferedWriter acc; public UnsafePrintStream(OutputStream out) throws UnsupportedEncodingException { super(new ByteArrayOutputStream()); acc = new BufferedWriter(new OutputStreamWriter(out, "ASCII"), 4096); } @Override public void flush() { super.flush(); try { acc.flush(); } catch (IOException e) { e.printStackTrace(); } } public void write(int b) { try { acc.write(b); } catch (IOException e) { e.printStackTrace(); } } public void write(byte buf[], int off, int len) { super.write(buf, off, len); // TODO } private void write(char buf[]) { try { acc.write(buf); } catch (IOException e) { e.printStackTrace(); } } private void write(String s) { try { acc.write(s); } catch (IOException e) { e.printStackTrace(); } } private void newLine() { try { acc.write('\n'); } catch (IOException e) { e.printStackTrace(); } } public void print(boolean b) { write(b ? "true" : "false"); } public void print(char c) { write(String.valueOf(c)); } public void print(int i) { write(String.valueOf(i)); } public void print(long l) { write(String.valueOf(l)); } public void print(float f) { write(String.valueOf(f)); } public void print(double d) { write(String.valueOf(d)); } public void print(char s[]) { write(s); } public void print(String s) { write(s == null ? "null" : s); } public void print(Object obj) { write(String.valueOf(obj)); } public void println() { newLine(); } public void println(boolean x) { print(x); newLine(); } public void println(char x) { print(x); newLine(); } public void println(int x) { print(x); newLine(); } public void println(long x) { print(x); newLine(); } public void println(float x) { print(x); newLine(); } public void println(double x) { print(x); newLine(); } public void println(char x[]) { print(x); newLine(); } public void println(String x) { print(x); newLine(); } public void println(Object x) { print(String.valueOf(x)); newLine(); } } }
Allow reading of FD
java_executor/JavaSafeExecutor.java
Allow reading of FD
<ide><path>ava_executor/JavaSafeExecutor.java <ide> @Override <ide> public void checkPermission(Permission perm) { <ide> if (perm instanceof RuntimePermission) { <del> if (perm.getName().equals("writeFileDescriptor")) <add> if (perm.getName().equals("writeFileDescriptor") || perm.getName().equals("readFileDescriptor")) <ide> return; <ide> } <ide> if (perm instanceof PropertyPermission) {
Java
epl-1.0
b6dd3caf096120cfdd1104dc38987a48d7da39c9
0
sibvisions/javafx.DndTabPane
/******************************************************************************* * Copyright (c) 2014 BestSolution.at and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Tom Schindl<[email protected]> - initial API and implementation *******************************************************************************/ package org.eclipse.fx.ui.controls.tabpane; import java.util.function.Consumer; import java.util.function.Function; import javafx.geometry.BoundingBox; import javafx.geometry.Bounds; import javafx.scene.Node; import javafx.scene.control.Tab; import javafx.scene.control.TabPane; import javafx.scene.input.DragEvent; import javafx.scene.layout.Pane; import javafx.scene.layout.StackPane; import org.eclipse.fx.ui.controls.markers.PositionMarker; import org.eclipse.fx.ui.controls.markers.TabOutlineMarker; import org.eclipse.fx.ui.controls.tabpane.skin.DnDTabPaneSkin; /** * Factory to create a tab pane who support DnD */ public final class DndTabPaneFactory { private static MarkerFeedback CURRENT_FEEDBACK; // private static Map<TabSerializationStrategy<?>, Boolean> SERIALIZERS = new WeakHashMap<>(); private DndTabPaneFactory() { } // public static final class TabSerializationStrategy<O> { // private final Function<Tab, String> serializationFunction; // private final Function<String, O> deserializationFunction; // final String prefix = UUID.randomUUID().toString(); // // public TabSerializationStrategy(Function<Tab, String> serializationFunction, Function<String, O> deserializationFunction) { // this.serializationFunction = serializationFunction; // this.deserializationFunction = deserializationFunction; // } // // public final String toString(Tab tab) { // return this.prefix + "#" + this.serializationFunction.apply(tab); //$NON-NLS-1$ // } // // public final O toData(String data) { // return deserializationFunction.apply(data.substring(prefix.length() + 1)); // } // } // // public static <O> TabSerializationStrategy<O> register(Function<Tab, String> serializationFunction, Function<String, O> deserializationFunction) { // TabSerializationStrategy<O> t = new TabSerializationStrategy<O>(serializationFunction, deserializationFunction); // SERIALIZERS.put(t, Boolean.TRUE); // return t; // } /** * Create a tab pane and set the drag strategy * * @param setup * the setup instance for the pane * @return the tab pane */ public static TabPane createDndTabPane(Consumer<DragSetup> setup) { return new DndTabPane() { @Override protected javafx.scene.control.Skin<?> createDefaultSkin() { DnDTabPaneSkin skin = new DnDTabPaneSkin(this); setup.accept(skin); return skin; } }; } /** * Create a tab pane with a default setup for drag feedback * * @param feedbackType * the feedback type * @param setup * consumer to set up the tab pane * @return a pane containing the TabPane */ public static Pane createDefaultDnDPane(FeedbackType feedbackType, Consumer<TabPane> setup) { StackPane pane = new StackPane(); TabPane tabPane = new DndTabPane() { @Override protected javafx.scene.control.Skin<?> createDefaultSkin() { DnDTabPaneSkin skin = new DnDTabPaneSkin(this); setup(feedbackType, pane, skin); return skin; } }; if (setup != null) { setup.accept(tabPane); } pane.getChildren().add(tabPane); return pane; } /** * Extract the tab content * * @param e * the event * @return the content */ public static boolean hasDnDContent(DragEvent e) { return e.getDragboard().hasContent(DnDTabPaneSkin.TAB_MOVE); } // /** // * Extract the tab content // * // * @param e // * the event // * @param clazz // * the type // * @return the content // */ // public static <O> O getDnDContent(DragEvent e, Class<O> clazz) { // String data = (String) e.getDragboard().getContent(DnDTabPaneSkin.TAB_MOVE); // Object rv = null; // for (TabSerializationStrategy<?> s : SERIALIZERS.keySet()) { // if (data.startsWith(s.prefix + "#")) { //$NON-NLS-1$ // rv = s.toData(data); // } // } // // if (rv == null) { // return (O) null; // } else { // if (clazz.isAssignableFrom(rv.getClass())) { // return (O) rv; // } // } // // return (O) null; // } /** * Extract the content * * @param e * the event * @return the return value */ public static String getDnDContent(DragEvent e) { return (String) e.getDragboard().getContent(DnDTabPaneSkin.TAB_MOVE); } /** * Setup insert marker * * @param layoutNode * the layout node used to position * @param setup * the setup */ public static void setup(FeedbackType type, Pane layoutNode, DragSetup setup) { setup.setStartFunction((t) -> Boolean.valueOf(!t.isDisabled() && ((DndTabPane) t.getTabPane()).isDraggingEnabled())); setup.setFeedbackConsumer((d) -> handleFeedback(type, layoutNode, d)); setup.setDropConsumer(DndTabPaneFactory::handleDropped); setup.setDragFinishedConsumer(DndTabPaneFactory::handleFinished); } private static void handleDropped(DroppedData data) { TabPane targetPane = data.targetTab.getTabPane(); data.draggedTab.getTabPane().getTabs().remove(data.draggedTab); int idx = targetPane.getTabs().indexOf(data.targetTab); if (data.dropType == DropType.AFTER) { if (idx + 1 <= targetPane.getTabs().size()) { targetPane.getTabs().add(idx + 1, data.draggedTab); } else { targetPane.getTabs().add(data.draggedTab); } } else { targetPane.getTabs().add(idx, data.draggedTab); } data.draggedTab.getTabPane().getSelectionModel().select(data.draggedTab); } private static void handleFeedback(FeedbackType type, Pane layoutNode, FeedbackData data) { if (data.dropType == DropType.NONE) { cleanup(); return; } MarkerFeedback f = CURRENT_FEEDBACK; if (f == null || !f.data.equals(data)) { cleanup(); if (type == FeedbackType.MARKER) { CURRENT_FEEDBACK = handleMarker(layoutNode, data); } else { CURRENT_FEEDBACK = handleOutline(layoutNode, data); } } } private static void handleFinished(Tab tab) { cleanup(); } static void cleanup() { if (CURRENT_FEEDBACK != null) { CURRENT_FEEDBACK.hide(); CURRENT_FEEDBACK = null; } } private static MarkerFeedback handleMarker(Pane layoutNode, FeedbackData data) { PositionMarker marker = null; for (Node n : layoutNode.getChildren()) { if (n instanceof PositionMarker) { marker = (PositionMarker) n; } } if (marker == null) { marker = new PositionMarker(); marker.setManaged(false); layoutNode.getChildren().add(marker); } else { marker.setVisible(true); } double w = marker.getBoundsInLocal().getWidth(); double h = marker.getBoundsInLocal().getHeight(); double ratio = data.bounds.getHeight() / h; ratio += 0.1; marker.setScaleX(ratio); marker.setScaleY(ratio); double wDiff = w / 2; double hDiff = (h - h * ratio) / 2; if (data.dropType == DropType.AFTER) { marker.relocate(data.bounds.getMinX() + data.bounds.getWidth() - wDiff, data.bounds.getMinY() - hDiff); } else { marker.relocate(data.bounds.getMinX() - wDiff, data.bounds.getMinY() - hDiff); } final PositionMarker fmarker = marker; return new MarkerFeedback(data) { @Override public void hide() { fmarker.setVisible(false); } }; } private static MarkerFeedback handleOutline(Pane layoutNode, FeedbackData data) { TabOutlineMarker marker = null; for (Node n : layoutNode.getChildren()) { if (n instanceof TabOutlineMarker) { marker = (TabOutlineMarker) n; } } if (marker == null) { marker = new TabOutlineMarker(layoutNode.getBoundsInLocal(), new BoundingBox(data.bounds.getMinX(), data.bounds.getMinY(), data.bounds.getWidth(), data.bounds.getHeight()), data.dropType == DropType.BEFORE); marker.setManaged(false); marker.setMouseTransparent(true); layoutNode.getChildren().add(marker); } else { marker.updateBounds(layoutNode.getBoundsInLocal(), new BoundingBox(data.bounds.getMinX(), data.bounds.getMinY(), data.bounds.getWidth(), data.bounds.getHeight()), data.dropType == DropType.BEFORE); marker.setVisible(true); } final TabOutlineMarker fmarker = marker; return new MarkerFeedback(data) { @Override public void hide() { fmarker.setVisible(false); } }; } private abstract static class MarkerFeedback { public final FeedbackData data; public MarkerFeedback(FeedbackData data) { this.data = data; } public abstract void hide(); } /** * The drop type */ public enum DropType { /** * No dropping */ NONE, /** * Dropped before a reference tab */ BEFORE, /** * Dropped after a reference tab */ AFTER } /** * The feedback type to use */ public enum FeedbackType { /** * Show a marker */ MARKER, /** * Show an outline */ OUTLINE } /** * Data to create a feedback */ public static class FeedbackData { /** * The tab dragged */ public final Tab draggedTab; /** * The reference tab */ public final Tab targetTab; /** * The bounds of the reference tab */ public final Bounds bounds; /** * The drop type */ public final DropType dropType; /** * Create a feedback data * * @param draggedTab * the dragged tab * @param targetTab * the reference tab * @param bounds * the bounds of the reference tab * @param dropType * the drop type */ public FeedbackData(Tab draggedTab, Tab targetTab, Bounds bounds, DropType dropType) { this.draggedTab = draggedTab; this.targetTab = targetTab; this.bounds = bounds; this.dropType = dropType; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((this.bounds == null) ? 0 : this.bounds.hashCode()); result = prime * result + this.draggedTab.hashCode(); result = prime * result + this.dropType.hashCode(); result = prime * result + ((this.targetTab == null) ? 0 : this.targetTab.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; FeedbackData other = (FeedbackData) obj; if (this.bounds == null) { if (other.bounds != null) return false; } else if (!this.bounds.equals(other.bounds)) return false; if (!this.draggedTab.equals(other.draggedTab)) return false; if (this.dropType != other.dropType) return false; if (this.targetTab == null) { if (other.targetTab != null) return false; } else if (!this.targetTab.equals(other.targetTab)) return false; return true; } } /** * The drop data */ public static class DroppedData { /** * The dragged tab */ public final Tab draggedTab; /** * The reference tab */ public final Tab targetTab; /** * The drop type */ public final DropType dropType; /** * Create drop data * * @param draggedTab * the dragged tab * @param targetTab * the target tab * @param dropType * the drop type */ public DroppedData(Tab draggedTab, Tab targetTab, DropType dropType) { this.draggedTab = draggedTab; this.targetTab = targetTab; this.dropType = dropType; } } /** * Setup of the drag and drop */ public interface DragSetup { /** * Function to handle the starting of the the drag * * @param startFunction * the function */ public void setStartFunction(Function<Tab, Boolean> startFunction); /** * Consumer called to handle the finishing of the drag process * * @param dragFinishedConsumer * the consumer */ public void setDragFinishedConsumer(Consumer<Tab> dragFinishedConsumer); /** * Consumer called to present drag feedback * * @param feedbackConsumer * the consumer to call */ public void setFeedbackConsumer(Consumer<FeedbackData> feedbackConsumer); /** * Consumer called when the drop has to be handled * * @param dropConsumer * the consumer */ public void setDropConsumer(Consumer<DroppedData> dropConsumer); /** * Function to translate the tab content into clipboard content * * @param clipboardDataFunction * the function */ public void setClipboardDataFunction(Function<Tab, String> clipboardDataFunction); } }
src/org/eclipse/fx/ui/controls/tabpane/DndTabPaneFactory.java
/******************************************************************************* * Copyright (c) 2014 BestSolution.at and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Tom Schindl<[email protected]> - initial API and implementation *******************************************************************************/ package org.eclipse.fx.ui.controls.tabpane; import java.util.function.Consumer; import java.util.function.Function; import javafx.geometry.BoundingBox; import javafx.geometry.Bounds; import javafx.scene.Node; import javafx.scene.control.Tab; import javafx.scene.control.TabPane; import javafx.scene.input.DragEvent; import javafx.scene.layout.Pane; import javafx.scene.layout.StackPane; import org.eclipse.fx.ui.controls.markers.PositionMarker; import org.eclipse.fx.ui.controls.markers.TabOutlineMarker; import org.eclipse.fx.ui.controls.tabpane.skin.DnDTabPaneSkin; /** * Factory to create a tab pane who support DnD */ public final class DndTabPaneFactory { private static MarkerFeedback CURRENT_FEEDBACK; // private static Map<TabSerializationStrategy<?>, Boolean> SERIALIZERS = new WeakHashMap<>(); private DndTabPaneFactory() { } // public static final class TabSerializationStrategy<O> { // private final Function<Tab, String> serializationFunction; // private final Function<String, O> deserializationFunction; // final String prefix = UUID.randomUUID().toString(); // // public TabSerializationStrategy(Function<Tab, String> serializationFunction, Function<String, O> deserializationFunction) { // this.serializationFunction = serializationFunction; // this.deserializationFunction = deserializationFunction; // } // // public final String toString(Tab tab) { // return this.prefix + "#" + this.serializationFunction.apply(tab); //$NON-NLS-1$ // } // // public final O toData(String data) { // return deserializationFunction.apply(data.substring(prefix.length() + 1)); // } // } // // public static <O> TabSerializationStrategy<O> register(Function<Tab, String> serializationFunction, Function<String, O> deserializationFunction) { // TabSerializationStrategy<O> t = new TabSerializationStrategy<O>(serializationFunction, deserializationFunction); // SERIALIZERS.put(t, Boolean.TRUE); // return t; // } /** * Create a tab pane and set the drag strategy * * @param setup * the setup instance for the pane * @return the tab pane */ public static TabPane createDndTabPane(Consumer<DragSetup> setup) { return new DndTabPane() { @Override protected javafx.scene.control.Skin<?> createDefaultSkin() { DnDTabPaneSkin skin = new DnDTabPaneSkin(this); setup.accept(skin); return skin; } }; } /** * Create a tab pane with a default setup for drag feedback * * @param feedbackType * the feedback type * @param setup * consumer to set up the tab pane * @return a pane containing the TabPane */ public static Pane createDefaultDnDPane(FeedbackType feedbackType, Consumer<TabPane> setup) { StackPane pane = new StackPane(); TabPane tabPane = new DndTabPane() { @Override protected javafx.scene.control.Skin<?> createDefaultSkin() { DnDTabPaneSkin skin = new DnDTabPaneSkin(this); setup(feedbackType, pane, skin); return skin; } }; if (setup != null) { setup.accept(tabPane); } pane.getChildren().add(tabPane); return pane; } /** * Extract the tab content * * @param e * the event * @return the content */ public static boolean hasDnDContent(DragEvent e) { return e.getDragboard().hasContent(DnDTabPaneSkin.TAB_MOVE); } // /** // * Extract the tab content // * // * @param e // * the event // * @param clazz // * the type // * @return the content // */ // public static <O> O getDnDContent(DragEvent e, Class<O> clazz) { // String data = (String) e.getDragboard().getContent(DnDTabPaneSkin.TAB_MOVE); // Object rv = null; // for (TabSerializationStrategy<?> s : SERIALIZERS.keySet()) { // if (data.startsWith(s.prefix + "#")) { //$NON-NLS-1$ // rv = s.toData(data); // } // } // // if (rv == null) { // return (O) null; // } else { // if (clazz.isAssignableFrom(rv.getClass())) { // return (O) rv; // } // } // // return (O) null; // } /** * Extract the content * * @param e * the event * @return the return value */ public static String getDnDContent(DragEvent e) { return (String) e.getDragboard().getContent(DnDTabPaneSkin.TAB_MOVE); } /** * Setup insert marker * * @param layoutNode * the layout node used to position * @param setup * the setup */ static void setup(FeedbackType type, Pane layoutNode, DragSetup setup) { setup.setStartFunction((t) -> Boolean.valueOf(!t.isDisabled() && ((DndTabPane) t.getTabPane()).isDraggingEnabled())); setup.setFeedbackConsumer((d) -> handleFeedback(type, layoutNode, d)); setup.setDropConsumer(DndTabPaneFactory::handleDropped); setup.setDragFinishedConsumer(DndTabPaneFactory::handleFinished); } private static void handleDropped(DroppedData data) { TabPane targetPane = data.targetTab.getTabPane(); data.draggedTab.getTabPane().getTabs().remove(data.draggedTab); int idx = targetPane.getTabs().indexOf(data.targetTab); if (data.dropType == DropType.AFTER) { if (idx + 1 <= targetPane.getTabs().size()) { targetPane.getTabs().add(idx + 1, data.draggedTab); } else { targetPane.getTabs().add(data.draggedTab); } } else { targetPane.getTabs().add(idx, data.draggedTab); } data.draggedTab.getTabPane().getSelectionModel().select(data.draggedTab); } private static void handleFeedback(FeedbackType type, Pane layoutNode, FeedbackData data) { if (data.dropType == DropType.NONE) { cleanup(); return; } MarkerFeedback f = CURRENT_FEEDBACK; if (f == null || !f.data.equals(data)) { cleanup(); if (type == FeedbackType.MARKER) { CURRENT_FEEDBACK = handleMarker(layoutNode, data); } else { CURRENT_FEEDBACK = handleOutline(layoutNode, data); } } } private static void handleFinished(Tab tab) { cleanup(); } static void cleanup() { if (CURRENT_FEEDBACK != null) { CURRENT_FEEDBACK.hide(); CURRENT_FEEDBACK = null; } } private static MarkerFeedback handleMarker(Pane layoutNode, FeedbackData data) { PositionMarker marker = null; for (Node n : layoutNode.getChildren()) { if (n instanceof PositionMarker) { marker = (PositionMarker) n; } } if (marker == null) { marker = new PositionMarker(); marker.setManaged(false); layoutNode.getChildren().add(marker); } else { marker.setVisible(true); } double w = marker.getBoundsInLocal().getWidth(); double h = marker.getBoundsInLocal().getHeight(); double ratio = data.bounds.getHeight() / h; ratio += 0.1; marker.setScaleX(ratio); marker.setScaleY(ratio); double wDiff = w / 2; double hDiff = (h - h * ratio) / 2; if (data.dropType == DropType.AFTER) { marker.relocate(data.bounds.getMinX() + data.bounds.getWidth() - wDiff, data.bounds.getMinY() - hDiff); } else { marker.relocate(data.bounds.getMinX() - wDiff, data.bounds.getMinY() - hDiff); } final PositionMarker fmarker = marker; return new MarkerFeedback(data) { @Override public void hide() { fmarker.setVisible(false); } }; } private static MarkerFeedback handleOutline(Pane layoutNode, FeedbackData data) { TabOutlineMarker marker = null; for (Node n : layoutNode.getChildren()) { if (n instanceof TabOutlineMarker) { marker = (TabOutlineMarker) n; } } if (marker == null) { marker = new TabOutlineMarker(layoutNode.getBoundsInLocal(), new BoundingBox(data.bounds.getMinX(), data.bounds.getMinY(), data.bounds.getWidth(), data.bounds.getHeight()), data.dropType == DropType.BEFORE); marker.setManaged(false); marker.setMouseTransparent(true); layoutNode.getChildren().add(marker); } else { marker.updateBounds(layoutNode.getBoundsInLocal(), new BoundingBox(data.bounds.getMinX(), data.bounds.getMinY(), data.bounds.getWidth(), data.bounds.getHeight()), data.dropType == DropType.BEFORE); marker.setVisible(true); } final TabOutlineMarker fmarker = marker; return new MarkerFeedback(data) { @Override public void hide() { fmarker.setVisible(false); } }; } private abstract static class MarkerFeedback { public final FeedbackData data; public MarkerFeedback(FeedbackData data) { this.data = data; } public abstract void hide(); } /** * The drop type */ public enum DropType { /** * No dropping */ NONE, /** * Dropped before a reference tab */ BEFORE, /** * Dropped after a reference tab */ AFTER } /** * The feedback type to use */ public enum FeedbackType { /** * Show a marker */ MARKER, /** * Show an outline */ OUTLINE } /** * Data to create a feedback */ public static class FeedbackData { /** * The tab dragged */ public final Tab draggedTab; /** * The reference tab */ public final Tab targetTab; /** * The bounds of the reference tab */ public final Bounds bounds; /** * The drop type */ public final DropType dropType; /** * Create a feedback data * * @param draggedTab * the dragged tab * @param targetTab * the reference tab * @param bounds * the bounds of the reference tab * @param dropType * the drop type */ public FeedbackData(Tab draggedTab, Tab targetTab, Bounds bounds, DropType dropType) { this.draggedTab = draggedTab; this.targetTab = targetTab; this.bounds = bounds; this.dropType = dropType; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((this.bounds == null) ? 0 : this.bounds.hashCode()); result = prime * result + this.draggedTab.hashCode(); result = prime * result + this.dropType.hashCode(); result = prime * result + ((this.targetTab == null) ? 0 : this.targetTab.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; FeedbackData other = (FeedbackData) obj; if (this.bounds == null) { if (other.bounds != null) return false; } else if (!this.bounds.equals(other.bounds)) return false; if (!this.draggedTab.equals(other.draggedTab)) return false; if (this.dropType != other.dropType) return false; if (this.targetTab == null) { if (other.targetTab != null) return false; } else if (!this.targetTab.equals(other.targetTab)) return false; return true; } } /** * The drop data */ public static class DroppedData { /** * The dragged tab */ public final Tab draggedTab; /** * The reference tab */ public final Tab targetTab; /** * The drop type */ public final DropType dropType; /** * Create drop data * * @param draggedTab * the dragged tab * @param targetTab * the target tab * @param dropType * the drop type */ public DroppedData(Tab draggedTab, Tab targetTab, DropType dropType) { this.draggedTab = draggedTab; this.targetTab = targetTab; this.dropType = dropType; } } /** * Setup of the drag and drop */ public interface DragSetup { /** * Function to handle the starting of the the drag * * @param startFunction * the function */ public void setStartFunction(Function<Tab, Boolean> startFunction); /** * Consumer called to handle the finishing of the drag process * * @param dragFinishedConsumer * the consumer */ public void setDragFinishedConsumer(Consumer<Tab> dragFinishedConsumer); /** * Consumer called to present drag feedback * * @param feedbackConsumer * the consumer to call */ public void setFeedbackConsumer(Consumer<FeedbackData> feedbackConsumer); /** * Consumer called when the drop has to be handled * * @param dropConsumer * the consumer */ public void setDropConsumer(Consumer<DroppedData> dropConsumer); /** * Function to translate the tab content into clipboard content * * @param clipboardDataFunction * the function */ public void setClipboardDataFunction(Function<Tab, String> clipboardDataFunction); } }
Made the setup method public, to allow more fine-grained control.
src/org/eclipse/fx/ui/controls/tabpane/DndTabPaneFactory.java
Made the setup method public, to allow more fine-grained control.
<ide><path>rc/org/eclipse/fx/ui/controls/tabpane/DndTabPaneFactory.java <ide> * @param setup <ide> * the setup <ide> */ <del> static void setup(FeedbackType type, Pane layoutNode, DragSetup setup) { <add> public static void setup(FeedbackType type, Pane layoutNode, DragSetup setup) { <ide> setup.setStartFunction((t) -> Boolean.valueOf(!t.isDisabled() && ((DndTabPane) t.getTabPane()).isDraggingEnabled())); <ide> setup.setFeedbackConsumer((d) -> handleFeedback(type, layoutNode, d)); <ide> setup.setDropConsumer(DndTabPaneFactory::handleDropped);
Java
apache-2.0
6704ef2794cf7617da658ab0a96f6aca9eb3134c
0
GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.storage; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.ExecutionException; import javax.inject.Inject; import org.apache.cloudstack.api.command.user.volume.AttachVolumeCmd; import org.apache.cloudstack.api.command.user.volume.CreateVolumeCmd; import org.apache.cloudstack.api.command.user.volume.DetachVolumeCmd; import org.apache.cloudstack.api.command.user.volume.ExtractVolumeCmd; import org.apache.cloudstack.api.command.user.volume.GetUploadParamsForVolumeCmd; import org.apache.cloudstack.api.command.user.volume.MigrateVolumeCmd; import org.apache.cloudstack.api.command.user.volume.ResizeVolumeCmd; import org.apache.cloudstack.api.command.user.volume.UploadVolumeCmd; import org.apache.cloudstack.api.response.GetUploadParamsResponse; import org.apache.cloudstack.context.CallContext; import org.apache.cloudstack.engine.orchestration.service.VolumeOrchestrationService; import org.apache.cloudstack.engine.subsystem.api.storage.ChapInfo; import org.apache.cloudstack.engine.subsystem.api.storage.DataObject; import org.apache.cloudstack.engine.subsystem.api.storage.DataStore; import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreManager; import org.apache.cloudstack.engine.subsystem.api.storage.EndPoint; import org.apache.cloudstack.engine.subsystem.api.storage.HostScope; import org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo; import org.apache.cloudstack.engine.subsystem.api.storage.Scope; import org.apache.cloudstack.engine.subsystem.api.storage.StoragePoolAllocator; import org.apache.cloudstack.engine.subsystem.api.storage.VolumeDataFactory; import org.apache.cloudstack.engine.subsystem.api.storage.VolumeInfo; import org.apache.cloudstack.engine.subsystem.api.storage.VolumeService; import org.apache.cloudstack.engine.subsystem.api.storage.VolumeService.VolumeApiResult; import org.apache.cloudstack.framework.async.AsyncCallFuture; import org.apache.cloudstack.framework.config.ConfigKey; import org.apache.cloudstack.framework.config.Configurable; import org.apache.cloudstack.framework.config.dao.ConfigurationDao; import org.apache.cloudstack.framework.jobs.AsyncJob; import org.apache.cloudstack.framework.jobs.AsyncJobExecutionContext; import org.apache.cloudstack.framework.jobs.AsyncJobManager; import org.apache.cloudstack.framework.jobs.Outcome; import org.apache.cloudstack.framework.jobs.dao.VmWorkJobDao; import org.apache.cloudstack.framework.jobs.impl.AsyncJobVO; import org.apache.cloudstack.framework.jobs.impl.OutcomeImpl; import org.apache.cloudstack.framework.jobs.impl.VmWorkJobVO; import org.apache.cloudstack.jobs.JobInfo; import org.apache.cloudstack.storage.command.AttachAnswer; import org.apache.cloudstack.storage.command.AttachCommand; import org.apache.cloudstack.storage.command.DettachCommand; import org.apache.cloudstack.storage.command.TemplateOrVolumePostUploadCommand; import org.apache.cloudstack.storage.datastore.db.PrimaryDataStoreDao; import org.apache.cloudstack.storage.datastore.db.StoragePoolVO; import org.apache.cloudstack.storage.datastore.db.VolumeDataStoreDao; import org.apache.cloudstack.storage.datastore.db.VolumeDataStoreVO; import org.apache.cloudstack.storage.image.datastore.ImageStoreEntity; import org.apache.cloudstack.utils.identity.ManagementServerNode; import org.apache.cloudstack.utils.imagestore.ImageStoreUtil; import org.apache.cloudstack.utils.volume.VirtualMachineDiskInfo; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.apache.log4j.Logger; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import com.cloud.agent.AgentManager; import com.cloud.agent.api.Answer; import com.cloud.agent.api.ModifyTargetsCommand; import com.cloud.agent.api.to.DataTO; import com.cloud.agent.api.to.DiskTO; import com.cloud.api.ApiDBUtils; import com.cloud.configuration.Config; import com.cloud.configuration.ConfigurationManager; import com.cloud.configuration.Resource.ResourceType; import com.cloud.dc.ClusterDetailsDao; import com.cloud.dc.DataCenter; import com.cloud.dc.DataCenterVO; import com.cloud.dc.dao.DataCenterDao; import com.cloud.domain.Domain; import com.cloud.event.ActionEvent; import com.cloud.event.EventTypes; import com.cloud.event.UsageEventUtils; import com.cloud.exception.ConcurrentOperationException; import com.cloud.exception.InvalidParameterValueException; import com.cloud.exception.PermissionDeniedException; import com.cloud.exception.ResourceAllocationException; import com.cloud.exception.StorageUnavailableException; import com.cloud.gpu.GPU; import com.cloud.host.HostVO; import com.cloud.host.Status; import com.cloud.host.dao.HostDao; import com.cloud.hypervisor.Hypervisor.HypervisorType; import com.cloud.hypervisor.HypervisorCapabilitiesVO; import com.cloud.hypervisor.dao.HypervisorCapabilitiesDao; import com.cloud.org.Grouping; import com.cloud.resource.ResourceState; import com.cloud.serializer.GsonHelper; import com.cloud.server.ResourceTag; import com.cloud.server.TaggedResourceService; import com.cloud.service.dao.ServiceOfferingDetailsDao; import com.cloud.storage.Storage.ImageFormat; import com.cloud.storage.dao.DiskOfferingDao; import com.cloud.storage.dao.SnapshotDao; import com.cloud.storage.dao.StoragePoolTagsDao; import com.cloud.storage.dao.VMTemplateDao; import com.cloud.storage.dao.VolumeDao; import com.cloud.storage.snapshot.SnapshotApiService; import com.cloud.storage.snapshot.SnapshotManager; import com.cloud.template.TemplateManager; import com.cloud.user.Account; import com.cloud.user.AccountManager; import com.cloud.user.ResourceLimitService; import com.cloud.user.User; import com.cloud.user.VmDiskStatisticsVO; import com.cloud.user.dao.AccountDao; import com.cloud.user.dao.VmDiskStatisticsDao; import com.cloud.utils.DateUtil; import com.cloud.utils.EncryptionUtil; import com.cloud.utils.EnumUtils; import com.cloud.utils.NumbersUtil; import com.cloud.utils.Pair; import com.cloud.utils.Predicate; import com.cloud.utils.ReflectionUse; import com.cloud.utils.StringUtils; import com.cloud.utils.UriUtils; import com.cloud.utils.component.ManagerBase; import com.cloud.utils.db.DB; import com.cloud.utils.db.EntityManager; import com.cloud.utils.db.Transaction; import com.cloud.utils.db.TransactionCallback; import com.cloud.utils.db.TransactionCallbackWithException; import com.cloud.utils.db.TransactionStatus; import com.cloud.utils.db.UUIDManager; import com.cloud.utils.exception.CloudRuntimeException; import com.cloud.utils.fsm.NoTransitionException; import com.cloud.utils.fsm.StateMachine2; import com.cloud.vm.UserVmManager; import com.cloud.vm.UserVmService; import com.cloud.vm.UserVmVO; import com.cloud.vm.VMInstanceVO; import com.cloud.vm.VirtualMachine; import com.cloud.vm.VirtualMachine.State; import com.cloud.vm.VmDetailConstants; import com.cloud.vm.VmWork; import com.cloud.vm.VmWorkAttachVolume; import com.cloud.vm.VmWorkConstants; import com.cloud.vm.VmWorkDetachVolume; import com.cloud.vm.VmWorkExtractVolume; import com.cloud.vm.VmWorkJobHandler; import com.cloud.vm.VmWorkJobHandlerProxy; import com.cloud.vm.VmWorkMigrateVolume; import com.cloud.vm.VmWorkResizeVolume; import com.cloud.vm.VmWorkSerializer; import com.cloud.vm.VmWorkTakeVolumeSnapshot; import com.cloud.vm.dao.UserVmDao; import com.cloud.vm.dao.VMInstanceDao; import com.cloud.vm.snapshot.VMSnapshotVO; import com.cloud.vm.snapshot.dao.VMSnapshotDao; import com.google.common.base.Strings; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonParseException; public class VolumeApiServiceImpl extends ManagerBase implements VolumeApiService, VmWorkJobHandler, Configurable { private final static Logger s_logger = Logger.getLogger(VolumeApiServiceImpl.class); public static final String VM_WORK_JOB_HANDLER = VolumeApiServiceImpl.class.getSimpleName(); @Inject private UserVmManager _userVmMgr; @Inject private VolumeOrchestrationService _volumeMgr; @Inject private EntityManager _entityMgr; @Inject private AgentManager _agentMgr; @Inject private TemplateManager _tmpltMgr; @Inject private SnapshotManager _snapshotMgr; @Inject private AccountManager _accountMgr; @Inject private ConfigurationManager _configMgr; @Inject private VolumeDao _volsDao; @Inject private HostDao _hostDao; @Inject private SnapshotDao _snapshotDao; @Inject private ServiceOfferingDetailsDao _serviceOfferingDetailsDao; @Inject private UserVmDao _userVmDao; @Inject private UserVmService _userVmService; @Inject private VolumeDataStoreDao _volumeStoreDao; @Inject private VMInstanceDao _vmInstanceDao; @Inject private PrimaryDataStoreDao _storagePoolDao; @Inject private DiskOfferingDao _diskOfferingDao; @Inject private AccountDao _accountDao; @Inject private DataCenterDao _dcDao; @Inject private VMTemplateDao _templateDao; @Inject private ResourceLimitService _resourceLimitMgr; @Inject private VmDiskStatisticsDao _vmDiskStatsDao; @Inject private VMSnapshotDao _vmSnapshotDao; @Inject private ConfigurationDao _configDao; @Inject private DataStoreManager dataStoreMgr; @Inject private VolumeService volService; @Inject private VolumeDataFactory volFactory; @Inject private SnapshotApiService snapshotMgr; @Inject private UUIDManager _uuidMgr; @Inject private HypervisorCapabilitiesDao _hypervisorCapabilitiesDao; @Inject private AsyncJobManager _jobMgr; @Inject private VmWorkJobDao _workJobDao; @Inject private ClusterDetailsDao _clusterDetailsDao; @Inject private StorageManager storageMgr; @Inject private StoragePoolTagsDao storagePoolTagsDao; @Inject private StorageUtil storageUtil; @Inject public TaggedResourceService taggedResourceService; protected Gson _gson; private List<StoragePoolAllocator> _storagePoolAllocators; private List<HypervisorType> supportingDefaultHV; VmWorkJobHandlerProxy _jobHandlerProxy = new VmWorkJobHandlerProxy(this); static final ConfigKey<Long> VmJobCheckInterval = new ConfigKey<Long>("Advanced", Long.class, "vm.job.check.interval", "3000", "Interval in milliseconds to check if the job is complete", false); static final ConfigKey<Boolean> VolumeUrlCheck = new ConfigKey<Boolean>("Advanced", Boolean.class, "volume.url.check", "true", "Check the url for a volume before downloading it from the management server. Set to false when you managment has no internet access.", true); public static final ConfigKey<Boolean> AllowUserExpungeRecoverVolume = new ConfigKey<Boolean>("Advanced", Boolean.class, "allow.user.expunge.recover.volume", "true", "Determines whether users can expunge or recover their volume", true, ConfigKey.Scope.Account); private long _maxVolumeSizeInGb; private final StateMachine2<Volume.State, Volume.Event, Volume> _volStateMachine; protected VolumeApiServiceImpl() { _volStateMachine = Volume.State.getStateMachine(); _gson = GsonHelper.getGsonLogger(); } /* * Upload the volume to secondary storage. */ @Override @DB @ActionEvent(eventType = EventTypes.EVENT_VOLUME_UPLOAD, eventDescription = "uploading volume", async = true) public VolumeVO uploadVolume(UploadVolumeCmd cmd) throws ResourceAllocationException { Account caller = CallContext.current().getCallingAccount(); long ownerId = cmd.getEntityOwnerId(); Account owner = _entityMgr.findById(Account.class, ownerId); Long zoneId = cmd.getZoneId(); String volumeName = cmd.getVolumeName(); String url = cmd.getUrl(); String format = cmd.getFormat(); Long diskOfferingId = cmd.getDiskOfferingId(); String imageStoreUuid = cmd.getImageStoreUuid(); DataStore store = _tmpltMgr.getImageStore(imageStoreUuid, zoneId); validateVolume(caller, ownerId, zoneId, volumeName, url, format, diskOfferingId); VolumeVO volume = persistVolume(owner, zoneId, volumeName, url, cmd.getFormat(), diskOfferingId, Volume.State.Allocated); VolumeInfo vol = volFactory.getVolume(volume.getId()); RegisterVolumePayload payload = new RegisterVolumePayload(cmd.getUrl(), cmd.getChecksum(), cmd.getFormat()); vol.addPayload(payload); volService.registerVolume(vol, store); return volume; } @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_UPLOAD, eventDescription = "uploading volume for post upload", async = true) public GetUploadParamsResponse uploadVolume(final GetUploadParamsForVolumeCmd cmd) throws ResourceAllocationException, MalformedURLException { Account caller = CallContext.current().getCallingAccount(); long ownerId = cmd.getEntityOwnerId(); final Account owner = _entityMgr.findById(Account.class, ownerId); final Long zoneId = cmd.getZoneId(); final String volumeName = cmd.getName(); String format = cmd.getFormat(); final Long diskOfferingId = cmd.getDiskOfferingId(); String imageStoreUuid = cmd.getImageStoreUuid(); final DataStore store = _tmpltMgr.getImageStore(imageStoreUuid, zoneId); validateVolume(caller, ownerId, zoneId, volumeName, null, format, diskOfferingId); return Transaction.execute(new TransactionCallbackWithException<GetUploadParamsResponse, MalformedURLException>() { @Override public GetUploadParamsResponse doInTransaction(TransactionStatus status) throws MalformedURLException { VolumeVO volume = persistVolume(owner, zoneId, volumeName, null, cmd.getFormat(), diskOfferingId, Volume.State.NotUploaded); VolumeInfo vol = volFactory.getVolume(volume.getId()); RegisterVolumePayload payload = new RegisterVolumePayload(null, cmd.getChecksum(), cmd.getFormat()); vol.addPayload(payload); Pair<EndPoint, DataObject> pair = volService.registerVolumeForPostUpload(vol, store); EndPoint ep = pair.first(); DataObject dataObject = pair.second(); GetUploadParamsResponse response = new GetUploadParamsResponse(); String ssvmUrlDomain = _configDao.getValue(Config.SecStorageSecureCopyCert.key()); String url = ImageStoreUtil.generatePostUploadUrl(ssvmUrlDomain, ep.getPublicAddr(), vol.getUuid()); response.setPostURL(new URL(url)); // set the post url, this is used in the monitoring thread to determine the SSVM VolumeDataStoreVO volumeStore = _volumeStoreDao.findByVolume(vol.getId()); assert (volumeStore != null) : "sincle volume is registered, volumestore cannot be null at this stage"; volumeStore.setExtractUrl(url); _volumeStoreDao.persist(volumeStore); response.setId(UUID.fromString(vol.getUuid())); int timeout = ImageStoreUploadMonitorImpl.getUploadOperationTimeout(); DateTime currentDateTime = new DateTime(DateTimeZone.UTC); String expires = currentDateTime.plusMinutes(timeout).toString(); response.setTimeout(expires); String key = _configDao.getValue(Config.SSVMPSK.key()); /* * encoded metadata using the post upload config key */ TemplateOrVolumePostUploadCommand command = new TemplateOrVolumePostUploadCommand(vol.getId(), vol.getUuid(), volumeStore.getInstallPath(), cmd.getChecksum(), vol.getType().toString(), vol.getName(), vol.getFormat().toString(), dataObject.getDataStore().getUri(), dataObject.getDataStore().getRole().toString()); command.setLocalPath(volumeStore.getLocalDownloadPath()); //using the existing max upload size configuration command.setProcessTimeout(NumbersUtil.parseLong(_configDao.getValue("vmware.package.ova.timeout"), 3600)); command.setMaxUploadSize(_configDao.getValue(Config.MaxUploadVolumeSize.key())); command.setDefaultMaxAccountSecondaryStorage(_configDao.getValue(Config.DefaultMaxAccountSecondaryStorage.key())); command.setAccountId(vol.getAccountId()); Gson gson = new GsonBuilder().create(); String metadata = EncryptionUtil.encodeData(gson.toJson(command), key); response.setMetadata(metadata); /* * signature calculated on the url, expiry, metadata. */ response.setSignature(EncryptionUtil.generateSignature(metadata + url + expires, key)); return response; } }); } private boolean validateVolume(Account caller, long ownerId, Long zoneId, String volumeName, String url, String format, Long diskOfferingId) throws ResourceAllocationException { // permission check Account volumeOwner = _accountMgr.getActiveAccountById(ownerId); _accountMgr.checkAccess(caller, null, true, volumeOwner); // Check that the resource limit for volumes won't be exceeded _resourceLimitMgr.checkResourceLimit(volumeOwner, ResourceType.volume); // Verify that zone exists DataCenterVO zone = _dcDao.findById(zoneId); if (zone == null) { throw new InvalidParameterValueException("Unable to find zone by id " + zoneId); } // Check if zone is disabled if (Grouping.AllocationState.Disabled == zone.getAllocationState() && !_accountMgr.isRootAdmin(caller.getId())) { throw new PermissionDeniedException("Cannot perform this operation, Zone is currently disabled: " + zoneId); } //validating the url only when url is not null. url can be null incase of form based post upload if (url != null) { if (url.toLowerCase().contains("file://")) { throw new InvalidParameterValueException("File:// type urls are currently unsupported"); } UriUtils.validateUrl(format, url); if (VolumeUrlCheck.value()) { // global setting that can be set when their MS does not have internet access s_logger.debug("Checking url: " + url); UriUtils.checkUrlExistence(url); } // Check that the resource limit for secondary storage won't be exceeded _resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(ownerId), ResourceType.secondary_storage, UriUtils.getRemoteSize(url)); } else { _resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(ownerId), ResourceType.secondary_storage); } try { ImageFormat.valueOf(format.toUpperCase()); } catch (IllegalArgumentException e) { s_logger.debug("ImageFormat IllegalArgumentException: " + e.getMessage()); throw new IllegalArgumentException("Image format: " + format + " is incorrect. Supported formats are " + EnumUtils.listValues(ImageFormat.values())); } // Check that the the disk offering specified is valid if (diskOfferingId != null) { DiskOfferingVO diskOffering = _diskOfferingDao.findById(diskOfferingId); if ((diskOffering == null) || diskOffering.getRemoved() != null || !DiskOfferingVO.Type.Disk.equals(diskOffering.getType())) { throw new InvalidParameterValueException("Please specify a valid disk offering."); } if (!diskOffering.isCustomized()) { throw new InvalidParameterValueException("Please specify a custom sized disk offering."); } _configMgr.checkDiskOfferingAccess(volumeOwner, diskOffering, zone); } return false; } public String getRandomVolumeName() { return UUID.randomUUID().toString(); } @DB protected VolumeVO persistVolume(final Account owner, final Long zoneId, final String volumeName, final String url, final String format, final Long diskOfferingId, final Volume.State state) { return Transaction.execute(new TransactionCallback<VolumeVO>() { @Override public VolumeVO doInTransaction(TransactionStatus status) { VolumeVO volume = new VolumeVO(volumeName, zoneId, -1, -1, -1, new Long(-1), null, null, Storage.ProvisioningType.THIN, 0, Volume.Type.DATADISK); volume.setPoolId(null); volume.setDataCenterId(zoneId); volume.setPodId(null); volume.setState(state); // initialize the state // to prevent a null pointer deref I put the system account id here when no owner is given. // TODO Decide if this is valid or whether throwing a CloudRuntimeException is more appropriate volume.setAccountId((owner == null) ? Account.ACCOUNT_ID_SYSTEM : owner.getAccountId()); volume.setDomainId((owner == null) ? Domain.ROOT_DOMAIN : owner.getDomainId()); if (diskOfferingId == null) { DiskOfferingVO diskOfferingVO = _diskOfferingDao.findByUniqueName("Cloud.com-Custom"); if (diskOfferingVO != null) { long defaultDiskOfferingId = diskOfferingVO.getId(); volume.setDiskOfferingId(defaultDiskOfferingId); } } else { volume.setDiskOfferingId(diskOfferingId); DiskOfferingVO diskOfferingVO = _diskOfferingDao.findById(diskOfferingId); Boolean isCustomizedIops = diskOfferingVO != null && diskOfferingVO.isCustomizedIops() != null ? diskOfferingVO.isCustomizedIops() : false; if (isCustomizedIops == null || !isCustomizedIops) { volume.setMinIops(diskOfferingVO.getMinIops()); volume.setMaxIops(diskOfferingVO.getMaxIops()); } } // volume.setSize(size); volume.setInstanceId(null); volume.setUpdated(new Date()); volume.setDomainId((owner == null) ? Domain.ROOT_DOMAIN : owner.getDomainId()); volume.setFormat(ImageFormat.valueOf(format)); volume = _volsDao.persist(volume); CallContext.current().setEventDetails("Volume Id: " + volume.getUuid()); // Increment resource count during allocation; if actual creation fails, // decrement it _resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.volume); //url can be null incase of postupload if (url != null) { _resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.secondary_storage, UriUtils.getRemoteSize(url)); } return volume; } }); } /** * Retrieves the volume name from CreateVolumeCmd object. * * If the retrieved volume name is null, empty or blank, then A random name * will be generated using getRandomVolumeName method. * * @param cmd * @return Either the retrieved name or a random name. */ public String getVolumeNameFromCommand(CreateVolumeCmd cmd) { String userSpecifiedName = cmd.getVolumeName(); if (org.apache.commons.lang.StringUtils.isBlank(userSpecifiedName)) { userSpecifiedName = getRandomVolumeName(); } return userSpecifiedName; } /* * Just allocate a volume in the database, don't send the createvolume cmd * to hypervisor. The volume will be finally created only when it's attached * to a VM. */ @Override @DB @ActionEvent(eventType = EventTypes.EVENT_VOLUME_CREATE, eventDescription = "creating volume", create = true) public VolumeVO allocVolume(CreateVolumeCmd cmd) throws ResourceAllocationException { Account caller = CallContext.current().getCallingAccount(); long ownerId = cmd.getEntityOwnerId(); Account owner = _accountMgr.getActiveAccountById(ownerId); Boolean displayVolume = cmd.getDisplayVolume(); // permission check _accountMgr.checkAccess(caller, null, true, _accountMgr.getActiveAccountById(ownerId)); if (displayVolume == null) { displayVolume = true; } else { if (!_accountMgr.isRootAdmin(caller.getId())) { throw new PermissionDeniedException("Cannot update parameter displayvolume, only admin permitted "); } } // Check that the resource limit for volumes won't be exceeded _resourceLimitMgr.checkResourceLimit(owner, ResourceType.volume, displayVolume); Long zoneId = cmd.getZoneId(); Long diskOfferingId = null; DiskOfferingVO diskOffering = null; Long size = null; Long minIops = null; Long maxIops = null; // Volume VO used for extracting the source template id VolumeVO parentVolume = null; // validate input parameters before creating the volume if (cmd.getSnapshotId() == null && cmd.getDiskOfferingId() == null) { throw new InvalidParameterValueException("At least one of disk Offering ID or snapshot ID must be passed whilst creating volume"); } // disallow passing disk offering ID with DATA disk volume snapshots if (cmd.getSnapshotId() != null && cmd.getDiskOfferingId() != null) { SnapshotVO snapshot = _snapshotDao.findById(cmd.getSnapshotId()); if (snapshot != null) { parentVolume = _volsDao.findByIdIncludingRemoved(snapshot.getVolumeId()); if (parentVolume != null && parentVolume.getVolumeType() != Volume.Type.ROOT) throw new InvalidParameterValueException("Disk Offering ID cannot be passed whilst creating volume from snapshot other than ROOT disk snapshots"); } parentVolume = null; } if (cmd.getDiskOfferingId() != null) { // create a new volume diskOfferingId = cmd.getDiskOfferingId(); size = cmd.getSize(); Long sizeInGB = size; if (size != null) { if (size > 0) { size = size * 1024 * 1024 * 1024; // user specify size in GB } else { throw new InvalidParameterValueException("Disk size must be larger than 0"); } } // Check that the the disk offering is specified diskOffering = _diskOfferingDao.findById(diskOfferingId); if ((diskOffering == null) || diskOffering.getRemoved() != null || !DiskOfferingVO.Type.Disk.equals(diskOffering.getType())) { throw new InvalidParameterValueException("Please specify a valid disk offering."); } if (diskOffering.isCustomized()) { if (size == null) { throw new InvalidParameterValueException("This disk offering requires a custom size specified"); } Long customDiskOfferingMaxSize = VolumeOrchestrationService.CustomDiskOfferingMaxSize.value(); Long customDiskOfferingMinSize = VolumeOrchestrationService.CustomDiskOfferingMinSize.value(); if ((sizeInGB < customDiskOfferingMinSize) || (sizeInGB > customDiskOfferingMaxSize)) { throw new InvalidParameterValueException("Volume size: " + sizeInGB + "GB is out of allowed range. Max: " + customDiskOfferingMaxSize + " Min:" + customDiskOfferingMinSize); } } if (!diskOffering.isCustomized() && size != null) { throw new InvalidParameterValueException("This disk offering does not allow custom size"); } _configMgr.checkDiskOfferingAccess(owner, diskOffering, _dcDao.findById(zoneId)); if (diskOffering.getDiskSize() > 0) { size = diskOffering.getDiskSize(); } Boolean isCustomizedIops = diskOffering.isCustomizedIops(); if (isCustomizedIops != null) { if (isCustomizedIops) { minIops = cmd.getMinIops(); maxIops = cmd.getMaxIops(); if (minIops == null && maxIops == null) { minIops = 0L; maxIops = 0L; } else { if (minIops == null || minIops <= 0) { throw new InvalidParameterValueException("The min IOPS must be greater than 0."); } if (maxIops == null) { maxIops = 0L; } if (minIops > maxIops) { throw new InvalidParameterValueException("The min IOPS must be less than or equal to the max IOPS."); } } } else { minIops = diskOffering.getMinIops(); maxIops = diskOffering.getMaxIops(); } } if (!validateVolumeSizeRange(size)) {// convert size from mb to gb // for validation throw new InvalidParameterValueException("Invalid size for custom volume creation: " + size + " ,max volume size is:" + _maxVolumeSizeInGb); } } if (cmd.getSnapshotId() != null) { // create volume from snapshot Long snapshotId = cmd.getSnapshotId(); SnapshotVO snapshotCheck = _snapshotDao.findById(snapshotId); if (snapshotCheck == null) { throw new InvalidParameterValueException("unable to find a snapshot with id " + snapshotId); } if (snapshotCheck.getState() != Snapshot.State.BackedUp) { throw new InvalidParameterValueException("Snapshot id=" + snapshotId + " is not in " + Snapshot.State.BackedUp + " state yet and can't be used for volume creation"); } parentVolume = _volsDao.findByIdIncludingRemoved(snapshotCheck.getVolumeId()); if (zoneId == null) { // if zoneId is not provided, we default to create volume in the same zone as the snapshot zone. zoneId = snapshotCheck.getDataCenterId(); } if (diskOffering == null) { // Pure snapshot is being used to create volume. diskOfferingId = snapshotCheck.getDiskOfferingId(); diskOffering = _diskOfferingDao.findById(diskOfferingId); minIops = snapshotCheck.getMinIops(); maxIops = snapshotCheck.getMaxIops(); size = snapshotCheck.getSize(); // ; disk offering is used for tags purposes } else { if (size < snapshotCheck.getSize()) { throw new InvalidParameterValueException(String.format("Invalid size for volume creation: %dGB, snapshot size is: %dGB", size / (1024 * 1024 * 1024), snapshotCheck.getSize() / (1024 * 1024 * 1024))); } } _configMgr.checkDiskOfferingAccess(null, diskOffering, _dcDao.findById(zoneId)); // check snapshot permissions _accountMgr.checkAccess(caller, null, true, snapshotCheck); // one step operation - create volume in VM's cluster and attach it // to the VM Long vmId = cmd.getVirtualMachineId(); if (vmId != null) { // Check that the virtual machine ID is valid and it's a user vm UserVmVO vm = _userVmDao.findById(vmId); if (vm == null || vm.getType() != VirtualMachine.Type.User) { throw new InvalidParameterValueException("Please specify a valid User VM."); } // Check that the VM is in the correct state if (vm.getState() != State.Running && vm.getState() != State.Stopped) { throw new InvalidParameterValueException("Please specify a VM that is either running or stopped."); } // permission check _accountMgr.checkAccess(caller, null, false, vm); } } Storage.ProvisioningType provisioningType = diskOffering.getProvisioningType(); // Check that the resource limit for primary storage won't be exceeded _resourceLimitMgr.checkResourceLimit(owner, ResourceType.primary_storage, displayVolume, new Long(size)); // Verify that zone exists DataCenterVO zone = _dcDao.findById(zoneId); if (zone == null) { throw new InvalidParameterValueException("Unable to find zone by id " + zoneId); } // Check if zone is disabled if (Grouping.AllocationState.Disabled == zone.getAllocationState() && !_accountMgr.isRootAdmin(caller.getId())) { throw new PermissionDeniedException("Cannot perform this operation, Zone is currently disabled: " + zoneId); } // If local storage is disabled then creation of volume with local disk // offering not allowed if (!zone.isLocalStorageEnabled() && diskOffering.isUseLocalStorage()) { throw new InvalidParameterValueException("Zone is not configured to use local storage but volume's disk offering " + diskOffering.getName() + " uses it"); } String userSpecifiedName = getVolumeNameFromCommand(cmd); return commitVolume(cmd, caller, owner, displayVolume, zoneId, diskOfferingId, provisioningType, size, minIops, maxIops, parentVolume, userSpecifiedName, _uuidMgr.generateUuid(Volume.class, cmd.getCustomId())); } private VolumeVO commitVolume(final CreateVolumeCmd cmd, final Account caller, final Account owner, final Boolean displayVolume, final Long zoneId, final Long diskOfferingId, final Storage.ProvisioningType provisioningType, final Long size, final Long minIops, final Long maxIops, final VolumeVO parentVolume, final String userSpecifiedName, final String uuid) { return Transaction.execute(new TransactionCallback<VolumeVO>() { @Override public VolumeVO doInTransaction(TransactionStatus status) { VolumeVO volume = new VolumeVO(userSpecifiedName, -1, -1, -1, -1, new Long(-1), null, null, provisioningType, 0, Volume.Type.DATADISK); volume.setPoolId(null); volume.setUuid(uuid); volume.setDataCenterId(zoneId); volume.setPodId(null); volume.setAccountId(owner.getId()); volume.setDomainId(owner.getDomainId()); volume.setDiskOfferingId(diskOfferingId); volume.setSize(size); volume.setMinIops(minIops); volume.setMaxIops(maxIops); volume.setInstanceId(null); volume.setUpdated(new Date()); volume.setDisplayVolume(displayVolume); if (parentVolume != null) { volume.setTemplateId(parentVolume.getTemplateId()); volume.setFormat(parentVolume.getFormat()); } else { volume.setTemplateId(null); } volume = _volsDao.persist(volume); if (cmd.getSnapshotId() == null && displayVolume) { // for volume created from snapshot, create usage event after volume creation UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), diskOfferingId, null, size, Volume.class.getName(), volume.getUuid(), displayVolume); } CallContext.current().setEventDetails("Volume Id: " + volume.getUuid()); // Increment resource count during allocation; if actual creation fails, // decrement it _resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.volume, displayVolume); _resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.primary_storage, displayVolume, new Long(volume.getSize())); return volume; } }); } public boolean validateVolumeSizeRange(long size) { if (size < 0 || (size > 0 && size < (1024 * 1024 * 1024))) { throw new InvalidParameterValueException("Please specify a size of at least 1 GB."); } else if (size > (_maxVolumeSizeInGb * 1024 * 1024 * 1024)) { throw new InvalidParameterValueException("Requested volume size is " + size + ", but the maximum size allowed is " + _maxVolumeSizeInGb + " GB."); } return true; } @Override @DB @ActionEvent(eventType = EventTypes.EVENT_VOLUME_CREATE, eventDescription = "creating volume", async = true) public VolumeVO createVolume(CreateVolumeCmd cmd) { VolumeVO volume = _volsDao.findById(cmd.getEntityId()); boolean created = true; try { if (cmd.getSnapshotId() != null) { volume = createVolumeFromSnapshot(volume, cmd.getSnapshotId(), cmd.getVirtualMachineId()); if (volume.getState() != Volume.State.Ready) { created = false; } // if VM Id is provided, attach the volume to the VM if (cmd.getVirtualMachineId() != null) { try { attachVolumeToVM(cmd.getVirtualMachineId(), volume.getId(), volume.getDeviceId()); } catch (Exception ex) { StringBuilder message = new StringBuilder("Volume: "); message.append(volume.getUuid()); message.append(" created successfully, but failed to attach the newly created volume to VM: "); message.append(cmd.getVirtualMachineId()); message.append(" due to error: "); message.append(ex.getMessage()); if (s_logger.isDebugEnabled()) { s_logger.debug(message, ex); } throw new CloudRuntimeException(message.toString()); } } } return volume; } catch (Exception e) { created = false; VolumeInfo vol = volFactory.getVolume(cmd.getEntityId()); vol.stateTransit(Volume.Event.DestroyRequested); throw new CloudRuntimeException("Failed to create volume: " + volume.getId(), e); } finally { if (!created) { s_logger.trace("Decrementing volume resource count for account id=" + volume.getAccountId() + " as volume failed to create on the backend"); _resourceLimitMgr.decrementResourceCount(volume.getAccountId(), ResourceType.volume, cmd.getDisplayVolume()); _resourceLimitMgr.decrementResourceCount(volume.getAccountId(), ResourceType.primary_storage, cmd.getDisplayVolume(), new Long(volume.getSize())); } } } protected VolumeVO createVolumeFromSnapshot(VolumeVO volume, long snapshotId, Long vmId) throws StorageUnavailableException { VolumeInfo createdVolume = null; SnapshotVO snapshot = _snapshotDao.findById(snapshotId); snapshot.getVolumeId(); UserVmVO vm = null; if (vmId != null) { vm = _userVmDao.findById(vmId); } // sync old snapshots to region store if necessary createdVolume = _volumeMgr.createVolumeFromSnapshot(volume, snapshot, vm); VolumeVO volumeVo = _volsDao.findById(createdVolume.getId()); UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, createdVolume.getAccountId(), createdVolume.getDataCenterId(), createdVolume.getId(), createdVolume.getName(), createdVolume.getDiskOfferingId(), null, createdVolume.getSize(), Volume.class.getName(), createdVolume.getUuid(), volumeVo.isDisplayVolume()); return volumeVo; } @Override @DB @ActionEvent(eventType = EventTypes.EVENT_VOLUME_RESIZE, eventDescription = "resizing volume", async = true) public VolumeVO resizeVolume(ResizeVolumeCmd cmd) throws ResourceAllocationException { Long newSize; Long newMinIops; Long newMaxIops; Integer newHypervisorSnapshotReserve; boolean shrinkOk = cmd.isShrinkOk(); VolumeVO volume = _volsDao.findById(cmd.getEntityId()); if (volume == null) { throw new InvalidParameterValueException("No such volume"); } // checking if there are any ongoing snapshots on the volume which is to be resized List<SnapshotVO> ongoingSnapshots = _snapshotDao.listByStatus(cmd.getId(), Snapshot.State.Creating, Snapshot.State.CreatedOnPrimary, Snapshot.State.BackingUp); if (ongoingSnapshots.size() > 0) { throw new CloudRuntimeException("There is/are unbacked up snapshot(s) on this volume, resize volume is not permitted, please try again later."); } /* Does the caller have authority to act on this volume? */ _accountMgr.checkAccess(CallContext.current().getCallingAccount(), null, true, volume); DiskOfferingVO diskOffering = _diskOfferingDao.findById(volume.getDiskOfferingId()); DiskOfferingVO newDiskOffering = null; if (cmd.getNewDiskOfferingId() != null && volume.getDiskOfferingId() != cmd.getNewDiskOfferingId()) { newDiskOffering = _diskOfferingDao.findById(cmd.getNewDiskOfferingId()); } /* Only works for KVM/XenServer/VMware (or "Any") for now, and volumes with 'None' since they're just allocated in DB */ HypervisorType hypervisorType = _volsDao.getHypervisorType(volume.getId()); if (hypervisorType != HypervisorType.KVM && hypervisorType != HypervisorType.XenServer && hypervisorType != HypervisorType.VMware && hypervisorType != HypervisorType.Any && hypervisorType != HypervisorType.None) { throw new InvalidParameterValueException("Hypervisor " + hypervisorType + " does not support rootdisksize override"); } if (volume.getState() != Volume.State.Ready && volume.getState() != Volume.State.Allocated) { throw new InvalidParameterValueException("Volume should be in ready or allocated state before attempting a resize. Volume " + volume.getUuid() + " is in state " + volume.getState() + "."); } // if we are to use the existing disk offering if (newDiskOffering == null) { newSize = cmd.getSize(); newHypervisorSnapshotReserve = volume.getHypervisorSnapshotReserve(); // if the caller is looking to change the size of the volume if (newSize != null) { if (!diskOffering.isCustomized() && !volume.getVolumeType().equals(Volume.Type.ROOT)) { throw new InvalidParameterValueException("To change a volume's size without providing a new disk offering, its current disk offering must be " + "customizable or it must be a root volume (if providing a disk offering, make sure it is different from the current disk offering)."); } // convert from bytes to GiB newSize = newSize << 30; } else { // no parameter provided; just use the original size of the volume newSize = volume.getSize(); } newMinIops = cmd.getMinIops(); if (newMinIops != null) { if (!volume.getVolumeType().equals(Volume.Type.ROOT) && (diskOffering.isCustomizedIops() == null || !diskOffering.isCustomizedIops())) { throw new InvalidParameterValueException("The current disk offering does not support customization of the 'Min IOPS' parameter."); } } else { // no parameter provided; just use the original min IOPS of the volume newMinIops = volume.getMinIops(); } newMaxIops = cmd.getMaxIops(); if (newMaxIops != null) { if (!volume.getVolumeType().equals(Volume.Type.ROOT) && (diskOffering.isCustomizedIops() == null || !diskOffering.isCustomizedIops())) { throw new InvalidParameterValueException("The current disk offering does not support customization of the 'Max IOPS' parameter."); } } else { // no parameter provided; just use the original max IOPS of the volume newMaxIops = volume.getMaxIops(); } validateIops(newMinIops, newMaxIops); } else { if (newDiskOffering.getRemoved() != null) { throw new InvalidParameterValueException("Requested disk offering has been removed."); } if (!DiskOfferingVO.Type.Disk.equals(newDiskOffering.getType())) { throw new InvalidParameterValueException("Requested disk offering type is invalid."); } if (diskOffering.getTags() != null) { if (!StringUtils.areTagsEqual(diskOffering.getTags(), newDiskOffering.getTags())) { throw new InvalidParameterValueException("The tags on the new and old disk offerings must match."); } } else if (newDiskOffering.getTags() != null) { throw new InvalidParameterValueException("There are no tags on the current disk offering. The new disk offering needs to have no tags, as well."); } _configMgr.checkDiskOfferingAccess(_accountMgr.getActiveAccountById(volume.getAccountId()), newDiskOffering, _dcDao.findById(volume.getDataCenterId())); if (newDiskOffering.isCustomized()) { newSize = cmd.getSize(); if (newSize == null) { throw new InvalidParameterValueException("The new disk offering requires that a size be specified."); } // convert from GiB to bytes newSize = newSize << 30; } else { if (cmd.getSize() != null) { throw new InvalidParameterValueException("You cannnot pass in a custom disk size to a non-custom disk offering."); } newSize = newDiskOffering.getDiskSize(); } if (!volume.getSize().equals(newSize) && !volume.getVolumeType().equals(Volume.Type.DATADISK)) { throw new InvalidParameterValueException("Only data volumes can be resized via a new disk offering."); } if (newDiskOffering.isCustomizedIops() != null && newDiskOffering.isCustomizedIops()) { newMinIops = cmd.getMinIops() != null ? cmd.getMinIops() : volume.getMinIops(); newMaxIops = cmd.getMaxIops() != null ? cmd.getMaxIops() : volume.getMaxIops(); validateIops(newMinIops, newMaxIops); } else { newMinIops = newDiskOffering.getMinIops(); newMaxIops = newDiskOffering.getMaxIops(); } // if the hypervisor snapshot reserve value is null, it must remain null (currently only KVM uses null and null is all KVM uses for a value here) newHypervisorSnapshotReserve = volume.getHypervisorSnapshotReserve() != null ? newDiskOffering.getHypervisorSnapshotReserve() : null; } long currentSize = volume.getSize(); // if the caller is looking to change the size of the volume if (currentSize != newSize) { if (volume.getInstanceId() != null) { // Check that VM to which this volume is attached does not have VM snapshots if (_vmSnapshotDao.findByVm(volume.getInstanceId()).size() > 0) { throw new InvalidParameterValueException("A volume that is attached to a VM with any VM snapshots cannot be resized."); } } if (!validateVolumeSizeRange(newSize)) { throw new InvalidParameterValueException("Requested size out of range"); } Long storagePoolId = volume.getPoolId(); if (storagePoolId != null) { StoragePoolVO storagePoolVO = _storagePoolDao.findById(storagePoolId); if (storagePoolVO.isManaged()) { Long instanceId = volume.getInstanceId(); if (instanceId != null) { VMInstanceVO vmInstanceVO = _vmInstanceDao.findById(instanceId); if (vmInstanceVO.getHypervisorType() == HypervisorType.KVM && vmInstanceVO.getState() != State.Stopped) { throw new CloudRuntimeException("This kind of KVM disk cannot be resized while it is connected to a VM that's not in the Stopped state."); } } } } /* * Let's make certain they (think they) know what they're doing if they * want to shrink by forcing them to provide the shrinkok parameter. * This will be checked again at the hypervisor level where we can see * the actual disk size. */ if (currentSize > newSize && !shrinkOk) { throw new InvalidParameterValueException("Going from existing size of " + currentSize + " to size of " + newSize + " would shrink the volume." + "Need to sign off by supplying the shrinkok parameter with value of true."); } if (newSize > currentSize) { /* Check resource limit for this account on primary storage resource */ _resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(volume.getAccountId()), ResourceType.primary_storage, volume.isDisplayVolume(), new Long(newSize - currentSize).longValue()); } } // Note: The storage plug-in in question should perform validation on the IOPS to check if a sufficient number of IOPS is available to perform // the requested change /* If this volume has never been beyond allocated state, short circuit everything and simply update the database. */ // We need to publish this event to usage_volume table if (volume.getState() == Volume.State.Allocated) { s_logger.debug("Volume is in the allocated state, but has never been created. Simply updating database with new size and IOPS."); volume.setSize(newSize); volume.setMinIops(newMinIops); volume.setMaxIops(newMaxIops); volume.setHypervisorSnapshotReserve(newHypervisorSnapshotReserve); if (newDiskOffering != null) { volume.setDiskOfferingId(cmd.getNewDiskOfferingId()); } _volsDao.update(volume.getId(), volume); UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_RESIZE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), volume.getDiskOfferingId(), volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getUuid()); return volume; } UserVmVO userVm = _userVmDao.findById(volume.getInstanceId()); if (userVm != null) { if (volume.getVolumeType().equals(Volume.Type.ROOT) && userVm.getPowerState() != VirtualMachine.PowerState.PowerOff && hypervisorType == HypervisorType.VMware) { s_logger.error(" For ROOT volume resize VM should be in Power Off state."); throw new InvalidParameterValueException("VM current state is : " + userVm.getPowerState() + ". But VM should be in " + VirtualMachine.PowerState.PowerOff + " state."); } // serialize VM operation AsyncJobExecutionContext jobContext = AsyncJobExecutionContext.getCurrentExecutionContext(); if (jobContext.isJobDispatchedBy(VmWorkConstants.VM_WORK_JOB_DISPATCHER)) { // avoid re-entrance VmWorkJobVO placeHolder = null; placeHolder = createPlaceHolderWork(userVm.getId()); try { return orchestrateResizeVolume(volume.getId(), currentSize, newSize, newMinIops, newMaxIops, newHypervisorSnapshotReserve, newDiskOffering != null ? cmd.getNewDiskOfferingId() : null, shrinkOk); } finally { _workJobDao.expunge(placeHolder.getId()); } } else { Outcome<Volume> outcome = resizeVolumeThroughJobQueue(userVm.getId(), volume.getId(), currentSize, newSize, newMinIops, newMaxIops, newHypervisorSnapshotReserve, newDiskOffering != null ? cmd.getNewDiskOfferingId() : null, shrinkOk); try { outcome.get(); } catch (InterruptedException e) { throw new RuntimeException("Operation was interrupted", e); } catch (java.util.concurrent.ExecutionException e) { throw new RuntimeException("Execution exception", e); } Object jobResult = _jobMgr.unmarshallResultObject(outcome.getJob()); if (jobResult != null) { if (jobResult instanceof ConcurrentOperationException) { throw (ConcurrentOperationException)jobResult; } else if (jobResult instanceof ResourceAllocationException) { throw (ResourceAllocationException)jobResult; } else if (jobResult instanceof RuntimeException) { throw (RuntimeException)jobResult; } else if (jobResult instanceof Throwable) { throw new RuntimeException("Unexpected exception", (Throwable)jobResult); } else if (jobResult instanceof Long) { return _volsDao.findById((Long)jobResult); } } return volume; } } return orchestrateResizeVolume(volume.getId(), currentSize, newSize, newMinIops, newMaxIops, newHypervisorSnapshotReserve, newDiskOffering != null ? cmd.getNewDiskOfferingId() : null, shrinkOk); } private void validateIops(Long minIops, Long maxIops) { if ((minIops == null && maxIops != null) || (minIops != null && maxIops == null)) { throw new InvalidParameterValueException("Either 'miniops' and 'maxiops' must both be provided or neither must be provided."); } if (minIops != null && maxIops != null) { if (minIops > maxIops) { throw new InvalidParameterValueException("The 'miniops' parameter must be less than or equal to the 'maxiops' parameter."); } } } private VolumeVO orchestrateResizeVolume(long volumeId, long currentSize, long newSize, Long newMinIops, Long newMaxIops, Integer newHypervisorSnapshotReserve, Long newDiskOfferingId, boolean shrinkOk) { final VolumeVO volume = _volsDao.findById(volumeId); UserVmVO userVm = _userVmDao.findById(volume.getInstanceId()); StoragePoolVO storagePool = _storagePoolDao.findById(volume.getPoolId()); boolean isManaged = storagePool.isManaged(); if (!storageMgr.storagePoolHasEnoughSpaceForResize(storagePool, currentSize, newSize)) { throw new CloudRuntimeException("Storage pool " + storagePool.getName() + " does not have enough space to resize volume " + volume.getName()); } /* * get a list of hosts to send the commands to, try the system the * associated vm is running on first, then the last known place it ran. * If not attached to a userVm, we pass 'none' and resizevolume.sh is ok * with that since it only needs the vm name to live resize */ long[] hosts = null; String instanceName = "none"; if (userVm != null) { instanceName = userVm.getInstanceName(); if (userVm.getHostId() != null) { hosts = new long[] {userVm.getHostId()}; } else if (userVm.getLastHostId() != null) { hosts = new long[] {userVm.getLastHostId()}; } final String errorMsg = "The VM must be stopped or the disk detached in order to resize with the XenServer Hypervisor."; if (storagePool.isManaged() && storagePool.getHypervisor() == HypervisorType.Any && hosts != null && hosts.length > 0) { HostVO host = _hostDao.findById(hosts[0]); if (currentSize != newSize && host.getHypervisorType() == HypervisorType.XenServer && !userVm.getState().equals(State.Stopped)) { throw new InvalidParameterValueException(errorMsg); } } /* Xen only works offline, SR does not support VDI.resizeOnline */ if (currentSize != newSize && _volsDao.getHypervisorType(volume.getId()) == HypervisorType.XenServer && !userVm.getState().equals(State.Stopped)) { throw new InvalidParameterValueException(errorMsg); } /* Do not resize volume of running vm on KVM host if host is not Up or not Enabled */ if (currentSize != newSize && userVm.getState() == State.Running && userVm.getHypervisorType() == HypervisorType.KVM) { if (userVm.getHostId() == null) { throw new InvalidParameterValueException("Cannot find the hostId of running vm " + userVm.getUuid()); } HostVO host = _hostDao.findById(userVm.getHostId()); if (host == null) { throw new InvalidParameterValueException("The KVM host where vm is running does not exist"); } else if (host.getStatus() != Status.Up) { throw new InvalidParameterValueException("The KVM host where vm is running is not Up"); } else if (host.getResourceState() != ResourceState.Enabled) { throw new InvalidParameterValueException("The KVM host where vm is running is not Enabled"); } } } ResizeVolumePayload payload = new ResizeVolumePayload(newSize, newMinIops, newMaxIops, newHypervisorSnapshotReserve, shrinkOk, instanceName, hosts, isManaged); try { VolumeInfo vol = volFactory.getVolume(volume.getId()); vol.addPayload(payload); // this call to resize has a different impact depending on whether the // underlying primary storage is managed or not // if managed, this is the chance for the plug-in to change the size and/or IOPS values // if not managed, this is the chance for the plug-in to talk to the hypervisor layer // to change the size of the disk AsyncCallFuture<VolumeApiResult> future = volService.resize(vol); VolumeApiResult result = future.get(); if (result.isFailed()) { s_logger.warn("Failed to resize the volume " + volume); String details = ""; if (result.getResult() != null && !result.getResult().isEmpty()) { details = result.getResult(); } throw new CloudRuntimeException(details); } // managed storage is designed in such a way that the storage plug-in does not // talk to the hypervisor layer; as such, if the storage is managed and the // current and new sizes are different, then CloudStack (i.e. not a storage plug-in) // needs to tell the hypervisor to resize the disk if (storagePool.isManaged() && currentSize != newSize) { if (hosts != null && hosts.length > 0) { HostVO hostVO = _hostDao.findById(hosts[0]); if (hostVO.getHypervisorType() != HypervisorType.KVM) { volService.resizeVolumeOnHypervisor(volumeId, newSize, hosts[0], instanceName); } } } if (newDiskOfferingId != null) { volume.setDiskOfferingId(newDiskOfferingId); } if (currentSize != newSize) { volume.setSize(newSize); } _volsDao.update(volume.getId(), volume); /* Update resource count for the account on primary storage resource */ if (!shrinkOk) { _resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.primary_storage, volume.isDisplayVolume(), newSize - currentSize); } else { _resourceLimitMgr.decrementResourceCount(volume.getAccountId(), ResourceType.primary_storage, volume.isDisplayVolume(), currentSize - newSize); } UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_RESIZE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), volume.getDiskOfferingId(), volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getUuid()); return volume; } catch (Exception e) { throw new CloudRuntimeException("Exception caught during resize volume operation of volume UUID: " + volume.getUuid(), e); } } @DB @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_DELETE, eventDescription = "deleting volume") /** * Executes the removal of the volume. If the volume is only allocated we do not try to remove it from primary and secondary storage. * Otherwise, after the removal in the database, we will try to remove the volume from both primary and secondary storage. */ public boolean deleteVolume(long volumeId, Account caller) throws ConcurrentOperationException { Volume volume = destroyVolume(volumeId, caller, true, true); return (volume != null); } private boolean deleteVolumeFromStorage(VolumeVO volume, Account caller) throws ConcurrentOperationException { try { expungeVolumesInPrimaryStorageIfNeeded(volume); expungeVolumesInSecondaryStorageIfNeeded(volume); cleanVolumesCache(volume); return true; } catch (InterruptedException | ExecutionException e) { s_logger.warn("Failed to expunge volume: " + volume.getUuid(), e); return false; } } /** * Retrieves and validates the volume for the {@link #deleteVolume(long, Account)} method. The following validation are executed. * <ul> * <li> if no volume is found in the database, we throw an {@link InvalidParameterValueException}; * <li> if there are snapshots operation on the volume we cannot delete it. Therefore, an {@link InvalidParameterValueException} is thrown; * <li> if the volume is still attached to a VM we throw an {@link InvalidParameterValueException}; * <li> if volume state is in {@link Volume.State#UploadOp}, we check the {@link VolumeDataStoreVO}. Then, if the {@link VolumeDataStoreVO} for the given volume has download status of {@link VMTemplateStorageResourceAssoc.Status#DOWNLOAD_IN_PROGRESS}, an exception is throw; * <li> if the volume state is in {@link Volume.State#NotUploaded} or if the state is {@link Volume.State#UploadInProgress}, an {@link InvalidParameterValueException} is thrown; * <li> we also check if the user has access to the given volume using {@link AccountManager#checkAccess(Account, org.apache.cloudstack.acl.SecurityChecker.AccessType, boolean, String)}. * </ul> * * After all validations we return the volume object. */ protected VolumeVO retrieveAndValidateVolume(long volumeId, Account caller) { VolumeVO volume = _volsDao.findById(volumeId); if (volume == null) { throw new InvalidParameterValueException("Unable to find volume with ID: " + volumeId); } if (!_snapshotMgr.canOperateOnVolume(volume)) { throw new InvalidParameterValueException("There are snapshot operations in progress on the volume, unable to delete it"); } if (volume.getInstanceId() != null && volume.getState() != Volume.State.Expunged) { throw new InvalidParameterValueException("Please specify a volume that is not attached to any VM."); } if (volume.getState() == Volume.State.UploadOp) { VolumeDataStoreVO volumeStore = _volumeStoreDao.findByVolume(volume.getId()); if (volumeStore.getDownloadState() == VMTemplateStorageResourceAssoc.Status.DOWNLOAD_IN_PROGRESS) { throw new InvalidParameterValueException("Please specify a volume that is not uploading"); } } if (volume.getState() == Volume.State.NotUploaded || volume.getState() == Volume.State.UploadInProgress) { throw new InvalidParameterValueException("The volume is either getting uploaded or it may be initiated shortly, please wait for it to be completed"); } _accountMgr.checkAccess(caller, null, true, volume); return volume; } /** * Destroy the volume if possible and then decrement the following resource types. * <ul> * <li> {@link ResourceType#volume}; * <li> {@link ResourceType#primary_storage} * </ul> * * A volume can be destroyed if it is not in any of the following states. * <ul> * <li> {@value Volume.State#Destroy}; * <li> {@value Volume.State#Expunging}; * <li> {@value Volume.State#Expunged}. * </ul> * * The volume is destroyed via {@link VolumeService#destroyVolume(long)} method. */ protected void destroyVolumeIfPossible(VolumeVO volume) { if (volume.getState() != Volume.State.Destroy && volume.getState() != Volume.State.Expunging && volume.getState() != Volume.State.Expunged && volume.getState() != Volume.State.Allocated && volume.getState() != Volume.State.Uploaded) { volService.destroyVolume(volume.getId()); } } /** * We will check if the given volume is in the primary storage. If it is, we will execute an asynchronous call to delete it there. * If the volume is not in the primary storage, we do nothing here. */ protected void expungeVolumesInPrimaryStorageIfNeeded(VolumeVO volume) throws InterruptedException, ExecutionException { VolumeInfo volOnPrimary = volFactory.getVolume(volume.getId(), DataStoreRole.Primary); if (volOnPrimary != null) { s_logger.info("Expunging volume " + volume.getId() + " from primary data store"); AsyncCallFuture<VolumeApiResult> future = volService.expungeVolumeAsync(volOnPrimary); future.get(); } } /** * We will check if the given volume is in the secondary storage. If the volume is not in the primary storage, we do nothing here. * If it is, we will execute an asynchronous call to delete it there. Then, we decrement the {@link ResourceType#secondary_storage} for the account that owns the volume. */ protected void expungeVolumesInSecondaryStorageIfNeeded(VolumeVO volume) throws InterruptedException, ExecutionException { VolumeInfo volOnSecondary = volFactory.getVolume(volume.getId(), DataStoreRole.Image); if (volOnSecondary != null) { s_logger.info("Expunging volume " + volume.getId() + " from secondary data store"); AsyncCallFuture<VolumeApiResult> future2 = volService.expungeVolumeAsync(volOnSecondary); future2.get(); _resourceLimitMgr.decrementResourceCount(volOnSecondary.getAccountId(), ResourceType.secondary_storage, volOnSecondary.getSize()); } } /** * Clean volumes cache entries (if they exist). */ protected void cleanVolumesCache(VolumeVO volume) { List<VolumeInfo> cacheVols = volFactory.listVolumeOnCache(volume.getId()); if (CollectionUtils.isEmpty(cacheVols)) { return; } for (VolumeInfo volOnCache : cacheVols) { s_logger.info("Delete volume from image cache store: " + volOnCache.getDataStore().getName()); volOnCache.delete(); } } protected boolean stateTransitTo(Volume vol, Volume.Event event) throws NoTransitionException { return _volStateMachine.transitTo(vol, event, null, _volsDao); } @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_DESTROY, eventDescription = "destroying a volume") public Volume destroyVolume(long volumeId, Account caller, boolean expunge, boolean forceExpunge) { VolumeVO volume = retrieveAndValidateVolume(volumeId, caller); if (expunge) { // When trying to expunge, permission is denied when the caller is not an admin and the AllowUserExpungeRecoverVolume is false for the caller. final Long userId = caller.getAccountId(); if (!forceExpunge && !_accountMgr.isAdmin(userId) && !AllowUserExpungeRecoverVolume.valueIn(userId)) { throw new PermissionDeniedException("Expunging a volume can only be done by an Admin. Or when the allow.user.expunge.recover.volume key is set."); } } else if (volume.getState() == Volume.State.Allocated || volume.getState() == Volume.State.Uploaded) { throw new InvalidParameterValueException("The volume in Allocated/Uploaded state can only be expunged not destroyed/recovered"); } destroyVolumeIfPossible(volume); if (expunge) { // Mark volume as removed if volume has not been created on primary or secondary if (volume.getState() == Volume.State.Allocated) { _volsDao.remove(volume.getId()); try { stateTransitTo(volume, Volume.Event.DestroyRequested); } catch (NoTransitionException e) { s_logger.debug("Failed to destroy volume" + volume.getId(), e); return null; } _resourceLimitMgr.decrementResourceCount(volume.getAccountId(), ResourceType.volume, volume.isDisplay()); _resourceLimitMgr.decrementResourceCount(volume.getAccountId(), ResourceType.primary_storage, volume.isDisplay(), new Long(volume.getSize())); return volume; } if (!deleteVolumeFromStorage(volume, caller)) { s_logger.warn("Failed to expunge volume: " + volumeId); return null; } } return volume; } @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_RECOVER, eventDescription = "recovering a volume in Destroy state") public Volume recoverVolume(long volumeId) { Account caller = CallContext.current().getCallingAccount(); final Long userId = caller.getAccountId(); // Verify input parameters final VolumeVO volume = _volsDao.findById(volumeId); if (volume == null) { throw new InvalidParameterValueException("Unable to find a volume with id " + volume); } // When trying to expunge, permission is denied when the caller is not an admin and the AllowUserExpungeRecoverVolume is false for the caller. if (!_accountMgr.isAdmin(userId) && !AllowUserExpungeRecoverVolume.valueIn(userId)) { throw new PermissionDeniedException("Recovering a volume can only be done by an Admin. Or when the allow.user.expunge.recover.volume key is set."); } _accountMgr.checkAccess(caller, null, true, volume); if (volume.getState() != Volume.State.Destroy) { throw new InvalidParameterValueException("Please specify a volume in Destroy state."); } try { _resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(volume.getAccountId()), ResourceType.primary_storage, volume.isDisplayVolume(), volume.getSize()); } catch (ResourceAllocationException e) { s_logger.error("primary storage resource limit check failed", e); throw new InvalidParameterValueException(e.getMessage()); } try { stateTransitTo(volume, Volume.Event.RecoverRequested); } catch (NoTransitionException e) { s_logger.debug("Failed to recover volume" + volume.getId(), e); throw new CloudRuntimeException("Failed to recover volume" + volume.getId(), e); } _resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.volume, volume.isDisplay()); _resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.primary_storage, volume.isDisplay(), new Long(volume.getSize())); return volume; } @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_ATTACH, eventDescription = "attaching volume", async = true) public Volume attachVolumeToVM(AttachVolumeCmd command) { return attachVolumeToVM(command.getVirtualMachineId(), command.getId(), command.getDeviceId()); } private Volume orchestrateAttachVolumeToVM(Long vmId, Long volumeId, Long deviceId) { VolumeInfo volumeToAttach = volFactory.getVolume(volumeId); if (volumeToAttach.isAttachedVM()) { throw new CloudRuntimeException("This volume is already attached to a VM."); } UserVmVO vm = _userVmDao.findById(vmId); VolumeVO exstingVolumeOfVm = null; List<VolumeVO> rootVolumesOfVm = _volsDao.findByInstanceAndType(vmId, Volume.Type.ROOT); if (rootVolumesOfVm.size() > 1) { throw new CloudRuntimeException("The VM " + vm.getHostName() + " has more than one ROOT volume and is in an invalid state."); } else { if (!rootVolumesOfVm.isEmpty()) { exstingVolumeOfVm = rootVolumesOfVm.get(0); } else { // locate data volume of the vm List<VolumeVO> diskVolumesOfVm = _volsDao.findByInstanceAndType(vmId, Volume.Type.DATADISK); for (VolumeVO diskVolume : diskVolumesOfVm) { if (diskVolume.getState() != Volume.State.Allocated) { exstingVolumeOfVm = diskVolume; break; } } } } HypervisorType rootDiskHyperType = vm.getHypervisorType(); HypervisorType volumeToAttachHyperType = _volsDao.getHypervisorType(volumeToAttach.getId()); VolumeInfo newVolumeOnPrimaryStorage = volumeToAttach; //don't create volume on primary storage if its being attached to the vm which Root's volume hasn't been created yet StoragePoolVO destPrimaryStorage = null; if (exstingVolumeOfVm != null && !exstingVolumeOfVm.getState().equals(Volume.State.Allocated)) { destPrimaryStorage = _storagePoolDao.findById(exstingVolumeOfVm.getPoolId()); } boolean volumeOnSecondary = volumeToAttach.getState() == Volume.State.Uploaded; if (destPrimaryStorage != null && (volumeToAttach.getState() == Volume.State.Allocated || volumeOnSecondary)) { try { newVolumeOnPrimaryStorage = _volumeMgr.createVolumeOnPrimaryStorage(vm, volumeToAttach, rootDiskHyperType, destPrimaryStorage); } catch (NoTransitionException e) { s_logger.debug("Failed to create volume on primary storage", e); throw new CloudRuntimeException("Failed to create volume on primary storage", e); } } // reload the volume from db newVolumeOnPrimaryStorage = volFactory.getVolume(newVolumeOnPrimaryStorage.getId()); boolean moveVolumeNeeded = needMoveVolume(exstingVolumeOfVm, newVolumeOnPrimaryStorage); if (moveVolumeNeeded) { PrimaryDataStoreInfo primaryStore = (PrimaryDataStoreInfo)newVolumeOnPrimaryStorage.getDataStore(); if (primaryStore.isLocal()) { throw new CloudRuntimeException( "Failed to attach local data volume " + volumeToAttach.getName() + " to VM " + vm.getDisplayName() + " as migration of local data volume is not allowed"); } StoragePoolVO vmRootVolumePool = _storagePoolDao.findById(exstingVolumeOfVm.getPoolId()); try { newVolumeOnPrimaryStorage = _volumeMgr.moveVolume(newVolumeOnPrimaryStorage, vmRootVolumePool.getDataCenterId(), vmRootVolumePool.getPodId(), vmRootVolumePool.getClusterId(), volumeToAttachHyperType); } catch (ConcurrentOperationException e) { s_logger.debug("move volume failed", e); throw new CloudRuntimeException("move volume failed", e); } catch (StorageUnavailableException e) { s_logger.debug("move volume failed", e); throw new CloudRuntimeException("move volume failed", e); } } VolumeVO newVol = _volsDao.findById(newVolumeOnPrimaryStorage.getId()); // Getting the fresh vm object in case of volume migration to check the current state of VM if (moveVolumeNeeded || volumeOnSecondary) { vm = _userVmDao.findById(vmId); if (vm == null) { throw new InvalidParameterValueException("VM not found."); } } newVol = sendAttachVolumeCommand(vm, newVol, deviceId); return newVol; } public Volume attachVolumeToVM(Long vmId, Long volumeId, Long deviceId) { Account caller = CallContext.current().getCallingAccount(); // Check that the volume ID is valid VolumeInfo volumeToAttach = volFactory.getVolume(volumeId); // Check that the volume is a data volume if (volumeToAttach == null || !(volumeToAttach.getVolumeType() == Volume.Type.DATADISK || volumeToAttach.getVolumeType() == Volume.Type.ROOT)) { throw new InvalidParameterValueException("Please specify a volume with the valid type: " + Volume.Type.ROOT.toString() + " or " + Volume.Type.DATADISK.toString()); } // Check that the volume is not currently attached to any VM if (volumeToAttach.getInstanceId() != null) { throw new InvalidParameterValueException("Please specify a volume that is not attached to any VM."); } // Check that the volume is not destroyed if (volumeToAttach.getState() == Volume.State.Destroy) { throw new InvalidParameterValueException("Please specify a volume that is not destroyed."); } // Check that the virtual machine ID is valid and it's a user vm UserVmVO vm = _userVmDao.findById(vmId); if (vm == null || vm.getType() != VirtualMachine.Type.User) { throw new InvalidParameterValueException("Please specify a valid User VM."); } // Check that the VM is in the correct state if (vm.getState() != State.Running && vm.getState() != State.Stopped) { throw new InvalidParameterValueException("Please specify a VM that is either running or stopped."); } // Check that the VM and the volume are in the same zone if (vm.getDataCenterId() != volumeToAttach.getDataCenterId()) { throw new InvalidParameterValueException("Please specify a VM that is in the same zone as the volume."); } // Check that the device ID is valid if (deviceId != null) { // validate ROOT volume type if (deviceId.longValue() == 0) { validateRootVolumeDetachAttach(_volsDao.findById(volumeToAttach.getId()), vm); // vm shouldn't have any volume with deviceId 0 if (!_volsDao.findByInstanceAndDeviceId(vm.getId(), 0).isEmpty()) { throw new InvalidParameterValueException("Vm already has root volume attached to it"); } // volume can't be in Uploaded state if (volumeToAttach.getState() == Volume.State.Uploaded) { throw new InvalidParameterValueException("No support for Root volume attach in state " + Volume.State.Uploaded); } } } // Check that the number of data volumes attached to VM is less than // that supported by hypervisor if (deviceId == null || deviceId.longValue() != 0) { List<VolumeVO> existingDataVolumes = _volsDao.findByInstanceAndType(vmId, Volume.Type.DATADISK); int maxAttachableDataVolumesSupported = getMaxDataVolumesSupported(vm); if (existingDataVolumes.size() >= maxAttachableDataVolumesSupported) { throw new InvalidParameterValueException( "The specified VM already has the maximum number of data disks (" + maxAttachableDataVolumesSupported + ") attached. Please specify another VM."); } } // If local storage is disabled then attaching a volume with local disk // offering not allowed DataCenterVO dataCenter = _dcDao.findById(volumeToAttach.getDataCenterId()); if (!dataCenter.isLocalStorageEnabled()) { DiskOfferingVO diskOffering = _diskOfferingDao.findById(volumeToAttach.getDiskOfferingId()); if (diskOffering.isUseLocalStorage()) { throw new InvalidParameterValueException("Zone is not configured to use local storage but volume's disk offering " + diskOffering.getName() + " uses it"); } } // if target VM has associated VM snapshots List<VMSnapshotVO> vmSnapshots = _vmSnapshotDao.findByVm(vmId); if (vmSnapshots.size() > 0) { throw new InvalidParameterValueException("Unable to attach volume, please specify a VM that does not have VM snapshots"); } // if target VM has backups if (vm.getBackupOfferingId() != null || vm.getBackupVolumeList().size() > 0) { throw new InvalidParameterValueException("Unable to attach volume, please specify a VM that does not have any backups"); } // permission check _accountMgr.checkAccess(caller, null, true, volumeToAttach, vm); if (!(Volume.State.Allocated.equals(volumeToAttach.getState()) || Volume.State.Ready.equals(volumeToAttach.getState()) || Volume.State.Uploaded.equals(volumeToAttach.getState()))) { throw new InvalidParameterValueException("Volume state must be in Allocated, Ready or in Uploaded state"); } Account owner = _accountDao.findById(volumeToAttach.getAccountId()); if (!(volumeToAttach.getState() == Volume.State.Allocated || volumeToAttach.getState() == Volume.State.Ready)) { try { _resourceLimitMgr.checkResourceLimit(owner, ResourceType.primary_storage, volumeToAttach.getSize()); } catch (ResourceAllocationException e) { s_logger.error("primary storage resource limit check failed", e); throw new InvalidParameterValueException(e.getMessage()); } } HypervisorType rootDiskHyperType = vm.getHypervisorType(); HypervisorType volumeToAttachHyperType = _volsDao.getHypervisorType(volumeToAttach.getId()); StoragePoolVO volumeToAttachStoragePool = _storagePoolDao.findById(volumeToAttach.getPoolId()); // managed storage can be used for different types of hypervisors // only perform this check if the volume's storage pool is not null and not managed if (volumeToAttachStoragePool != null && !volumeToAttachStoragePool.isManaged()) { if (volumeToAttachHyperType != HypervisorType.None && rootDiskHyperType != volumeToAttachHyperType) { throw new InvalidParameterValueException("Can't attach a volume created by: " + volumeToAttachHyperType + " to a " + rootDiskHyperType + " vm"); } } AsyncJobExecutionContext asyncExecutionContext = AsyncJobExecutionContext.getCurrentExecutionContext(); if (asyncExecutionContext != null) { AsyncJob job = asyncExecutionContext.getJob(); if (s_logger.isInfoEnabled()) { s_logger.info("Trying to attaching volume " + volumeId + " to vm instance:" + vm.getId() + ", update async job-" + job.getId() + " progress status"); } _jobMgr.updateAsyncJobAttachment(job.getId(), "Volume", volumeId); } AsyncJobExecutionContext jobContext = AsyncJobExecutionContext.getCurrentExecutionContext(); if (jobContext.isJobDispatchedBy(VmWorkConstants.VM_WORK_JOB_DISPATCHER)) { // avoid re-entrance VmWorkJobVO placeHolder = null; placeHolder = createPlaceHolderWork(vmId); try { return orchestrateAttachVolumeToVM(vmId, volumeId, deviceId); } finally { _workJobDao.expunge(placeHolder.getId()); } } else { Outcome<Volume> outcome = attachVolumeToVmThroughJobQueue(vmId, volumeId, deviceId); Volume vol = null; try { outcome.get(); } catch (InterruptedException e) { throw new RuntimeException("Operation is interrupted", e); } catch (java.util.concurrent.ExecutionException e) { throw new RuntimeException("Execution excetion", e); } Object jobResult = _jobMgr.unmarshallResultObject(outcome.getJob()); if (jobResult != null) { if (jobResult instanceof ConcurrentOperationException) { throw (ConcurrentOperationException)jobResult; } else if (jobResult instanceof InvalidParameterValueException) { throw (InvalidParameterValueException)jobResult; } else if (jobResult instanceof RuntimeException) { throw (RuntimeException)jobResult; } else if (jobResult instanceof Throwable) { throw new RuntimeException("Unexpected exception", (Throwable)jobResult); } else if (jobResult instanceof Long) { vol = _volsDao.findById((Long)jobResult); } } return vol; } } @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_UPDATE, eventDescription = "updating volume", async = true) public Volume updateVolume(long volumeId, String path, String state, Long storageId, Boolean displayVolume, String customId, long entityOwnerId, String chainInfo) { VolumeVO volume = _volsDao.findById(volumeId); if (volume == null) { throw new InvalidParameterValueException("The volume id doesn't exist"); } if (path != null) { volume.setPath(path); } if (chainInfo != null) { volume.setChainInfo(chainInfo); } if (state != null) { try { Volume.State volumeState = Volume.State.valueOf(state); volume.setState(volumeState); } catch (IllegalArgumentException ex) { throw new InvalidParameterValueException("Invalid volume state specified"); } } if (storageId != null) { StoragePool pool = _storagePoolDao.findById(storageId); if (pool.getDataCenterId() != volume.getDataCenterId()) { throw new InvalidParameterValueException("Invalid storageId specified; refers to the pool outside of the volume's zone"); } volume.setPoolId(pool.getId()); } if (customId != null) { volume.setUuid(customId); } updateDisplay(volume, displayVolume); _volsDao.update(volumeId, volume); return volume; } @Override public void updateDisplay(Volume volume, Boolean displayVolume) { // 1. Resource limit changes updateResourceCount(volume, displayVolume); // 2. generate usage event if not in destroyed state saveUsageEvent(volume, displayVolume); // 3. Set the flag if (displayVolume != null && displayVolume != volume.isDisplayVolume()) { // FIXME - Confused - typecast for now. ((VolumeVO)volume).setDisplayVolume(displayVolume); _volsDao.update(volume.getId(), (VolumeVO)volume); } } private void updateResourceCount(Volume volume, Boolean displayVolume) { // Update only when the flag has changed. if (displayVolume != null && displayVolume != volume.isDisplayVolume()) { _resourceLimitMgr.changeResourceCount(volume.getAccountId(), ResourceType.volume, displayVolume); _resourceLimitMgr.changeResourceCount(volume.getAccountId(), ResourceType.primary_storage, displayVolume, new Long(volume.getSize())); } } private void saveUsageEvent(Volume volume, Boolean displayVolume) { // Update only when the flag has changed && only when volume in a non-destroyed state. if ((displayVolume != null && displayVolume != volume.isDisplayVolume()) && !isVolumeDestroyed(volume)) { if (displayVolume) { // flag turned 1 equivalent to freshly created volume UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), volume.getDiskOfferingId(), volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getUuid()); } else { // flag turned 0 equivalent to deleting a volume UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_DELETE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), Volume.class.getName(), volume.getUuid()); } } } private boolean isVolumeDestroyed(Volume volume) { if (volume.getState() == Volume.State.Destroy || volume.getState() == Volume.State.Expunging && volume.getState() == Volume.State.Expunged) { return true; } return false; } @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_DETACH, eventDescription = "detaching volume", async = true) public Volume detachVolumeFromVM(DetachVolumeCmd cmmd) { Account caller = CallContext.current().getCallingAccount(); if ((cmmd.getId() == null && cmmd.getDeviceId() == null && cmmd.getVirtualMachineId() == null) || (cmmd.getId() != null && (cmmd.getDeviceId() != null || cmmd.getVirtualMachineId() != null)) || (cmmd.getId() == null && (cmmd.getDeviceId() == null || cmmd.getVirtualMachineId() == null))) { throw new InvalidParameterValueException("Please provide either a volume id, or a tuple(device id, instance id)"); } Long volumeId = cmmd.getId(); VolumeVO volume = null; if (volumeId != null) { volume = _volsDao.findById(volumeId); } else { volume = _volsDao.findByInstanceAndDeviceId(cmmd.getVirtualMachineId(), cmmd.getDeviceId()).get(0); } // Check that the volume ID is valid if (volume == null) { throw new InvalidParameterValueException("Unable to find volume with ID: " + volumeId); } Long vmId = null; if (cmmd.getVirtualMachineId() == null) { vmId = volume.getInstanceId(); } else { vmId = cmmd.getVirtualMachineId(); } // Permissions check _accountMgr.checkAccess(caller, null, true, volume); // Check that the volume is currently attached to a VM if (vmId == null) { throw new InvalidParameterValueException("The specified volume is not attached to a VM."); } // Check that the VM is in the correct state UserVmVO vm = _userVmDao.findById(vmId); if (vm.getState() != State.Running && vm.getState() != State.Stopped && vm.getState() != State.Destroyed) { throw new InvalidParameterValueException("Please specify a VM that is either running or stopped."); } // Check that the volume is a data/root volume if (!(volume.getVolumeType() == Volume.Type.ROOT || volume.getVolumeType() == Volume.Type.DATADISK)) { throw new InvalidParameterValueException("Please specify volume of type " + Volume.Type.DATADISK.toString() + " or " + Volume.Type.ROOT.toString()); } // Root volume detach is allowed for following hypervisors: Xen/KVM/VmWare if (volume.getVolumeType() == Volume.Type.ROOT) { validateRootVolumeDetachAttach(volume, vm); } // Don't allow detach if target VM has associated VM snapshots List<VMSnapshotVO> vmSnapshots = _vmSnapshotDao.findByVm(vmId); if (vmSnapshots.size() > 0) { throw new InvalidParameterValueException("Unable to detach volume, please specify a VM that does not have VM snapshots"); } if (vm.getBackupOfferingId() != null || vm.getBackupVolumeList().size() > 0) { throw new InvalidParameterValueException("Unable to detach volume, cannot detach volume from a VM that has backups. First remove the VM from the backup offering."); } AsyncJobExecutionContext asyncExecutionContext = AsyncJobExecutionContext.getCurrentExecutionContext(); if (asyncExecutionContext != null) { AsyncJob job = asyncExecutionContext.getJob(); if (s_logger.isInfoEnabled()) { s_logger.info("Trying to attaching volume " + volumeId + "to vm instance:" + vm.getId() + ", update async job-" + job.getId() + " progress status"); } _jobMgr.updateAsyncJobAttachment(job.getId(), "Volume", volumeId); } AsyncJobExecutionContext jobContext = AsyncJobExecutionContext.getCurrentExecutionContext(); if (jobContext.isJobDispatchedBy(VmWorkConstants.VM_WORK_JOB_DISPATCHER)) { // avoid re-entrance VmWorkJobVO placeHolder = null; placeHolder = createPlaceHolderWork(vmId); try { return orchestrateDetachVolumeFromVM(vmId, volumeId); } finally { _workJobDao.expunge(placeHolder.getId()); } } else { Outcome<Volume> outcome = detachVolumeFromVmThroughJobQueue(vmId, volumeId); Volume vol = null; try { outcome.get(); } catch (InterruptedException e) { throw new RuntimeException("Operation is interrupted", e); } catch (java.util.concurrent.ExecutionException e) { throw new RuntimeException("Execution excetion", e); } Object jobResult = _jobMgr.unmarshallResultObject(outcome.getJob()); if (jobResult != null) { if (jobResult instanceof ConcurrentOperationException) { throw (ConcurrentOperationException)jobResult; } else if (jobResult instanceof RuntimeException) { throw (RuntimeException)jobResult; } else if (jobResult instanceof Throwable) { throw new RuntimeException("Unexpected exception", (Throwable)jobResult); } else if (jobResult instanceof Long) { vol = _volsDao.findById((Long)jobResult); } } return vol; } } private void validateRootVolumeDetachAttach(VolumeVO volume, UserVmVO vm) { if (!(vm.getHypervisorType() == HypervisorType.XenServer || vm.getHypervisorType() == HypervisorType.VMware || vm.getHypervisorType() == HypervisorType.KVM || vm.getHypervisorType() == HypervisorType.Simulator)) { throw new InvalidParameterValueException("Root volume detach is not supported for hypervisor type " + vm.getHypervisorType()); } if (!(vm.getState() == State.Stopped) || (vm.getState() == State.Destroyed)) { throw new InvalidParameterValueException("Root volume detach can happen only when vm is in states: " + State.Stopped.toString() + " or " + State.Destroyed.toString()); } if (volume.getPoolId() != null) { StoragePoolVO pool = _storagePoolDao.findById(volume.getPoolId()); if (pool.isManaged()) { throw new InvalidParameterValueException("Root volume detach is not supported for Managed DataStores"); } } } @ActionEvent(eventType = EventTypes.EVENT_VOLUME_DETACH, eventDescription = "detaching volume") public Volume detachVolumeViaDestroyVM(long vmId, long volumeId) { return orchestrateDetachVolumeFromVM(vmId, volumeId); } private Volume orchestrateDetachVolumeFromVM(long vmId, long volumeId) { Volume volume = _volsDao.findById(volumeId); VMInstanceVO vm = _vmInstanceDao.findById(vmId); String errorMsg = "Failed to detach volume " + volume.getName() + " from VM " + vm.getHostName(); boolean sendCommand = vm.getState() == State.Running; Long hostId = vm.getHostId(); if (hostId == null) { hostId = vm.getLastHostId(); HostVO host = _hostDao.findById(hostId); if (host != null && host.getHypervisorType() == HypervisorType.VMware) { sendCommand = true; } } HostVO host = null; StoragePoolVO volumePool = _storagePoolDao.findByIdIncludingRemoved(volume.getPoolId()); if (hostId != null) { host = _hostDao.findById(hostId); if (host != null && host.getHypervisorType() == HypervisorType.XenServer && volumePool != null && volumePool.isManaged()) { sendCommand = true; } } if (volumePool == null) { sendCommand = false; } Answer answer = null; if (sendCommand) { // collect vm disk statistics before detach a volume UserVmVO userVm = _userVmDao.findById(vmId); if (userVm != null && userVm.getType() == VirtualMachine.Type.User) { _userVmService.collectVmDiskStatistics(userVm); } DataTO volTO = volFactory.getVolume(volume.getId()).getTO(); DiskTO disk = new DiskTO(volTO, volume.getDeviceId(), volume.getPath(), volume.getVolumeType()); DettachCommand cmd = new DettachCommand(disk, vm.getInstanceName()); cmd.setManaged(volumePool.isManaged()); cmd.setStorageHost(volumePool.getHostAddress()); cmd.setStoragePort(volumePool.getPort()); cmd.set_iScsiName(volume.get_iScsiName()); try { answer = _agentMgr.send(hostId, cmd); } catch (Exception e) { throw new CloudRuntimeException(errorMsg + " due to: " + e.getMessage()); } } if (!sendCommand || (answer != null && answer.getResult())) { // Mark the volume as detached _volsDao.detachVolume(volume.getId()); // volume.getPoolId() should be null if the VM we are detaching the disk from has never been started before if (volume.getPoolId() != null) { DataStore dataStore = dataStoreMgr.getDataStore(volume.getPoolId(), DataStoreRole.Primary); volService.revokeAccess(volFactory.getVolume(volume.getId()), host, dataStore); } if (volumePool != null && hostId != null) { handleTargetsForVMware(hostId, volumePool.getHostAddress(), volumePool.getPort(), volume.get_iScsiName()); } return _volsDao.findById(volumeId); } else { if (answer != null) { String details = answer.getDetails(); if (details != null && !details.isEmpty()) { errorMsg += "; " + details; } } throw new CloudRuntimeException(errorMsg); } } public void updateMissingRootDiskController(final VMInstanceVO vm, final String rootVolChainInfo) { if (vm == null || !VirtualMachine.Type.User.equals(vm.getType()) || Strings.isNullOrEmpty(rootVolChainInfo)) { return; } String rootDiskController = null; try { final VirtualMachineDiskInfo infoInChain = _gson.fromJson(rootVolChainInfo, VirtualMachineDiskInfo.class); if (infoInChain != null) { rootDiskController = infoInChain.getControllerFromDeviceBusName(); } final UserVmVO userVmVo = _userVmDao.findById(vm.getId()); if ((rootDiskController != null) && (!rootDiskController.isEmpty())) { _userVmDao.loadDetails(userVmVo); _userVmMgr.persistDeviceBusInfo(userVmVo, rootDiskController); } } catch (JsonParseException e) { s_logger.debug("Error parsing chain info json: " + e.getMessage()); } } private void handleTargetsForVMware(long hostId, String storageAddress, int storagePort, String iScsiName) { HostVO host = _hostDao.findById(hostId); if (host.getHypervisorType() == HypervisorType.VMware) { ModifyTargetsCommand cmd = new ModifyTargetsCommand(); List<Map<String, String>> targets = new ArrayList<>(); Map<String, String> target = new HashMap<>(); target.put(ModifyTargetsCommand.STORAGE_HOST, storageAddress); target.put(ModifyTargetsCommand.STORAGE_PORT, String.valueOf(storagePort)); target.put(ModifyTargetsCommand.IQN, iScsiName); targets.add(target); cmd.setTargets(targets); cmd.setApplyToAllHostsInCluster(true); cmd.setAdd(false); cmd.setTargetTypeToRemove(ModifyTargetsCommand.TargetTypeToRemove.DYNAMIC); sendModifyTargetsCommand(cmd, hostId); } } private void sendModifyTargetsCommand(ModifyTargetsCommand cmd, long hostId) { Answer answer = _agentMgr.easySend(hostId, cmd); if (answer == null) { String msg = "Unable to get an answer to the modify targets command"; s_logger.warn(msg); } else if (!answer.getResult()) { String msg = "Unable to modify target on the following host: " + hostId; s_logger.warn(msg); } } @DB @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_MIGRATE, eventDescription = "migrating volume", async = true) public Volume migrateVolume(MigrateVolumeCmd cmd) { Long volumeId = cmd.getVolumeId(); Long storagePoolId = cmd.getStoragePoolId(); VolumeVO vol = _volsDao.findById(volumeId); if (vol == null) { throw new InvalidParameterValueException("Failed to find the volume id: " + volumeId); } if (vol.getState() != Volume.State.Ready) { throw new InvalidParameterValueException("Volume must be in ready state"); } if (vol.getPoolId() == storagePoolId) { throw new InvalidParameterValueException("Volume " + vol + " is already on the destination storage pool"); } boolean liveMigrateVolume = false; Long instanceId = vol.getInstanceId(); Long srcClusterId = null; VMInstanceVO vm = null; if (instanceId != null) { vm = _vmInstanceDao.findById(instanceId); } // Check that Vm to which this volume is attached does not have VM Snapshots // OfflineVmwareMigration: considder if this is needed and desirable if (vm != null && _vmSnapshotDao.findByVm(vm.getId()).size() > 0) { throw new InvalidParameterValueException("Volume cannot be migrated, please remove all VM snapshots for VM to which this volume is attached"); } // OfflineVmwareMigration: extract this block as method and check if it is subject to regression if (vm != null && vm.getState() == State.Running) { // Check if the VM is GPU enabled. if (_serviceOfferingDetailsDao.findDetail(vm.getServiceOfferingId(), GPU.Keys.pciDevice.toString()) != null) { throw new InvalidParameterValueException("Live Migration of GPU enabled VM is not supported"); } // Check if the underlying hypervisor supports storage motion. Long hostId = vm.getHostId(); if (hostId != null) { HostVO host = _hostDao.findById(hostId); HypervisorCapabilitiesVO capabilities = null; if (host != null) { capabilities = _hypervisorCapabilitiesDao.findByHypervisorTypeAndVersion(host.getHypervisorType(), host.getHypervisorVersion()); srcClusterId = host.getClusterId(); } if (capabilities != null) { liveMigrateVolume = capabilities.isStorageMotionSupported(); } } // If vm is running, and hypervisor doesn't support live migration, then return error if (!liveMigrateVolume) { throw new InvalidParameterValueException("Volume needs to be detached from VM"); } } if (liveMigrateVolume && !cmd.isLiveMigrate()) { throw new InvalidParameterValueException("The volume " + vol + "is attached to a vm and for migrating it " + "the parameter livemigrate should be specified"); } StoragePool destPool = (StoragePool)dataStoreMgr.getDataStore(storagePoolId, DataStoreRole.Primary); if (destPool == null) { throw new InvalidParameterValueException("Failed to find the destination storage pool: " + storagePoolId); } else if (destPool.isInMaintenance()) { throw new InvalidParameterValueException("Cannot migrate volume " + vol + "to the destination storage pool " + destPool.getName() + " as the storage pool is in maintenance mode."); } if (!storageMgr.storagePoolHasEnoughSpace(Collections.singletonList(vol), destPool)) { throw new CloudRuntimeException("Storage pool " + destPool.getName() + " does not have enough space to migrate volume " + vol.getName()); } // OfflineVmwareMigration: check storage tags on disk(offering)s in comparison to destination storage pool // OfflineVmwareMigration: if no match return a proper error now DiskOfferingVO diskOffering = _diskOfferingDao.findById(vol.getDiskOfferingId()); if (diskOffering == null) { throw new CloudRuntimeException("volume '" + vol.getUuid() + "', has no diskoffering. Migration target cannot be checked."); } if (!doesTargetStorageSupportDiskOffering(destPool, diskOffering)) { throw new CloudRuntimeException(String.format("Migration target pool [%s, tags:%s] has no matching tags for volume [%s, uuid:%s, tags:%s]", destPool.getName(), getStoragePoolTags(destPool), vol.getName(), vol.getUuid(), diskOffering.getTags())); } if (liveMigrateVolume && destPool.getClusterId() != null && srcClusterId != null) { if (!srcClusterId.equals(destPool.getClusterId())) { throw new InvalidParameterValueException("Cannot migrate a volume of a virtual machine to a storage pool in a different cluster"); } } // In case of VMware, if ROOT volume is being cold-migrated, then ensure destination storage pool is in the same Datacenter as the VM. if (vm != null && vm.getHypervisorType().equals(HypervisorType.VMware)) { if (!liveMigrateVolume && vol.volumeType.equals(Volume.Type.ROOT)) { Long hostId = vm.getHostId() != null ? vm.getHostId() : vm.getLastHostId(); HostVO host = _hostDao.findById(hostId); if (host != null) { srcClusterId = host.getClusterId(); } if (srcClusterId != null && destPool.getClusterId() != null && !srcClusterId.equals(destPool.getClusterId())) { String srcDcName = _clusterDetailsDao.getVmwareDcName(srcClusterId); String destDcName = _clusterDetailsDao.getVmwareDcName(destPool.getClusterId()); if (srcDcName != null && destDcName != null && !srcDcName.equals(destDcName)) { throw new InvalidParameterValueException("Cannot migrate ROOT volume of a stopped VM to a storage pool in a different VMware datacenter"); } } updateMissingRootDiskController(vm, vol.getChainInfo()); } } DiskOfferingVO newDiskOffering = retrieveAndValidateNewDiskOffering(cmd); validateConditionsToReplaceDiskOfferingOfVolume(vol, newDiskOffering, destPool); if (vm != null) { // serialize VM operation AsyncJobExecutionContext jobContext = AsyncJobExecutionContext.getCurrentExecutionContext(); if (jobContext.isJobDispatchedBy(VmWorkConstants.VM_WORK_JOB_DISPATCHER)) { // avoid re-entrance VmWorkJobVO placeHolder = null; placeHolder = createPlaceHolderWork(vm.getId()); try { return orchestrateMigrateVolume(vol, destPool, liveMigrateVolume, newDiskOffering); } finally { _workJobDao.expunge(placeHolder.getId()); } } else { Outcome<Volume> outcome = migrateVolumeThroughJobQueue(vm, vol, destPool, liveMigrateVolume, newDiskOffering); try { outcome.get(); } catch (InterruptedException e) { throw new RuntimeException("Operation is interrupted", e); } catch (java.util.concurrent.ExecutionException e) { throw new RuntimeException("Execution excetion", e); } Object jobResult = _jobMgr.unmarshallResultObject(outcome.getJob()); if (jobResult != null) { if (jobResult instanceof ConcurrentOperationException) { throw (ConcurrentOperationException)jobResult; } else if (jobResult instanceof RuntimeException) { throw (RuntimeException)jobResult; } else if (jobResult instanceof Throwable) { throw new RuntimeException("Unexpected exception", (Throwable)jobResult); } } // retrieve the migrated new volume from job result if (jobResult != null && jobResult instanceof Long) { return _entityMgr.findById(VolumeVO.class, ((Long)jobResult)); } return null; } } return orchestrateMigrateVolume(vol, destPool, liveMigrateVolume, newDiskOffering); } /** * Retrieves the new disk offering UUID that might be sent to replace the current one in the volume being migrated. * If no disk offering UUID is provided we return null. Otherwise, we perform the following checks. * <ul> * <li>Is the disk offering UUID entered valid? If not, an {@link InvalidParameterValueException} is thrown; * <li>If the disk offering was already removed, we thrown an {@link InvalidParameterValueException} is thrown; * <li>We then check if the user executing the operation has access to the given disk offering. * </ul> * * If all checks pass, we move forward returning the disk offering object. */ private DiskOfferingVO retrieveAndValidateNewDiskOffering(MigrateVolumeCmd cmd) { String newDiskOfferingUuid = cmd.getNewDiskOfferingUuid(); if (org.apache.commons.lang.StringUtils.isBlank(newDiskOfferingUuid)) { return null; } DiskOfferingVO newDiskOffering = _diskOfferingDao.findByUuid(newDiskOfferingUuid); if (newDiskOffering == null) { throw new InvalidParameterValueException(String.format("The disk offering informed is not valid [id=%s].", newDiskOfferingUuid)); } if (newDiskOffering.getRemoved() != null) { throw new InvalidParameterValueException(String.format("We cannot assign a removed disk offering [id=%s] to a volume. ", newDiskOffering.getUuid())); } Account caller = CallContext.current().getCallingAccount(); DataCenter zone = null; Volume volume = _volsDao.findById(cmd.getId()); if (volume != null) { zone = _dcDao.findById(volume.getDataCenterId()); } _accountMgr.checkAccess(caller, newDiskOffering, zone); return newDiskOffering; } /** * Performs the validations required for replacing the disk offering while migrating the volume of storage. If no new disk offering is provided, we do not execute any validation. * If a disk offering is informed, we then proceed with the following checks. * <ul> * <li>We check if the given volume is of ROOT type. We cannot change the disk offering of a ROOT volume. Therefore, we thrown an {@link InvalidParameterValueException}; * <li>We the disk is being migrated to shared storage and the new disk offering is for local storage (or vice versa), we throw an {@link InvalidParameterValueException}. Bear in mind that we are validating only the new disk offering. If none is provided we can override the current disk offering. This means, placing a volume with shared disk offering in local storage and vice versa; * <li>We then proceed checking the target storage pool supports the new disk offering {@link #doesTargetStorageSupportNewDiskOffering(StoragePool, DiskOfferingVO)}. * </ul> * * If all of the above validations pass, we check if the size of the new disk offering is different from the volume. If it is, we log a warning message. */ protected void validateConditionsToReplaceDiskOfferingOfVolume(VolumeVO volume, DiskOfferingVO newDiskOffering, StoragePool destPool) { if (newDiskOffering == null) { return; } if ((destPool.isShared() && newDiskOffering.isUseLocalStorage()) || destPool.isLocal() && newDiskOffering.isShared()) { throw new InvalidParameterValueException("You cannot move the volume to a shared storage and assing a disk offering for local storage and vice versa."); } if (!doesTargetStorageSupportDiskOffering(destPool, newDiskOffering)) { throw new InvalidParameterValueException(String.format("Target Storage [id=%s] tags [%s] does not match new disk offering [id=%s] tags [%s].", destPool.getUuid(), getStoragePoolTags(destPool), newDiskOffering.getUuid(), newDiskOffering.getTags())); } if (volume.getSize() != newDiskOffering.getDiskSize()) { DiskOfferingVO oldDiskOffering = this._diskOfferingDao.findById(volume.getDiskOfferingId()); s_logger.warn(String.format( "You are migrating a volume [id=%s] and changing the disk offering[from id=%s to id=%s] to reflect this migration. However, the sizes of the volume and the new disk offering are different.", volume.getUuid(), oldDiskOffering.getUuid(), newDiskOffering.getUuid())); } s_logger.info(String.format("Changing disk offering to [uuid=%s] while migrating volume [uuid=%s, name=%s].", newDiskOffering.getUuid(), volume.getUuid(), volume.getName())); } /** * Checks if the target storage supports the new disk offering. * This validation is consistent with the mechanism used to select a storage pool to deploy a volume when a virtual machine is deployed or when a new data disk is allocated. * * The scenarios when this method returns true or false is presented in the following table. * * <table border="1"> * <tr> * <th>#</th><th>Disk offering tags</th><th>Storage tags</th><th>Does the storage support the disk offering?</th> * </tr> * <body> * <tr> * <td>1</td><td>A,B</td><td>A</td><td>NO</td> * </tr> * <tr> * <td>2</td><td>A,B,C</td><td>A,B,C,D,X</td><td>YES</td> * </tr> * <tr> * <td>3</td><td>A,B,C</td><td>X,Y,Z</td><td>NO</td> * </tr> * <tr> * <td>4</td><td>null</td><td>A,S,D</td><td>YES</td> * </tr> * <tr> * <td>5</td><td>A</td><td>null</td><td>NO</td> * </tr> * <tr> * <td>6</td><td>null</td><td>null</td><td>YES</td> * </tr> * </body> * </table> */ protected boolean doesTargetStorageSupportDiskOffering(StoragePool destPool, DiskOfferingVO diskOffering) { String targetStoreTags = diskOffering.getTags(); return doesTargetStorageSupportDiskOffering(destPool, targetStoreTags); } @Override public boolean doesTargetStorageSupportDiskOffering(StoragePool destPool, String diskOfferingTags) { if (org.apache.commons.lang.StringUtils.isBlank(diskOfferingTags)) { return true; } String storagePoolTags = getStoragePoolTags(destPool); if (org.apache.commons.lang.StringUtils.isBlank(storagePoolTags)) { return false; } String[] storageTagsAsStringArray = org.apache.commons.lang.StringUtils.split(storagePoolTags, ","); String[] newDiskOfferingTagsAsStringArray = org.apache.commons.lang.StringUtils.split(diskOfferingTags, ","); return CollectionUtils.isSubCollection(Arrays.asList(newDiskOfferingTagsAsStringArray), Arrays.asList(storageTagsAsStringArray)); } /** * Retrieves the storage pool tags as a {@link String}. If the storage pool does not have tags we return a null value. */ protected String getStoragePoolTags(StoragePool destPool) { List<String> destPoolTags = storagePoolTagsDao.getStoragePoolTags(destPool.getId()); if (CollectionUtils.isEmpty(destPoolTags)) { return null; } return StringUtils.join(destPoolTags, ","); } private Volume orchestrateMigrateVolume(VolumeVO volume, StoragePool destPool, boolean liveMigrateVolume, DiskOfferingVO newDiskOffering) { Volume newVol = null; try { if (liveMigrateVolume) { newVol = liveMigrateVolume(volume, destPool); } else { newVol = _volumeMgr.migrateVolume(volume, destPool); } if (newDiskOffering != null) { _volsDao.updateDiskOffering(newVol.getId(), newDiskOffering.getId()); } } catch (StorageUnavailableException e) { s_logger.debug("Failed to migrate volume", e); throw new CloudRuntimeException(e.getMessage()); } catch (Exception e) { s_logger.debug("Failed to migrate volume", e); throw new CloudRuntimeException(e.getMessage()); } return newVol; } @DB protected Volume liveMigrateVolume(Volume volume, StoragePool destPool) throws StorageUnavailableException { VolumeInfo vol = volFactory.getVolume(volume.getId()); DataStore dataStoreTarget = dataStoreMgr.getDataStore(destPool.getId(), DataStoreRole.Primary); AsyncCallFuture<VolumeApiResult> future = volService.migrateVolume(vol, dataStoreTarget); try { VolumeApiResult result = future.get(); if (result.isFailed()) { s_logger.debug("migrate volume failed:" + result.getResult()); throw new StorageUnavailableException("Migrate volume failed: " + result.getResult(), destPool.getId()); } return result.getVolume(); } catch (InterruptedException e) { s_logger.debug("migrate volume failed", e); throw new CloudRuntimeException(e.getMessage()); } catch (ExecutionException e) { s_logger.debug("migrate volume failed", e); throw new CloudRuntimeException(e.getMessage()); } } @Override @ActionEvent(eventType = EventTypes.EVENT_SNAPSHOT_CREATE, eventDescription = "taking snapshot", async = true) public Snapshot takeSnapshot(Long volumeId, Long policyId, Long snapshotId, Account account, boolean quiescevm, Snapshot.LocationType locationType, boolean asyncBackup, Map<String, String> tags) throws ResourceAllocationException { final Snapshot snapshot = takeSnapshotInternal(volumeId, policyId, snapshotId, account, quiescevm, locationType, asyncBackup); if (snapshot != null && MapUtils.isNotEmpty(tags)) { taggedResourceService.createTags(Collections.singletonList(snapshot.getUuid()), ResourceTag.ResourceObjectType.Snapshot, tags, null); } return snapshot; } private Snapshot takeSnapshotInternal(Long volumeId, Long policyId, Long snapshotId, Account account, boolean quiescevm, Snapshot.LocationType locationType, boolean asyncBackup) throws ResourceAllocationException { VolumeInfo volume = volFactory.getVolume(volumeId); if (volume == null) { throw new InvalidParameterValueException("Creating snapshot failed due to volume:" + volumeId + " doesn't exist"); } if (volume.getState() != Volume.State.Ready) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " is not in " + Volume.State.Ready + " state but " + volume.getState() + ". Cannot take snapshot."); } StoragePoolVO storagePoolVO = _storagePoolDao.findById(volume.getPoolId()); if (storagePoolVO.isManaged() && locationType == null) { locationType = Snapshot.LocationType.PRIMARY; } VMInstanceVO vm = null; if (volume.getInstanceId() != null) { vm = _vmInstanceDao.findById(volume.getInstanceId()); } if (vm != null) { // serialize VM operation AsyncJobExecutionContext jobContext = AsyncJobExecutionContext.getCurrentExecutionContext(); if (jobContext.isJobDispatchedBy(VmWorkConstants.VM_WORK_JOB_DISPATCHER)) { // avoid re-entrance VmWorkJobVO placeHolder = null; placeHolder = createPlaceHolderWork(vm.getId()); try { return orchestrateTakeVolumeSnapshot(volumeId, policyId, snapshotId, account, quiescevm, locationType, asyncBackup); } finally { _workJobDao.expunge(placeHolder.getId()); } } else { Outcome<Snapshot> outcome = takeVolumeSnapshotThroughJobQueue(vm.getId(), volumeId, policyId, snapshotId, account.getId(), quiescevm, locationType, asyncBackup); try { outcome.get(); } catch (InterruptedException e) { throw new RuntimeException("Operation is interrupted", e); } catch (java.util.concurrent.ExecutionException e) { throw new RuntimeException("Execution excetion", e); } Object jobResult = _jobMgr.unmarshallResultObject(outcome.getJob()); if (jobResult != null) { if (jobResult instanceof ConcurrentOperationException) { throw (ConcurrentOperationException)jobResult; } else if (jobResult instanceof ResourceAllocationException) { throw (ResourceAllocationException)jobResult; } else if (jobResult instanceof Throwable) { throw new RuntimeException("Unexpected exception", (Throwable)jobResult); } } return _snapshotDao.findById(snapshotId); } } else { CreateSnapshotPayload payload = new CreateSnapshotPayload(); payload.setSnapshotId(snapshotId); payload.setSnapshotPolicyId(policyId); payload.setAccount(account); payload.setQuiescevm(quiescevm); payload.setAsyncBackup(asyncBackup); volume.addPayload(payload); return volService.takeSnapshot(volume); } } private Snapshot orchestrateTakeVolumeSnapshot(Long volumeId, Long policyId, Long snapshotId, Account account, boolean quiescevm, Snapshot.LocationType locationType, boolean asyncBackup) throws ResourceAllocationException { VolumeInfo volume = volFactory.getVolume(volumeId); if (volume == null) { throw new InvalidParameterValueException("Creating snapshot failed due to volume:" + volumeId + " doesn't exist"); } if (volume.getState() != Volume.State.Ready) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " is not in " + Volume.State.Ready + " state but " + volume.getState() + ". Cannot take snapshot."); } CreateSnapshotPayload payload = new CreateSnapshotPayload(); payload.setSnapshotId(snapshotId); payload.setSnapshotPolicyId(policyId); payload.setAccount(account); payload.setQuiescevm(quiescevm); payload.setLocationType(locationType); payload.setAsyncBackup(asyncBackup); volume.addPayload(payload); return volService.takeSnapshot(volume); } @Override @ActionEvent(eventType = EventTypes.EVENT_SNAPSHOT_CREATE, eventDescription = "allocating snapshot", create = true) public Snapshot allocSnapshot(Long volumeId, Long policyId, String snapshotName, Snapshot.LocationType locationType) throws ResourceAllocationException { Account caller = CallContext.current().getCallingAccount(); VolumeInfo volume = volFactory.getVolume(volumeId); if (volume == null) { throw new InvalidParameterValueException("Creating snapshot failed due to volume:" + volumeId + " doesn't exist"); } DataCenter zone = _dcDao.findById(volume.getDataCenterId()); if (zone == null) { throw new InvalidParameterValueException("Can't find zone by id " + volume.getDataCenterId()); } if (Grouping.AllocationState.Disabled == zone.getAllocationState() && !_accountMgr.isRootAdmin(caller.getId())) { throw new PermissionDeniedException("Cannot perform this operation, Zone is currently disabled: " + zone.getName()); } if (volume.getState() != Volume.State.Ready) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " is not in " + Volume.State.Ready + " state but " + volume.getState() + ". Cannot take snapshot."); } if (ImageFormat.DIR.equals(volume.getFormat())) { throw new InvalidParameterValueException("Snapshot not supported for volume:" + volumeId); } if (volume.getTemplateId() != null) { VMTemplateVO template = _templateDao.findById(volume.getTemplateId()); if (template != null && template.getTemplateType() == Storage.TemplateType.SYSTEM) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " is for System VM , Creating snapshot against System VM volumes is not supported"); } } StoragePoolVO storagePoolVO = _storagePoolDao.findById(volume.getPoolId()); if (!storagePoolVO.isManaged() && locationType != null) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " LocationType is supported only for managed storage"); } if (storagePoolVO.isManaged() && locationType == null) { locationType = Snapshot.LocationType.PRIMARY; } StoragePool storagePool = (StoragePool)volume.getDataStore(); if (storagePool == null) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " please attach this volume to a VM before create snapshot for it"); } return snapshotMgr.allocSnapshot(volumeId, policyId, snapshotName, locationType); } @Override public Snapshot allocSnapshotForVm(Long vmId, Long volumeId, String snapshotName) throws ResourceAllocationException { Account caller = CallContext.current().getCallingAccount(); VMInstanceVO vm = _vmInstanceDao.findById(vmId); if (vm == null) { throw new InvalidParameterValueException("Creating snapshot failed due to vm:" + vmId + " doesn't exist"); } _accountMgr.checkAccess(caller, null, true, vm); VolumeInfo volume = volFactory.getVolume(volumeId); if (volume == null) { throw new InvalidParameterValueException("Creating snapshot failed due to volume:" + volumeId + " doesn't exist"); } _accountMgr.checkAccess(caller, null, true, volume); VirtualMachine attachVM = volume.getAttachedVM(); if (attachVM == null || attachVM.getId() != vm.getId()) { throw new InvalidParameterValueException("Creating snapshot failed due to volume:" + volumeId + " doesn't attach to vm :" + vm); } DataCenter zone = _dcDao.findById(volume.getDataCenterId()); if (zone == null) { throw new InvalidParameterValueException("Can't find zone by id " + volume.getDataCenterId()); } if (Grouping.AllocationState.Disabled == zone.getAllocationState() && !_accountMgr.isRootAdmin(caller.getId())) { throw new PermissionDeniedException("Cannot perform this operation, Zone is currently disabled: " + zone.getName()); } if (volume.getState() != Volume.State.Ready) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " is not in " + Volume.State.Ready + " state but " + volume.getState() + ". Cannot take snapshot."); } if (volume.getTemplateId() != null) { VMTemplateVO template = _templateDao.findById(volume.getTemplateId()); if (template != null && template.getTemplateType() == Storage.TemplateType.SYSTEM) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " is for System VM , Creating snapshot against System VM volumes is not supported"); } } StoragePool storagePool = (StoragePool)volume.getDataStore(); if (storagePool == null) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " please attach this volume to a VM before create snapshot for it"); } return snapshotMgr.allocSnapshot(volumeId, Snapshot.MANUAL_POLICY_ID, snapshotName, null); } @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_EXTRACT, eventDescription = "extracting volume", async = true) public String extractVolume(ExtractVolumeCmd cmd) { Long volumeId = cmd.getId(); Long zoneId = cmd.getZoneId(); String mode = cmd.getMode(); Account account = CallContext.current().getCallingAccount(); if (!_accountMgr.isRootAdmin(account.getId()) && ApiDBUtils.isExtractionDisabled()) { throw new PermissionDeniedException("Extraction has been disabled by admin"); } VolumeVO volume = _volsDao.findById(volumeId); if (volume == null) { InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find volume with specified volumeId"); ex.addProxyObject(volumeId.toString(), "volumeId"); throw ex; } // perform permission check _accountMgr.checkAccess(account, null, true, volume); if (_dcDao.findById(zoneId) == null) { throw new InvalidParameterValueException("Please specify a valid zone."); } if (volume.getPoolId() == null) { throw new InvalidParameterValueException("The volume doesn't belong to a storage pool so can't extract it"); } // Extract activity only for detached volumes or for volumes whose // instance is stopped if (volume.getInstanceId() != null && ApiDBUtils.findVMInstanceById(volume.getInstanceId()).getState() != State.Stopped) { s_logger.debug("Invalid state of the volume with ID: " + volumeId + ". It should be either detached or the VM should be in stopped state."); PermissionDeniedException ex = new PermissionDeniedException("Invalid state of the volume with specified ID. It should be either detached or the VM should be in stopped state."); ex.addProxyObject(volume.getUuid(), "volumeId"); throw ex; } if (volume.getVolumeType() != Volume.Type.DATADISK) { // Datadisk dont have any template dependence. VMTemplateVO template = ApiDBUtils.findTemplateById(volume.getTemplateId()); if (template != null) { // For ISO based volumes template = null and // we allow extraction of all ISO based // volumes boolean isExtractable = template.isExtractable() && template.getTemplateType() != Storage.TemplateType.SYSTEM; if (!isExtractable && account != null && !_accountMgr.isRootAdmin(account.getId())) { // Global admins are always allowed to extract PermissionDeniedException ex = new PermissionDeniedException("The volume with specified volumeId is not allowed to be extracted"); ex.addProxyObject(volume.getUuid(), "volumeId"); throw ex; } } } if (mode == null || (!mode.equals(Upload.Mode.FTP_UPLOAD.toString()) && !mode.equals(Upload.Mode.HTTP_DOWNLOAD.toString()))) { throw new InvalidParameterValueException("Please specify a valid extract Mode "); } // Check if the url already exists VolumeDataStoreVO volumeStoreRef = _volumeStoreDao.findByVolume(volumeId); if (volumeStoreRef != null && volumeStoreRef.getExtractUrl() != null) { return volumeStoreRef.getExtractUrl(); } VMInstanceVO vm = null; if (volume.getInstanceId() != null) { vm = _vmInstanceDao.findById(volume.getInstanceId()); } if (vm != null) { // serialize VM operation AsyncJobExecutionContext jobContext = AsyncJobExecutionContext.getCurrentExecutionContext(); if (jobContext.isJobDispatchedBy(VmWorkConstants.VM_WORK_JOB_DISPATCHER)) { // avoid re-entrance VmWorkJobVO placeHolder = null; placeHolder = createPlaceHolderWork(vm.getId()); try { return orchestrateExtractVolume(volume.getId(), zoneId); } finally { _workJobDao.expunge(placeHolder.getId()); } } else { Outcome<String> outcome = extractVolumeThroughJobQueue(vm.getId(), volume.getId(), zoneId); try { outcome.get(); } catch (InterruptedException e) { throw new RuntimeException("Operation is interrupted", e); } catch (java.util.concurrent.ExecutionException e) { throw new RuntimeException("Execution excetion", e); } Object jobResult = _jobMgr.unmarshallResultObject(outcome.getJob()); if (jobResult != null) { if (jobResult instanceof ConcurrentOperationException) { throw (ConcurrentOperationException)jobResult; } else if (jobResult instanceof RuntimeException) { throw (RuntimeException)jobResult; } else if (jobResult instanceof Throwable) { throw new RuntimeException("Unexpected exception", (Throwable)jobResult); } } // retrieve the entity url from job result if (jobResult != null && jobResult instanceof String) { return (String)jobResult; } return null; } } return orchestrateExtractVolume(volume.getId(), zoneId); } private String orchestrateExtractVolume(long volumeId, long zoneId) { // get latest volume state to make sure that it is not updated by other parallel operations VolumeVO volume = _volsDao.findById(volumeId); if (volume == null || volume.getState() != Volume.State.Ready) { throw new InvalidParameterValueException("Volume to be extracted has been removed or not in right state!"); } // perform extraction ImageStoreEntity secStore = (ImageStoreEntity)dataStoreMgr.getImageStoreWithFreeCapacity(zoneId); if (secStore == null) { throw new InvalidParameterValueException(String.format("Secondary storage to satisfy storage needs cannot be found for zone: %d", zoneId)); } String value = _configDao.getValue(Config.CopyVolumeWait.toString()); NumbersUtil.parseInt(value, Integer.parseInt(Config.CopyVolumeWait.getDefaultValue())); // Copy volume from primary to secondary storage VolumeInfo srcVol = volFactory.getVolume(volumeId); AsyncCallFuture<VolumeApiResult> cvAnswer = volService.copyVolume(srcVol, secStore); // Check if you got a valid answer. VolumeApiResult cvResult = null; try { cvResult = cvAnswer.get(); } catch (InterruptedException e1) { s_logger.debug("failed copy volume", e1); throw new CloudRuntimeException("Failed to copy volume", e1); } catch (ExecutionException e1) { s_logger.debug("failed copy volume", e1); throw new CloudRuntimeException("Failed to copy volume", e1); } if (cvResult == null || cvResult.isFailed()) { String errorString = "Failed to copy the volume from the source primary storage pool to secondary storage."; throw new CloudRuntimeException(errorString); } VolumeInfo vol = cvResult.getVolume(); String extractUrl = secStore.createEntityExtractUrl(vol.getPath(), vol.getFormat(), vol); VolumeDataStoreVO volumeStoreRef = _volumeStoreDao.findByVolume(volumeId); volumeStoreRef.setExtractUrl(extractUrl); volumeStoreRef.setExtractUrlCreated(DateUtil.now()); volumeStoreRef.setDownloadState(VMTemplateStorageResourceAssoc.Status.DOWNLOADED); volumeStoreRef.setDownloadPercent(100); volumeStoreRef.setZoneId(zoneId); _volumeStoreDao.update(volumeStoreRef.getId(), volumeStoreRef); return extractUrl; } @Override public boolean isDisplayResourceEnabled(Long id) { Volume volume = _volsDao.findById(id); if (volume == null) { return true; // bad id given, default to true } return volume.isDisplayVolume(); } private boolean needMoveVolume(VolumeVO existingVolume, VolumeInfo newVolume) { if (existingVolume == null || existingVolume.getPoolId() == null || newVolume.getPoolId() == null) { return false; } DataStore storeForExistingVol = dataStoreMgr.getPrimaryDataStore(existingVolume.getPoolId()); DataStore storeForNewVol = dataStoreMgr.getPrimaryDataStore(newVolume.getPoolId()); Scope storeForExistingStoreScope = storeForExistingVol.getScope(); if (storeForExistingStoreScope == null) { throw new CloudRuntimeException("Can't get scope of data store: " + storeForExistingVol.getId()); } Scope storeForNewStoreScope = storeForNewVol.getScope(); if (storeForNewStoreScope == null) { throw new CloudRuntimeException("Can't get scope of data store: " + storeForNewVol.getId()); } if (storeForNewStoreScope.getScopeType() == ScopeType.ZONE) { return false; } if (storeForExistingStoreScope.getScopeType() != storeForNewStoreScope.getScopeType()) { if (storeForNewStoreScope.getScopeType() == ScopeType.CLUSTER) { Long vmClusterId = null; if (storeForExistingStoreScope.getScopeType() == ScopeType.HOST) { HostScope hs = (HostScope)storeForExistingStoreScope; vmClusterId = hs.getClusterId(); } else if (storeForExistingStoreScope.getScopeType() == ScopeType.ZONE) { Long hostId = _vmInstanceDao.findById(existingVolume.getInstanceId()).getHostId(); if (hostId != null) { HostVO host = _hostDao.findById(hostId); vmClusterId = host.getClusterId(); } } if (storeForNewStoreScope.getScopeId().equals(vmClusterId)) { return false; } else { return true; } } else if (storeForNewStoreScope.getScopeType() == ScopeType.HOST && (storeForExistingStoreScope.getScopeType() == ScopeType.CLUSTER || storeForExistingStoreScope.getScopeType() == ScopeType.ZONE)) { Long hostId = _vmInstanceDao.findById(existingVolume.getInstanceId()).getHostId(); if (storeForNewStoreScope.getScopeId().equals(hostId)) { return false; } } throw new InvalidParameterValueException("Can't move volume between scope: " + storeForNewStoreScope.getScopeType() + " and " + storeForExistingStoreScope.getScopeType()); } return !storeForExistingStoreScope.isSameScope(storeForNewStoreScope); } private synchronized void checkAndSetAttaching(Long volumeId) { VolumeInfo volumeToAttach = volFactory.getVolume(volumeId); if (volumeToAttach.isAttachedVM()) { throw new CloudRuntimeException("volume: " + volumeToAttach.getName() + " is already attached to a VM: " + volumeToAttach.getAttachedVmName()); } if (Volume.State.Allocated.equals(volumeToAttach.getState())) { return; } if (Volume.State.Ready.equals(volumeToAttach.getState())) { volumeToAttach.stateTransit(Volume.Event.AttachRequested); return; } final String error = "Volume: " + volumeToAttach.getName() + " is in " + volumeToAttach.getState() + ". It should be in Ready or Allocated state"; s_logger.error(error); throw new CloudRuntimeException(error); } private void verifyManagedStorage(Long storagePoolId, Long hostId) { if (storagePoolId == null || hostId == null) { return; } StoragePoolVO storagePoolVO = _storagePoolDao.findById(storagePoolId); if (storagePoolVO == null || !storagePoolVO.isManaged()) { return; } HostVO hostVO = _hostDao.findById(hostId); if (hostVO == null) { return; } if (!storageUtil.managedStoragePoolCanScale(storagePoolVO, hostVO.getClusterId(), hostVO.getId())) { throw new CloudRuntimeException("Insufficient number of available " + getNameOfClusteredFileSystem(hostVO)); } } private String getNameOfClusteredFileSystem(HostVO hostVO) { HypervisorType hypervisorType = hostVO.getHypervisorType(); if (HypervisorType.XenServer.equals(hypervisorType)) { return "SRs"; } if (HypervisorType.VMware.equals(hypervisorType)) { return "datastores"; } return "clustered file systems"; } private VolumeVO sendAttachVolumeCommand(UserVmVO vm, VolumeVO volumeToAttach, Long deviceId) { String errorMsg = "Failed to attach volume " + volumeToAttach.getName() + " to VM " + vm.getHostName(); boolean sendCommand = vm.getState() == State.Running; AttachAnswer answer = null; Long hostId = vm.getHostId(); if (hostId == null) { hostId = vm.getLastHostId(); HostVO host = _hostDao.findById(hostId); if (host != null && host.getHypervisorType() == HypervisorType.VMware) { sendCommand = true; } } HostVO host = null; StoragePoolVO volumeToAttachStoragePool = _storagePoolDao.findById(volumeToAttach.getPoolId()); if (hostId != null) { host = _hostDao.findById(hostId); if (host != null && host.getHypervisorType() == HypervisorType.XenServer && volumeToAttachStoragePool != null && volumeToAttachStoragePool.isManaged()) { sendCommand = true; } } if (volumeToAttachStoragePool != null) { verifyManagedStorage(volumeToAttachStoragePool.getId(), hostId); } // volumeToAttachStoragePool should be null if the VM we are attaching the disk to has never been started before DataStore dataStore = volumeToAttachStoragePool != null ? dataStoreMgr.getDataStore(volumeToAttachStoragePool.getId(), DataStoreRole.Primary) : null; checkAndSetAttaching(volumeToAttach.getId()); boolean attached = false; try { // if we don't have a host, the VM we are attaching the disk to has never been started before if (host != null) { try { volService.grantAccess(volFactory.getVolume(volumeToAttach.getId()), host, dataStore); } catch (Exception e) { volService.revokeAccess(volFactory.getVolume(volumeToAttach.getId()), host, dataStore); throw new CloudRuntimeException(e.getMessage()); } } if (sendCommand) { if (host != null && host.getHypervisorType() == HypervisorType.KVM && volumeToAttachStoragePool.isManaged() && volumeToAttach.getPath() == null) { volumeToAttach.setPath(volumeToAttach.get_iScsiName()); _volsDao.update(volumeToAttach.getId(), volumeToAttach); } DataTO volTO = volFactory.getVolume(volumeToAttach.getId()).getTO(); deviceId = getDeviceId(vm, deviceId); DiskTO disk = storageMgr.getDiskWithThrottling(volTO, volumeToAttach.getVolumeType(), deviceId, volumeToAttach.getPath(), vm.getServiceOfferingId(), volumeToAttach.getDiskOfferingId()); AttachCommand cmd = new AttachCommand(disk, vm.getInstanceName()); ChapInfo chapInfo = volService.getChapInfo(volFactory.getVolume(volumeToAttach.getId()), dataStore); Map<String, String> details = new HashMap<String, String>(); disk.setDetails(details); details.put(DiskTO.MANAGED, String.valueOf(volumeToAttachStoragePool.isManaged())); details.put(DiskTO.STORAGE_HOST, volumeToAttachStoragePool.getHostAddress()); details.put(DiskTO.STORAGE_PORT, String.valueOf(volumeToAttachStoragePool.getPort())); details.put(DiskTO.VOLUME_SIZE, String.valueOf(volumeToAttach.getSize())); details.put(DiskTO.IQN, volumeToAttach.get_iScsiName()); details.put(DiskTO.MOUNT_POINT, volumeToAttach.get_iScsiName()); details.put(DiskTO.PROTOCOL_TYPE, (volumeToAttach.getPoolType() != null) ? volumeToAttach.getPoolType().toString() : null); if (chapInfo != null) { details.put(DiskTO.CHAP_INITIATOR_USERNAME, chapInfo.getInitiatorUsername()); details.put(DiskTO.CHAP_INITIATOR_SECRET, chapInfo.getInitiatorSecret()); details.put(DiskTO.CHAP_TARGET_USERNAME, chapInfo.getTargetUsername()); details.put(DiskTO.CHAP_TARGET_SECRET, chapInfo.getTargetSecret()); } _userVmDao.loadDetails(vm); Map<String, String> controllerInfo = new HashMap<String, String>(); controllerInfo.put(VmDetailConstants.ROOT_DISK_CONTROLLER, vm.getDetail(VmDetailConstants.ROOT_DISK_CONTROLLER)); controllerInfo.put(VmDetailConstants.DATA_DISK_CONTROLLER, vm.getDetail(VmDetailConstants.DATA_DISK_CONTROLLER)); cmd.setControllerInfo(controllerInfo); s_logger.debug("Attach volume id:" + volumeToAttach.getId() + " on VM id:" + vm.getId() + " has controller info:" + controllerInfo); try { answer = (AttachAnswer)_agentMgr.send(hostId, cmd); } catch (Exception e) { if (host != null) { volService.revokeAccess(volFactory.getVolume(volumeToAttach.getId()), host, dataStore); } throw new CloudRuntimeException(errorMsg + " due to: " + e.getMessage()); } } if (!sendCommand || (answer != null && answer.getResult())) { // Mark the volume as attached if (sendCommand) { DiskTO disk = answer.getDisk(); _volsDao.attachVolume(volumeToAttach.getId(), vm.getId(), disk.getDiskSeq()); volumeToAttach = _volsDao.findById(volumeToAttach.getId()); if (volumeToAttachStoragePool.isManaged() && volumeToAttach.getPath() == null) { volumeToAttach.setPath(answer.getDisk().getPath()); _volsDao.update(volumeToAttach.getId(), volumeToAttach); } } else { deviceId = getDeviceId(vm, deviceId); _volsDao.attachVolume(volumeToAttach.getId(), vm.getId(), deviceId); volumeToAttach = _volsDao.findById(volumeToAttach.getId()); if (vm.getHypervisorType() == HypervisorType.KVM && volumeToAttachStoragePool != null && volumeToAttachStoragePool.isManaged() && volumeToAttach.getPath() == null && volumeToAttach.get_iScsiName() != null) { volumeToAttach.setPath(volumeToAttach.get_iScsiName()); _volsDao.update(volumeToAttach.getId(), volumeToAttach); } } // insert record for disk I/O statistics VmDiskStatisticsVO diskstats = _vmDiskStatsDao.findBy(vm.getAccountId(), vm.getDataCenterId(), vm.getId(), volumeToAttach.getId()); if (diskstats == null) { diskstats = new VmDiskStatisticsVO(vm.getAccountId(), vm.getDataCenterId(), vm.getId(), volumeToAttach.getId()); _vmDiskStatsDao.persist(diskstats); } attached = true; } else { if (answer != null) { String details = answer.getDetails(); if (details != null && !details.isEmpty()) { errorMsg += "; " + details; } } if (host != null) { volService.revokeAccess(volFactory.getVolume(volumeToAttach.getId()), host, dataStore); } throw new CloudRuntimeException(errorMsg); } } finally { Volume.Event ev = Volume.Event.OperationFailed; VolumeInfo volInfo = volFactory.getVolume(volumeToAttach.getId()); if (attached) { ev = Volume.Event.OperationSucceeded; s_logger.debug("Volume: " + volInfo.getName() + " successfully attached to VM: " + volInfo.getAttachedVmName()); } else { s_logger.debug("Volume: " + volInfo.getName() + " failed to attach to VM: " + volInfo.getAttachedVmName()); } volInfo.stateTransit(ev); } return _volsDao.findById(volumeToAttach.getId()); } private int getMaxDataVolumesSupported(UserVmVO vm) { Long hostId = vm.getHostId(); if (hostId == null) { hostId = vm.getLastHostId(); } HostVO host = _hostDao.findById(hostId); Integer maxDataVolumesSupported = null; if (host != null) { _hostDao.loadDetails(host); String hypervisorVersion = host.getDetail("product_version"); if (org.apache.commons.lang.StringUtils.isBlank(hypervisorVersion)) { hypervisorVersion = host.getHypervisorVersion(); } maxDataVolumesSupported = _hypervisorCapabilitiesDao.getMaxDataVolumesLimit(host.getHypervisorType(), hypervisorVersion); } else { HypervisorType hypervisorType = vm.getHypervisorType(); if (hypervisorType != null && CollectionUtils.isNotEmpty(supportingDefaultHV) && supportingDefaultHV.contains(hypervisorType)) { maxDataVolumesSupported = _hypervisorCapabilitiesDao.getMaxDataVolumesLimit(hypervisorType, "default"); } } if (maxDataVolumesSupported == null || maxDataVolumesSupported.intValue() <= 0) { maxDataVolumesSupported = 6; // 6 data disks by default if nothing // is specified in // 'hypervisor_capabilities' table } return maxDataVolumesSupported.intValue(); } private Long getDeviceId(UserVmVO vm, Long deviceId) { // allocate deviceId int maxDevices = getMaxDataVolumesSupported(vm) + 2; // add 2 to consider devices root volume and cdrom int maxDeviceId = maxDevices - 1; List<VolumeVO> vols = _volsDao.findByInstance(vm.getId()); if (deviceId != null) { if (deviceId.longValue() < 0 || deviceId.longValue() > maxDeviceId || deviceId.longValue() == 3) { throw new RuntimeException("deviceId should be 0,1,2,4-" + maxDeviceId); } for (VolumeVO vol : vols) { if (vol.getDeviceId().equals(deviceId)) { throw new RuntimeException("deviceId " + deviceId + " is used by vm " + vm.getId()); } } } else { // allocate deviceId here List<String> devIds = new ArrayList<String>(); for (int i = 1; i <= maxDeviceId; i++) { devIds.add(String.valueOf(i)); } devIds.remove("3"); for (VolumeVO vol : vols) { devIds.remove(vol.getDeviceId().toString().trim()); } if (devIds.isEmpty()) { throw new RuntimeException("All device Ids are used by vm " + vm.getId()); } deviceId = Long.parseLong(devIds.iterator().next()); } return deviceId; } @Override public boolean configure(String name, Map<String, Object> params) { String maxVolumeSizeInGbString = _configDao.getValue(Config.MaxVolumeSize.toString()); _maxVolumeSizeInGb = NumbersUtil.parseLong(maxVolumeSizeInGbString, 2000); supportingDefaultHV = _hypervisorCapabilitiesDao.getHypervisorsWithDefaultEntries(); return true; } public List<StoragePoolAllocator> getStoragePoolAllocators() { return _storagePoolAllocators; } @Inject public void setStoragePoolAllocators(List<StoragePoolAllocator> storagePoolAllocators) { _storagePoolAllocators = storagePoolAllocators; } public class VmJobVolumeUrlOutcome extends OutcomeImpl<String> { public VmJobVolumeUrlOutcome(final AsyncJob job) { super(String.class, job, VmJobCheckInterval.value(), new Predicate() { @Override public boolean checkCondition() { AsyncJobVO jobVo = _entityMgr.findById(AsyncJobVO.class, job.getId()); assert (jobVo != null); if (jobVo == null || jobVo.getStatus() != JobInfo.Status.IN_PROGRESS) { return true; } return false; } }, AsyncJob.Topics.JOB_STATE); } } public class VmJobVolumeOutcome extends OutcomeImpl<Volume> { private long _volumeId; public VmJobVolumeOutcome(final AsyncJob job, final long volumeId) { super(Volume.class, job, VmJobCheckInterval.value(), new Predicate() { @Override public boolean checkCondition() { AsyncJobVO jobVo = _entityMgr.findById(AsyncJobVO.class, job.getId()); assert (jobVo != null); if (jobVo == null || jobVo.getStatus() != JobInfo.Status.IN_PROGRESS) { return true; } return false; } }, AsyncJob.Topics.JOB_STATE); _volumeId = volumeId; } @Override protected Volume retrieve() { return _volsDao.findById(_volumeId); } } public class VmJobSnapshotOutcome extends OutcomeImpl<Snapshot> { private long _snapshotId; public VmJobSnapshotOutcome(final AsyncJob job, final long snapshotId) { super(Snapshot.class, job, VmJobCheckInterval.value(), new Predicate() { @Override public boolean checkCondition() { AsyncJobVO jobVo = _entityMgr.findById(AsyncJobVO.class, job.getId()); assert (jobVo != null); if (jobVo == null || jobVo.getStatus() != JobInfo.Status.IN_PROGRESS) { return true; } return false; } }, AsyncJob.Topics.JOB_STATE); _snapshotId = snapshotId; } @Override protected Snapshot retrieve() { return _snapshotDao.findById(_snapshotId); } } public Outcome<Volume> attachVolumeToVmThroughJobQueue(final Long vmId, final Long volumeId, final Long deviceId) { final CallContext context = CallContext.current(); final User callingUser = context.getCallingUser(); final Account callingAccount = context.getCallingAccount(); final VMInstanceVO vm = _vmInstanceDao.findById(vmId); VmWorkJobVO workJob = new VmWorkJobVO(context.getContextId()); workJob.setDispatcher(VmWorkConstants.VM_WORK_JOB_DISPATCHER); workJob.setCmd(VmWorkAttachVolume.class.getName()); workJob.setAccountId(callingAccount.getId()); workJob.setUserId(callingUser.getId()); workJob.setStep(VmWorkJobVO.Step.Starting); workJob.setVmType(VirtualMachine.Type.Instance); workJob.setVmInstanceId(vm.getId()); workJob.setRelated(AsyncJobExecutionContext.getOriginJobId()); // save work context info (there are some duplications) VmWorkAttachVolume workInfo = new VmWorkAttachVolume(callingUser.getId(), callingAccount.getId(), vm.getId(), VolumeApiServiceImpl.VM_WORK_JOB_HANDLER, volumeId, deviceId); workJob.setCmdInfo(VmWorkSerializer.serialize(workInfo)); _jobMgr.submitAsyncJob(workJob, VmWorkConstants.VM_WORK_QUEUE, vm.getId()); AsyncJobVO jobVo = _jobMgr.getAsyncJob(workJob.getId()); s_logger.debug("New job " + workJob.getId() + ", result field: " + jobVo.getResult()); AsyncJobExecutionContext.getCurrentExecutionContext().joinJob(workJob.getId()); return new VmJobVolumeOutcome(workJob, volumeId); } public Outcome<Volume> detachVolumeFromVmThroughJobQueue(final Long vmId, final Long volumeId) { final CallContext context = CallContext.current(); final User callingUser = context.getCallingUser(); final Account callingAccount = context.getCallingAccount(); final VMInstanceVO vm = _vmInstanceDao.findById(vmId); VmWorkJobVO workJob = new VmWorkJobVO(context.getContextId()); workJob.setDispatcher(VmWorkConstants.VM_WORK_JOB_DISPATCHER); workJob.setCmd(VmWorkDetachVolume.class.getName()); workJob.setAccountId(callingAccount.getId()); workJob.setUserId(callingUser.getId()); workJob.setStep(VmWorkJobVO.Step.Starting); workJob.setVmType(VirtualMachine.Type.Instance); workJob.setVmInstanceId(vm.getId()); workJob.setRelated(AsyncJobExecutionContext.getOriginJobId()); // save work context info (there are some duplications) VmWorkDetachVolume workInfo = new VmWorkDetachVolume(callingUser.getId(), callingAccount.getId(), vm.getId(), VolumeApiServiceImpl.VM_WORK_JOB_HANDLER, volumeId); workJob.setCmdInfo(VmWorkSerializer.serialize(workInfo)); _jobMgr.submitAsyncJob(workJob, VmWorkConstants.VM_WORK_QUEUE, vm.getId()); AsyncJobExecutionContext.getCurrentExecutionContext().joinJob(workJob.getId()); return new VmJobVolumeOutcome(workJob, volumeId); } public Outcome<Volume> resizeVolumeThroughJobQueue(final Long vmId, final long volumeId, final long currentSize, final long newSize, final Long newMinIops, final Long newMaxIops, final Integer newHypervisorSnapshotReserve, final Long newServiceOfferingId, final boolean shrinkOk) { final CallContext context = CallContext.current(); final User callingUser = context.getCallingUser(); final Account callingAccount = context.getCallingAccount(); final VMInstanceVO vm = _vmInstanceDao.findById(vmId); VmWorkJobVO workJob = new VmWorkJobVO(context.getContextId()); workJob.setDispatcher(VmWorkConstants.VM_WORK_JOB_DISPATCHER); workJob.setCmd(VmWorkResizeVolume.class.getName()); workJob.setAccountId(callingAccount.getId()); workJob.setUserId(callingUser.getId()); workJob.setStep(VmWorkJobVO.Step.Starting); workJob.setVmType(VirtualMachine.Type.Instance); workJob.setVmInstanceId(vm.getId()); workJob.setRelated(AsyncJobExecutionContext.getOriginJobId()); // save work context info (there are some duplications) VmWorkResizeVolume workInfo = new VmWorkResizeVolume(callingUser.getId(), callingAccount.getId(), vm.getId(), VolumeApiServiceImpl.VM_WORK_JOB_HANDLER, volumeId, currentSize, newSize, newMinIops, newMaxIops, newHypervisorSnapshotReserve, newServiceOfferingId, shrinkOk); workJob.setCmdInfo(VmWorkSerializer.serialize(workInfo)); _jobMgr.submitAsyncJob(workJob, VmWorkConstants.VM_WORK_QUEUE, vm.getId()); AsyncJobExecutionContext.getCurrentExecutionContext().joinJob(workJob.getId()); return new VmJobVolumeOutcome(workJob, volumeId); } public Outcome<String> extractVolumeThroughJobQueue(final Long vmId, final long volumeId, final long zoneId) { final CallContext context = CallContext.current(); final User callingUser = context.getCallingUser(); final Account callingAccount = context.getCallingAccount(); final VMInstanceVO vm = _vmInstanceDao.findById(vmId); VmWorkJobVO workJob = new VmWorkJobVO(context.getContextId()); workJob.setDispatcher(VmWorkConstants.VM_WORK_JOB_DISPATCHER); workJob.setCmd(VmWorkExtractVolume.class.getName()); workJob.setAccountId(callingAccount.getId()); workJob.setUserId(callingUser.getId()); workJob.setStep(VmWorkJobVO.Step.Starting); workJob.setVmType(VirtualMachine.Type.Instance); workJob.setVmInstanceId(vm.getId()); workJob.setRelated(AsyncJobExecutionContext.getOriginJobId()); // save work context info (there are some duplications) VmWorkExtractVolume workInfo = new VmWorkExtractVolume(callingUser.getId(), callingAccount.getId(), vm.getId(), VolumeApiServiceImpl.VM_WORK_JOB_HANDLER, volumeId, zoneId); workJob.setCmdInfo(VmWorkSerializer.serialize(workInfo)); _jobMgr.submitAsyncJob(workJob, VmWorkConstants.VM_WORK_QUEUE, vm.getId()); AsyncJobExecutionContext.getCurrentExecutionContext().joinJob(workJob.getId()); return new VmJobVolumeUrlOutcome(workJob); } private Outcome<Volume> migrateVolumeThroughJobQueue(VMInstanceVO vm, VolumeVO vol, StoragePool destPool, boolean liveMigrateVolume, DiskOfferingVO newDiskOffering) { CallContext context = CallContext.current(); User callingUser = context.getCallingUser(); Account callingAccount = context.getCallingAccount(); VmWorkJobVO workJob = new VmWorkJobVO(context.getContextId()); workJob.setDispatcher(VmWorkConstants.VM_WORK_JOB_DISPATCHER); workJob.setCmd(VmWorkMigrateVolume.class.getName()); workJob.setAccountId(callingAccount.getId()); workJob.setUserId(callingUser.getId()); workJob.setStep(VmWorkJobVO.Step.Starting); workJob.setVmType(VirtualMachine.Type.Instance); workJob.setVmInstanceId(vm.getId()); workJob.setRelated(AsyncJobExecutionContext.getOriginJobId()); Long newDiskOfferingId = newDiskOffering != null ? newDiskOffering.getId() : null; // save work context info (there are some duplications) VmWorkMigrateVolume workInfo = new VmWorkMigrateVolume(callingUser.getId(), callingAccount.getId(), vm.getId(), VolumeApiServiceImpl.VM_WORK_JOB_HANDLER, vol.getId(), destPool.getId(), liveMigrateVolume, newDiskOfferingId); workJob.setCmdInfo(VmWorkSerializer.serialize(workInfo)); _jobMgr.submitAsyncJob(workJob, VmWorkConstants.VM_WORK_QUEUE, vm.getId()); AsyncJobExecutionContext.getCurrentExecutionContext().joinJob(workJob.getId()); return new VmJobVolumeOutcome(workJob, vol.getId()); } public Outcome<Snapshot> takeVolumeSnapshotThroughJobQueue(final Long vmId, final Long volumeId, final Long policyId, final Long snapshotId, final Long accountId, final boolean quiesceVm, final Snapshot.LocationType locationType, final boolean asyncBackup) { final CallContext context = CallContext.current(); final User callingUser = context.getCallingUser(); final Account callingAccount = context.getCallingAccount(); final VMInstanceVO vm = _vmInstanceDao.findById(vmId); VmWorkJobVO workJob = new VmWorkJobVO(context.getContextId()); workJob.setDispatcher(VmWorkConstants.VM_WORK_JOB_DISPATCHER); workJob.setCmd(VmWorkTakeVolumeSnapshot.class.getName()); workJob.setAccountId(callingAccount.getId()); workJob.setUserId(callingUser.getId()); workJob.setStep(VmWorkJobVO.Step.Starting); workJob.setVmType(VirtualMachine.Type.Instance); workJob.setVmInstanceId(vm.getId()); workJob.setRelated(AsyncJobExecutionContext.getOriginJobId()); // save work context info (there are some duplications) VmWorkTakeVolumeSnapshot workInfo = new VmWorkTakeVolumeSnapshot(callingUser.getId(), accountId != null ? accountId : callingAccount.getId(), vm.getId(), VolumeApiServiceImpl.VM_WORK_JOB_HANDLER, volumeId, policyId, snapshotId, quiesceVm, locationType, asyncBackup); workJob.setCmdInfo(VmWorkSerializer.serialize(workInfo)); _jobMgr.submitAsyncJob(workJob, VmWorkConstants.VM_WORK_QUEUE, vm.getId()); AsyncJobExecutionContext.getCurrentExecutionContext().joinJob(workJob.getId()); return new VmJobSnapshotOutcome(workJob, snapshotId); } @ReflectionUse private Pair<JobInfo.Status, String> orchestrateExtractVolume(VmWorkExtractVolume work) throws Exception { String volUrl = orchestrateExtractVolume(work.getVolumeId(), work.getZoneId()); return new Pair<JobInfo.Status, String>(JobInfo.Status.SUCCEEDED, _jobMgr.marshallResultObject(volUrl)); } @ReflectionUse private Pair<JobInfo.Status, String> orchestrateAttachVolumeToVM(VmWorkAttachVolume work) throws Exception { Volume vol = orchestrateAttachVolumeToVM(work.getVmId(), work.getVolumeId(), work.getDeviceId()); return new Pair<JobInfo.Status, String>(JobInfo.Status.SUCCEEDED, _jobMgr.marshallResultObject(new Long(vol.getId()))); } @ReflectionUse private Pair<JobInfo.Status, String> orchestrateDetachVolumeFromVM(VmWorkDetachVolume work) throws Exception { Volume vol = orchestrateDetachVolumeFromVM(work.getVmId(), work.getVolumeId()); return new Pair<JobInfo.Status, String>(JobInfo.Status.SUCCEEDED, _jobMgr.marshallResultObject(new Long(vol.getId()))); } @ReflectionUse private Pair<JobInfo.Status, String> orchestrateResizeVolume(VmWorkResizeVolume work) throws Exception { Volume vol = orchestrateResizeVolume(work.getVolumeId(), work.getCurrentSize(), work.getNewSize(), work.getNewMinIops(), work.getNewMaxIops(), work.getNewHypervisorSnapshotReserve(), work.getNewServiceOfferingId(), work.isShrinkOk()); return new Pair<JobInfo.Status, String>(JobInfo.Status.SUCCEEDED, _jobMgr.marshallResultObject(new Long(vol.getId()))); } @ReflectionUse private Pair<JobInfo.Status, String> orchestrateMigrateVolume(VmWorkMigrateVolume work) throws Exception { VolumeVO volume = _volsDao.findById(work.getVolumeId()); StoragePoolVO targetStoragePool = _storagePoolDao.findById(work.getDestPoolId()); DiskOfferingVO newDiskOffering = _diskOfferingDao.findById(work.getNewDiskOfferingId()); Volume newVol = orchestrateMigrateVolume(volume, targetStoragePool, work.isLiveMigrate(), newDiskOffering); return new Pair<JobInfo.Status, String>(JobInfo.Status.SUCCEEDED, _jobMgr.marshallResultObject(newVol.getId())); } @ReflectionUse private Pair<JobInfo.Status, String> orchestrateTakeVolumeSnapshot(VmWorkTakeVolumeSnapshot work) throws Exception { Account account = _accountDao.findById(work.getAccountId()); orchestrateTakeVolumeSnapshot(work.getVolumeId(), work.getPolicyId(), work.getSnapshotId(), account, work.isQuiesceVm(), work.getLocationType(), work.isAsyncBackup()); return new Pair<JobInfo.Status, String>(JobInfo.Status.SUCCEEDED, _jobMgr.marshallResultObject(work.getSnapshotId())); } @Override public Pair<JobInfo.Status, String> handleVmWorkJob(VmWork work) throws Exception { return _jobHandlerProxy.handleVmWorkJob(work); } private VmWorkJobVO createPlaceHolderWork(long instanceId) { VmWorkJobVO workJob = new VmWorkJobVO(""); workJob.setDispatcher(VmWorkConstants.VM_WORK_JOB_PLACEHOLDER); workJob.setCmd(""); workJob.setCmdInfo(""); workJob.setAccountId(0); workJob.setUserId(0); workJob.setStep(VmWorkJobVO.Step.Starting); workJob.setVmType(VirtualMachine.Type.Instance); workJob.setVmInstanceId(instanceId); workJob.setInitMsid(ManagementServerNode.getManagementServerId()); _workJobDao.persist(workJob); return workJob; } @Override public String getConfigComponentName() { return VolumeApiService.class.getSimpleName(); } @Override public ConfigKey<?>[] getConfigKeys() { return new ConfigKey<?>[] {ConcurrentMigrationsThresholdPerDatastore, AllowUserExpungeRecoverVolume}; } }
server/src/main/java/com/cloud/storage/VolumeApiServiceImpl.java
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.storage; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.ExecutionException; import javax.inject.Inject; import org.apache.cloudstack.api.command.user.volume.AttachVolumeCmd; import org.apache.cloudstack.api.command.user.volume.CreateVolumeCmd; import org.apache.cloudstack.api.command.user.volume.DetachVolumeCmd; import org.apache.cloudstack.api.command.user.volume.ExtractVolumeCmd; import org.apache.cloudstack.api.command.user.volume.GetUploadParamsForVolumeCmd; import org.apache.cloudstack.api.command.user.volume.MigrateVolumeCmd; import org.apache.cloudstack.api.command.user.volume.ResizeVolumeCmd; import org.apache.cloudstack.api.command.user.volume.UploadVolumeCmd; import org.apache.cloudstack.api.response.GetUploadParamsResponse; import org.apache.cloudstack.context.CallContext; import org.apache.cloudstack.engine.orchestration.service.VolumeOrchestrationService; import org.apache.cloudstack.engine.subsystem.api.storage.ChapInfo; import org.apache.cloudstack.engine.subsystem.api.storage.DataObject; import org.apache.cloudstack.engine.subsystem.api.storage.DataStore; import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreManager; import org.apache.cloudstack.engine.subsystem.api.storage.EndPoint; import org.apache.cloudstack.engine.subsystem.api.storage.HostScope; import org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo; import org.apache.cloudstack.engine.subsystem.api.storage.Scope; import org.apache.cloudstack.engine.subsystem.api.storage.StoragePoolAllocator; import org.apache.cloudstack.engine.subsystem.api.storage.VolumeDataFactory; import org.apache.cloudstack.engine.subsystem.api.storage.VolumeInfo; import org.apache.cloudstack.engine.subsystem.api.storage.VolumeService; import org.apache.cloudstack.engine.subsystem.api.storage.VolumeService.VolumeApiResult; import org.apache.cloudstack.framework.async.AsyncCallFuture; import org.apache.cloudstack.framework.config.ConfigKey; import org.apache.cloudstack.framework.config.Configurable; import org.apache.cloudstack.framework.config.dao.ConfigurationDao; import org.apache.cloudstack.framework.jobs.AsyncJob; import org.apache.cloudstack.framework.jobs.AsyncJobExecutionContext; import org.apache.cloudstack.framework.jobs.AsyncJobManager; import org.apache.cloudstack.framework.jobs.Outcome; import org.apache.cloudstack.framework.jobs.dao.VmWorkJobDao; import org.apache.cloudstack.framework.jobs.impl.AsyncJobVO; import org.apache.cloudstack.framework.jobs.impl.OutcomeImpl; import org.apache.cloudstack.framework.jobs.impl.VmWorkJobVO; import org.apache.cloudstack.jobs.JobInfo; import org.apache.cloudstack.storage.command.AttachAnswer; import org.apache.cloudstack.storage.command.AttachCommand; import org.apache.cloudstack.storage.command.DettachCommand; import org.apache.cloudstack.storage.command.TemplateOrVolumePostUploadCommand; import org.apache.cloudstack.storage.datastore.db.PrimaryDataStoreDao; import org.apache.cloudstack.storage.datastore.db.StoragePoolVO; import org.apache.cloudstack.storage.datastore.db.VolumeDataStoreDao; import org.apache.cloudstack.storage.datastore.db.VolumeDataStoreVO; import org.apache.cloudstack.storage.image.datastore.ImageStoreEntity; import org.apache.cloudstack.utils.identity.ManagementServerNode; import org.apache.cloudstack.utils.imagestore.ImageStoreUtil; import org.apache.cloudstack.utils.volume.VirtualMachineDiskInfo; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.apache.log4j.Logger; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import com.cloud.agent.AgentManager; import com.cloud.agent.api.Answer; import com.cloud.agent.api.ModifyTargetsCommand; import com.cloud.agent.api.to.DataTO; import com.cloud.agent.api.to.DiskTO; import com.cloud.api.ApiDBUtils; import com.cloud.configuration.Config; import com.cloud.configuration.ConfigurationManager; import com.cloud.configuration.Resource.ResourceType; import com.cloud.dc.ClusterDetailsDao; import com.cloud.dc.DataCenter; import com.cloud.dc.DataCenterVO; import com.cloud.dc.dao.DataCenterDao; import com.cloud.domain.Domain; import com.cloud.event.ActionEvent; import com.cloud.event.EventTypes; import com.cloud.event.UsageEventUtils; import com.cloud.exception.ConcurrentOperationException; import com.cloud.exception.InvalidParameterValueException; import com.cloud.exception.PermissionDeniedException; import com.cloud.exception.ResourceAllocationException; import com.cloud.exception.StorageUnavailableException; import com.cloud.gpu.GPU; import com.cloud.host.HostVO; import com.cloud.host.Status; import com.cloud.host.dao.HostDao; import com.cloud.hypervisor.Hypervisor.HypervisorType; import com.cloud.hypervisor.HypervisorCapabilitiesVO; import com.cloud.hypervisor.dao.HypervisorCapabilitiesDao; import com.cloud.org.Grouping; import com.cloud.resource.ResourceState; import com.cloud.serializer.GsonHelper; import com.cloud.server.ResourceTag; import com.cloud.server.TaggedResourceService; import com.cloud.service.dao.ServiceOfferingDetailsDao; import com.cloud.storage.Storage.ImageFormat; import com.cloud.storage.dao.DiskOfferingDao; import com.cloud.storage.dao.SnapshotDao; import com.cloud.storage.dao.StoragePoolTagsDao; import com.cloud.storage.dao.VMTemplateDao; import com.cloud.storage.dao.VolumeDao; import com.cloud.storage.snapshot.SnapshotApiService; import com.cloud.storage.snapshot.SnapshotManager; import com.cloud.template.TemplateManager; import com.cloud.user.Account; import com.cloud.user.AccountManager; import com.cloud.user.ResourceLimitService; import com.cloud.user.User; import com.cloud.user.VmDiskStatisticsVO; import com.cloud.user.dao.AccountDao; import com.cloud.user.dao.VmDiskStatisticsDao; import com.cloud.utils.DateUtil; import com.cloud.utils.EncryptionUtil; import com.cloud.utils.EnumUtils; import com.cloud.utils.NumbersUtil; import com.cloud.utils.Pair; import com.cloud.utils.Predicate; import com.cloud.utils.ReflectionUse; import com.cloud.utils.StringUtils; import com.cloud.utils.UriUtils; import com.cloud.utils.component.ManagerBase; import com.cloud.utils.db.DB; import com.cloud.utils.db.EntityManager; import com.cloud.utils.db.Transaction; import com.cloud.utils.db.TransactionCallback; import com.cloud.utils.db.TransactionCallbackWithException; import com.cloud.utils.db.TransactionStatus; import com.cloud.utils.db.UUIDManager; import com.cloud.utils.exception.CloudRuntimeException; import com.cloud.utils.fsm.NoTransitionException; import com.cloud.utils.fsm.StateMachine2; import com.cloud.vm.UserVmManager; import com.cloud.vm.UserVmService; import com.cloud.vm.UserVmVO; import com.cloud.vm.VMInstanceVO; import com.cloud.vm.VirtualMachine; import com.cloud.vm.VirtualMachine.State; import com.cloud.vm.VmDetailConstants; import com.cloud.vm.VmWork; import com.cloud.vm.VmWorkAttachVolume; import com.cloud.vm.VmWorkConstants; import com.cloud.vm.VmWorkDetachVolume; import com.cloud.vm.VmWorkExtractVolume; import com.cloud.vm.VmWorkJobHandler; import com.cloud.vm.VmWorkJobHandlerProxy; import com.cloud.vm.VmWorkMigrateVolume; import com.cloud.vm.VmWorkResizeVolume; import com.cloud.vm.VmWorkSerializer; import com.cloud.vm.VmWorkTakeVolumeSnapshot; import com.cloud.vm.dao.UserVmDao; import com.cloud.vm.dao.VMInstanceDao; import com.cloud.vm.snapshot.VMSnapshotVO; import com.cloud.vm.snapshot.dao.VMSnapshotDao; import com.google.common.base.Strings; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonParseException; public class VolumeApiServiceImpl extends ManagerBase implements VolumeApiService, VmWorkJobHandler, Configurable { private final static Logger s_logger = Logger.getLogger(VolumeApiServiceImpl.class); public static final String VM_WORK_JOB_HANDLER = VolumeApiServiceImpl.class.getSimpleName(); @Inject private UserVmManager _userVmMgr; @Inject private VolumeOrchestrationService _volumeMgr; @Inject private EntityManager _entityMgr; @Inject private AgentManager _agentMgr; @Inject private TemplateManager _tmpltMgr; @Inject private SnapshotManager _snapshotMgr; @Inject private AccountManager _accountMgr; @Inject private ConfigurationManager _configMgr; @Inject private VolumeDao _volsDao; @Inject private HostDao _hostDao; @Inject private SnapshotDao _snapshotDao; @Inject private ServiceOfferingDetailsDao _serviceOfferingDetailsDao; @Inject private UserVmDao _userVmDao; @Inject private UserVmService _userVmService; @Inject private VolumeDataStoreDao _volumeStoreDao; @Inject private VMInstanceDao _vmInstanceDao; @Inject private PrimaryDataStoreDao _storagePoolDao; @Inject private DiskOfferingDao _diskOfferingDao; @Inject private AccountDao _accountDao; @Inject private DataCenterDao _dcDao; @Inject private VMTemplateDao _templateDao; @Inject private ResourceLimitService _resourceLimitMgr; @Inject private VmDiskStatisticsDao _vmDiskStatsDao; @Inject private VMSnapshotDao _vmSnapshotDao; @Inject private ConfigurationDao _configDao; @Inject private DataStoreManager dataStoreMgr; @Inject private VolumeService volService; @Inject private VolumeDataFactory volFactory; @Inject private SnapshotApiService snapshotMgr; @Inject private UUIDManager _uuidMgr; @Inject private HypervisorCapabilitiesDao _hypervisorCapabilitiesDao; @Inject private AsyncJobManager _jobMgr; @Inject private VmWorkJobDao _workJobDao; @Inject private ClusterDetailsDao _clusterDetailsDao; @Inject private StorageManager storageMgr; @Inject private StoragePoolTagsDao storagePoolTagsDao; @Inject private StorageUtil storageUtil; @Inject public TaggedResourceService taggedResourceService; protected Gson _gson; private List<StoragePoolAllocator> _storagePoolAllocators; private List<HypervisorType> supportingDefaultHV; VmWorkJobHandlerProxy _jobHandlerProxy = new VmWorkJobHandlerProxy(this); static final ConfigKey<Long> VmJobCheckInterval = new ConfigKey<Long>("Advanced", Long.class, "vm.job.check.interval", "3000", "Interval in milliseconds to check if the job is complete", false); static final ConfigKey<Boolean> VolumeUrlCheck = new ConfigKey<Boolean>("Advanced", Boolean.class, "volume.url.check", "true", "Check the url for a volume before downloading it from the management server. Set to false when you managment has no internet access.", true); public static final ConfigKey<Boolean> AllowUserExpungeRecoverVolume = new ConfigKey<Boolean>("Advanced", Boolean.class, "allow.user.expunge.recover.volume", "true", "Determines whether users can expunge or recover their volume", true, ConfigKey.Scope.Account); private long _maxVolumeSizeInGb; private final StateMachine2<Volume.State, Volume.Event, Volume> _volStateMachine; protected VolumeApiServiceImpl() { _volStateMachine = Volume.State.getStateMachine(); _gson = GsonHelper.getGsonLogger(); } /* * Upload the volume to secondary storage. */ @Override @DB @ActionEvent(eventType = EventTypes.EVENT_VOLUME_UPLOAD, eventDescription = "uploading volume", async = true) public VolumeVO uploadVolume(UploadVolumeCmd cmd) throws ResourceAllocationException { Account caller = CallContext.current().getCallingAccount(); long ownerId = cmd.getEntityOwnerId(); Account owner = _entityMgr.findById(Account.class, ownerId); Long zoneId = cmd.getZoneId(); String volumeName = cmd.getVolumeName(); String url = cmd.getUrl(); String format = cmd.getFormat(); Long diskOfferingId = cmd.getDiskOfferingId(); String imageStoreUuid = cmd.getImageStoreUuid(); DataStore store = _tmpltMgr.getImageStore(imageStoreUuid, zoneId); validateVolume(caller, ownerId, zoneId, volumeName, url, format, diskOfferingId); VolumeVO volume = persistVolume(owner, zoneId, volumeName, url, cmd.getFormat(), diskOfferingId, Volume.State.Allocated); VolumeInfo vol = volFactory.getVolume(volume.getId()); RegisterVolumePayload payload = new RegisterVolumePayload(cmd.getUrl(), cmd.getChecksum(), cmd.getFormat()); vol.addPayload(payload); volService.registerVolume(vol, store); return volume; } @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_UPLOAD, eventDescription = "uploading volume for post upload", async = true) public GetUploadParamsResponse uploadVolume(final GetUploadParamsForVolumeCmd cmd) throws ResourceAllocationException, MalformedURLException { Account caller = CallContext.current().getCallingAccount(); long ownerId = cmd.getEntityOwnerId(); final Account owner = _entityMgr.findById(Account.class, ownerId); final Long zoneId = cmd.getZoneId(); final String volumeName = cmd.getName(); String format = cmd.getFormat(); final Long diskOfferingId = cmd.getDiskOfferingId(); String imageStoreUuid = cmd.getImageStoreUuid(); final DataStore store = _tmpltMgr.getImageStore(imageStoreUuid, zoneId); validateVolume(caller, ownerId, zoneId, volumeName, null, format, diskOfferingId); return Transaction.execute(new TransactionCallbackWithException<GetUploadParamsResponse, MalformedURLException>() { @Override public GetUploadParamsResponse doInTransaction(TransactionStatus status) throws MalformedURLException { VolumeVO volume = persistVolume(owner, zoneId, volumeName, null, cmd.getFormat(), diskOfferingId, Volume.State.NotUploaded); VolumeInfo vol = volFactory.getVolume(volume.getId()); RegisterVolumePayload payload = new RegisterVolumePayload(null, cmd.getChecksum(), cmd.getFormat()); vol.addPayload(payload); Pair<EndPoint, DataObject> pair = volService.registerVolumeForPostUpload(vol, store); EndPoint ep = pair.first(); DataObject dataObject = pair.second(); GetUploadParamsResponse response = new GetUploadParamsResponse(); String ssvmUrlDomain = _configDao.getValue(Config.SecStorageSecureCopyCert.key()); String url = ImageStoreUtil.generatePostUploadUrl(ssvmUrlDomain, ep.getPublicAddr(), vol.getUuid()); response.setPostURL(new URL(url)); // set the post url, this is used in the monitoring thread to determine the SSVM VolumeDataStoreVO volumeStore = _volumeStoreDao.findByVolume(vol.getId()); assert (volumeStore != null) : "sincle volume is registered, volumestore cannot be null at this stage"; volumeStore.setExtractUrl(url); _volumeStoreDao.persist(volumeStore); response.setId(UUID.fromString(vol.getUuid())); int timeout = ImageStoreUploadMonitorImpl.getUploadOperationTimeout(); DateTime currentDateTime = new DateTime(DateTimeZone.UTC); String expires = currentDateTime.plusMinutes(timeout).toString(); response.setTimeout(expires); String key = _configDao.getValue(Config.SSVMPSK.key()); /* * encoded metadata using the post upload config key */ TemplateOrVolumePostUploadCommand command = new TemplateOrVolumePostUploadCommand(vol.getId(), vol.getUuid(), volumeStore.getInstallPath(), cmd.getChecksum(), vol.getType().toString(), vol.getName(), vol.getFormat().toString(), dataObject.getDataStore().getUri(), dataObject.getDataStore().getRole().toString()); command.setLocalPath(volumeStore.getLocalDownloadPath()); //using the existing max upload size configuration command.setProcessTimeout(NumbersUtil.parseLong(_configDao.getValue("vmware.package.ova.timeout"), 3600)); command.setMaxUploadSize(_configDao.getValue(Config.MaxUploadVolumeSize.key())); command.setDefaultMaxAccountSecondaryStorage(_configDao.getValue(Config.DefaultMaxAccountSecondaryStorage.key())); command.setAccountId(vol.getAccountId()); Gson gson = new GsonBuilder().create(); String metadata = EncryptionUtil.encodeData(gson.toJson(command), key); response.setMetadata(metadata); /* * signature calculated on the url, expiry, metadata. */ response.setSignature(EncryptionUtil.generateSignature(metadata + url + expires, key)); return response; } }); } private boolean validateVolume(Account caller, long ownerId, Long zoneId, String volumeName, String url, String format, Long diskOfferingId) throws ResourceAllocationException { // permission check Account volumeOwner = _accountMgr.getActiveAccountById(ownerId); _accountMgr.checkAccess(caller, null, true, volumeOwner); // Check that the resource limit for volumes won't be exceeded _resourceLimitMgr.checkResourceLimit(volumeOwner, ResourceType.volume); // Verify that zone exists DataCenterVO zone = _dcDao.findById(zoneId); if (zone == null) { throw new InvalidParameterValueException("Unable to find zone by id " + zoneId); } // Check if zone is disabled if (Grouping.AllocationState.Disabled == zone.getAllocationState() && !_accountMgr.isRootAdmin(caller.getId())) { throw new PermissionDeniedException("Cannot perform this operation, Zone is currently disabled: " + zoneId); } //validating the url only when url is not null. url can be null incase of form based post upload if (url != null) { if (url.toLowerCase().contains("file://")) { throw new InvalidParameterValueException("File:// type urls are currently unsupported"); } UriUtils.validateUrl(format, url); if (VolumeUrlCheck.value()) { // global setting that can be set when their MS does not have internet access s_logger.debug("Checking url: " + url); UriUtils.checkUrlExistence(url); } // Check that the resource limit for secondary storage won't be exceeded _resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(ownerId), ResourceType.secondary_storage, UriUtils.getRemoteSize(url)); } else { _resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(ownerId), ResourceType.secondary_storage); } try { ImageFormat.valueOf(format.toUpperCase()); } catch (IllegalArgumentException e) { s_logger.debug("ImageFormat IllegalArgumentException: " + e.getMessage()); throw new IllegalArgumentException("Image format: " + format + " is incorrect. Supported formats are " + EnumUtils.listValues(ImageFormat.values())); } // Check that the the disk offering specified is valid if (diskOfferingId != null) { DiskOfferingVO diskOffering = _diskOfferingDao.findById(diskOfferingId); if ((diskOffering == null) || diskOffering.getRemoved() != null || !DiskOfferingVO.Type.Disk.equals(diskOffering.getType())) { throw new InvalidParameterValueException("Please specify a valid disk offering."); } if (!diskOffering.isCustomized()) { throw new InvalidParameterValueException("Please specify a custom sized disk offering."); } _configMgr.checkDiskOfferingAccess(volumeOwner, diskOffering, zone); } return false; } public String getRandomVolumeName() { return UUID.randomUUID().toString(); } @DB protected VolumeVO persistVolume(final Account owner, final Long zoneId, final String volumeName, final String url, final String format, final Long diskOfferingId, final Volume.State state) { return Transaction.execute(new TransactionCallback<VolumeVO>() { @Override public VolumeVO doInTransaction(TransactionStatus status) { VolumeVO volume = new VolumeVO(volumeName, zoneId, -1, -1, -1, new Long(-1), null, null, Storage.ProvisioningType.THIN, 0, Volume.Type.DATADISK); volume.setPoolId(null); volume.setDataCenterId(zoneId); volume.setPodId(null); volume.setState(state); // initialize the state // to prevent a null pointer deref I put the system account id here when no owner is given. // TODO Decide if this is valid or whether throwing a CloudRuntimeException is more appropriate volume.setAccountId((owner == null) ? Account.ACCOUNT_ID_SYSTEM : owner.getAccountId()); volume.setDomainId((owner == null) ? Domain.ROOT_DOMAIN : owner.getDomainId()); if (diskOfferingId == null) { DiskOfferingVO diskOfferingVO = _diskOfferingDao.findByUniqueName("Cloud.com-Custom"); if (diskOfferingVO != null) { long defaultDiskOfferingId = diskOfferingVO.getId(); volume.setDiskOfferingId(defaultDiskOfferingId); } } else { volume.setDiskOfferingId(diskOfferingId); DiskOfferingVO diskOfferingVO = _diskOfferingDao.findById(diskOfferingId); Boolean isCustomizedIops = diskOfferingVO != null && diskOfferingVO.isCustomizedIops() != null ? diskOfferingVO.isCustomizedIops() : false; if (isCustomizedIops == null || !isCustomizedIops) { volume.setMinIops(diskOfferingVO.getMinIops()); volume.setMaxIops(diskOfferingVO.getMaxIops()); } } // volume.setSize(size); volume.setInstanceId(null); volume.setUpdated(new Date()); volume.setDomainId((owner == null) ? Domain.ROOT_DOMAIN : owner.getDomainId()); volume.setFormat(ImageFormat.valueOf(format)); volume = _volsDao.persist(volume); CallContext.current().setEventDetails("Volume Id: " + volume.getUuid()); // Increment resource count during allocation; if actual creation fails, // decrement it _resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.volume); //url can be null incase of postupload if (url != null) { _resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.secondary_storage, UriUtils.getRemoteSize(url)); } return volume; } }); } /** * Retrieves the volume name from CreateVolumeCmd object. * * If the retrieved volume name is null, empty or blank, then A random name * will be generated using getRandomVolumeName method. * * @param cmd * @return Either the retrieved name or a random name. */ public String getVolumeNameFromCommand(CreateVolumeCmd cmd) { String userSpecifiedName = cmd.getVolumeName(); if (org.apache.commons.lang.StringUtils.isBlank(userSpecifiedName)) { userSpecifiedName = getRandomVolumeName(); } return userSpecifiedName; } /* * Just allocate a volume in the database, don't send the createvolume cmd * to hypervisor. The volume will be finally created only when it's attached * to a VM. */ @Override @DB @ActionEvent(eventType = EventTypes.EVENT_VOLUME_CREATE, eventDescription = "creating volume", create = true) public VolumeVO allocVolume(CreateVolumeCmd cmd) throws ResourceAllocationException { Account caller = CallContext.current().getCallingAccount(); long ownerId = cmd.getEntityOwnerId(); Account owner = _accountMgr.getActiveAccountById(ownerId); Boolean displayVolume = cmd.getDisplayVolume(); // permission check _accountMgr.checkAccess(caller, null, true, _accountMgr.getActiveAccountById(ownerId)); if (displayVolume == null) { displayVolume = true; } else { if (!_accountMgr.isRootAdmin(caller.getId())) { throw new PermissionDeniedException("Cannot update parameter displayvolume, only admin permitted "); } } // Check that the resource limit for volumes won't be exceeded _resourceLimitMgr.checkResourceLimit(owner, ResourceType.volume, displayVolume); Long zoneId = cmd.getZoneId(); Long diskOfferingId = null; DiskOfferingVO diskOffering = null; Long size = null; Long minIops = null; Long maxIops = null; // Volume VO used for extracting the source template id VolumeVO parentVolume = null; // validate input parameters before creating the volume if (cmd.getSnapshotId() == null && cmd.getDiskOfferingId() == null) { throw new InvalidParameterValueException("At least one of disk Offering ID or snapshot ID must be passed whilst creating volume"); } // disallow passing disk offering ID with DATA disk volume snapshots if (cmd.getSnapshotId() != null && cmd.getDiskOfferingId() != null) { SnapshotVO snapshot = _snapshotDao.findById(cmd.getSnapshotId()); if (snapshot != null) { parentVolume = _volsDao.findByIdIncludingRemoved(snapshot.getVolumeId()); if (parentVolume != null && parentVolume.getVolumeType() != Volume.Type.ROOT) throw new InvalidParameterValueException("Disk Offering ID cannot be passed whilst creating volume from snapshot other than ROOT disk snapshots"); } parentVolume = null; } if (cmd.getDiskOfferingId() != null) { // create a new volume diskOfferingId = cmd.getDiskOfferingId(); size = cmd.getSize(); Long sizeInGB = size; if (size != null) { if (size > 0) { size = size * 1024 * 1024 * 1024; // user specify size in GB } else { throw new InvalidParameterValueException("Disk size must be larger than 0"); } } // Check that the the disk offering is specified diskOffering = _diskOfferingDao.findById(diskOfferingId); if ((diskOffering == null) || diskOffering.getRemoved() != null || !DiskOfferingVO.Type.Disk.equals(diskOffering.getType())) { throw new InvalidParameterValueException("Please specify a valid disk offering."); } if (diskOffering.isCustomized()) { if (size == null) { throw new InvalidParameterValueException("This disk offering requires a custom size specified"); } Long customDiskOfferingMaxSize = VolumeOrchestrationService.CustomDiskOfferingMaxSize.value(); Long customDiskOfferingMinSize = VolumeOrchestrationService.CustomDiskOfferingMinSize.value(); if ((sizeInGB < customDiskOfferingMinSize) || (sizeInGB > customDiskOfferingMaxSize)) { throw new InvalidParameterValueException("Volume size: " + sizeInGB + "GB is out of allowed range. Max: " + customDiskOfferingMaxSize + " Min:" + customDiskOfferingMinSize); } } if (!diskOffering.isCustomized() && size != null) { throw new InvalidParameterValueException("This disk offering does not allow custom size"); } _configMgr.checkDiskOfferingAccess(owner, diskOffering, _dcDao.findById(zoneId)); if (diskOffering.getDiskSize() > 0) { size = diskOffering.getDiskSize(); } Boolean isCustomizedIops = diskOffering.isCustomizedIops(); if (isCustomizedIops != null) { if (isCustomizedIops) { minIops = cmd.getMinIops(); maxIops = cmd.getMaxIops(); if (minIops == null && maxIops == null) { minIops = 0L; maxIops = 0L; } else { if (minIops == null || minIops <= 0) { throw new InvalidParameterValueException("The min IOPS must be greater than 0."); } if (maxIops == null) { maxIops = 0L; } if (minIops > maxIops) { throw new InvalidParameterValueException("The min IOPS must be less than or equal to the max IOPS."); } } } else { minIops = diskOffering.getMinIops(); maxIops = diskOffering.getMaxIops(); } } if (!validateVolumeSizeRange(size)) {// convert size from mb to gb // for validation throw new InvalidParameterValueException("Invalid size for custom volume creation: " + size + " ,max volume size is:" + _maxVolumeSizeInGb); } } if (cmd.getSnapshotId() != null) { // create volume from snapshot Long snapshotId = cmd.getSnapshotId(); SnapshotVO snapshotCheck = _snapshotDao.findById(snapshotId); if (snapshotCheck == null) { throw new InvalidParameterValueException("unable to find a snapshot with id " + snapshotId); } if (snapshotCheck.getState() != Snapshot.State.BackedUp) { throw new InvalidParameterValueException("Snapshot id=" + snapshotId + " is not in " + Snapshot.State.BackedUp + " state yet and can't be used for volume creation"); } parentVolume = _volsDao.findByIdIncludingRemoved(snapshotCheck.getVolumeId()); if (zoneId == null) { // if zoneId is not provided, we default to create volume in the same zone as the snapshot zone. zoneId = snapshotCheck.getDataCenterId(); } if (diskOffering == null) { // Pure snapshot is being used to create volume. diskOfferingId = snapshotCheck.getDiskOfferingId(); diskOffering = _diskOfferingDao.findById(diskOfferingId); minIops = snapshotCheck.getMinIops(); maxIops = snapshotCheck.getMaxIops(); size = snapshotCheck.getSize(); // ; disk offering is used for tags purposes } else { if (size < snapshotCheck.getSize()) { throw new InvalidParameterValueException(String.format("Invalid size for volume creation: %dGB, snapshot size is: %dGB", size / (1024 * 1024 * 1024), snapshotCheck.getSize() / (1024 * 1024 * 1024))); } } _configMgr.checkDiskOfferingAccess(null, diskOffering, _dcDao.findById(zoneId)); // check snapshot permissions _accountMgr.checkAccess(caller, null, true, snapshotCheck); // one step operation - create volume in VM's cluster and attach it // to the VM Long vmId = cmd.getVirtualMachineId(); if (vmId != null) { // Check that the virtual machine ID is valid and it's a user vm UserVmVO vm = _userVmDao.findById(vmId); if (vm == null || vm.getType() != VirtualMachine.Type.User) { throw new InvalidParameterValueException("Please specify a valid User VM."); } // Check that the VM is in the correct state if (vm.getState() != State.Running && vm.getState() != State.Stopped) { throw new InvalidParameterValueException("Please specify a VM that is either running or stopped."); } // permission check _accountMgr.checkAccess(caller, null, false, vm); } } Storage.ProvisioningType provisioningType = diskOffering.getProvisioningType(); // Check that the resource limit for primary storage won't be exceeded _resourceLimitMgr.checkResourceLimit(owner, ResourceType.primary_storage, displayVolume, new Long(size)); // Verify that zone exists DataCenterVO zone = _dcDao.findById(zoneId); if (zone == null) { throw new InvalidParameterValueException("Unable to find zone by id " + zoneId); } // Check if zone is disabled if (Grouping.AllocationState.Disabled == zone.getAllocationState() && !_accountMgr.isRootAdmin(caller.getId())) { throw new PermissionDeniedException("Cannot perform this operation, Zone is currently disabled: " + zoneId); } // If local storage is disabled then creation of volume with local disk // offering not allowed if (!zone.isLocalStorageEnabled() && diskOffering.isUseLocalStorage()) { throw new InvalidParameterValueException("Zone is not configured to use local storage but volume's disk offering " + diskOffering.getName() + " uses it"); } String userSpecifiedName = getVolumeNameFromCommand(cmd); return commitVolume(cmd, caller, owner, displayVolume, zoneId, diskOfferingId, provisioningType, size, minIops, maxIops, parentVolume, userSpecifiedName, _uuidMgr.generateUuid(Volume.class, cmd.getCustomId())); } private VolumeVO commitVolume(final CreateVolumeCmd cmd, final Account caller, final Account owner, final Boolean displayVolume, final Long zoneId, final Long diskOfferingId, final Storage.ProvisioningType provisioningType, final Long size, final Long minIops, final Long maxIops, final VolumeVO parentVolume, final String userSpecifiedName, final String uuid) { return Transaction.execute(new TransactionCallback<VolumeVO>() { @Override public VolumeVO doInTransaction(TransactionStatus status) { VolumeVO volume = new VolumeVO(userSpecifiedName, -1, -1, -1, -1, new Long(-1), null, null, provisioningType, 0, Volume.Type.DATADISK); volume.setPoolId(null); volume.setUuid(uuid); volume.setDataCenterId(zoneId); volume.setPodId(null); volume.setAccountId(owner.getId()); volume.setDomainId(owner.getDomainId()); volume.setDiskOfferingId(diskOfferingId); volume.setSize(size); volume.setMinIops(minIops); volume.setMaxIops(maxIops); volume.setInstanceId(null); volume.setUpdated(new Date()); volume.setDisplayVolume(displayVolume); if (parentVolume != null) { volume.setTemplateId(parentVolume.getTemplateId()); volume.setFormat(parentVolume.getFormat()); } else { volume.setTemplateId(null); } volume = _volsDao.persist(volume); if (cmd.getSnapshotId() == null && displayVolume) { // for volume created from snapshot, create usage event after volume creation UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), diskOfferingId, null, size, Volume.class.getName(), volume.getUuid(), displayVolume); } CallContext.current().setEventDetails("Volume Id: " + volume.getUuid()); // Increment resource count during allocation; if actual creation fails, // decrement it _resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.volume, displayVolume); _resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.primary_storage, displayVolume, new Long(volume.getSize())); return volume; } }); } public boolean validateVolumeSizeRange(long size) { if (size < 0 || (size > 0 && size < (1024 * 1024 * 1024))) { throw new InvalidParameterValueException("Please specify a size of at least 1 GB."); } else if (size > (_maxVolumeSizeInGb * 1024 * 1024 * 1024)) { throw new InvalidParameterValueException("Requested volume size is " + size + ", but the maximum size allowed is " + _maxVolumeSizeInGb + " GB."); } return true; } @Override @DB @ActionEvent(eventType = EventTypes.EVENT_VOLUME_CREATE, eventDescription = "creating volume", async = true) public VolumeVO createVolume(CreateVolumeCmd cmd) { VolumeVO volume = _volsDao.findById(cmd.getEntityId()); boolean created = true; try { if (cmd.getSnapshotId() != null) { volume = createVolumeFromSnapshot(volume, cmd.getSnapshotId(), cmd.getVirtualMachineId()); if (volume.getState() != Volume.State.Ready) { created = false; } // if VM Id is provided, attach the volume to the VM if (cmd.getVirtualMachineId() != null) { try { attachVolumeToVM(cmd.getVirtualMachineId(), volume.getId(), volume.getDeviceId()); } catch (Exception ex) { StringBuilder message = new StringBuilder("Volume: "); message.append(volume.getUuid()); message.append(" created successfully, but failed to attach the newly created volume to VM: "); message.append(cmd.getVirtualMachineId()); message.append(" due to error: "); message.append(ex.getMessage()); if (s_logger.isDebugEnabled()) { s_logger.debug(message, ex); } throw new CloudRuntimeException(message.toString()); } } } return volume; } catch (Exception e) { created = false; VolumeInfo vol = volFactory.getVolume(cmd.getEntityId()); vol.stateTransit(Volume.Event.DestroyRequested); throw new CloudRuntimeException("Failed to create volume: " + volume.getId(), e); } finally { if (!created) { s_logger.trace("Decrementing volume resource count for account id=" + volume.getAccountId() + " as volume failed to create on the backend"); _resourceLimitMgr.decrementResourceCount(volume.getAccountId(), ResourceType.volume, cmd.getDisplayVolume()); _resourceLimitMgr.decrementResourceCount(volume.getAccountId(), ResourceType.primary_storage, cmd.getDisplayVolume(), new Long(volume.getSize())); } } } protected VolumeVO createVolumeFromSnapshot(VolumeVO volume, long snapshotId, Long vmId) throws StorageUnavailableException { VolumeInfo createdVolume = null; SnapshotVO snapshot = _snapshotDao.findById(snapshotId); snapshot.getVolumeId(); UserVmVO vm = null; if (vmId != null) { vm = _userVmDao.findById(vmId); } // sync old snapshots to region store if necessary createdVolume = _volumeMgr.createVolumeFromSnapshot(volume, snapshot, vm); VolumeVO volumeVo = _volsDao.findById(createdVolume.getId()); UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, createdVolume.getAccountId(), createdVolume.getDataCenterId(), createdVolume.getId(), createdVolume.getName(), createdVolume.getDiskOfferingId(), null, createdVolume.getSize(), Volume.class.getName(), createdVolume.getUuid(), volumeVo.isDisplayVolume()); return volumeVo; } @Override @DB @ActionEvent(eventType = EventTypes.EVENT_VOLUME_RESIZE, eventDescription = "resizing volume", async = true) public VolumeVO resizeVolume(ResizeVolumeCmd cmd) throws ResourceAllocationException { Long newSize; Long newMinIops; Long newMaxIops; Integer newHypervisorSnapshotReserve; boolean shrinkOk = cmd.isShrinkOk(); VolumeVO volume = _volsDao.findById(cmd.getEntityId()); if (volume == null) { throw new InvalidParameterValueException("No such volume"); } // checking if there are any ongoing snapshots on the volume which is to be resized List<SnapshotVO> ongoingSnapshots = _snapshotDao.listByStatus(cmd.getId(), Snapshot.State.Creating, Snapshot.State.CreatedOnPrimary, Snapshot.State.BackingUp); if (ongoingSnapshots.size() > 0) { throw new CloudRuntimeException("There is/are unbacked up snapshot(s) on this volume, resize volume is not permitted, please try again later."); } /* Does the caller have authority to act on this volume? */ _accountMgr.checkAccess(CallContext.current().getCallingAccount(), null, true, volume); DiskOfferingVO diskOffering = _diskOfferingDao.findById(volume.getDiskOfferingId()); DiskOfferingVO newDiskOffering = null; if (cmd.getNewDiskOfferingId() != null && volume.getDiskOfferingId() != cmd.getNewDiskOfferingId()) { newDiskOffering = _diskOfferingDao.findById(cmd.getNewDiskOfferingId()); } /* Only works for KVM/XenServer/VMware (or "Any") for now, and volumes with 'None' since they're just allocated in DB */ HypervisorType hypervisorType = _volsDao.getHypervisorType(volume.getId()); if (hypervisorType != HypervisorType.KVM && hypervisorType != HypervisorType.XenServer && hypervisorType != HypervisorType.VMware && hypervisorType != HypervisorType.Any && hypervisorType != HypervisorType.None) { throw new InvalidParameterValueException("Hypervisor " + hypervisorType + " does not support rootdisksize override"); } if (volume.getState() != Volume.State.Ready && volume.getState() != Volume.State.Allocated) { throw new InvalidParameterValueException("Volume should be in ready or allocated state before attempting a resize. Volume " + volume.getUuid() + " is in state " + volume.getState() + "."); } // if we are to use the existing disk offering if (newDiskOffering == null) { newSize = cmd.getSize(); newHypervisorSnapshotReserve = volume.getHypervisorSnapshotReserve(); // if the caller is looking to change the size of the volume if (newSize != null) { if (!diskOffering.isCustomized() && !volume.getVolumeType().equals(Volume.Type.ROOT)) { throw new InvalidParameterValueException("To change a volume's size without providing a new disk offering, its current disk offering must be " + "customizable or it must be a root volume (if providing a disk offering, make sure it is different from the current disk offering)."); } // convert from bytes to GiB newSize = newSize << 30; } else { // no parameter provided; just use the original size of the volume newSize = volume.getSize(); } newMinIops = cmd.getMinIops(); if (newMinIops != null) { if (!volume.getVolumeType().equals(Volume.Type.ROOT) && (diskOffering.isCustomizedIops() == null || !diskOffering.isCustomizedIops())) { throw new InvalidParameterValueException("The current disk offering does not support customization of the 'Min IOPS' parameter."); } } else { // no parameter provided; just use the original min IOPS of the volume newMinIops = volume.getMinIops(); } newMaxIops = cmd.getMaxIops(); if (newMaxIops != null) { if (!volume.getVolumeType().equals(Volume.Type.ROOT) && (diskOffering.isCustomizedIops() == null || !diskOffering.isCustomizedIops())) { throw new InvalidParameterValueException("The current disk offering does not support customization of the 'Max IOPS' parameter."); } } else { // no parameter provided; just use the original max IOPS of the volume newMaxIops = volume.getMaxIops(); } validateIops(newMinIops, newMaxIops); } else { if (newDiskOffering.getRemoved() != null) { throw new InvalidParameterValueException("Requested disk offering has been removed."); } if (!DiskOfferingVO.Type.Disk.equals(newDiskOffering.getType())) { throw new InvalidParameterValueException("Requested disk offering type is invalid."); } if (diskOffering.getTags() != null) { if (!StringUtils.areTagsEqual(diskOffering.getTags(), newDiskOffering.getTags())) { throw new InvalidParameterValueException("The tags on the new and old disk offerings must match."); } } else if (newDiskOffering.getTags() != null) { throw new InvalidParameterValueException("There are no tags on the current disk offering. The new disk offering needs to have no tags, as well."); } _configMgr.checkDiskOfferingAccess(_accountMgr.getActiveAccountById(volume.getAccountId()), newDiskOffering, _dcDao.findById(volume.getDataCenterId())); if (newDiskOffering.isCustomized()) { newSize = cmd.getSize(); if (newSize == null) { throw new InvalidParameterValueException("The new disk offering requires that a size be specified."); } // convert from GiB to bytes newSize = newSize << 30; } else { if (cmd.getSize() != null) { throw new InvalidParameterValueException("You cannnot pass in a custom disk size to a non-custom disk offering."); } newSize = newDiskOffering.getDiskSize(); } if (!volume.getSize().equals(newSize) && !volume.getVolumeType().equals(Volume.Type.DATADISK)) { throw new InvalidParameterValueException("Only data volumes can be resized via a new disk offering."); } if (newDiskOffering.isCustomizedIops() != null && newDiskOffering.isCustomizedIops()) { newMinIops = cmd.getMinIops() != null ? cmd.getMinIops() : volume.getMinIops(); newMaxIops = cmd.getMaxIops() != null ? cmd.getMaxIops() : volume.getMaxIops(); validateIops(newMinIops, newMaxIops); } else { newMinIops = newDiskOffering.getMinIops(); newMaxIops = newDiskOffering.getMaxIops(); } // if the hypervisor snapshot reserve value is null, it must remain null (currently only KVM uses null and null is all KVM uses for a value here) newHypervisorSnapshotReserve = volume.getHypervisorSnapshotReserve() != null ? newDiskOffering.getHypervisorSnapshotReserve() : null; } long currentSize = volume.getSize(); // if the caller is looking to change the size of the volume if (currentSize != newSize) { if (volume.getInstanceId() != null) { // Check that VM to which this volume is attached does not have VM snapshots if (_vmSnapshotDao.findByVm(volume.getInstanceId()).size() > 0) { throw new InvalidParameterValueException("A volume that is attached to a VM with any VM snapshots cannot be resized."); } } if (!validateVolumeSizeRange(newSize)) { throw new InvalidParameterValueException("Requested size out of range"); } Long storagePoolId = volume.getPoolId(); if (storagePoolId != null) { StoragePoolVO storagePoolVO = _storagePoolDao.findById(storagePoolId); if (storagePoolVO.isManaged()) { Long instanceId = volume.getInstanceId(); if (instanceId != null) { VMInstanceVO vmInstanceVO = _vmInstanceDao.findById(instanceId); if (vmInstanceVO.getHypervisorType() == HypervisorType.KVM && vmInstanceVO.getState() != State.Stopped) { throw new CloudRuntimeException("This kind of KVM disk cannot be resized while it is connected to a VM that's not in the Stopped state."); } } } } /* * Let's make certain they (think they) know what they're doing if they * want to shrink by forcing them to provide the shrinkok parameter. * This will be checked again at the hypervisor level where we can see * the actual disk size. */ if (currentSize > newSize && !shrinkOk) { throw new InvalidParameterValueException("Going from existing size of " + currentSize + " to size of " + newSize + " would shrink the volume." + "Need to sign off by supplying the shrinkok parameter with value of true."); } if (newSize > currentSize) { /* Check resource limit for this account on primary storage resource */ _resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(volume.getAccountId()), ResourceType.primary_storage, volume.isDisplayVolume(), new Long(newSize - currentSize).longValue()); } } // Note: The storage plug-in in question should perform validation on the IOPS to check if a sufficient number of IOPS is available to perform // the requested change /* If this volume has never been beyond allocated state, short circuit everything and simply update the database. */ // We need to publish this event to usage_volume table if (volume.getState() == Volume.State.Allocated) { s_logger.debug("Volume is in the allocated state, but has never been created. Simply updating database with new size and IOPS."); volume.setSize(newSize); volume.setMinIops(newMinIops); volume.setMaxIops(newMaxIops); volume.setHypervisorSnapshotReserve(newHypervisorSnapshotReserve); if (newDiskOffering != null) { volume.setDiskOfferingId(cmd.getNewDiskOfferingId()); } _volsDao.update(volume.getId(), volume); UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_RESIZE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), volume.getDiskOfferingId(), volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getUuid()); return volume; } UserVmVO userVm = _userVmDao.findById(volume.getInstanceId()); if (userVm != null) { if (volume.getVolumeType().equals(Volume.Type.ROOT) && userVm.getPowerState() != VirtualMachine.PowerState.PowerOff && hypervisorType == HypervisorType.VMware) { s_logger.error(" For ROOT volume resize VM should be in Power Off state."); throw new InvalidParameterValueException("VM current state is : " + userVm.getPowerState() + ". But VM should be in " + VirtualMachine.PowerState.PowerOff + " state."); } // serialize VM operation AsyncJobExecutionContext jobContext = AsyncJobExecutionContext.getCurrentExecutionContext(); if (jobContext.isJobDispatchedBy(VmWorkConstants.VM_WORK_JOB_DISPATCHER)) { // avoid re-entrance VmWorkJobVO placeHolder = null; placeHolder = createPlaceHolderWork(userVm.getId()); try { return orchestrateResizeVolume(volume.getId(), currentSize, newSize, newMinIops, newMaxIops, newHypervisorSnapshotReserve, newDiskOffering != null ? cmd.getNewDiskOfferingId() : null, shrinkOk); } finally { _workJobDao.expunge(placeHolder.getId()); } } else { Outcome<Volume> outcome = resizeVolumeThroughJobQueue(userVm.getId(), volume.getId(), currentSize, newSize, newMinIops, newMaxIops, newHypervisorSnapshotReserve, newDiskOffering != null ? cmd.getNewDiskOfferingId() : null, shrinkOk); try { outcome.get(); } catch (InterruptedException e) { throw new RuntimeException("Operation was interrupted", e); } catch (java.util.concurrent.ExecutionException e) { throw new RuntimeException("Execution exception", e); } Object jobResult = _jobMgr.unmarshallResultObject(outcome.getJob()); if (jobResult != null) { if (jobResult instanceof ConcurrentOperationException) { throw (ConcurrentOperationException)jobResult; } else if (jobResult instanceof ResourceAllocationException) { throw (ResourceAllocationException)jobResult; } else if (jobResult instanceof RuntimeException) { throw (RuntimeException)jobResult; } else if (jobResult instanceof Throwable) { throw new RuntimeException("Unexpected exception", (Throwable)jobResult); } else if (jobResult instanceof Long) { return _volsDao.findById((Long)jobResult); } } return volume; } } return orchestrateResizeVolume(volume.getId(), currentSize, newSize, newMinIops, newMaxIops, newHypervisorSnapshotReserve, newDiskOffering != null ? cmd.getNewDiskOfferingId() : null, shrinkOk); } private void validateIops(Long minIops, Long maxIops) { if ((minIops == null && maxIops != null) || (minIops != null && maxIops == null)) { throw new InvalidParameterValueException("Either 'miniops' and 'maxiops' must both be provided or neither must be provided."); } if (minIops != null && maxIops != null) { if (minIops > maxIops) { throw new InvalidParameterValueException("The 'miniops' parameter must be less than or equal to the 'maxiops' parameter."); } } } private VolumeVO orchestrateResizeVolume(long volumeId, long currentSize, long newSize, Long newMinIops, Long newMaxIops, Integer newHypervisorSnapshotReserve, Long newDiskOfferingId, boolean shrinkOk) { final VolumeVO volume = _volsDao.findById(volumeId); UserVmVO userVm = _userVmDao.findById(volume.getInstanceId()); StoragePoolVO storagePool = _storagePoolDao.findById(volume.getPoolId()); boolean isManaged = storagePool.isManaged(); if (!storageMgr.storagePoolHasEnoughSpaceForResize(storagePool, currentSize, newSize)) { throw new CloudRuntimeException("Storage pool " + storagePool.getName() + " does not have enough space to resize volume " + volume.getName()); } /* * get a list of hosts to send the commands to, try the system the * associated vm is running on first, then the last known place it ran. * If not attached to a userVm, we pass 'none' and resizevolume.sh is ok * with that since it only needs the vm name to live resize */ long[] hosts = null; String instanceName = "none"; if (userVm != null) { instanceName = userVm.getInstanceName(); if (userVm.getHostId() != null) { hosts = new long[] {userVm.getHostId()}; } else if (userVm.getLastHostId() != null) { hosts = new long[] {userVm.getLastHostId()}; } final String errorMsg = "The VM must be stopped or the disk detached in order to resize with the XenServer Hypervisor."; if (storagePool.isManaged() && storagePool.getHypervisor() == HypervisorType.Any && hosts != null && hosts.length > 0) { HostVO host = _hostDao.findById(hosts[0]); if (currentSize != newSize && host.getHypervisorType() == HypervisorType.XenServer && !userVm.getState().equals(State.Stopped)) { throw new InvalidParameterValueException(errorMsg); } } /* Xen only works offline, SR does not support VDI.resizeOnline */ if (currentSize != newSize && _volsDao.getHypervisorType(volume.getId()) == HypervisorType.XenServer && !userVm.getState().equals(State.Stopped)) { throw new InvalidParameterValueException(errorMsg); } /* Do not resize volume of running vm on KVM host if host is not Up or not Enabled */ if (currentSize != newSize && userVm.getState() == State.Running && userVm.getHypervisorType() == HypervisorType.KVM) { if (userVm.getHostId() == null) { throw new InvalidParameterValueException("Cannot find the hostId of running vm " + userVm.getUuid()); } HostVO host = _hostDao.findById(userVm.getHostId()); if (host == null) { throw new InvalidParameterValueException("The KVM host where vm is running does not exist"); } else if (host.getStatus() != Status.Up) { throw new InvalidParameterValueException("The KVM host where vm is running is not Up"); } else if (host.getResourceState() != ResourceState.Enabled) { throw new InvalidParameterValueException("The KVM host where vm is running is not Enabled"); } } } ResizeVolumePayload payload = new ResizeVolumePayload(newSize, newMinIops, newMaxIops, newHypervisorSnapshotReserve, shrinkOk, instanceName, hosts, isManaged); try { VolumeInfo vol = volFactory.getVolume(volume.getId()); vol.addPayload(payload); // this call to resize has a different impact depending on whether the // underlying primary storage is managed or not // if managed, this is the chance for the plug-in to change the size and/or IOPS values // if not managed, this is the chance for the plug-in to talk to the hypervisor layer // to change the size of the disk AsyncCallFuture<VolumeApiResult> future = volService.resize(vol); VolumeApiResult result = future.get(); if (result.isFailed()) { s_logger.warn("Failed to resize the volume " + volume); String details = ""; if (result.getResult() != null && !result.getResult().isEmpty()) { details = result.getResult(); } throw new CloudRuntimeException(details); } // managed storage is designed in such a way that the storage plug-in does not // talk to the hypervisor layer; as such, if the storage is managed and the // current and new sizes are different, then CloudStack (i.e. not a storage plug-in) // needs to tell the hypervisor to resize the disk if (storagePool.isManaged() && currentSize != newSize) { if (hosts != null && hosts.length > 0) { HostVO hostVO = _hostDao.findById(hosts[0]); if (hostVO.getHypervisorType() != HypervisorType.KVM) { volService.resizeVolumeOnHypervisor(volumeId, newSize, hosts[0], instanceName); } } } if (newDiskOfferingId != null) { volume.setDiskOfferingId(newDiskOfferingId); } if (currentSize != newSize) { volume.setSize(newSize); } _volsDao.update(volume.getId(), volume); /* Update resource count for the account on primary storage resource */ if (!shrinkOk) { _resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.primary_storage, volume.isDisplayVolume(), newSize - currentSize); } else { _resourceLimitMgr.decrementResourceCount(volume.getAccountId(), ResourceType.primary_storage, volume.isDisplayVolume(), currentSize - newSize); } UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_RESIZE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), volume.getDiskOfferingId(), volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getUuid()); return volume; } catch (Exception e) { throw new CloudRuntimeException("Exception caught during resize volume operation of volume UUID: " + volume.getUuid(), e); } } @DB @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_DELETE, eventDescription = "deleting volume") /** * Executes the removal of the volume. If the volume is only allocated we do not try to remove it from primary and secondary storage. * Otherwise, after the removal in the database, we will try to remove the volume from both primary and secondary storage. */ public boolean deleteVolume(long volumeId, Account caller) throws ConcurrentOperationException { Volume volume = destroyVolume(volumeId, caller, true, true); return (volume != null); } private boolean deleteVolumeFromStorage(VolumeVO volume, Account caller) throws ConcurrentOperationException { try { expungeVolumesInPrimaryStorageIfNeeded(volume); expungeVolumesInSecondaryStorageIfNeeded(volume); cleanVolumesCache(volume); return true; } catch (InterruptedException | ExecutionException e) { s_logger.warn("Failed to expunge volume: " + volume.getUuid(), e); return false; } } /** * Retrieves and validates the volume for the {@link #deleteVolume(long, Account)} method. The following validation are executed. * <ul> * <li> if no volume is found in the database, we throw an {@link InvalidParameterValueException}; * <li> if there are snapshots operation on the volume we cannot delete it. Therefore, an {@link InvalidParameterValueException} is thrown; * <li> if the volume is still attached to a VM we throw an {@link InvalidParameterValueException}; * <li> if volume state is in {@link Volume.State#UploadOp}, we check the {@link VolumeDataStoreVO}. Then, if the {@link VolumeDataStoreVO} for the given volume has download status of {@link VMTemplateStorageResourceAssoc.Status#DOWNLOAD_IN_PROGRESS}, an exception is throw; * <li> if the volume state is in {@link Volume.State#NotUploaded} or if the state is {@link Volume.State#UploadInProgress}, an {@link InvalidParameterValueException} is thrown; * <li> we also check if the user has access to the given volume using {@link AccountManager#checkAccess(Account, org.apache.cloudstack.acl.SecurityChecker.AccessType, boolean, String)}. * </ul> * * After all validations we return the volume object. */ protected VolumeVO retrieveAndValidateVolume(long volumeId, Account caller) { VolumeVO volume = _volsDao.findById(volumeId); if (volume == null) { throw new InvalidParameterValueException("Unable to find volume with ID: " + volumeId); } if (!_snapshotMgr.canOperateOnVolume(volume)) { throw new InvalidParameterValueException("There are snapshot operations in progress on the volume, unable to delete it"); } if (volume.getInstanceId() != null && volume.getState() != Volume.State.Expunged) { throw new InvalidParameterValueException("Please specify a volume that is not attached to any VM."); } if (volume.getState() == Volume.State.UploadOp) { VolumeDataStoreVO volumeStore = _volumeStoreDao.findByVolume(volume.getId()); if (volumeStore.getDownloadState() == VMTemplateStorageResourceAssoc.Status.DOWNLOAD_IN_PROGRESS) { throw new InvalidParameterValueException("Please specify a volume that is not uploading"); } } if (volume.getState() == Volume.State.NotUploaded || volume.getState() == Volume.State.UploadInProgress) { throw new InvalidParameterValueException("The volume is either getting uploaded or it may be initiated shortly, please wait for it to be completed"); } _accountMgr.checkAccess(caller, null, true, volume); return volume; } /** * Destroy the volume if possible and then decrement the following resource types. * <ul> * <li> {@link ResourceType#volume}; * <li> {@link ResourceType#primary_storage} * </ul> * * A volume can be destroyed if it is not in any of the following states. * <ul> * <li> {@value Volume.State#Destroy}; * <li> {@value Volume.State#Expunging}; * <li> {@value Volume.State#Expunged}. * </ul> * * The volume is destroyed via {@link VolumeService#destroyVolume(long)} method. */ protected void destroyVolumeIfPossible(VolumeVO volume) { if (volume.getState() != Volume.State.Destroy && volume.getState() != Volume.State.Expunging && volume.getState() != Volume.State.Expunged && volume.getState() != Volume.State.Allocated && volume.getState() != Volume.State.Uploaded) { volService.destroyVolume(volume.getId()); } } /** * We will check if the given volume is in the primary storage. If it is, we will execute an asynchronous call to delete it there. * If the volume is not in the primary storage, we do nothing here. */ protected void expungeVolumesInPrimaryStorageIfNeeded(VolumeVO volume) throws InterruptedException, ExecutionException { VolumeInfo volOnPrimary = volFactory.getVolume(volume.getId(), DataStoreRole.Primary); if (volOnPrimary != null) { s_logger.info("Expunging volume " + volume.getId() + " from primary data store"); AsyncCallFuture<VolumeApiResult> future = volService.expungeVolumeAsync(volOnPrimary); future.get(); } } /** * We will check if the given volume is in the secondary storage. If the volume is not in the primary storage, we do nothing here. * If it is, we will execute an asynchronous call to delete it there. Then, we decrement the {@link ResourceType#secondary_storage} for the account that owns the volume. */ protected void expungeVolumesInSecondaryStorageIfNeeded(VolumeVO volume) throws InterruptedException, ExecutionException { VolumeInfo volOnSecondary = volFactory.getVolume(volume.getId(), DataStoreRole.Image); if (volOnSecondary != null) { s_logger.info("Expunging volume " + volume.getId() + " from secondary data store"); AsyncCallFuture<VolumeApiResult> future2 = volService.expungeVolumeAsync(volOnSecondary); future2.get(); _resourceLimitMgr.decrementResourceCount(volOnSecondary.getAccountId(), ResourceType.secondary_storage, volOnSecondary.getSize()); } } /** * Clean volumes cache entries (if they exist). */ protected void cleanVolumesCache(VolumeVO volume) { List<VolumeInfo> cacheVols = volFactory.listVolumeOnCache(volume.getId()); if (CollectionUtils.isEmpty(cacheVols)) { return; } for (VolumeInfo volOnCache : cacheVols) { s_logger.info("Delete volume from image cache store: " + volOnCache.getDataStore().getName()); volOnCache.delete(); } } protected boolean stateTransitTo(Volume vol, Volume.Event event) throws NoTransitionException { return _volStateMachine.transitTo(vol, event, null, _volsDao); } @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_DESTROY, eventDescription = "destroying a volume") public Volume destroyVolume(long volumeId, Account caller, boolean expunge, boolean forceExpunge) { VolumeVO volume = retrieveAndValidateVolume(volumeId, caller); if (expunge) { // When trying to expunge, permission is denied when the caller is not an admin and the AllowUserExpungeRecoverVolume is false for the caller. final Long userId = caller.getAccountId(); if (!forceExpunge && !_accountMgr.isAdmin(userId) && !AllowUserExpungeRecoverVolume.valueIn(userId)) { throw new PermissionDeniedException("Expunging a volume can only be done by an Admin. Or when the allow.user.expunge.recover.volume key is set."); } } else if (volume.getState() == Volume.State.Allocated || volume.getState() == Volume.State.Uploaded) { throw new InvalidParameterValueException("The volume in Allocated/Uploaded state can only be expunged not destroyed/recovered"); } destroyVolumeIfPossible(volume); if (expunge) { // Mark volume as removed if volume has not been created on primary or secondary if (volume.getState() == Volume.State.Allocated) { _volsDao.remove(volume.getId()); try { stateTransitTo(volume, Volume.Event.DestroyRequested); } catch (NoTransitionException e) { s_logger.debug("Failed to destroy volume" + volume.getId(), e); return null; } _resourceLimitMgr.decrementResourceCount(volume.getAccountId(), ResourceType.volume, volume.isDisplay()); _resourceLimitMgr.decrementResourceCount(volume.getAccountId(), ResourceType.primary_storage, volume.isDisplay(), new Long(volume.getSize())); return volume; } if (!deleteVolumeFromStorage(volume, caller)) { s_logger.warn("Failed to expunge volume: " + volumeId); return null; } } return volume; } @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_RECOVER, eventDescription = "recovering a volume in Destroy state") public Volume recoverVolume(long volumeId) { Account caller = CallContext.current().getCallingAccount(); final Long userId = caller.getAccountId(); // Verify input parameters final VolumeVO volume = _volsDao.findById(volumeId); if (volume == null) { throw new InvalidParameterValueException("Unable to find a volume with id " + volume); } // When trying to expunge, permission is denied when the caller is not an admin and the AllowUserExpungeRecoverVolume is false for the caller. if (!_accountMgr.isAdmin(userId) && !AllowUserExpungeRecoverVolume.valueIn(userId)) { throw new PermissionDeniedException("Recovering a volume can only be done by an Admin. Or when the allow.user.expunge.recover.volume key is set."); } _accountMgr.checkAccess(caller, null, true, volume); if (volume.getState() != Volume.State.Destroy) { throw new InvalidParameterValueException("Please specify a volume in Destroy state."); } try { _resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(volume.getAccountId()), ResourceType.primary_storage, volume.isDisplayVolume(), volume.getSize()); } catch (ResourceAllocationException e) { s_logger.error("primary storage resource limit check failed", e); throw new InvalidParameterValueException(e.getMessage()); } try { stateTransitTo(volume, Volume.Event.RecoverRequested); } catch (NoTransitionException e) { s_logger.debug("Failed to recover volume" + volume.getId(), e); throw new CloudRuntimeException("Failed to recover volume" + volume.getId(), e); } _resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.volume, volume.isDisplay()); _resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.primary_storage, volume.isDisplay(), new Long(volume.getSize())); return volume; } @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_ATTACH, eventDescription = "attaching volume", async = true) public Volume attachVolumeToVM(AttachVolumeCmd command) { return attachVolumeToVM(command.getVirtualMachineId(), command.getId(), command.getDeviceId()); } private Volume orchestrateAttachVolumeToVM(Long vmId, Long volumeId, Long deviceId) { VolumeInfo volumeToAttach = volFactory.getVolume(volumeId); if (volumeToAttach.isAttachedVM()) { throw new CloudRuntimeException("This volume is already attached to a VM."); } UserVmVO vm = _userVmDao.findById(vmId); VolumeVO exstingVolumeOfVm = null; List<VolumeVO> rootVolumesOfVm = _volsDao.findByInstanceAndType(vmId, Volume.Type.ROOT); if (rootVolumesOfVm.size() > 1) { throw new CloudRuntimeException("The VM " + vm.getHostName() + " has more than one ROOT volume and is in an invalid state."); } else { if (!rootVolumesOfVm.isEmpty()) { exstingVolumeOfVm = rootVolumesOfVm.get(0); } else { // locate data volume of the vm List<VolumeVO> diskVolumesOfVm = _volsDao.findByInstanceAndType(vmId, Volume.Type.DATADISK); for (VolumeVO diskVolume : diskVolumesOfVm) { if (diskVolume.getState() != Volume.State.Allocated) { exstingVolumeOfVm = diskVolume; break; } } } } HypervisorType rootDiskHyperType = vm.getHypervisorType(); HypervisorType volumeToAttachHyperType = _volsDao.getHypervisorType(volumeToAttach.getId()); VolumeInfo newVolumeOnPrimaryStorage = volumeToAttach; //don't create volume on primary storage if its being attached to the vm which Root's volume hasn't been created yet StoragePoolVO destPrimaryStorage = null; if (exstingVolumeOfVm != null && !exstingVolumeOfVm.getState().equals(Volume.State.Allocated)) { destPrimaryStorage = _storagePoolDao.findById(exstingVolumeOfVm.getPoolId()); } boolean volumeOnSecondary = volumeToAttach.getState() == Volume.State.Uploaded; if (destPrimaryStorage != null && (volumeToAttach.getState() == Volume.State.Allocated || volumeOnSecondary)) { try { newVolumeOnPrimaryStorage = _volumeMgr.createVolumeOnPrimaryStorage(vm, volumeToAttach, rootDiskHyperType, destPrimaryStorage); } catch (NoTransitionException e) { s_logger.debug("Failed to create volume on primary storage", e); throw new CloudRuntimeException("Failed to create volume on primary storage", e); } } // reload the volume from db newVolumeOnPrimaryStorage = volFactory.getVolume(newVolumeOnPrimaryStorage.getId()); boolean moveVolumeNeeded = needMoveVolume(exstingVolumeOfVm, newVolumeOnPrimaryStorage); if (moveVolumeNeeded) { PrimaryDataStoreInfo primaryStore = (PrimaryDataStoreInfo)newVolumeOnPrimaryStorage.getDataStore(); if (primaryStore.isLocal()) { throw new CloudRuntimeException( "Failed to attach local data volume " + volumeToAttach.getName() + " to VM " + vm.getDisplayName() + " as migration of local data volume is not allowed"); } StoragePoolVO vmRootVolumePool = _storagePoolDao.findById(exstingVolumeOfVm.getPoolId()); try { newVolumeOnPrimaryStorage = _volumeMgr.moveVolume(newVolumeOnPrimaryStorage, vmRootVolumePool.getDataCenterId(), vmRootVolumePool.getPodId(), vmRootVolumePool.getClusterId(), volumeToAttachHyperType); } catch (ConcurrentOperationException e) { s_logger.debug("move volume failed", e); throw new CloudRuntimeException("move volume failed", e); } catch (StorageUnavailableException e) { s_logger.debug("move volume failed", e); throw new CloudRuntimeException("move volume failed", e); } } VolumeVO newVol = _volsDao.findById(newVolumeOnPrimaryStorage.getId()); // Getting the fresh vm object in case of volume migration to check the current state of VM if (moveVolumeNeeded || volumeOnSecondary) { vm = _userVmDao.findById(vmId); if (vm == null) { throw new InvalidParameterValueException("VM not found."); } } newVol = sendAttachVolumeCommand(vm, newVol, deviceId); return newVol; } public Volume attachVolumeToVM(Long vmId, Long volumeId, Long deviceId) { Account caller = CallContext.current().getCallingAccount(); // Check that the volume ID is valid VolumeInfo volumeToAttach = volFactory.getVolume(volumeId); // Check that the volume is a data volume if (volumeToAttach == null || !(volumeToAttach.getVolumeType() == Volume.Type.DATADISK || volumeToAttach.getVolumeType() == Volume.Type.ROOT)) { throw new InvalidParameterValueException("Please specify a volume with the valid type: " + Volume.Type.ROOT.toString() + " or " + Volume.Type.DATADISK.toString()); } // Check that the volume is not currently attached to any VM if (volumeToAttach.getInstanceId() != null) { throw new InvalidParameterValueException("Please specify a volume that is not attached to any VM."); } // Check that the volume is not destroyed if (volumeToAttach.getState() == Volume.State.Destroy) { throw new InvalidParameterValueException("Please specify a volume that is not destroyed."); } // Check that the virtual machine ID is valid and it's a user vm UserVmVO vm = _userVmDao.findById(vmId); if (vm == null || vm.getType() != VirtualMachine.Type.User) { throw new InvalidParameterValueException("Please specify a valid User VM."); } // Check that the VM is in the correct state if (vm.getState() != State.Running && vm.getState() != State.Stopped) { throw new InvalidParameterValueException("Please specify a VM that is either running or stopped."); } // Check that the VM and the volume are in the same zone if (vm.getDataCenterId() != volumeToAttach.getDataCenterId()) { throw new InvalidParameterValueException("Please specify a VM that is in the same zone as the volume."); } // Check that the device ID is valid if (deviceId != null) { // validate ROOT volume type if (deviceId.longValue() == 0) { validateRootVolumeDetachAttach(_volsDao.findById(volumeToAttach.getId()), vm); // vm shouldn't have any volume with deviceId 0 if (!_volsDao.findByInstanceAndDeviceId(vm.getId(), 0).isEmpty()) { throw new InvalidParameterValueException("Vm already has root volume attached to it"); } // volume can't be in Uploaded state if (volumeToAttach.getState() == Volume.State.Uploaded) { throw new InvalidParameterValueException("No support for Root volume attach in state " + Volume.State.Uploaded); } } } // Check that the number of data volumes attached to VM is less than // that supported by hypervisor if (deviceId == null || deviceId.longValue() != 0) { List<VolumeVO> existingDataVolumes = _volsDao.findByInstanceAndType(vmId, Volume.Type.DATADISK); int maxAttachableDataVolumesSupported = getMaxDataVolumesSupported(vm); if (existingDataVolumes.size() >= maxAttachableDataVolumesSupported) { throw new InvalidParameterValueException( "The specified VM already has the maximum number of data disks (" + maxAttachableDataVolumesSupported + ") attached. Please specify another VM."); } } // If local storage is disabled then attaching a volume with local disk // offering not allowed DataCenterVO dataCenter = _dcDao.findById(volumeToAttach.getDataCenterId()); if (!dataCenter.isLocalStorageEnabled()) { DiskOfferingVO diskOffering = _diskOfferingDao.findById(volumeToAttach.getDiskOfferingId()); if (diskOffering.isUseLocalStorage()) { throw new InvalidParameterValueException("Zone is not configured to use local storage but volume's disk offering " + diskOffering.getName() + " uses it"); } } // if target VM has associated VM snapshots List<VMSnapshotVO> vmSnapshots = _vmSnapshotDao.findByVm(vmId); if (vmSnapshots.size() > 0) { throw new InvalidParameterValueException("Unable to attach volume, please specify a VM that does not have VM snapshots"); } // if target VM has backups if (vm.getBackupOfferingId() != null || vm.getBackupVolumeList().size() > 0) { throw new InvalidParameterValueException("Unable to attach volume, please specify a VM that does not have any backups"); } // permission check _accountMgr.checkAccess(caller, null, true, volumeToAttach, vm); if (!(Volume.State.Allocated.equals(volumeToAttach.getState()) || Volume.State.Ready.equals(volumeToAttach.getState()) || Volume.State.Uploaded.equals(volumeToAttach.getState()))) { throw new InvalidParameterValueException("Volume state must be in Allocated, Ready or in Uploaded state"); } Account owner = _accountDao.findById(volumeToAttach.getAccountId()); if (!(volumeToAttach.getState() == Volume.State.Allocated || volumeToAttach.getState() == Volume.State.Ready)) { try { _resourceLimitMgr.checkResourceLimit(owner, ResourceType.primary_storage, volumeToAttach.getSize()); } catch (ResourceAllocationException e) { s_logger.error("primary storage resource limit check failed", e); throw new InvalidParameterValueException(e.getMessage()); } } HypervisorType rootDiskHyperType = vm.getHypervisorType(); HypervisorType volumeToAttachHyperType = _volsDao.getHypervisorType(volumeToAttach.getId()); StoragePoolVO volumeToAttachStoragePool = _storagePoolDao.findById(volumeToAttach.getPoolId()); // managed storage can be used for different types of hypervisors // only perform this check if the volume's storage pool is not null and not managed if (volumeToAttachStoragePool != null && !volumeToAttachStoragePool.isManaged()) { if (volumeToAttachHyperType != HypervisorType.None && rootDiskHyperType != volumeToAttachHyperType) { throw new InvalidParameterValueException("Can't attach a volume created by: " + volumeToAttachHyperType + " to a " + rootDiskHyperType + " vm"); } } AsyncJobExecutionContext asyncExecutionContext = AsyncJobExecutionContext.getCurrentExecutionContext(); if (asyncExecutionContext != null) { AsyncJob job = asyncExecutionContext.getJob(); if (s_logger.isInfoEnabled()) { s_logger.info("Trying to attaching volume " + volumeId + " to vm instance:" + vm.getId() + ", update async job-" + job.getId() + " progress status"); } _jobMgr.updateAsyncJobAttachment(job.getId(), "Volume", volumeId); } AsyncJobExecutionContext jobContext = AsyncJobExecutionContext.getCurrentExecutionContext(); if (jobContext.isJobDispatchedBy(VmWorkConstants.VM_WORK_JOB_DISPATCHER)) { // avoid re-entrance VmWorkJobVO placeHolder = null; placeHolder = createPlaceHolderWork(vmId); try { return orchestrateAttachVolumeToVM(vmId, volumeId, deviceId); } finally { _workJobDao.expunge(placeHolder.getId()); } } else { Outcome<Volume> outcome = attachVolumeToVmThroughJobQueue(vmId, volumeId, deviceId); Volume vol = null; try { outcome.get(); } catch (InterruptedException e) { throw new RuntimeException("Operation is interrupted", e); } catch (java.util.concurrent.ExecutionException e) { throw new RuntimeException("Execution excetion", e); } Object jobResult = _jobMgr.unmarshallResultObject(outcome.getJob()); if (jobResult != null) { if (jobResult instanceof ConcurrentOperationException) { throw (ConcurrentOperationException)jobResult; } else if (jobResult instanceof InvalidParameterValueException) { throw (InvalidParameterValueException)jobResult; } else if (jobResult instanceof RuntimeException) { throw (RuntimeException)jobResult; } else if (jobResult instanceof Throwable) { throw new RuntimeException("Unexpected exception", (Throwable)jobResult); } else if (jobResult instanceof Long) { vol = _volsDao.findById((Long)jobResult); } } return vol; } } @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_UPDATE, eventDescription = "updating volume", async = true) public Volume updateVolume(long volumeId, String path, String state, Long storageId, Boolean displayVolume, String customId, long entityOwnerId, String chainInfo) { VolumeVO volume = _volsDao.findById(volumeId); if (volume == null) { throw new InvalidParameterValueException("The volume id doesn't exist"); } if (path != null) { volume.setPath(path); } if (chainInfo != null) { volume.setChainInfo(chainInfo); } if (state != null) { try { Volume.State volumeState = Volume.State.valueOf(state); volume.setState(volumeState); } catch (IllegalArgumentException ex) { throw new InvalidParameterValueException("Invalid volume state specified"); } } if (storageId != null) { StoragePool pool = _storagePoolDao.findById(storageId); if (pool.getDataCenterId() != volume.getDataCenterId()) { throw new InvalidParameterValueException("Invalid storageId specified; refers to the pool outside of the volume's zone"); } volume.setPoolId(pool.getId()); } if (customId != null) { volume.setUuid(customId); } updateDisplay(volume, displayVolume); _volsDao.update(volumeId, volume); return volume; } @Override public void updateDisplay(Volume volume, Boolean displayVolume) { // 1. Resource limit changes updateResourceCount(volume, displayVolume); // 2. generate usage event if not in destroyed state saveUsageEvent(volume, displayVolume); // 3. Set the flag if (displayVolume != null && displayVolume != volume.isDisplayVolume()) { // FIXME - Confused - typecast for now. ((VolumeVO)volume).setDisplayVolume(displayVolume); _volsDao.update(volume.getId(), (VolumeVO)volume); } } private void updateResourceCount(Volume volume, Boolean displayVolume) { // Update only when the flag has changed. if (displayVolume != null && displayVolume != volume.isDisplayVolume()) { _resourceLimitMgr.changeResourceCount(volume.getAccountId(), ResourceType.volume, displayVolume); _resourceLimitMgr.changeResourceCount(volume.getAccountId(), ResourceType.primary_storage, displayVolume, new Long(volume.getSize())); } } private void saveUsageEvent(Volume volume, Boolean displayVolume) { // Update only when the flag has changed && only when volume in a non-destroyed state. if ((displayVolume != null && displayVolume != volume.isDisplayVolume()) && !isVolumeDestroyed(volume)) { if (displayVolume) { // flag turned 1 equivalent to freshly created volume UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), volume.getDiskOfferingId(), volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getUuid()); } else { // flag turned 0 equivalent to deleting a volume UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_DELETE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), Volume.class.getName(), volume.getUuid()); } } } private boolean isVolumeDestroyed(Volume volume) { if (volume.getState() == Volume.State.Destroy || volume.getState() == Volume.State.Expunging && volume.getState() == Volume.State.Expunged) { return true; } return false; } @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_DETACH, eventDescription = "detaching volume", async = true) public Volume detachVolumeFromVM(DetachVolumeCmd cmmd) { Account caller = CallContext.current().getCallingAccount(); if ((cmmd.getId() == null && cmmd.getDeviceId() == null && cmmd.getVirtualMachineId() == null) || (cmmd.getId() != null && (cmmd.getDeviceId() != null || cmmd.getVirtualMachineId() != null)) || (cmmd.getId() == null && (cmmd.getDeviceId() == null || cmmd.getVirtualMachineId() == null))) { throw new InvalidParameterValueException("Please provide either a volume id, or a tuple(device id, instance id)"); } Long volumeId = cmmd.getId(); VolumeVO volume = null; if (volumeId != null) { volume = _volsDao.findById(volumeId); } else { volume = _volsDao.findByInstanceAndDeviceId(cmmd.getVirtualMachineId(), cmmd.getDeviceId()).get(0); } // Check that the volume ID is valid if (volume == null) { throw new InvalidParameterValueException("Unable to find volume with ID: " + volumeId); } Long vmId = null; if (cmmd.getVirtualMachineId() == null) { vmId = volume.getInstanceId(); } else { vmId = cmmd.getVirtualMachineId(); } // Permissions check _accountMgr.checkAccess(caller, null, true, volume); // Check that the volume is currently attached to a VM if (vmId == null) { throw new InvalidParameterValueException("The specified volume is not attached to a VM."); } // Check that the VM is in the correct state UserVmVO vm = _userVmDao.findById(vmId); if (vm.getState() != State.Running && vm.getState() != State.Stopped && vm.getState() != State.Destroyed) { throw new InvalidParameterValueException("Please specify a VM that is either running or stopped."); } // Check that the volume is a data/root volume if (!(volume.getVolumeType() == Volume.Type.ROOT || volume.getVolumeType() == Volume.Type.DATADISK)) { throw new InvalidParameterValueException("Please specify volume of type " + Volume.Type.DATADISK.toString() + " or " + Volume.Type.ROOT.toString()); } // Root volume detach is allowed for following hypervisors: Xen/KVM/VmWare if (volume.getVolumeType() == Volume.Type.ROOT) { validateRootVolumeDetachAttach(volume, vm); } // Don't allow detach if target VM has associated VM snapshots List<VMSnapshotVO> vmSnapshots = _vmSnapshotDao.findByVm(vmId); if (vmSnapshots.size() > 0) { throw new InvalidParameterValueException("Unable to detach volume, please specify a VM that does not have VM snapshots"); } if (vm.getBackupOfferingId() != null || vm.getBackupVolumeList().size() > 0) { throw new InvalidParameterValueException("Unable to detach volume, cannot detach volume from a VM that has backups. First remove the VM from the backup offering."); } AsyncJobExecutionContext asyncExecutionContext = AsyncJobExecutionContext.getCurrentExecutionContext(); if (asyncExecutionContext != null) { AsyncJob job = asyncExecutionContext.getJob(); if (s_logger.isInfoEnabled()) { s_logger.info("Trying to attaching volume " + volumeId + "to vm instance:" + vm.getId() + ", update async job-" + job.getId() + " progress status"); } _jobMgr.updateAsyncJobAttachment(job.getId(), "Volume", volumeId); } AsyncJobExecutionContext jobContext = AsyncJobExecutionContext.getCurrentExecutionContext(); if (jobContext.isJobDispatchedBy(VmWorkConstants.VM_WORK_JOB_DISPATCHER)) { // avoid re-entrance VmWorkJobVO placeHolder = null; placeHolder = createPlaceHolderWork(vmId); try { return orchestrateDetachVolumeFromVM(vmId, volumeId); } finally { _workJobDao.expunge(placeHolder.getId()); } } else { Outcome<Volume> outcome = detachVolumeFromVmThroughJobQueue(vmId, volumeId); Volume vol = null; try { outcome.get(); } catch (InterruptedException e) { throw new RuntimeException("Operation is interrupted", e); } catch (java.util.concurrent.ExecutionException e) { throw new RuntimeException("Execution excetion", e); } Object jobResult = _jobMgr.unmarshallResultObject(outcome.getJob()); if (jobResult != null) { if (jobResult instanceof ConcurrentOperationException) { throw (ConcurrentOperationException)jobResult; } else if (jobResult instanceof RuntimeException) { throw (RuntimeException)jobResult; } else if (jobResult instanceof Throwable) { throw new RuntimeException("Unexpected exception", (Throwable)jobResult); } else if (jobResult instanceof Long) { vol = _volsDao.findById((Long)jobResult); } } return vol; } } private void validateRootVolumeDetachAttach(VolumeVO volume, UserVmVO vm) { if (!(vm.getHypervisorType() == HypervisorType.XenServer || vm.getHypervisorType() == HypervisorType.VMware || vm.getHypervisorType() == HypervisorType.KVM || vm.getHypervisorType() == HypervisorType.Simulator)) { throw new InvalidParameterValueException("Root volume detach is not supported for hypervisor type " + vm.getHypervisorType()); } if (!(vm.getState() == State.Stopped) || (vm.getState() == State.Destroyed)) { throw new InvalidParameterValueException("Root volume detach can happen only when vm is in states: " + State.Stopped.toString() + " or " + State.Destroyed.toString()); } if (volume.getPoolId() != null) { StoragePoolVO pool = _storagePoolDao.findById(volume.getPoolId()); if (pool.isManaged()) { throw new InvalidParameterValueException("Root volume detach is not supported for Managed DataStores"); } } } @ActionEvent(eventType = EventTypes.EVENT_VOLUME_DETACH, eventDescription = "detaching volume") public Volume detachVolumeViaDestroyVM(long vmId, long volumeId) { return orchestrateDetachVolumeFromVM(vmId, volumeId); } private Volume orchestrateDetachVolumeFromVM(long vmId, long volumeId) { Volume volume = _volsDao.findById(volumeId); VMInstanceVO vm = _vmInstanceDao.findById(vmId); String errorMsg = "Failed to detach volume " + volume.getName() + " from VM " + vm.getHostName(); boolean sendCommand = vm.getState() == State.Running; Long hostId = vm.getHostId(); if (hostId == null) { hostId = vm.getLastHostId(); HostVO host = _hostDao.findById(hostId); if (host != null && host.getHypervisorType() == HypervisorType.VMware) { sendCommand = true; } } HostVO host = null; StoragePoolVO volumePool = _storagePoolDao.findByIdIncludingRemoved(volume.getPoolId()); if (hostId != null) { host = _hostDao.findById(hostId); if (host != null && host.getHypervisorType() == HypervisorType.XenServer && volumePool != null && volumePool.isManaged()) { sendCommand = true; } } if (volumePool == null) { sendCommand = false; } Answer answer = null; if (sendCommand) { // collect vm disk statistics before detach a volume UserVmVO userVm = _userVmDao.findById(vmId); if (userVm != null && userVm.getType() == VirtualMachine.Type.User) { _userVmService.collectVmDiskStatistics(userVm); } DataTO volTO = volFactory.getVolume(volume.getId()).getTO(); DiskTO disk = new DiskTO(volTO, volume.getDeviceId(), volume.getPath(), volume.getVolumeType()); DettachCommand cmd = new DettachCommand(disk, vm.getInstanceName()); cmd.setManaged(volumePool.isManaged()); cmd.setStorageHost(volumePool.getHostAddress()); cmd.setStoragePort(volumePool.getPort()); cmd.set_iScsiName(volume.get_iScsiName()); try { answer = _agentMgr.send(hostId, cmd); } catch (Exception e) { throw new CloudRuntimeException(errorMsg + " due to: " + e.getMessage()); } } if (!sendCommand || (answer != null && answer.getResult())) { // Mark the volume as detached _volsDao.detachVolume(volume.getId()); // volume.getPoolId() should be null if the VM we are detaching the disk from has never been started before if (volume.getPoolId() != null) { DataStore dataStore = dataStoreMgr.getDataStore(volume.getPoolId(), DataStoreRole.Primary); volService.revokeAccess(volFactory.getVolume(volume.getId()), host, dataStore); } if (volumePool != null && hostId != null) { handleTargetsForVMware(hostId, volumePool.getHostAddress(), volumePool.getPort(), volume.get_iScsiName()); } return _volsDao.findById(volumeId); } else { if (answer != null) { String details = answer.getDetails(); if (details != null && !details.isEmpty()) { errorMsg += "; " + details; } } throw new CloudRuntimeException(errorMsg); } } public void updateMissingRootDiskController(final VMInstanceVO vm, final String rootVolChainInfo) { if (vm == null || !VirtualMachine.Type.User.equals(vm.getType()) || Strings.isNullOrEmpty(rootVolChainInfo)) { return; } String rootDiskController = null; try { final VirtualMachineDiskInfo infoInChain = _gson.fromJson(rootVolChainInfo, VirtualMachineDiskInfo.class); if (infoInChain != null) { rootDiskController = infoInChain.getControllerFromDeviceBusName(); } final UserVmVO userVmVo = _userVmDao.findById(vm.getId()); if ((rootDiskController != null) && (!rootDiskController.isEmpty())) { _userVmDao.loadDetails(userVmVo); _userVmMgr.persistDeviceBusInfo(userVmVo, rootDiskController); } } catch (JsonParseException e) { s_logger.debug("Error parsing chain info json: " + e.getMessage()); } } private void handleTargetsForVMware(long hostId, String storageAddress, int storagePort, String iScsiName) { HostVO host = _hostDao.findById(hostId); if (host.getHypervisorType() == HypervisorType.VMware) { ModifyTargetsCommand cmd = new ModifyTargetsCommand(); List<Map<String, String>> targets = new ArrayList<>(); Map<String, String> target = new HashMap<>(); target.put(ModifyTargetsCommand.STORAGE_HOST, storageAddress); target.put(ModifyTargetsCommand.STORAGE_PORT, String.valueOf(storagePort)); target.put(ModifyTargetsCommand.IQN, iScsiName); targets.add(target); cmd.setTargets(targets); cmd.setApplyToAllHostsInCluster(true); cmd.setAdd(false); cmd.setTargetTypeToRemove(ModifyTargetsCommand.TargetTypeToRemove.DYNAMIC); sendModifyTargetsCommand(cmd, hostId); } } private void sendModifyTargetsCommand(ModifyTargetsCommand cmd, long hostId) { Answer answer = _agentMgr.easySend(hostId, cmd); if (answer == null) { String msg = "Unable to get an answer to the modify targets command"; s_logger.warn(msg); } else if (!answer.getResult()) { String msg = "Unable to modify target on the following host: " + hostId; s_logger.warn(msg); } } @DB @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_MIGRATE, eventDescription = "migrating volume", async = true) public Volume migrateVolume(MigrateVolumeCmd cmd) { Long volumeId = cmd.getVolumeId(); Long storagePoolId = cmd.getStoragePoolId(); VolumeVO vol = _volsDao.findById(volumeId); if (vol == null) { throw new InvalidParameterValueException("Failed to find the volume id: " + volumeId); } if (vol.getState() != Volume.State.Ready) { throw new InvalidParameterValueException("Volume must be in ready state"); } if (vol.getPoolId() == storagePoolId) { throw new InvalidParameterValueException("Volume " + vol + " is already on the destination storage pool"); } boolean liveMigrateVolume = false; Long instanceId = vol.getInstanceId(); Long srcClusterId = null; VMInstanceVO vm = null; if (instanceId != null) { vm = _vmInstanceDao.findById(instanceId); } // Check that Vm to which this volume is attached does not have VM Snapshots // OfflineVmwareMigration: considder if this is needed and desirable if (vm != null && _vmSnapshotDao.findByVm(vm.getId()).size() > 0) { throw new InvalidParameterValueException("Volume cannot be migrated, please remove all VM snapshots for VM to which this volume is attached"); } // OfflineVmwareMigration: extract this block as method and check if it is subject to regression if (vm != null && vm.getState() == State.Running) { // Check if the VM is GPU enabled. if (_serviceOfferingDetailsDao.findDetail(vm.getServiceOfferingId(), GPU.Keys.pciDevice.toString()) != null) { throw new InvalidParameterValueException("Live Migration of GPU enabled VM is not supported"); } // Check if the underlying hypervisor supports storage motion. Long hostId = vm.getHostId(); if (hostId != null) { HostVO host = _hostDao.findById(hostId); HypervisorCapabilitiesVO capabilities = null; if (host != null) { capabilities = _hypervisorCapabilitiesDao.findByHypervisorTypeAndVersion(host.getHypervisorType(), host.getHypervisorVersion()); srcClusterId = host.getClusterId(); } if (capabilities != null) { liveMigrateVolume = capabilities.isStorageMotionSupported(); } } // If vm is running, and hypervisor doesn't support live migration, then return error if (!liveMigrateVolume) { throw new InvalidParameterValueException("Volume needs to be detached from VM"); } } if (liveMigrateVolume && !cmd.isLiveMigrate()) { throw new InvalidParameterValueException("The volume " + vol + "is attached to a vm and for migrating it " + "the parameter livemigrate should be specified"); } StoragePool destPool = (StoragePool)dataStoreMgr.getDataStore(storagePoolId, DataStoreRole.Primary); if (destPool == null) { throw new InvalidParameterValueException("Failed to find the destination storage pool: " + storagePoolId); } else if (destPool.isInMaintenance()) { throw new InvalidParameterValueException("Cannot migrate volume " + vol + "to the destination storage pool " + destPool.getName() + " as the storage pool is in maintenance mode."); } if (!storageMgr.storagePoolHasEnoughSpace(Collections.singletonList(vol), destPool)) { throw new CloudRuntimeException("Storage pool " + destPool.getName() + " does not have enough space to migrate volume " + vol.getName()); } // OfflineVmwareMigration: check storage tags on disk(offering)s in comparison to destination storage pool // OfflineVmwareMigration: if no match return a proper error now DiskOfferingVO diskOffering = _diskOfferingDao.findById(vol.getDiskOfferingId()); if (diskOffering.equals(null)) { throw new CloudRuntimeException("volume '" + vol.getUuid() + "', has no diskoffering. Migration target cannot be checked."); } if (!doesTargetStorageSupportDiskOffering(destPool, diskOffering)) { throw new CloudRuntimeException(String.format("Migration target pool [%s, tags:%s] has no matching tags for volume [%s, uuid:%s, tags:%s]", destPool.getName(), getStoragePoolTags(destPool), vol.getName(), vol.getUuid(), diskOffering.getTags())); } if (liveMigrateVolume && destPool.getClusterId() != null && srcClusterId != null) { if (!srcClusterId.equals(destPool.getClusterId())) { throw new InvalidParameterValueException("Cannot migrate a volume of a virtual machine to a storage pool in a different cluster"); } } // In case of VMware, if ROOT volume is being cold-migrated, then ensure destination storage pool is in the same Datacenter as the VM. if (vm != null && vm.getHypervisorType().equals(HypervisorType.VMware)) { if (!liveMigrateVolume && vol.volumeType.equals(Volume.Type.ROOT)) { Long hostId = vm.getHostId() != null ? vm.getHostId() : vm.getLastHostId(); HostVO host = _hostDao.findById(hostId); if (host != null) { srcClusterId = host.getClusterId(); } if (srcClusterId != null && destPool.getClusterId() != null && !srcClusterId.equals(destPool.getClusterId())) { String srcDcName = _clusterDetailsDao.getVmwareDcName(srcClusterId); String destDcName = _clusterDetailsDao.getVmwareDcName(destPool.getClusterId()); if (srcDcName != null && destDcName != null && !srcDcName.equals(destDcName)) { throw new InvalidParameterValueException("Cannot migrate ROOT volume of a stopped VM to a storage pool in a different VMware datacenter"); } } updateMissingRootDiskController(vm, vol.getChainInfo()); } } DiskOfferingVO newDiskOffering = retrieveAndValidateNewDiskOffering(cmd); validateConditionsToReplaceDiskOfferingOfVolume(vol, newDiskOffering, destPool); if (vm != null) { // serialize VM operation AsyncJobExecutionContext jobContext = AsyncJobExecutionContext.getCurrentExecutionContext(); if (jobContext.isJobDispatchedBy(VmWorkConstants.VM_WORK_JOB_DISPATCHER)) { // avoid re-entrance VmWorkJobVO placeHolder = null; placeHolder = createPlaceHolderWork(vm.getId()); try { return orchestrateMigrateVolume(vol, destPool, liveMigrateVolume, newDiskOffering); } finally { _workJobDao.expunge(placeHolder.getId()); } } else { Outcome<Volume> outcome = migrateVolumeThroughJobQueue(vm, vol, destPool, liveMigrateVolume, newDiskOffering); try { outcome.get(); } catch (InterruptedException e) { throw new RuntimeException("Operation is interrupted", e); } catch (java.util.concurrent.ExecutionException e) { throw new RuntimeException("Execution excetion", e); } Object jobResult = _jobMgr.unmarshallResultObject(outcome.getJob()); if (jobResult != null) { if (jobResult instanceof ConcurrentOperationException) { throw (ConcurrentOperationException)jobResult; } else if (jobResult instanceof RuntimeException) { throw (RuntimeException)jobResult; } else if (jobResult instanceof Throwable) { throw new RuntimeException("Unexpected exception", (Throwable)jobResult); } } // retrieve the migrated new volume from job result if (jobResult != null && jobResult instanceof Long) { return _entityMgr.findById(VolumeVO.class, ((Long)jobResult)); } return null; } } return orchestrateMigrateVolume(vol, destPool, liveMigrateVolume, newDiskOffering); } /** * Retrieves the new disk offering UUID that might be sent to replace the current one in the volume being migrated. * If no disk offering UUID is provided we return null. Otherwise, we perform the following checks. * <ul> * <li>Is the disk offering UUID entered valid? If not, an {@link InvalidParameterValueException} is thrown; * <li>If the disk offering was already removed, we thrown an {@link InvalidParameterValueException} is thrown; * <li>We then check if the user executing the operation has access to the given disk offering. * </ul> * * If all checks pass, we move forward returning the disk offering object. */ private DiskOfferingVO retrieveAndValidateNewDiskOffering(MigrateVolumeCmd cmd) { String newDiskOfferingUuid = cmd.getNewDiskOfferingUuid(); if (org.apache.commons.lang.StringUtils.isBlank(newDiskOfferingUuid)) { return null; } DiskOfferingVO newDiskOffering = _diskOfferingDao.findByUuid(newDiskOfferingUuid); if (newDiskOffering == null) { throw new InvalidParameterValueException(String.format("The disk offering informed is not valid [id=%s].", newDiskOfferingUuid)); } if (newDiskOffering.getRemoved() != null) { throw new InvalidParameterValueException(String.format("We cannot assign a removed disk offering [id=%s] to a volume. ", newDiskOffering.getUuid())); } Account caller = CallContext.current().getCallingAccount(); DataCenter zone = null; Volume volume = _volsDao.findById(cmd.getId()); if (volume != null) { zone = _dcDao.findById(volume.getDataCenterId()); } _accountMgr.checkAccess(caller, newDiskOffering, zone); return newDiskOffering; } /** * Performs the validations required for replacing the disk offering while migrating the volume of storage. If no new disk offering is provided, we do not execute any validation. * If a disk offering is informed, we then proceed with the following checks. * <ul> * <li>We check if the given volume is of ROOT type. We cannot change the disk offering of a ROOT volume. Therefore, we thrown an {@link InvalidParameterValueException}; * <li>We the disk is being migrated to shared storage and the new disk offering is for local storage (or vice versa), we throw an {@link InvalidParameterValueException}. Bear in mind that we are validating only the new disk offering. If none is provided we can override the current disk offering. This means, placing a volume with shared disk offering in local storage and vice versa; * <li>We then proceed checking the target storage pool supports the new disk offering {@link #doesTargetStorageSupportNewDiskOffering(StoragePool, DiskOfferingVO)}. * </ul> * * If all of the above validations pass, we check if the size of the new disk offering is different from the volume. If it is, we log a warning message. */ protected void validateConditionsToReplaceDiskOfferingOfVolume(VolumeVO volume, DiskOfferingVO newDiskOffering, StoragePool destPool) { if (newDiskOffering == null) { return; } if ((destPool.isShared() && newDiskOffering.isUseLocalStorage()) || destPool.isLocal() && newDiskOffering.isShared()) { throw new InvalidParameterValueException("You cannot move the volume to a shared storage and assing a disk offering for local storage and vice versa."); } if (!doesTargetStorageSupportDiskOffering(destPool, newDiskOffering)) { throw new InvalidParameterValueException(String.format("Target Storage [id=%s] tags [%s] does not match new disk offering [id=%s] tags [%s].", destPool.getUuid(), getStoragePoolTags(destPool), newDiskOffering.getUuid(), newDiskOffering.getTags())); } if (volume.getSize() != newDiskOffering.getDiskSize()) { DiskOfferingVO oldDiskOffering = this._diskOfferingDao.findById(volume.getDiskOfferingId()); s_logger.warn(String.format( "You are migrating a volume [id=%s] and changing the disk offering[from id=%s to id=%s] to reflect this migration. However, the sizes of the volume and the new disk offering are different.", volume.getUuid(), oldDiskOffering.getUuid(), newDiskOffering.getUuid())); } s_logger.info(String.format("Changing disk offering to [uuid=%s] while migrating volume [uuid=%s, name=%s].", newDiskOffering.getUuid(), volume.getUuid(), volume.getName())); } /** * Checks if the target storage supports the new disk offering. * This validation is consistent with the mechanism used to select a storage pool to deploy a volume when a virtual machine is deployed or when a new data disk is allocated. * * The scenarios when this method returns true or false is presented in the following table. * * <table border="1"> * <tr> * <th>#</th><th>Disk offering tags</th><th>Storage tags</th><th>Does the storage support the disk offering?</th> * </tr> * <body> * <tr> * <td>1</td><td>A,B</td><td>A</td><td>NO</td> * </tr> * <tr> * <td>2</td><td>A,B,C</td><td>A,B,C,D,X</td><td>YES</td> * </tr> * <tr> * <td>3</td><td>A,B,C</td><td>X,Y,Z</td><td>NO</td> * </tr> * <tr> * <td>4</td><td>null</td><td>A,S,D</td><td>YES</td> * </tr> * <tr> * <td>5</td><td>A</td><td>null</td><td>NO</td> * </tr> * <tr> * <td>6</td><td>null</td><td>null</td><td>YES</td> * </tr> * </body> * </table> */ protected boolean doesTargetStorageSupportDiskOffering(StoragePool destPool, DiskOfferingVO diskOffering) { String targetStoreTags = diskOffering.getTags(); return doesTargetStorageSupportDiskOffering(destPool, targetStoreTags); } @Override public boolean doesTargetStorageSupportDiskOffering(StoragePool destPool, String diskOfferingTags) { if (org.apache.commons.lang.StringUtils.isBlank(diskOfferingTags)) { return true; } String storagePoolTags = getStoragePoolTags(destPool); if (org.apache.commons.lang.StringUtils.isBlank(storagePoolTags)) { return false; } String[] storageTagsAsStringArray = org.apache.commons.lang.StringUtils.split(storagePoolTags, ","); String[] newDiskOfferingTagsAsStringArray = org.apache.commons.lang.StringUtils.split(diskOfferingTags, ","); return CollectionUtils.isSubCollection(Arrays.asList(newDiskOfferingTagsAsStringArray), Arrays.asList(storageTagsAsStringArray)); } /** * Retrieves the storage pool tags as a {@link String}. If the storage pool does not have tags we return a null value. */ protected String getStoragePoolTags(StoragePool destPool) { List<String> destPoolTags = storagePoolTagsDao.getStoragePoolTags(destPool.getId()); if (CollectionUtils.isEmpty(destPoolTags)) { return null; } return StringUtils.join(destPoolTags, ","); } private Volume orchestrateMigrateVolume(VolumeVO volume, StoragePool destPool, boolean liveMigrateVolume, DiskOfferingVO newDiskOffering) { Volume newVol = null; try { if (liveMigrateVolume) { newVol = liveMigrateVolume(volume, destPool); } else { newVol = _volumeMgr.migrateVolume(volume, destPool); } if (newDiskOffering != null) { _volsDao.updateDiskOffering(newVol.getId(), newDiskOffering.getId()); } } catch (StorageUnavailableException e) { s_logger.debug("Failed to migrate volume", e); throw new CloudRuntimeException(e.getMessage()); } catch (Exception e) { s_logger.debug("Failed to migrate volume", e); throw new CloudRuntimeException(e.getMessage()); } return newVol; } @DB protected Volume liveMigrateVolume(Volume volume, StoragePool destPool) throws StorageUnavailableException { VolumeInfo vol = volFactory.getVolume(volume.getId()); DataStore dataStoreTarget = dataStoreMgr.getDataStore(destPool.getId(), DataStoreRole.Primary); AsyncCallFuture<VolumeApiResult> future = volService.migrateVolume(vol, dataStoreTarget); try { VolumeApiResult result = future.get(); if (result.isFailed()) { s_logger.debug("migrate volume failed:" + result.getResult()); throw new StorageUnavailableException("Migrate volume failed: " + result.getResult(), destPool.getId()); } return result.getVolume(); } catch (InterruptedException e) { s_logger.debug("migrate volume failed", e); throw new CloudRuntimeException(e.getMessage()); } catch (ExecutionException e) { s_logger.debug("migrate volume failed", e); throw new CloudRuntimeException(e.getMessage()); } } @Override @ActionEvent(eventType = EventTypes.EVENT_SNAPSHOT_CREATE, eventDescription = "taking snapshot", async = true) public Snapshot takeSnapshot(Long volumeId, Long policyId, Long snapshotId, Account account, boolean quiescevm, Snapshot.LocationType locationType, boolean asyncBackup, Map<String, String> tags) throws ResourceAllocationException { final Snapshot snapshot = takeSnapshotInternal(volumeId, policyId, snapshotId, account, quiescevm, locationType, asyncBackup); if (snapshot != null && MapUtils.isNotEmpty(tags)) { taggedResourceService.createTags(Collections.singletonList(snapshot.getUuid()), ResourceTag.ResourceObjectType.Snapshot, tags, null); } return snapshot; } private Snapshot takeSnapshotInternal(Long volumeId, Long policyId, Long snapshotId, Account account, boolean quiescevm, Snapshot.LocationType locationType, boolean asyncBackup) throws ResourceAllocationException { VolumeInfo volume = volFactory.getVolume(volumeId); if (volume == null) { throw new InvalidParameterValueException("Creating snapshot failed due to volume:" + volumeId + " doesn't exist"); } if (volume.getState() != Volume.State.Ready) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " is not in " + Volume.State.Ready + " state but " + volume.getState() + ". Cannot take snapshot."); } StoragePoolVO storagePoolVO = _storagePoolDao.findById(volume.getPoolId()); if (storagePoolVO.isManaged() && locationType == null) { locationType = Snapshot.LocationType.PRIMARY; } VMInstanceVO vm = null; if (volume.getInstanceId() != null) { vm = _vmInstanceDao.findById(volume.getInstanceId()); } if (vm != null) { // serialize VM operation AsyncJobExecutionContext jobContext = AsyncJobExecutionContext.getCurrentExecutionContext(); if (jobContext.isJobDispatchedBy(VmWorkConstants.VM_WORK_JOB_DISPATCHER)) { // avoid re-entrance VmWorkJobVO placeHolder = null; placeHolder = createPlaceHolderWork(vm.getId()); try { return orchestrateTakeVolumeSnapshot(volumeId, policyId, snapshotId, account, quiescevm, locationType, asyncBackup); } finally { _workJobDao.expunge(placeHolder.getId()); } } else { Outcome<Snapshot> outcome = takeVolumeSnapshotThroughJobQueue(vm.getId(), volumeId, policyId, snapshotId, account.getId(), quiescevm, locationType, asyncBackup); try { outcome.get(); } catch (InterruptedException e) { throw new RuntimeException("Operation is interrupted", e); } catch (java.util.concurrent.ExecutionException e) { throw new RuntimeException("Execution excetion", e); } Object jobResult = _jobMgr.unmarshallResultObject(outcome.getJob()); if (jobResult != null) { if (jobResult instanceof ConcurrentOperationException) { throw (ConcurrentOperationException)jobResult; } else if (jobResult instanceof ResourceAllocationException) { throw (ResourceAllocationException)jobResult; } else if (jobResult instanceof Throwable) { throw new RuntimeException("Unexpected exception", (Throwable)jobResult); } } return _snapshotDao.findById(snapshotId); } } else { CreateSnapshotPayload payload = new CreateSnapshotPayload(); payload.setSnapshotId(snapshotId); payload.setSnapshotPolicyId(policyId); payload.setAccount(account); payload.setQuiescevm(quiescevm); payload.setAsyncBackup(asyncBackup); volume.addPayload(payload); return volService.takeSnapshot(volume); } } private Snapshot orchestrateTakeVolumeSnapshot(Long volumeId, Long policyId, Long snapshotId, Account account, boolean quiescevm, Snapshot.LocationType locationType, boolean asyncBackup) throws ResourceAllocationException { VolumeInfo volume = volFactory.getVolume(volumeId); if (volume == null) { throw new InvalidParameterValueException("Creating snapshot failed due to volume:" + volumeId + " doesn't exist"); } if (volume.getState() != Volume.State.Ready) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " is not in " + Volume.State.Ready + " state but " + volume.getState() + ". Cannot take snapshot."); } CreateSnapshotPayload payload = new CreateSnapshotPayload(); payload.setSnapshotId(snapshotId); payload.setSnapshotPolicyId(policyId); payload.setAccount(account); payload.setQuiescevm(quiescevm); payload.setLocationType(locationType); payload.setAsyncBackup(asyncBackup); volume.addPayload(payload); return volService.takeSnapshot(volume); } @Override @ActionEvent(eventType = EventTypes.EVENT_SNAPSHOT_CREATE, eventDescription = "allocating snapshot", create = true) public Snapshot allocSnapshot(Long volumeId, Long policyId, String snapshotName, Snapshot.LocationType locationType) throws ResourceAllocationException { Account caller = CallContext.current().getCallingAccount(); VolumeInfo volume = volFactory.getVolume(volumeId); if (volume == null) { throw new InvalidParameterValueException("Creating snapshot failed due to volume:" + volumeId + " doesn't exist"); } DataCenter zone = _dcDao.findById(volume.getDataCenterId()); if (zone == null) { throw new InvalidParameterValueException("Can't find zone by id " + volume.getDataCenterId()); } if (Grouping.AllocationState.Disabled == zone.getAllocationState() && !_accountMgr.isRootAdmin(caller.getId())) { throw new PermissionDeniedException("Cannot perform this operation, Zone is currently disabled: " + zone.getName()); } if (volume.getState() != Volume.State.Ready) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " is not in " + Volume.State.Ready + " state but " + volume.getState() + ". Cannot take snapshot."); } if (ImageFormat.DIR.equals(volume.getFormat())) { throw new InvalidParameterValueException("Snapshot not supported for volume:" + volumeId); } if (volume.getTemplateId() != null) { VMTemplateVO template = _templateDao.findById(volume.getTemplateId()); if (template != null && template.getTemplateType() == Storage.TemplateType.SYSTEM) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " is for System VM , Creating snapshot against System VM volumes is not supported"); } } StoragePoolVO storagePoolVO = _storagePoolDao.findById(volume.getPoolId()); if (!storagePoolVO.isManaged() && locationType != null) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " LocationType is supported only for managed storage"); } if (storagePoolVO.isManaged() && locationType == null) { locationType = Snapshot.LocationType.PRIMARY; } StoragePool storagePool = (StoragePool)volume.getDataStore(); if (storagePool == null) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " please attach this volume to a VM before create snapshot for it"); } return snapshotMgr.allocSnapshot(volumeId, policyId, snapshotName, locationType); } @Override public Snapshot allocSnapshotForVm(Long vmId, Long volumeId, String snapshotName) throws ResourceAllocationException { Account caller = CallContext.current().getCallingAccount(); VMInstanceVO vm = _vmInstanceDao.findById(vmId); if (vm == null) { throw new InvalidParameterValueException("Creating snapshot failed due to vm:" + vmId + " doesn't exist"); } _accountMgr.checkAccess(caller, null, true, vm); VolumeInfo volume = volFactory.getVolume(volumeId); if (volume == null) { throw new InvalidParameterValueException("Creating snapshot failed due to volume:" + volumeId + " doesn't exist"); } _accountMgr.checkAccess(caller, null, true, volume); VirtualMachine attachVM = volume.getAttachedVM(); if (attachVM == null || attachVM.getId() != vm.getId()) { throw new InvalidParameterValueException("Creating snapshot failed due to volume:" + volumeId + " doesn't attach to vm :" + vm); } DataCenter zone = _dcDao.findById(volume.getDataCenterId()); if (zone == null) { throw new InvalidParameterValueException("Can't find zone by id " + volume.getDataCenterId()); } if (Grouping.AllocationState.Disabled == zone.getAllocationState() && !_accountMgr.isRootAdmin(caller.getId())) { throw new PermissionDeniedException("Cannot perform this operation, Zone is currently disabled: " + zone.getName()); } if (volume.getState() != Volume.State.Ready) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " is not in " + Volume.State.Ready + " state but " + volume.getState() + ". Cannot take snapshot."); } if (volume.getTemplateId() != null) { VMTemplateVO template = _templateDao.findById(volume.getTemplateId()); if (template != null && template.getTemplateType() == Storage.TemplateType.SYSTEM) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " is for System VM , Creating snapshot against System VM volumes is not supported"); } } StoragePool storagePool = (StoragePool)volume.getDataStore(); if (storagePool == null) { throw new InvalidParameterValueException("VolumeId: " + volumeId + " please attach this volume to a VM before create snapshot for it"); } return snapshotMgr.allocSnapshot(volumeId, Snapshot.MANUAL_POLICY_ID, snapshotName, null); } @Override @ActionEvent(eventType = EventTypes.EVENT_VOLUME_EXTRACT, eventDescription = "extracting volume", async = true) public String extractVolume(ExtractVolumeCmd cmd) { Long volumeId = cmd.getId(); Long zoneId = cmd.getZoneId(); String mode = cmd.getMode(); Account account = CallContext.current().getCallingAccount(); if (!_accountMgr.isRootAdmin(account.getId()) && ApiDBUtils.isExtractionDisabled()) { throw new PermissionDeniedException("Extraction has been disabled by admin"); } VolumeVO volume = _volsDao.findById(volumeId); if (volume == null) { InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find volume with specified volumeId"); ex.addProxyObject(volumeId.toString(), "volumeId"); throw ex; } // perform permission check _accountMgr.checkAccess(account, null, true, volume); if (_dcDao.findById(zoneId) == null) { throw new InvalidParameterValueException("Please specify a valid zone."); } if (volume.getPoolId() == null) { throw new InvalidParameterValueException("The volume doesn't belong to a storage pool so can't extract it"); } // Extract activity only for detached volumes or for volumes whose // instance is stopped if (volume.getInstanceId() != null && ApiDBUtils.findVMInstanceById(volume.getInstanceId()).getState() != State.Stopped) { s_logger.debug("Invalid state of the volume with ID: " + volumeId + ". It should be either detached or the VM should be in stopped state."); PermissionDeniedException ex = new PermissionDeniedException("Invalid state of the volume with specified ID. It should be either detached or the VM should be in stopped state."); ex.addProxyObject(volume.getUuid(), "volumeId"); throw ex; } if (volume.getVolumeType() != Volume.Type.DATADISK) { // Datadisk dont have any template dependence. VMTemplateVO template = ApiDBUtils.findTemplateById(volume.getTemplateId()); if (template != null) { // For ISO based volumes template = null and // we allow extraction of all ISO based // volumes boolean isExtractable = template.isExtractable() && template.getTemplateType() != Storage.TemplateType.SYSTEM; if (!isExtractable && account != null && !_accountMgr.isRootAdmin(account.getId())) { // Global admins are always allowed to extract PermissionDeniedException ex = new PermissionDeniedException("The volume with specified volumeId is not allowed to be extracted"); ex.addProxyObject(volume.getUuid(), "volumeId"); throw ex; } } } if (mode == null || (!mode.equals(Upload.Mode.FTP_UPLOAD.toString()) && !mode.equals(Upload.Mode.HTTP_DOWNLOAD.toString()))) { throw new InvalidParameterValueException("Please specify a valid extract Mode "); } // Check if the url already exists VolumeDataStoreVO volumeStoreRef = _volumeStoreDao.findByVolume(volumeId); if (volumeStoreRef != null && volumeStoreRef.getExtractUrl() != null) { return volumeStoreRef.getExtractUrl(); } VMInstanceVO vm = null; if (volume.getInstanceId() != null) { vm = _vmInstanceDao.findById(volume.getInstanceId()); } if (vm != null) { // serialize VM operation AsyncJobExecutionContext jobContext = AsyncJobExecutionContext.getCurrentExecutionContext(); if (jobContext.isJobDispatchedBy(VmWorkConstants.VM_WORK_JOB_DISPATCHER)) { // avoid re-entrance VmWorkJobVO placeHolder = null; placeHolder = createPlaceHolderWork(vm.getId()); try { return orchestrateExtractVolume(volume.getId(), zoneId); } finally { _workJobDao.expunge(placeHolder.getId()); } } else { Outcome<String> outcome = extractVolumeThroughJobQueue(vm.getId(), volume.getId(), zoneId); try { outcome.get(); } catch (InterruptedException e) { throw new RuntimeException("Operation is interrupted", e); } catch (java.util.concurrent.ExecutionException e) { throw new RuntimeException("Execution excetion", e); } Object jobResult = _jobMgr.unmarshallResultObject(outcome.getJob()); if (jobResult != null) { if (jobResult instanceof ConcurrentOperationException) { throw (ConcurrentOperationException)jobResult; } else if (jobResult instanceof RuntimeException) { throw (RuntimeException)jobResult; } else if (jobResult instanceof Throwable) { throw new RuntimeException("Unexpected exception", (Throwable)jobResult); } } // retrieve the entity url from job result if (jobResult != null && jobResult instanceof String) { return (String)jobResult; } return null; } } return orchestrateExtractVolume(volume.getId(), zoneId); } private String orchestrateExtractVolume(long volumeId, long zoneId) { // get latest volume state to make sure that it is not updated by other parallel operations VolumeVO volume = _volsDao.findById(volumeId); if (volume == null || volume.getState() != Volume.State.Ready) { throw new InvalidParameterValueException("Volume to be extracted has been removed or not in right state!"); } // perform extraction ImageStoreEntity secStore = (ImageStoreEntity)dataStoreMgr.getImageStoreWithFreeCapacity(zoneId); if (secStore == null) { throw new InvalidParameterValueException(String.format("Secondary storage to satisfy storage needs cannot be found for zone: %d", zoneId)); } String value = _configDao.getValue(Config.CopyVolumeWait.toString()); NumbersUtil.parseInt(value, Integer.parseInt(Config.CopyVolumeWait.getDefaultValue())); // Copy volume from primary to secondary storage VolumeInfo srcVol = volFactory.getVolume(volumeId); AsyncCallFuture<VolumeApiResult> cvAnswer = volService.copyVolume(srcVol, secStore); // Check if you got a valid answer. VolumeApiResult cvResult = null; try { cvResult = cvAnswer.get(); } catch (InterruptedException e1) { s_logger.debug("failed copy volume", e1); throw new CloudRuntimeException("Failed to copy volume", e1); } catch (ExecutionException e1) { s_logger.debug("failed copy volume", e1); throw new CloudRuntimeException("Failed to copy volume", e1); } if (cvResult == null || cvResult.isFailed()) { String errorString = "Failed to copy the volume from the source primary storage pool to secondary storage."; throw new CloudRuntimeException(errorString); } VolumeInfo vol = cvResult.getVolume(); String extractUrl = secStore.createEntityExtractUrl(vol.getPath(), vol.getFormat(), vol); VolumeDataStoreVO volumeStoreRef = _volumeStoreDao.findByVolume(volumeId); volumeStoreRef.setExtractUrl(extractUrl); volumeStoreRef.setExtractUrlCreated(DateUtil.now()); volumeStoreRef.setDownloadState(VMTemplateStorageResourceAssoc.Status.DOWNLOADED); volumeStoreRef.setDownloadPercent(100); volumeStoreRef.setZoneId(zoneId); _volumeStoreDao.update(volumeStoreRef.getId(), volumeStoreRef); return extractUrl; } @Override public boolean isDisplayResourceEnabled(Long id) { Volume volume = _volsDao.findById(id); if (volume == null) { return true; // bad id given, default to true } return volume.isDisplayVolume(); } private boolean needMoveVolume(VolumeVO existingVolume, VolumeInfo newVolume) { if (existingVolume == null || existingVolume.getPoolId() == null || newVolume.getPoolId() == null) { return false; } DataStore storeForExistingVol = dataStoreMgr.getPrimaryDataStore(existingVolume.getPoolId()); DataStore storeForNewVol = dataStoreMgr.getPrimaryDataStore(newVolume.getPoolId()); Scope storeForExistingStoreScope = storeForExistingVol.getScope(); if (storeForExistingStoreScope == null) { throw new CloudRuntimeException("Can't get scope of data store: " + storeForExistingVol.getId()); } Scope storeForNewStoreScope = storeForNewVol.getScope(); if (storeForNewStoreScope == null) { throw new CloudRuntimeException("Can't get scope of data store: " + storeForNewVol.getId()); } if (storeForNewStoreScope.getScopeType() == ScopeType.ZONE) { return false; } if (storeForExistingStoreScope.getScopeType() != storeForNewStoreScope.getScopeType()) { if (storeForNewStoreScope.getScopeType() == ScopeType.CLUSTER) { Long vmClusterId = null; if (storeForExistingStoreScope.getScopeType() == ScopeType.HOST) { HostScope hs = (HostScope)storeForExistingStoreScope; vmClusterId = hs.getClusterId(); } else if (storeForExistingStoreScope.getScopeType() == ScopeType.ZONE) { Long hostId = _vmInstanceDao.findById(existingVolume.getInstanceId()).getHostId(); if (hostId != null) { HostVO host = _hostDao.findById(hostId); vmClusterId = host.getClusterId(); } } if (storeForNewStoreScope.getScopeId().equals(vmClusterId)) { return false; } else { return true; } } else if (storeForNewStoreScope.getScopeType() == ScopeType.HOST && (storeForExistingStoreScope.getScopeType() == ScopeType.CLUSTER || storeForExistingStoreScope.getScopeType() == ScopeType.ZONE)) { Long hostId = _vmInstanceDao.findById(existingVolume.getInstanceId()).getHostId(); if (storeForNewStoreScope.getScopeId().equals(hostId)) { return false; } } throw new InvalidParameterValueException("Can't move volume between scope: " + storeForNewStoreScope.getScopeType() + " and " + storeForExistingStoreScope.getScopeType()); } return !storeForExistingStoreScope.isSameScope(storeForNewStoreScope); } private synchronized void checkAndSetAttaching(Long volumeId) { VolumeInfo volumeToAttach = volFactory.getVolume(volumeId); if (volumeToAttach.isAttachedVM()) { throw new CloudRuntimeException("volume: " + volumeToAttach.getName() + " is already attached to a VM: " + volumeToAttach.getAttachedVmName()); } if (Volume.State.Allocated.equals(volumeToAttach.getState())) { return; } if (Volume.State.Ready.equals(volumeToAttach.getState())) { volumeToAttach.stateTransit(Volume.Event.AttachRequested); return; } final String error = "Volume: " + volumeToAttach.getName() + " is in " + volumeToAttach.getState() + ". It should be in Ready or Allocated state"; s_logger.error(error); throw new CloudRuntimeException(error); } private void verifyManagedStorage(Long storagePoolId, Long hostId) { if (storagePoolId == null || hostId == null) { return; } StoragePoolVO storagePoolVO = _storagePoolDao.findById(storagePoolId); if (storagePoolVO == null || !storagePoolVO.isManaged()) { return; } HostVO hostVO = _hostDao.findById(hostId); if (hostVO == null) { return; } if (!storageUtil.managedStoragePoolCanScale(storagePoolVO, hostVO.getClusterId(), hostVO.getId())) { throw new CloudRuntimeException("Insufficient number of available " + getNameOfClusteredFileSystem(hostVO)); } } private String getNameOfClusteredFileSystem(HostVO hostVO) { HypervisorType hypervisorType = hostVO.getHypervisorType(); if (HypervisorType.XenServer.equals(hypervisorType)) { return "SRs"; } if (HypervisorType.VMware.equals(hypervisorType)) { return "datastores"; } return "clustered file systems"; } private VolumeVO sendAttachVolumeCommand(UserVmVO vm, VolumeVO volumeToAttach, Long deviceId) { String errorMsg = "Failed to attach volume " + volumeToAttach.getName() + " to VM " + vm.getHostName(); boolean sendCommand = vm.getState() == State.Running; AttachAnswer answer = null; Long hostId = vm.getHostId(); if (hostId == null) { hostId = vm.getLastHostId(); HostVO host = _hostDao.findById(hostId); if (host != null && host.getHypervisorType() == HypervisorType.VMware) { sendCommand = true; } } HostVO host = null; StoragePoolVO volumeToAttachStoragePool = _storagePoolDao.findById(volumeToAttach.getPoolId()); if (hostId != null) { host = _hostDao.findById(hostId); if (host != null && host.getHypervisorType() == HypervisorType.XenServer && volumeToAttachStoragePool != null && volumeToAttachStoragePool.isManaged()) { sendCommand = true; } } if (volumeToAttachStoragePool != null) { verifyManagedStorage(volumeToAttachStoragePool.getId(), hostId); } // volumeToAttachStoragePool should be null if the VM we are attaching the disk to has never been started before DataStore dataStore = volumeToAttachStoragePool != null ? dataStoreMgr.getDataStore(volumeToAttachStoragePool.getId(), DataStoreRole.Primary) : null; checkAndSetAttaching(volumeToAttach.getId()); boolean attached = false; try { // if we don't have a host, the VM we are attaching the disk to has never been started before if (host != null) { try { volService.grantAccess(volFactory.getVolume(volumeToAttach.getId()), host, dataStore); } catch (Exception e) { volService.revokeAccess(volFactory.getVolume(volumeToAttach.getId()), host, dataStore); throw new CloudRuntimeException(e.getMessage()); } } if (sendCommand) { if (host != null && host.getHypervisorType() == HypervisorType.KVM && volumeToAttachStoragePool.isManaged() && volumeToAttach.getPath() == null) { volumeToAttach.setPath(volumeToAttach.get_iScsiName()); _volsDao.update(volumeToAttach.getId(), volumeToAttach); } DataTO volTO = volFactory.getVolume(volumeToAttach.getId()).getTO(); deviceId = getDeviceId(vm, deviceId); DiskTO disk = storageMgr.getDiskWithThrottling(volTO, volumeToAttach.getVolumeType(), deviceId, volumeToAttach.getPath(), vm.getServiceOfferingId(), volumeToAttach.getDiskOfferingId()); AttachCommand cmd = new AttachCommand(disk, vm.getInstanceName()); ChapInfo chapInfo = volService.getChapInfo(volFactory.getVolume(volumeToAttach.getId()), dataStore); Map<String, String> details = new HashMap<String, String>(); disk.setDetails(details); details.put(DiskTO.MANAGED, String.valueOf(volumeToAttachStoragePool.isManaged())); details.put(DiskTO.STORAGE_HOST, volumeToAttachStoragePool.getHostAddress()); details.put(DiskTO.STORAGE_PORT, String.valueOf(volumeToAttachStoragePool.getPort())); details.put(DiskTO.VOLUME_SIZE, String.valueOf(volumeToAttach.getSize())); details.put(DiskTO.IQN, volumeToAttach.get_iScsiName()); details.put(DiskTO.MOUNT_POINT, volumeToAttach.get_iScsiName()); details.put(DiskTO.PROTOCOL_TYPE, (volumeToAttach.getPoolType() != null) ? volumeToAttach.getPoolType().toString() : null); if (chapInfo != null) { details.put(DiskTO.CHAP_INITIATOR_USERNAME, chapInfo.getInitiatorUsername()); details.put(DiskTO.CHAP_INITIATOR_SECRET, chapInfo.getInitiatorSecret()); details.put(DiskTO.CHAP_TARGET_USERNAME, chapInfo.getTargetUsername()); details.put(DiskTO.CHAP_TARGET_SECRET, chapInfo.getTargetSecret()); } _userVmDao.loadDetails(vm); Map<String, String> controllerInfo = new HashMap<String, String>(); controllerInfo.put(VmDetailConstants.ROOT_DISK_CONTROLLER, vm.getDetail(VmDetailConstants.ROOT_DISK_CONTROLLER)); controllerInfo.put(VmDetailConstants.DATA_DISK_CONTROLLER, vm.getDetail(VmDetailConstants.DATA_DISK_CONTROLLER)); cmd.setControllerInfo(controllerInfo); s_logger.debug("Attach volume id:" + volumeToAttach.getId() + " on VM id:" + vm.getId() + " has controller info:" + controllerInfo); try { answer = (AttachAnswer)_agentMgr.send(hostId, cmd); } catch (Exception e) { if (host != null) { volService.revokeAccess(volFactory.getVolume(volumeToAttach.getId()), host, dataStore); } throw new CloudRuntimeException(errorMsg + " due to: " + e.getMessage()); } } if (!sendCommand || (answer != null && answer.getResult())) { // Mark the volume as attached if (sendCommand) { DiskTO disk = answer.getDisk(); _volsDao.attachVolume(volumeToAttach.getId(), vm.getId(), disk.getDiskSeq()); volumeToAttach = _volsDao.findById(volumeToAttach.getId()); if (volumeToAttachStoragePool.isManaged() && volumeToAttach.getPath() == null) { volumeToAttach.setPath(answer.getDisk().getPath()); _volsDao.update(volumeToAttach.getId(), volumeToAttach); } } else { deviceId = getDeviceId(vm, deviceId); _volsDao.attachVolume(volumeToAttach.getId(), vm.getId(), deviceId); volumeToAttach = _volsDao.findById(volumeToAttach.getId()); if (vm.getHypervisorType() == HypervisorType.KVM && volumeToAttachStoragePool != null && volumeToAttachStoragePool.isManaged() && volumeToAttach.getPath() == null && volumeToAttach.get_iScsiName() != null) { volumeToAttach.setPath(volumeToAttach.get_iScsiName()); _volsDao.update(volumeToAttach.getId(), volumeToAttach); } } // insert record for disk I/O statistics VmDiskStatisticsVO diskstats = _vmDiskStatsDao.findBy(vm.getAccountId(), vm.getDataCenterId(), vm.getId(), volumeToAttach.getId()); if (diskstats == null) { diskstats = new VmDiskStatisticsVO(vm.getAccountId(), vm.getDataCenterId(), vm.getId(), volumeToAttach.getId()); _vmDiskStatsDao.persist(diskstats); } attached = true; } else { if (answer != null) { String details = answer.getDetails(); if (details != null && !details.isEmpty()) { errorMsg += "; " + details; } } if (host != null) { volService.revokeAccess(volFactory.getVolume(volumeToAttach.getId()), host, dataStore); } throw new CloudRuntimeException(errorMsg); } } finally { Volume.Event ev = Volume.Event.OperationFailed; VolumeInfo volInfo = volFactory.getVolume(volumeToAttach.getId()); if (attached) { ev = Volume.Event.OperationSucceeded; s_logger.debug("Volume: " + volInfo.getName() + " successfully attached to VM: " + volInfo.getAttachedVmName()); } else { s_logger.debug("Volume: " + volInfo.getName() + " failed to attach to VM: " + volInfo.getAttachedVmName()); } volInfo.stateTransit(ev); } return _volsDao.findById(volumeToAttach.getId()); } private int getMaxDataVolumesSupported(UserVmVO vm) { Long hostId = vm.getHostId(); if (hostId == null) { hostId = vm.getLastHostId(); } HostVO host = _hostDao.findById(hostId); Integer maxDataVolumesSupported = null; if (host != null) { _hostDao.loadDetails(host); String hypervisorVersion = host.getDetail("product_version"); if (org.apache.commons.lang.StringUtils.isBlank(hypervisorVersion)) { hypervisorVersion = host.getHypervisorVersion(); } maxDataVolumesSupported = _hypervisorCapabilitiesDao.getMaxDataVolumesLimit(host.getHypervisorType(), hypervisorVersion); } else { HypervisorType hypervisorType = vm.getHypervisorType(); if (hypervisorType != null && CollectionUtils.isNotEmpty(supportingDefaultHV) && supportingDefaultHV.contains(hypervisorType)) { maxDataVolumesSupported = _hypervisorCapabilitiesDao.getMaxDataVolumesLimit(hypervisorType, "default"); } } if (maxDataVolumesSupported == null || maxDataVolumesSupported.intValue() <= 0) { maxDataVolumesSupported = 6; // 6 data disks by default if nothing // is specified in // 'hypervisor_capabilities' table } return maxDataVolumesSupported.intValue(); } private Long getDeviceId(UserVmVO vm, Long deviceId) { // allocate deviceId int maxDevices = getMaxDataVolumesSupported(vm) + 2; // add 2 to consider devices root volume and cdrom int maxDeviceId = maxDevices - 1; List<VolumeVO> vols = _volsDao.findByInstance(vm.getId()); if (deviceId != null) { if (deviceId.longValue() < 0 || deviceId.longValue() > maxDeviceId || deviceId.longValue() == 3) { throw new RuntimeException("deviceId should be 0,1,2,4-" + maxDeviceId); } for (VolumeVO vol : vols) { if (vol.getDeviceId().equals(deviceId)) { throw new RuntimeException("deviceId " + deviceId + " is used by vm " + vm.getId()); } } } else { // allocate deviceId here List<String> devIds = new ArrayList<String>(); for (int i = 1; i <= maxDeviceId; i++) { devIds.add(String.valueOf(i)); } devIds.remove("3"); for (VolumeVO vol : vols) { devIds.remove(vol.getDeviceId().toString().trim()); } if (devIds.isEmpty()) { throw new RuntimeException("All device Ids are used by vm " + vm.getId()); } deviceId = Long.parseLong(devIds.iterator().next()); } return deviceId; } @Override public boolean configure(String name, Map<String, Object> params) { String maxVolumeSizeInGbString = _configDao.getValue(Config.MaxVolumeSize.toString()); _maxVolumeSizeInGb = NumbersUtil.parseLong(maxVolumeSizeInGbString, 2000); supportingDefaultHV = _hypervisorCapabilitiesDao.getHypervisorsWithDefaultEntries(); return true; } public List<StoragePoolAllocator> getStoragePoolAllocators() { return _storagePoolAllocators; } @Inject public void setStoragePoolAllocators(List<StoragePoolAllocator> storagePoolAllocators) { _storagePoolAllocators = storagePoolAllocators; } public class VmJobVolumeUrlOutcome extends OutcomeImpl<String> { public VmJobVolumeUrlOutcome(final AsyncJob job) { super(String.class, job, VmJobCheckInterval.value(), new Predicate() { @Override public boolean checkCondition() { AsyncJobVO jobVo = _entityMgr.findById(AsyncJobVO.class, job.getId()); assert (jobVo != null); if (jobVo == null || jobVo.getStatus() != JobInfo.Status.IN_PROGRESS) { return true; } return false; } }, AsyncJob.Topics.JOB_STATE); } } public class VmJobVolumeOutcome extends OutcomeImpl<Volume> { private long _volumeId; public VmJobVolumeOutcome(final AsyncJob job, final long volumeId) { super(Volume.class, job, VmJobCheckInterval.value(), new Predicate() { @Override public boolean checkCondition() { AsyncJobVO jobVo = _entityMgr.findById(AsyncJobVO.class, job.getId()); assert (jobVo != null); if (jobVo == null || jobVo.getStatus() != JobInfo.Status.IN_PROGRESS) { return true; } return false; } }, AsyncJob.Topics.JOB_STATE); _volumeId = volumeId; } @Override protected Volume retrieve() { return _volsDao.findById(_volumeId); } } public class VmJobSnapshotOutcome extends OutcomeImpl<Snapshot> { private long _snapshotId; public VmJobSnapshotOutcome(final AsyncJob job, final long snapshotId) { super(Snapshot.class, job, VmJobCheckInterval.value(), new Predicate() { @Override public boolean checkCondition() { AsyncJobVO jobVo = _entityMgr.findById(AsyncJobVO.class, job.getId()); assert (jobVo != null); if (jobVo == null || jobVo.getStatus() != JobInfo.Status.IN_PROGRESS) { return true; } return false; } }, AsyncJob.Topics.JOB_STATE); _snapshotId = snapshotId; } @Override protected Snapshot retrieve() { return _snapshotDao.findById(_snapshotId); } } public Outcome<Volume> attachVolumeToVmThroughJobQueue(final Long vmId, final Long volumeId, final Long deviceId) { final CallContext context = CallContext.current(); final User callingUser = context.getCallingUser(); final Account callingAccount = context.getCallingAccount(); final VMInstanceVO vm = _vmInstanceDao.findById(vmId); VmWorkJobVO workJob = new VmWorkJobVO(context.getContextId()); workJob.setDispatcher(VmWorkConstants.VM_WORK_JOB_DISPATCHER); workJob.setCmd(VmWorkAttachVolume.class.getName()); workJob.setAccountId(callingAccount.getId()); workJob.setUserId(callingUser.getId()); workJob.setStep(VmWorkJobVO.Step.Starting); workJob.setVmType(VirtualMachine.Type.Instance); workJob.setVmInstanceId(vm.getId()); workJob.setRelated(AsyncJobExecutionContext.getOriginJobId()); // save work context info (there are some duplications) VmWorkAttachVolume workInfo = new VmWorkAttachVolume(callingUser.getId(), callingAccount.getId(), vm.getId(), VolumeApiServiceImpl.VM_WORK_JOB_HANDLER, volumeId, deviceId); workJob.setCmdInfo(VmWorkSerializer.serialize(workInfo)); _jobMgr.submitAsyncJob(workJob, VmWorkConstants.VM_WORK_QUEUE, vm.getId()); AsyncJobVO jobVo = _jobMgr.getAsyncJob(workJob.getId()); s_logger.debug("New job " + workJob.getId() + ", result field: " + jobVo.getResult()); AsyncJobExecutionContext.getCurrentExecutionContext().joinJob(workJob.getId()); return new VmJobVolumeOutcome(workJob, volumeId); } public Outcome<Volume> detachVolumeFromVmThroughJobQueue(final Long vmId, final Long volumeId) { final CallContext context = CallContext.current(); final User callingUser = context.getCallingUser(); final Account callingAccount = context.getCallingAccount(); final VMInstanceVO vm = _vmInstanceDao.findById(vmId); VmWorkJobVO workJob = new VmWorkJobVO(context.getContextId()); workJob.setDispatcher(VmWorkConstants.VM_WORK_JOB_DISPATCHER); workJob.setCmd(VmWorkDetachVolume.class.getName()); workJob.setAccountId(callingAccount.getId()); workJob.setUserId(callingUser.getId()); workJob.setStep(VmWorkJobVO.Step.Starting); workJob.setVmType(VirtualMachine.Type.Instance); workJob.setVmInstanceId(vm.getId()); workJob.setRelated(AsyncJobExecutionContext.getOriginJobId()); // save work context info (there are some duplications) VmWorkDetachVolume workInfo = new VmWorkDetachVolume(callingUser.getId(), callingAccount.getId(), vm.getId(), VolumeApiServiceImpl.VM_WORK_JOB_HANDLER, volumeId); workJob.setCmdInfo(VmWorkSerializer.serialize(workInfo)); _jobMgr.submitAsyncJob(workJob, VmWorkConstants.VM_WORK_QUEUE, vm.getId()); AsyncJobExecutionContext.getCurrentExecutionContext().joinJob(workJob.getId()); return new VmJobVolumeOutcome(workJob, volumeId); } public Outcome<Volume> resizeVolumeThroughJobQueue(final Long vmId, final long volumeId, final long currentSize, final long newSize, final Long newMinIops, final Long newMaxIops, final Integer newHypervisorSnapshotReserve, final Long newServiceOfferingId, final boolean shrinkOk) { final CallContext context = CallContext.current(); final User callingUser = context.getCallingUser(); final Account callingAccount = context.getCallingAccount(); final VMInstanceVO vm = _vmInstanceDao.findById(vmId); VmWorkJobVO workJob = new VmWorkJobVO(context.getContextId()); workJob.setDispatcher(VmWorkConstants.VM_WORK_JOB_DISPATCHER); workJob.setCmd(VmWorkResizeVolume.class.getName()); workJob.setAccountId(callingAccount.getId()); workJob.setUserId(callingUser.getId()); workJob.setStep(VmWorkJobVO.Step.Starting); workJob.setVmType(VirtualMachine.Type.Instance); workJob.setVmInstanceId(vm.getId()); workJob.setRelated(AsyncJobExecutionContext.getOriginJobId()); // save work context info (there are some duplications) VmWorkResizeVolume workInfo = new VmWorkResizeVolume(callingUser.getId(), callingAccount.getId(), vm.getId(), VolumeApiServiceImpl.VM_WORK_JOB_HANDLER, volumeId, currentSize, newSize, newMinIops, newMaxIops, newHypervisorSnapshotReserve, newServiceOfferingId, shrinkOk); workJob.setCmdInfo(VmWorkSerializer.serialize(workInfo)); _jobMgr.submitAsyncJob(workJob, VmWorkConstants.VM_WORK_QUEUE, vm.getId()); AsyncJobExecutionContext.getCurrentExecutionContext().joinJob(workJob.getId()); return new VmJobVolumeOutcome(workJob, volumeId); } public Outcome<String> extractVolumeThroughJobQueue(final Long vmId, final long volumeId, final long zoneId) { final CallContext context = CallContext.current(); final User callingUser = context.getCallingUser(); final Account callingAccount = context.getCallingAccount(); final VMInstanceVO vm = _vmInstanceDao.findById(vmId); VmWorkJobVO workJob = new VmWorkJobVO(context.getContextId()); workJob.setDispatcher(VmWorkConstants.VM_WORK_JOB_DISPATCHER); workJob.setCmd(VmWorkExtractVolume.class.getName()); workJob.setAccountId(callingAccount.getId()); workJob.setUserId(callingUser.getId()); workJob.setStep(VmWorkJobVO.Step.Starting); workJob.setVmType(VirtualMachine.Type.Instance); workJob.setVmInstanceId(vm.getId()); workJob.setRelated(AsyncJobExecutionContext.getOriginJobId()); // save work context info (there are some duplications) VmWorkExtractVolume workInfo = new VmWorkExtractVolume(callingUser.getId(), callingAccount.getId(), vm.getId(), VolumeApiServiceImpl.VM_WORK_JOB_HANDLER, volumeId, zoneId); workJob.setCmdInfo(VmWorkSerializer.serialize(workInfo)); _jobMgr.submitAsyncJob(workJob, VmWorkConstants.VM_WORK_QUEUE, vm.getId()); AsyncJobExecutionContext.getCurrentExecutionContext().joinJob(workJob.getId()); return new VmJobVolumeUrlOutcome(workJob); } private Outcome<Volume> migrateVolumeThroughJobQueue(VMInstanceVO vm, VolumeVO vol, StoragePool destPool, boolean liveMigrateVolume, DiskOfferingVO newDiskOffering) { CallContext context = CallContext.current(); User callingUser = context.getCallingUser(); Account callingAccount = context.getCallingAccount(); VmWorkJobVO workJob = new VmWorkJobVO(context.getContextId()); workJob.setDispatcher(VmWorkConstants.VM_WORK_JOB_DISPATCHER); workJob.setCmd(VmWorkMigrateVolume.class.getName()); workJob.setAccountId(callingAccount.getId()); workJob.setUserId(callingUser.getId()); workJob.setStep(VmWorkJobVO.Step.Starting); workJob.setVmType(VirtualMachine.Type.Instance); workJob.setVmInstanceId(vm.getId()); workJob.setRelated(AsyncJobExecutionContext.getOriginJobId()); Long newDiskOfferingId = newDiskOffering != null ? newDiskOffering.getId() : null; // save work context info (there are some duplications) VmWorkMigrateVolume workInfo = new VmWorkMigrateVolume(callingUser.getId(), callingAccount.getId(), vm.getId(), VolumeApiServiceImpl.VM_WORK_JOB_HANDLER, vol.getId(), destPool.getId(), liveMigrateVolume, newDiskOfferingId); workJob.setCmdInfo(VmWorkSerializer.serialize(workInfo)); _jobMgr.submitAsyncJob(workJob, VmWorkConstants.VM_WORK_QUEUE, vm.getId()); AsyncJobExecutionContext.getCurrentExecutionContext().joinJob(workJob.getId()); return new VmJobVolumeOutcome(workJob, vol.getId()); } public Outcome<Snapshot> takeVolumeSnapshotThroughJobQueue(final Long vmId, final Long volumeId, final Long policyId, final Long snapshotId, final Long accountId, final boolean quiesceVm, final Snapshot.LocationType locationType, final boolean asyncBackup) { final CallContext context = CallContext.current(); final User callingUser = context.getCallingUser(); final Account callingAccount = context.getCallingAccount(); final VMInstanceVO vm = _vmInstanceDao.findById(vmId); VmWorkJobVO workJob = new VmWorkJobVO(context.getContextId()); workJob.setDispatcher(VmWorkConstants.VM_WORK_JOB_DISPATCHER); workJob.setCmd(VmWorkTakeVolumeSnapshot.class.getName()); workJob.setAccountId(callingAccount.getId()); workJob.setUserId(callingUser.getId()); workJob.setStep(VmWorkJobVO.Step.Starting); workJob.setVmType(VirtualMachine.Type.Instance); workJob.setVmInstanceId(vm.getId()); workJob.setRelated(AsyncJobExecutionContext.getOriginJobId()); // save work context info (there are some duplications) VmWorkTakeVolumeSnapshot workInfo = new VmWorkTakeVolumeSnapshot(callingUser.getId(), accountId != null ? accountId : callingAccount.getId(), vm.getId(), VolumeApiServiceImpl.VM_WORK_JOB_HANDLER, volumeId, policyId, snapshotId, quiesceVm, locationType, asyncBackup); workJob.setCmdInfo(VmWorkSerializer.serialize(workInfo)); _jobMgr.submitAsyncJob(workJob, VmWorkConstants.VM_WORK_QUEUE, vm.getId()); AsyncJobExecutionContext.getCurrentExecutionContext().joinJob(workJob.getId()); return new VmJobSnapshotOutcome(workJob, snapshotId); } @ReflectionUse private Pair<JobInfo.Status, String> orchestrateExtractVolume(VmWorkExtractVolume work) throws Exception { String volUrl = orchestrateExtractVolume(work.getVolumeId(), work.getZoneId()); return new Pair<JobInfo.Status, String>(JobInfo.Status.SUCCEEDED, _jobMgr.marshallResultObject(volUrl)); } @ReflectionUse private Pair<JobInfo.Status, String> orchestrateAttachVolumeToVM(VmWorkAttachVolume work) throws Exception { Volume vol = orchestrateAttachVolumeToVM(work.getVmId(), work.getVolumeId(), work.getDeviceId()); return new Pair<JobInfo.Status, String>(JobInfo.Status.SUCCEEDED, _jobMgr.marshallResultObject(new Long(vol.getId()))); } @ReflectionUse private Pair<JobInfo.Status, String> orchestrateDetachVolumeFromVM(VmWorkDetachVolume work) throws Exception { Volume vol = orchestrateDetachVolumeFromVM(work.getVmId(), work.getVolumeId()); return new Pair<JobInfo.Status, String>(JobInfo.Status.SUCCEEDED, _jobMgr.marshallResultObject(new Long(vol.getId()))); } @ReflectionUse private Pair<JobInfo.Status, String> orchestrateResizeVolume(VmWorkResizeVolume work) throws Exception { Volume vol = orchestrateResizeVolume(work.getVolumeId(), work.getCurrentSize(), work.getNewSize(), work.getNewMinIops(), work.getNewMaxIops(), work.getNewHypervisorSnapshotReserve(), work.getNewServiceOfferingId(), work.isShrinkOk()); return new Pair<JobInfo.Status, String>(JobInfo.Status.SUCCEEDED, _jobMgr.marshallResultObject(new Long(vol.getId()))); } @ReflectionUse private Pair<JobInfo.Status, String> orchestrateMigrateVolume(VmWorkMigrateVolume work) throws Exception { VolumeVO volume = _volsDao.findById(work.getVolumeId()); StoragePoolVO targetStoragePool = _storagePoolDao.findById(work.getDestPoolId()); DiskOfferingVO newDiskOffering = _diskOfferingDao.findById(work.getNewDiskOfferingId()); Volume newVol = orchestrateMigrateVolume(volume, targetStoragePool, work.isLiveMigrate(), newDiskOffering); return new Pair<JobInfo.Status, String>(JobInfo.Status.SUCCEEDED, _jobMgr.marshallResultObject(newVol.getId())); } @ReflectionUse private Pair<JobInfo.Status, String> orchestrateTakeVolumeSnapshot(VmWorkTakeVolumeSnapshot work) throws Exception { Account account = _accountDao.findById(work.getAccountId()); orchestrateTakeVolumeSnapshot(work.getVolumeId(), work.getPolicyId(), work.getSnapshotId(), account, work.isQuiesceVm(), work.getLocationType(), work.isAsyncBackup()); return new Pair<JobInfo.Status, String>(JobInfo.Status.SUCCEEDED, _jobMgr.marshallResultObject(work.getSnapshotId())); } @Override public Pair<JobInfo.Status, String> handleVmWorkJob(VmWork work) throws Exception { return _jobHandlerProxy.handleVmWorkJob(work); } private VmWorkJobVO createPlaceHolderWork(long instanceId) { VmWorkJobVO workJob = new VmWorkJobVO(""); workJob.setDispatcher(VmWorkConstants.VM_WORK_JOB_PLACEHOLDER); workJob.setCmd(""); workJob.setCmdInfo(""); workJob.setAccountId(0); workJob.setUserId(0); workJob.setStep(VmWorkJobVO.Step.Starting); workJob.setVmType(VirtualMachine.Type.Instance); workJob.setVmInstanceId(instanceId); workJob.setInitMsid(ManagementServerNode.getManagementServerId()); _workJobDao.persist(workJob); return workJob; } @Override public String getConfigComponentName() { return VolumeApiService.class.getSimpleName(); } @Override public ConfigKey<?>[] getConfigKeys() { return new ConfigKey<?>[] {ConcurrentMigrationsThresholdPerDatastore, AllowUserExpungeRecoverVolume}; } }
server: fix comparison using nullable objects (#4256) This PR fix some wrongs comparison using nullable objects. Preventing null pointer exception
server/src/main/java/com/cloud/storage/VolumeApiServiceImpl.java
server: fix comparison using nullable objects (#4256)
<ide><path>erver/src/main/java/com/cloud/storage/VolumeApiServiceImpl.java <ide> // OfflineVmwareMigration: check storage tags on disk(offering)s in comparison to destination storage pool <ide> // OfflineVmwareMigration: if no match return a proper error now <ide> DiskOfferingVO diskOffering = _diskOfferingDao.findById(vol.getDiskOfferingId()); <del> if (diskOffering.equals(null)) { <add> if (diskOffering == null) { <ide> throw new CloudRuntimeException("volume '" + vol.getUuid() + "', has no diskoffering. Migration target cannot be checked."); <ide> } <ide> if (!doesTargetStorageSupportDiskOffering(destPool, diskOffering)) {
Java
apache-2.0
error: pathspec 'plugins/job-repository-nifi/src/main/java/com/thinkbiganalytics/jobrepo/jpa/NifiEventStatisticsRepository.java' did not match any file(s) known to git
5f8d92c49f321807f99a1ebe9d27eb141ccfcd96
1
peter-gergely-horvath/kylo,claudiu-stanciu/kylo,claudiu-stanciu/kylo,Teradata/kylo,rashidaligee/kylo,Teradata/kylo,Teradata/kylo,Teradata/kylo,rashidaligee/kylo,claudiu-stanciu/kylo,rashidaligee/kylo,peter-gergely-horvath/kylo,peter-gergely-horvath/kylo,claudiu-stanciu/kylo,Teradata/kylo,rashidaligee/kylo,peter-gergely-horvath/kylo,claudiu-stanciu/kylo
package com.thinkbiganalytics.jobrepo.jpa; import org.joda.time.DateTime; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Query; import org.springframework.data.querydsl.QueryDslPredicateExecutor; import org.springframework.data.repository.query.Param; import java.util.List; /** * Created by sr186054 on 9/1/16. */ public interface NifiEventStatisticsRepository extends JpaRepository<NifiEventSummaryStats, String>, QueryDslPredicateExecutor<NifiEventSummaryStats> { @Query(value = "select stats from NifiEventSummaryStats as stats where stats.minEventTime between :startTime and :endTime") List<NifiEventSummaryStats> findWithinTimeWindow(@Param("startTime") DateTime start, @Param("endTime") DateTime end); }
plugins/job-repository-nifi/src/main/java/com/thinkbiganalytics/jobrepo/jpa/NifiEventStatisticsRepository.java
renamed
plugins/job-repository-nifi/src/main/java/com/thinkbiganalytics/jobrepo/jpa/NifiEventStatisticsRepository.java
renamed
<ide><path>lugins/job-repository-nifi/src/main/java/com/thinkbiganalytics/jobrepo/jpa/NifiEventStatisticsRepository.java <add>package com.thinkbiganalytics.jobrepo.jpa; <add> <add>import org.joda.time.DateTime; <add>import org.springframework.data.jpa.repository.JpaRepository; <add>import org.springframework.data.jpa.repository.Query; <add>import org.springframework.data.querydsl.QueryDslPredicateExecutor; <add>import org.springframework.data.repository.query.Param; <add> <add>import java.util.List; <add> <add>/** <add> * Created by sr186054 on 9/1/16. <add> */ <add>public interface NifiEventStatisticsRepository extends JpaRepository<NifiEventSummaryStats, String>, QueryDslPredicateExecutor<NifiEventSummaryStats> { <add> <add> @Query(value = "select stats from NifiEventSummaryStats as stats where stats.minEventTime between :startTime and :endTime") <add> List<NifiEventSummaryStats> findWithinTimeWindow(@Param("startTime") DateTime start, @Param("endTime") DateTime end); <add> <add> <add>}
Java
mit
error: pathspec 'java/src/org/broadinstitute/sting/gatk/walkers/indels/RealignerTargetCreator.java' did not match any file(s) known to git
78890c0beecf0e911246ede573d6a3c0c9e691b1
1
iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable
package org.broadinstitute.sting.gatk.walkers.indels; import net.sf.samtools.*; import org.broadinstitute.sting.gatk.refdata.*; import org.broadinstitute.sting.gatk.walkers.LocusWalker; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.filters.Platform454Filter; import org.broadinstitute.sting.gatk.filters.ZeroMappingQualityReadFilter; import org.broadinstitute.sting.utils.*; import org.broadinstitute.sting.utils.pileup.*; import org.broadinstitute.sting.gatk.walkers.ReadFilters; import org.broadinstitute.sting.utils.cmdLine.Argument; import java.util.*; /** * Emits intervals for the Local Indel Realigner to target for cleaning. Ignores 454 reads. */ @ReadFilters({Platform454Filter.class, ZeroMappingQualityReadFilter.class}) public class RealignerTargetCreator extends LocusWalker<RealignerTargetCreator.Event, RealignerTargetCreator.Event> { // mismatch/entropy arguments @Argument(fullName="windowSize", shortName="window", doc="window size for calculating entropy or SNP clusters", required=false) protected int windowSize = 10; @Argument(fullName="mismatchFraction", shortName="mismatch", doc="fraction of base qualities needing to mismatch for a position to have high entropy; to disable set to <= 0 or > 1", required=false) protected double mismatchThreshold = 0.15; // observed indels arguments @Argument(fullName="minIndelsPerInterval", shortName="minIndels", doc="min indels per interval", required=false) int minIntervalIndelCount = 1; // interval merging arguments @Argument(fullName="maxIntervalSize", shortName="maxInterval", doc="max interval size", required=false) int maxIntervalSize = 500; private final int minReadsAtInterval = 4; @Override public boolean generateExtendedEvents() { return true; } @Override public boolean includeReadsWithDeletionAtLoci() { return true; } public void initialize() { if ( windowSize < 2 ) throw new StingException("Window Size must be an integer greater than 1"); } public Event map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { boolean hasIndel = false; boolean hasInsertion = false; boolean hasPointEvent = false; long furthestStopPos = -1; // look for insertions in the extended context (we'll get deletions from the normal context) if ( context.hasExtendedEventPileup() ) { ReadBackedExtendedEventPileup pileup = context.getExtendedEventPileup(); if ( pileup.getNumberOfInsertions() > 0 ) { hasIndel = hasInsertion = true; // check the ends of the reads to see how far they extend for (ExtendedEventPileupElement p : pileup ) furthestStopPos = Math.max(furthestStopPos, p.getRead().getAlignmentEnd()); } } // look at the rods for indels or SNPs if ( tracker != null ) { Iterator<ReferenceOrderedDatum> rods = tracker.getAllRods().iterator(); while ( rods.hasNext() ) { ReferenceOrderedDatum rod = rods.next(); if ( rod instanceof VariationRod ) { if ( ((VariationRod)rod).isIndel() ) { hasIndel = true; if ( ((VariationRod)rod).isInsertion() ) hasInsertion = true; } if ( ((VariationRod)rod).isSNP() ) hasPointEvent = true; } } } // look at the normal context to get deletions and positions with high entropy ReadBackedPileup pileup = context.getBasePileup(); if ( pileup != null ) { int mismatchQualities = 0, totalQualities = 0; char upperRef = Character.toUpperCase(ref.getBase()); for (PileupElement p : pileup ) { // check the ends of the reads to see how far they extend SAMRecord read = p.getRead(); furthestStopPos = Math.max(furthestStopPos, read.getAlignmentEnd()); // is it a deletion? (sanity check in case extended event missed it) if ( p.isDeletion() ) { hasIndel = true; } // look for mismatches else { if ( Character.toUpperCase(p.getBase()) != upperRef ) mismatchQualities += p.getQual(); totalQualities += p.getQual(); } } // make sure we're supposed to look for high entropy if ( mismatchThreshold > 0.0 && mismatchThreshold <= 1.0 && pileup.size() >= minReadsAtInterval && (double)mismatchQualities / (double)totalQualities >= mismatchThreshold ) hasPointEvent = true; } if ( !hasIndel && !hasPointEvent ) return null; GenomeLoc eventLoc = context.getLocation(); if ( hasInsertion ) eventLoc = GenomeLocParser.createGenomeLoc(eventLoc.getContigIndex(), eventLoc.getStart(), eventLoc.getStart()+1); EVENT_TYPE eventType = (hasIndel ? (hasPointEvent ? EVENT_TYPE.BOTH : EVENT_TYPE.INDEL_EVENT) : EVENT_TYPE.POINT_EVENT); return new Event(eventLoc, furthestStopPos, eventType); } public void onTraversalDone(Event sum) { if ( sum != null && sum.isReportableEvent() ) out.println(sum.toString()); } public Event reduceInit() { return null; } public Event reduce(Event value, Event sum) { // ignore no new events if ( value == null ) return sum; // if it's the first good value, use it if ( sum == null ) return value; // if we hit a new contig or they have no overlapping reads, then they are separate events - so clear sum if ( sum.loc.getContigIndex() != value.loc.getContigIndex() || sum.furthestStopPos < value.loc.getStart() ) { if ( sum.isReportableEvent() ) out.println(sum.toString()); return value; } // otherwise, merge the two events sum.merge(value); return sum; } private enum EVENT_TYPE { POINT_EVENT, INDEL_EVENT, BOTH } class Event { public long furthestStopPos; public GenomeLoc loc; public long eventStartPos; private long eventStopPos; private EVENT_TYPE type; private ArrayList<Long> pointEvents = new ArrayList<Long>(); public Event(GenomeLoc loc, long furthestStopPos, EVENT_TYPE type) { this.loc = loc; this.furthestStopPos = furthestStopPos; this.type = type; if ( type == EVENT_TYPE.INDEL_EVENT || type == EVENT_TYPE.BOTH ) { eventStartPos = loc.getStart(); eventStopPos = loc.getStop(); } else { eventStartPos = -1; eventStopPos = -1; } if ( type == EVENT_TYPE.POINT_EVENT || type == EVENT_TYPE.BOTH ) { pointEvents.add(loc.getStart()); } } public void merge(Event e) { // merges only get called for events with certain types if ( e.type == EVENT_TYPE.INDEL_EVENT || e.type == EVENT_TYPE.BOTH ) { if ( eventStartPos == -1 ) eventStartPos = e.eventStartPos; eventStopPos = e.eventStopPos; furthestStopPos = e.furthestStopPos; } if ( e.type == EVENT_TYPE.POINT_EVENT || e.type == EVENT_TYPE.BOTH ) { long newPosition = e.pointEvents.get(0); if ( pointEvents.size() > 0 ) { long lastPosition = pointEvents.get(pointEvents.size()-1); if ( newPosition - lastPosition < windowSize ) { eventStopPos = Math.max(eventStopPos, newPosition); furthestStopPos = e.furthestStopPos; if ( eventStartPos == -1 ) eventStartPos = lastPosition; else eventStartPos = Math.min(eventStartPos, lastPosition); } } pointEvents.add(newPosition); } } public boolean isReportableEvent() { return eventStartPos >= 0 && eventStopPos >= 0 && eventStopPos - eventStartPos < maxIntervalSize; } public String toString() { return String.format("%s:%d-%d", loc.getContig(), eventStartPos, eventStopPos); } } }
java/src/org/broadinstitute/sting/gatk/walkers/indels/RealignerTargetCreator.java
First version of walker that combines the functionality of IndelIntervalWalker, MismatchIntervalWalker, SNPClusterWalker, and IntervalMergerWalker - plus it allows the user to input rods containing known indels (e.g. dbSNP or 1KG calls) for automatic cleaning. Basically, all pre-processing steps for cleaning are now done in a single pass. More testing needed. git-svn-id: 4561c0a8f080806b19201efb9525134c00b76d40@2672 348d0f76-0448-11de-a6fe-93d51630548a
java/src/org/broadinstitute/sting/gatk/walkers/indels/RealignerTargetCreator.java
First version of walker that combines the functionality of IndelIntervalWalker, MismatchIntervalWalker, SNPClusterWalker, and IntervalMergerWalker - plus it allows the user to input rods containing known indels (e.g. dbSNP or 1KG calls) for automatic cleaning. Basically, all pre-processing steps for cleaning are now done in a single pass. More testing needed.
<ide><path>ava/src/org/broadinstitute/sting/gatk/walkers/indels/RealignerTargetCreator.java <add>package org.broadinstitute.sting.gatk.walkers.indels; <add> <add>import net.sf.samtools.*; <add>import org.broadinstitute.sting.gatk.refdata.*; <add>import org.broadinstitute.sting.gatk.walkers.LocusWalker; <add>import org.broadinstitute.sting.gatk.contexts.AlignmentContext; <add>import org.broadinstitute.sting.gatk.contexts.ReferenceContext; <add>import org.broadinstitute.sting.gatk.filters.Platform454Filter; <add>import org.broadinstitute.sting.gatk.filters.ZeroMappingQualityReadFilter; <add>import org.broadinstitute.sting.utils.*; <add>import org.broadinstitute.sting.utils.pileup.*; <add>import org.broadinstitute.sting.gatk.walkers.ReadFilters; <add>import org.broadinstitute.sting.utils.cmdLine.Argument; <add> <add>import java.util.*; <add> <add>/** <add> * Emits intervals for the Local Indel Realigner to target for cleaning. Ignores 454 reads. <add> */ <add>@ReadFilters({Platform454Filter.class, ZeroMappingQualityReadFilter.class}) <add>public class RealignerTargetCreator extends LocusWalker<RealignerTargetCreator.Event, RealignerTargetCreator.Event> { <add> <add> // mismatch/entropy arguments <add> @Argument(fullName="windowSize", shortName="window", doc="window size for calculating entropy or SNP clusters", required=false) <add> protected int windowSize = 10; <add> <add> @Argument(fullName="mismatchFraction", shortName="mismatch", doc="fraction of base qualities needing to mismatch for a position to have high entropy; to disable set to <= 0 or > 1", required=false) <add> protected double mismatchThreshold = 0.15; <add> <add> <add> // observed indels arguments <add> @Argument(fullName="minIndelsPerInterval", shortName="minIndels", doc="min indels per interval", required=false) <add> int minIntervalIndelCount = 1; <add> <add> <add> // interval merging arguments <add> @Argument(fullName="maxIntervalSize", shortName="maxInterval", doc="max interval size", required=false) <add> int maxIntervalSize = 500; <add> <add> <add> private final int minReadsAtInterval = 4; <add> <add> @Override <add> public boolean generateExtendedEvents() { return true; } <add> <add> @Override <add> public boolean includeReadsWithDeletionAtLoci() { return true; } <add> <add> <add> public void initialize() { <add> if ( windowSize < 2 ) <add> throw new StingException("Window Size must be an integer greater than 1"); <add> } <add> <add> public Event map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { <add> <add> boolean hasIndel = false; <add> boolean hasInsertion = false; <add> boolean hasPointEvent = false; <add> <add> long furthestStopPos = -1; <add> <add> // look for insertions in the extended context (we'll get deletions from the normal context) <add> if ( context.hasExtendedEventPileup() ) { <add> ReadBackedExtendedEventPileup pileup = context.getExtendedEventPileup(); <add> if ( pileup.getNumberOfInsertions() > 0 ) { <add> hasIndel = hasInsertion = true; <add> // check the ends of the reads to see how far they extend <add> for (ExtendedEventPileupElement p : pileup ) <add> furthestStopPos = Math.max(furthestStopPos, p.getRead().getAlignmentEnd()); <add> } <add> } <add> <add> // look at the rods for indels or SNPs <add> if ( tracker != null ) { <add> Iterator<ReferenceOrderedDatum> rods = tracker.getAllRods().iterator(); <add> while ( rods.hasNext() ) { <add> ReferenceOrderedDatum rod = rods.next(); <add> if ( rod instanceof VariationRod ) { <add> if ( ((VariationRod)rod).isIndel() ) { <add> hasIndel = true; <add> if ( ((VariationRod)rod).isInsertion() ) <add> hasInsertion = true; <add> } <add> if ( ((VariationRod)rod).isSNP() ) <add> hasPointEvent = true; <add> } <add> } <add> } <add> <add> // look at the normal context to get deletions and positions with high entropy <add> ReadBackedPileup pileup = context.getBasePileup(); <add> if ( pileup != null ) { <add> <add> int mismatchQualities = 0, totalQualities = 0; <add> char upperRef = Character.toUpperCase(ref.getBase()); <add> for (PileupElement p : pileup ) { <add> // check the ends of the reads to see how far they extend <add> SAMRecord read = p.getRead(); <add> furthestStopPos = Math.max(furthestStopPos, read.getAlignmentEnd()); <add> <add> // is it a deletion? (sanity check in case extended event missed it) <add> if ( p.isDeletion() ) { <add> hasIndel = true; <add> } <add> <add> // look for mismatches <add> else { <add> if ( Character.toUpperCase(p.getBase()) != upperRef ) <add> mismatchQualities += p.getQual(); <add> totalQualities += p.getQual(); <add> } <add> } <add> <add> // make sure we're supposed to look for high entropy <add> if ( mismatchThreshold > 0.0 && <add> mismatchThreshold <= 1.0 && <add> pileup.size() >= minReadsAtInterval && <add> (double)mismatchQualities / (double)totalQualities >= mismatchThreshold ) <add> hasPointEvent = true; <add> } <add> <add> if ( !hasIndel && !hasPointEvent ) <add> return null; <add> <add> GenomeLoc eventLoc = context.getLocation(); <add> if ( hasInsertion ) <add> eventLoc = GenomeLocParser.createGenomeLoc(eventLoc.getContigIndex(), eventLoc.getStart(), eventLoc.getStart()+1); <add> <add> EVENT_TYPE eventType = (hasIndel ? (hasPointEvent ? EVENT_TYPE.BOTH : EVENT_TYPE.INDEL_EVENT) : EVENT_TYPE.POINT_EVENT); <add> <add> return new Event(eventLoc, furthestStopPos, eventType); <add> } <add> <add> public void onTraversalDone(Event sum) { <add> if ( sum != null && sum.isReportableEvent() ) <add> out.println(sum.toString()); <add> } <add> <add> public Event reduceInit() { <add> return null; <add> } <add> <add> public Event reduce(Event value, Event sum) { <add> // ignore no new events <add> if ( value == null ) <add> return sum; <add> <add> // if it's the first good value, use it <add> if ( sum == null ) <add> return value; <add> <add> // if we hit a new contig or they have no overlapping reads, then they are separate events - so clear sum <add> if ( sum.loc.getContigIndex() != value.loc.getContigIndex() || sum.furthestStopPos < value.loc.getStart() ) { <add> if ( sum.isReportableEvent() ) <add> out.println(sum.toString()); <add> return value; <add> } <add> <add> // otherwise, merge the two events <add> sum.merge(value); <add> return sum; <add> } <add> <add> private enum EVENT_TYPE { POINT_EVENT, INDEL_EVENT, BOTH } <add> <add> class Event { <add> public long furthestStopPos; <add> <add> public GenomeLoc loc; <add> public long eventStartPos; <add> private long eventStopPos; <add> private EVENT_TYPE type; <add> private ArrayList<Long> pointEvents = new ArrayList<Long>(); <add> <add> public Event(GenomeLoc loc, long furthestStopPos, EVENT_TYPE type) { <add> this.loc = loc; <add> this.furthestStopPos = furthestStopPos; <add> this.type = type; <add> <add> if ( type == EVENT_TYPE.INDEL_EVENT || type == EVENT_TYPE.BOTH ) { <add> eventStartPos = loc.getStart(); <add> eventStopPos = loc.getStop(); <add> } else { <add> eventStartPos = -1; <add> eventStopPos = -1; <add> } <add> <add> if ( type == EVENT_TYPE.POINT_EVENT || type == EVENT_TYPE.BOTH ) { <add> pointEvents.add(loc.getStart()); <add> } <add> } <add> <add> public void merge(Event e) { <add> <add> // merges only get called for events with certain types <add> if ( e.type == EVENT_TYPE.INDEL_EVENT || e.type == EVENT_TYPE.BOTH ) { <add> if ( eventStartPos == -1 ) <add> eventStartPos = e.eventStartPos; <add> eventStopPos = e.eventStopPos; <add> furthestStopPos = e.furthestStopPos; <add> } <add> <add> if ( e.type == EVENT_TYPE.POINT_EVENT || e.type == EVENT_TYPE.BOTH ) { <add> long newPosition = e.pointEvents.get(0); <add> if ( pointEvents.size() > 0 ) { <add> long lastPosition = pointEvents.get(pointEvents.size()-1); <add> if ( newPosition - lastPosition < windowSize ) { <add> eventStopPos = Math.max(eventStopPos, newPosition); <add> furthestStopPos = e.furthestStopPos; <add> <add> if ( eventStartPos == -1 ) <add> eventStartPos = lastPosition; <add> else <add> eventStartPos = Math.min(eventStartPos, lastPosition); <add> } <add> } <add> pointEvents.add(newPosition); <add> } <add> } <add> <add> public boolean isReportableEvent() { <add> return eventStartPos >= 0 && eventStopPos >= 0 && eventStopPos - eventStartPos < maxIntervalSize; <add> } <add> <add> public String toString() { <add> return String.format("%s:%d-%d", loc.getContig(), eventStartPos, eventStopPos); <add> } <add> } <add>}
Java
mit
e51aa3239baf37b0aa4ea77d68982dad553e1f19
0
nking/curvature-scale-space-corners-and-transformations,nking/curvature-scale-space-corners-and-transformations
package algorithms.imageProcessing; import algorithms.util.PairInt; import algorithms.util.ResourceFinder; import java.io.IOException; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.Set; import java.util.logging.Logger; /** * * @author nichole */ public class PostLineThinnerCorrections { private Logger log = Logger.getLogger(this.getClass().getName()); public void correctForArtifacts(GreyscaleImage input) { ImageProcessor imageProcessor = new ImageProcessor(); int w = input.getWidth(); int h = input.getHeight(); Set<PairInt> points = imageProcessor.readNonZeroPixels(input); //TODO: reduce the number of patterns here if possible // and make sure that true corners aren't drastically reduced to less // usable smaller corners try { String dirPath = ResourceFinder.findDirectory("bin"); ImageIOHelper.writeOutputImage(dirPath + "/nonZero.png", input); } catch (IOException e){} correctForHoleArtifacts0(points, w, h); correctForHoleArtifacts0_1(points, w, h); correctForHoleArtifacts0_2(points, w, h); correctForHoleArtifacts0_3(points, w, h); correctForHoleArtifacts0_4(points, w, h); correctForHoleArtifacts1(points, w, h); correctForHoleArtifacts1_2(points, w, h); correctForHoleArtifacts1_2_1(points, w, h); correctForHoleArtifacts1_3(points, w, h); correctForHoleArtifacts1_4(points, w, h); correctForZigZag0(points, w, h); correctForZigZag0Alt(points, w, h); correctForZigZag1(points, w, h); correctForZigZag2(points, w, h); correctForZigZag1Alt(points, w, h); correctForZigZag3(points, w, h); correctForZigZag5(points, w, h); correctForZigZag6(points, w, h); correctForWs(points, w, h); // TODO: revisit, not sure this is always an artifact: correctForLine0(points, w, h); // better edge extraction at the expense of unsharpening true corners: correctForLs(points, w, h); correctForLs2(points, w, h); correctForZigZag1(points, w, h); correctForSpurs(points, w, h); correctForZigZag7(points, w, h); imageProcessor.writeAsBinaryToImage(input, points); } private void correctForZigZag0(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern 0 0 2 0 # # 1 #* #< 0 0 # 0 0 -1 -1 0 1 2 and removing the topmost left #'s */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(0, -1)); zeroes.add(new PairInt(0, -2)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, -2)); zeroes.add(new PairInt(2, 0)); zeroes.add(new PairInt(2, 1)); ones.add(new PairInt(-1, 1)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(2, -1)); changeToZeroes.add(new PairInt(1, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForZigZag0Alt(Set<PairInt> points, int imageWidth, int imageHeight) { /* keep 0 0 # 0 2 0 0 # 0 0 1 0 #* #< 0 0 0 0 # 0 0 -1 0 # 0 -2 -1 0 1 2 3 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 2)); zeroes.add(new PairInt(-1, 1)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(0, -1)); zeroes.add(new PairInt(0, -2)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, 2)); zeroes.add(new PairInt(1, -2)); zeroes.add(new PairInt(2, 1)); zeroes.add(new PairInt(2, 0)); zeroes.add(new PairInt(2, -1)); zeroes.add(new PairInt(3, 0)); zeroes.add(new PairInt(3, -1)); zeroes.add(new PairInt(3, -2)); ones.add(new PairInt(0, 2)); ones.add(new PairInt(0, 1)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(2, -2)); changeToZeroes.add(new PairInt(1, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForZigZag5(Set<PairInt> points, int imageWidth, int imageHeight) { /* keep 0 # 0 2 0 # 0 0 1 0 # #*<0 0 0 0 # 0 -1 0 # -2 -2 -1 0 1 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-2, 1)); zeroes.add(new PairInt(-2, 0)); zeroes.add(new PairInt(-2, -1)); zeroes.add(new PairInt(-2, -2)); zeroes.add(new PairInt(-1, 1)); zeroes.add(new PairInt(0, 2)); zeroes.add(new PairInt(0, -1)); zeroes.add(new PairInt(0, -2)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, 0)); zeroes.add(new PairInt(1, -1)); ones.add(new PairInt(-1, 0)); ones.add(new PairInt(-1, -1)); ones.add(new PairInt(-1, -2)); ones.add(new PairInt(0, 1)); ones.add(new PairInt(1, 2)); changeToZeroes.add(new PairInt(0, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForZigZag6(Set<PairInt> points, int imageWidth, int imageHeight) { /* 0 0 0 0 2 0 #< # # 1 # #* # 0 0 0 0 0 -1 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(0, 1)); zeroes.add(new PairInt(0, -1)); zeroes.add(new PairInt(0, -2)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, -2)); zeroes.add(new PairInt(2, 1)); zeroes.add(new PairInt(2, 0)); zeroes.add(new PairInt(2, -2)); zeroes.add(new PairInt(3, -2)); ones.add(new PairInt(-1, 0)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(2, -1)); ones.add(new PairInt(3, -1)); changeToZeroes.add(new PairInt(1, -1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } /** * using this one unfortunately reduces the sharpness of real corners, * but it allows clear point ordering when extracting edges from the * line thinned image. * * @param input */ private void correctForZigZag7(Set<PairInt> points, int imageWidth, int imageHeight) { /* # 0 # 0 1 # #*<0 0 # 0 0 -1 # -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(0, 1)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, 0)); zeroes.add(new PairInt(1, -1)); zeroes.add(new PairInt(-1, -1)); ones.add(new PairInt(0, -1)); ones.add(new PairInt(-1, 0)); ones.add(new PairInt(-1, 1)); ones.add(new PairInt(-1, 2)); ones.add(new PairInt(0, -2)); changeToZeroes.add(new PairInt(0, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } /** * using this one unfortunately reduces the sharpness of real corners, * but it allows clear point ordering when extracting edges from the * line thinned image. * * @param input */ private void correctForLs(Set<PairInt> points, int imageWidth, int imageHeight) { /* 0 # 0 1 0 #* 0 0 0 #< # -1 0 0 0 -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 2)); zeroes.add(new PairInt(-1, 1)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(0, 2)); zeroes.add(new PairInt(1, 2)); zeroes.add(new PairInt(1, 0)); zeroes.add(new PairInt(1, -1)); ones.add(new PairInt(0, 1)); ones.add(new PairInt(0, -1)); ones.add(new PairInt(1, 1)); changeToZeroes.add(new PairInt(0, 1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForLs2(Set<PairInt> points, int imageWidth, int imageHeight) { /* 0 # 0 0 1 0 #*<# # 0 0 0 0 -1 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(-1, 1)); zeroes.add(new PairInt(0, 1)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, -1)); zeroes.add(new PairInt(2, -1)); ones.add(new PairInt(0, -1)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(2, 0)); changeToZeroes.add(new PairInt(0, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForWs(Set<PairInt> points, int imageWidth, int imageHeight) { /* 0 0 # 1 0 #*<# 0 # # 0 -1 -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(0, -1)); zeroes.add(new PairInt(1, 1)); ones.add(new PairInt(-1, 1)); ones.add(new PairInt(0, 1)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, -1)); changeToZeroes.add(new PairInt(0, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForZigZag1(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern # 3 0 # 2 0 0 # 0 1 0 #* #< 0 0 # 0 -1 -1 0 1 2 and removing the topmost left #'s */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(0, -1)); zeroes.add(new PairInt(0, -2)); zeroes.add(new PairInt(2, 0)); zeroes.add(new PairInt(2, -1)); zeroes.add(new PairInt(1, 1)); ones.add(new PairInt(-1, 1)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(2, -2)); ones.add(new PairInt(2, -3)); changeToZeroes.add(new PairInt(1, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForZigZag2(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern 0 0 0 # 1 0 #* # 0 0 0 # 0 -1 # 0 -2 -2 -1 0 1 2 and removing the topmost left #'s */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 1)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(0, 2)); zeroes.add(new PairInt(0, -1)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, -1)); zeroes.add(new PairInt(2, 0)); ones.add(new PairInt(-1, 2)); ones.add(new PairInt(0, 1)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(2, -1)); changeToZeroes.add(new PairInt(0, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForZigZag1Alt(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern 0 0 # # 1 0 #* #< 0 0 0 # 0 0 -1 # 0 -2 -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(0, 2)); zeroes.add(new PairInt(-1, 1)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(0, -1)); zeroes.add(new PairInt(2, 1)); zeroes.add(new PairInt(2, 0)); ones.add(new PairInt(0, -1)); // NOT YET REV ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, 1)); ones.add(new PairInt(-1, -2)); ones.add(new PairInt(2, 1)); changeToZeroes.add(new PairInt(1, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForZigZag3(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern 0 # 3 0 # 2 0 #< # 0 1 0 #* 0 0 # 0 0 -1 -2 -1 0 1 2 and removing the topmost left #'s */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(0, 1)); zeroes.add(new PairInt(0, -2)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, 0)); zeroes.add(new PairInt(1, -3)); zeroes.add(new PairInt(2, -1)); ones.add(new PairInt(-1, 1)); ones.add(new PairInt(0, -1)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(1, -2)); ones.add(new PairInt(2, -3)); changeToZeroes.add(new PairInt(0, -1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } /** * using this one unfortunately reduces the sharpness of real corners, * but it allows clear point ordering when extracting edges from the * line thinned image. * * @param input */ private void correctForRemaining(Set<PairInt> points, int imageWidth, int imageHeight) { /* # 1 0 #* 0 0 0 #< # -1 0 0 0 -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 2)); zeroes.add(new PairInt(-1, 1)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(0, 2)); zeroes.add(new PairInt(1, 2)); zeroes.add(new PairInt(1, 0)); ones.add(new PairInt(-1, -1)); ones.add(new PairInt(0, 1)); ones.add(new PairInt(1, 1)); changeToZeroes.add(new PairInt(0, 1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void reverseXs( final Set<PairInt> zeroes, final Set<PairInt> ones, Set<PairInt> changeToZeroes, final Set<PairInt> changeToOnes) { // ----- change the sign of x to handle other direction ----- for (PairInt p : zeroes) { p.setX(-1 * p.getX()); } for (PairInt p : ones) { p.setX(-1 * p.getX()); } for (PairInt p : changeToZeroes) { p.setX(-1 * p.getX()); } for (PairInt p : changeToOnes) { p.setX(-1 * p.getX()); } } private void reverseXs(final Set<PairInt> zeroes, final Set<PairInt> ones) { // ----- change the sign of x to handle other direction ----- for (PairInt p : zeroes) { p.setX(-1 * p.getX()); } for (PairInt p : ones) { p.setX(-1 * p.getX()); } } private void reverseYs(final Set<PairInt> zeroes, final Set<PairInt> ones, Set<PairInt> changeToZeroes, final Set<PairInt> changeToOnes) { // ----- change the sign of y ----- for (PairInt p : zeroes) { p.setY(-1 * p.getY()); } for (PairInt p : ones) { p.setY(-1 * p.getY()); } for (PairInt p : changeToZeroes) { p.setY(-1 * p.getY()); } for (PairInt p : changeToOnes) { p.setY(-1 * p.getY()); } } private void reverseYs(final Set<PairInt> zeroes, final Set<PairInt> ones) { // ----- change the sign of y ----- for (PairInt p : zeroes) { p.setY(-1 * p.getY()); } for (PairInt p : ones) { p.setY(-1 * p.getY()); } } private void rotate90ThreeTimes( Set<PairInt> points, int imageWidth, int imageHeight, final LinkedHashSet<PairInt> zeroes, final LinkedHashSet<PairInt> ones, LinkedHashSet<PairInt> changeToZeroes, final LinkedHashSet<PairInt> changeToOnes, final int startCenterValue) { // ----- change the sign of x to handle other direction ----- reverseXs(zeroes, ones, changeToZeroes, changeToOnes); replacePattern( points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startCenterValue); // ----- change the sign of y to handle other direction ----- reverseYs(zeroes, ones, changeToZeroes, changeToOnes); replacePattern( points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startCenterValue); // ----- change the sign of x to handle another direction ----- reverseXs(zeroes, ones, changeToZeroes, changeToOnes); replacePattern( points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startCenterValue); } private void replacePattern( Set<PairInt> points, int imageWidth, int imageHeight, final LinkedHashSet<PairInt> zeroes, final LinkedHashSet<PairInt> ones, final LinkedHashSet<PairInt> changeToZeroes, final LinkedHashSet<PairInt> changeToOnes, final int startCenterValue) { int w = imageWidth; int h = imageHeight; Set<PairInt> tmpPointsRemoved = new HashSet<PairInt>(); Set<PairInt> tmpPointsAdded = new HashSet<PairInt>(); for (PairInt p : points) { boolean isNotPresent = tmpPointsRemoved.contains(p); if (startCenterValue == 0) { // skip if point is in set if (!isNotPresent) { continue; } } else if (isNotPresent) { // skip if point is not in set continue; } int col = p.getX(); int row = p.getY(); boolean foundPattern = true; for (PairInt p2 : zeroes) { int x = col + p2.getX(); int y = row + p2.getY(); if ((x < 0) || (y < 0) || (x > (w - 1)) || (y > (h - 1))) { //TODO: revisit this foundPattern = false; break; } PairInt p3 = new PairInt(x, y); if (!tmpPointsRemoved.contains(p3) && (points.contains(p3) || tmpPointsAdded.contains(p3))) { foundPattern = false; break; } } if (!foundPattern) { continue; } for (PairInt p2 : ones) { int x = col + p2.getX(); int y = row + p2.getY(); if ((x < 0) || (y < 0) || (x > (w - 1)) || (y > (h - 1))) { foundPattern = false; break; } PairInt p3 = new PairInt(x, y); if (tmpPointsRemoved.contains(p3) || (!points.contains(p3) && !tmpPointsAdded.contains(p3))) { foundPattern = false; break; } } if (!foundPattern) { continue; } for (PairInt p2 : changeToZeroes) { int x = col + p2.getX(); int y = row + p2.getY(); if ((x < 0) || (y < 0) || (x > (w - 1)) || (y > (h - 1))) { continue; } tmpPointsRemoved.add(new PairInt(x, y)); } for (PairInt p2 : changeToOnes) { int x = col + p2.getX(); int y = row + p2.getY(); if ((x < 0) || (y < 0) || (x > (w - 1)) || (y > (h - 1))) { continue; } tmpPointsAdded.add(new PairInt(x, y)); } } for (PairInt p2 : tmpPointsRemoved) { points.remove(p2); } for (PairInt p2 : tmpPointsAdded) { points.add(p2); } } private void debugPrint(GreyscaleImage input, int xStart, int xStop, int yStart, int yStop) { StringBuilder sb = new StringBuilder(); for (int row = yStart; row <= yStop; row++) { sb.append(String.format("%3d: ", row)); for (int col = xStart; col <= xStop; col++) { sb.append(String.format(" %3d ", input.getValue(col, row))); } sb.append(String.format("\n")); } System.out.println(sb.toString()); } /** * removes a hole artifact in inclined lines. note that this should * probably be adjusted for gaussian convolution combined radius * if used outside of the gradientXY image produced by the * CannyEdgeFilter. * @param input */ private void correctForHoleArtifacts3(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern 0 0 0 0 0 0 3 4 0 0 0 0 1 1 1 2 3 0 0 0 1 0 1 0 1 2 0 0 1 0* 1 1 0 0 1 0 1 0 1** 1 0 0 -1 0 0 1 1 1 0 0 0 -2 0 1 0 0 0 0 0 -3 -3 -2 -1 0 1 2 3 and removing the topmost left #'s */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, -1 - 1)); zeroes.add(new PairInt(-1, 1 - 1)); zeroes.add(new PairInt(1, -1 - 1)); zeroes.add(new PairInt(0, -2 - 1)); zeroes.add(new PairInt(1, 2 - 1)); zeroes.add(new PairInt(2, 2 - 1)); zeroes.add(new PairInt(2, 1 - 1)); zeroes.add(new PairInt(0, -3 - 1)); zeroes.add(new PairInt(1, -3 - 1)); zeroes.add(new PairInt(-1, -2 - 1)); zeroes.add(new PairInt(-1, -3 - 1)); zeroes.add(new PairInt(2, -3 - 1)); zeroes.add(new PairInt(-2, -3 - 1)); zeroes.add(new PairInt(-2, -2 - 1)); zeroes.add(new PairInt(-2, -1 - 1)); zeroes.add(new PairInt(-2, 0 - 1)); zeroes.add(new PairInt(-1, 3 - 1)); zeroes.add(new PairInt(0, 3 - 1)); zeroes.add(new PairInt(1, 3 - 1)); zeroes.add(new PairInt(2, 3 - 1)); zeroes.add(new PairInt(3, 3 - 1)); zeroes.add(new PairInt(3, 2 - 1)); zeroes.add(new PairInt(3, 1 - 1)); zeroes.add(new PairInt(3, 0 - 1)); zeroes.add(new PairInt(3, -1 - 1)); zeroes.add(new PairInt(-3, -3 - 1)); zeroes.add(new PairInt(-3, 2 - 1)); zeroes.add(new PairInt(-3, -1 - 1)); zeroes.add(new PairInt(-3, 0 - 1)); zeroes.add(new PairInt(-3, -1 - 1)); zeroes.add(new PairInt(-3, -2 - 1)); zeroes.add(new PairInt(-3, -3 - 1)); ones.add(new PairInt(0, 1 - 1)); ones.add(new PairInt(0, 2 - 1)); ones.add(new PairInt(0, -1 - 1)); ones.add(new PairInt(-1, 0 - 1)); ones.add(new PairInt(-1, 2 - 1)); ones.add(new PairInt(1, -2 - 1)); ones.add(new PairInt(1, 0 - 1)); ones.add(new PairInt(1, 1 - 1)); ones.add(new PairInt(2, -2 - 1)); ones.add(new PairInt(2, -1 - 1)); ones.add(new PairInt(2, 0 - 1)); ones.add(new PairInt(3, -2 - 1)); ones.add(new PairInt(-2, 1 - 1)); ones.add(new PairInt(-2, 2 - 1)); ones.add(new PairInt(-2, 3 - 1)); changeToZeroes.add(new PairInt(-2, 2 - 1)); changeToZeroes.add(new PairInt(-2, 1 - 1)); changeToZeroes.add(new PairInt(-1, 0 - 1)); changeToZeroes.add(new PairInt(0, -1 - 1)); changeToZeroes.add(new PairInt(1, -2 - 1)); changeToZeroes.add(new PairInt(2, -2 - 1)); changeToZeroes.add(new PairInt(2, 0 - 1)); changeToZeroes.add(new PairInt(1, 1 - 1)); changeToZeroes.add(new PairInt(0, 2 - 1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } /** * removes a hole artifact in inclined lines. note that this should * probably be adjusted for gaussian convolution combined radius * if used outside of the gradientXY image produced by the * CannyEdgeFilter. * @param input */ private void correctForHoleArtifacts2(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern 0 0 0 0 0 0 3 4 0 0 0 1 1 1 2 3 0 0 1 0 1 0 1 2 0 1 0 1 1 0 0 1 0 1 1* 1 0 0 -1 0 0 1 0 0 0 0 -2 -1 -2 -1 0 1 2 3 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(0, 2 - 1)); zeroes.add(new PairInt(0, -2 - 1)); zeroes.add(new PairInt(-1, -1 - 1)); zeroes.add(new PairInt(-1, -2 - 1)); zeroes.add(new PairInt(-1, -3 - 1)); zeroes.add(new PairInt(0, -3 - 1)); zeroes.add(new PairInt(1, -3 - 1)); zeroes.add(new PairInt(1, -1 - 1)); zeroes.add(new PairInt(1, 2 - 1)); zeroes.add(new PairInt(2, -3 - 1)); zeroes.add(new PairInt(2, 1 - 1)); zeroes.add(new PairInt(2, 2 - 1)); zeroes.add(new PairInt(-2, -3 - 1)); zeroes.add(new PairInt(-2, -2 - 1)); zeroes.add(new PairInt(-2, -1 - 1)); zeroes.add(new PairInt(-2, 0 - 1)); zeroes.add(new PairInt(-2, 1 - 1)); zeroes.add(new PairInt(-2, 2 - 1)); zeroes.add(new PairInt(3, -3 - 1)); zeroes.add(new PairInt(3, -1 - 1)); zeroes.add(new PairInt(3, 0 - 1)); zeroes.add(new PairInt(3, 1 - 1)); zeroes.add(new PairInt(3, 2 - 1)); ones.add(new PairInt(0, -1 - 1)); ones.add(new PairInt(0, 1 - 1)); ones.add(new PairInt(-1, 0 - 1)); ones.add(new PairInt(-1, 1 - 1)); ones.add(new PairInt(-1, 2 - 1)); ones.add(new PairInt(1, -2 - 1)); ones.add(new PairInt(1, 0 - 1)); ones.add(new PairInt(1, 1 - 1)); ones.add(new PairInt(2, -2 - 1)); ones.add(new PairInt(2, -1 - 1)); ones.add(new PairInt(2, 0 - 1)); ones.add(new PairInt(3, -2 - 1)); changeToZeroes.add(new PairInt(-1, 0 - 1)); changeToZeroes.add(new PairInt(-1, 1 - 1)); changeToZeroes.add(new PairInt(0, -1 - 1)); changeToZeroes.add(new PairInt(1, -2 - 1)); changeToZeroes.add(new PairInt(1, 1 - 1)); changeToZeroes.add(new PairInt(2, -2 - 1)); changeToZeroes.add(new PairInt(2, 0 - 1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForLine0(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern 0 # 0 2 0 # 0 0 1 0 0 # 0 0 0 #* 0 0 -1 0 # 0 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 2 - 1)); zeroes.add(new PairInt(-1, 1 - 1)); zeroes.add(new PairInt(-1, 0 - 1)); zeroes.add(new PairInt(-1, -1 - 1)); zeroes.add(new PairInt(-1, -2 - 1)); zeroes.add(new PairInt(1, 2 - 1)); zeroes.add(new PairInt(1, 1 - 1)); zeroes.add(new PairInt(1, -1 - 1)); zeroes.add(new PairInt(1, -2 - 1)); zeroes.add(new PairInt(2, 1 - 1)); zeroes.add(new PairInt(2, 0 - 1)); zeroes.add(new PairInt(2, -1 - 1)); ones.add(new PairInt(0, 2 - 1)); ones.add(new PairInt(0, 1 - 1)); ones.add(new PairInt(0, -1 - 1)); ones.add(new PairInt(0, -2 - 1)); ones.add(new PairInt(1, 0 - 1)); changeToZeroes.add(new PairInt(1, 0 - 1)); changeToOnes.add(new PairInt(0, 0 - 1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } /** * possibly decreases sharpness of a true diagonal edge at the expense of * making a better line width for the edge extractor. * @param input */ private void correctForSpurs(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern # # 0 1 0 #*<0 0 0 0 0 -1 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 1)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(0, 1)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, -1)); ones.add(new PairInt(-1, -1)); ones.add(new PairInt(0, -1)); changeToZeroes.add(new PairInt(0, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } protected void correctForHoleArtifacts1(Set<PairInt> points, int imageWidth, int imageHeight) { /* 1 2 1 0 1 1 1* 0 -1 -2 -1 0 1 2 */ ErosionFilter erosionFilter = new ErosionFilter(); Set<PairInt> ones = new HashSet<PairInt>(); Set<PairInt> zeroes = new HashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) ones.add(new PairInt(-1, -1)); ones.add(new PairInt(0, -2)); ones.add(new PairInt(1, -1)); zeroes.add(new PairInt(0, -1)); int w = imageWidth; int h = imageHeight; Set<PairInt> tmpPointsRemoved = new HashSet<PairInt>(); Set<PairInt> tmpPointsAdded = new HashSet<PairInt>(); for (PairInt p : points) { // test for the pattern of ones and zeroes in the neighbors, // then make a temporary set of center to zero and test if each of // the four sorrounding can be deleted int col = p.getX(); int row = p.getY(); boolean foundPattern = true; for (PairInt p2 : ones) { int x = col + p2.getX(); int y = row + p2.getY(); if ((x < 0) || (y < 0) || (x > (w - 1)) || (y > (h - 1))) { foundPattern = false; break; } PairInt p3 = new PairInt(x, y); if (tmpPointsRemoved.contains(p3) || (!points.contains(p3) && !tmpPointsAdded.contains(p3))) { foundPattern = false; break; } } if (!foundPattern) { continue; } for (PairInt p2 : zeroes) { int x = col + p2.getX(); int y = row + p2.getY(); if ((x < 0) || (y < 0) || (x > (w - 1)) || (y > (h - 1))) { foundPattern = false; break; } PairInt p3 = new PairInt(x, y); if (!tmpPointsRemoved.contains(p3) && (points.contains(p3) || tmpPointsAdded.contains(p3))) { foundPattern = false; break; } } if (!foundPattern) { continue; } // change the central 0 to a 1 tmpPointsAdded.add(new PairInt(col, row - 1)); // test if can set the surrounding 1's to 0's without disconnecting // lines for (PairInt p2 : ones) { int x = col + p2.getX(); int y = row + p2.getY(); PairInt p3 = new PairInt(x, y); // adds to tmpPointsRemoved boolean nullable = erosionFilter.process(p3, points, tmpPointsAdded, tmpPointsRemoved, w, h); } } for (PairInt p2 : tmpPointsRemoved) { points.remove(p2); } for (PairInt p2 : tmpPointsAdded) { points.add(p2); } } private void correctForHoleArtifacts1_2(Set<PairInt> points, int imageWidth, int imageHeight) { /* 0 0 2 0 0 # # # 1 # 0 0 # 0 # # #* 0 0 -1 0 0 -2 -3 -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-3, -1 - 1)); zeroes.add(new PairInt(-2, -1 - 1)); zeroes.add(new PairInt(-2, -2 - 1)); zeroes.add(new PairInt(-1, 0 - 1)); zeroes.add(new PairInt(-1, -2 - 1)); zeroes.add(new PairInt(0, 2 - 1)); zeroes.add(new PairInt(1, 2 - 1)); zeroes.add(new PairInt(1, 1 - 1)); zeroes.add(new PairInt(2, 1 - 1)); ones.add(new PairInt(-2, 1 - 1)); ones.add(new PairInt(-2, 0 - 1)); ones.add(new PairInt(-1, 1 - 1)); ones.add(new PairInt(-1, -1 - 1)); ones.add(new PairInt(0, 1 - 1)); ones.add(new PairInt(0, -1 - 1)); ones.add(new PairInt(1, 0 - 1)); ones.add(new PairInt(1, -1 - 1)); changeToZeroes.add(new PairInt(-2, 0 - 1)); changeToZeroes.add(new PairInt(-1, -1 - 1)); changeToZeroes.add(new PairInt(-1, 1 - 1)); changeToZeroes.add(new PairInt(0, 1 - 1)); changeToZeroes.add(new PairInt(0, -1 - 1)); changeToZeroes.add(new PairInt(1, 0 - 1)); changeToOnes.add(new PairInt(-1, 0 - 1)); changeToOnes.add(new PairInt(0, 0 - 1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes( points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForHoleArtifacts1_2_1(Set<PairInt> points, int imageWidth, int imageHeight) { /* # # # 2 # 0 0 # 1 # # #* # 0 -3 -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) ones.add(new PairInt(-2, 0)); ones.add(new PairInt(-2, -1)); ones.add(new PairInt(-1, 0)); ones.add(new PairInt(-1, -2)); ones.add(new PairInt(0, -2)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(1, -2)); zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(0, -1)); int nRotations = 3; replaceAndRotateOnesIfNullable(points, imageWidth, imageHeight, zeroes, ones, nRotations); } private void correctForHoleArtifacts1_3(Set<PairInt> points, int imageWidth, int imageHeight) { /* 0 0 2 0 0 # # 1 0 # 0 0< # 0 0 # #* 0 0 -1 0 0 -2 -3 -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-3, 0 - 1)); zeroes.add(new PairInt(-3, -1 - 1)); zeroes.add(new PairInt(-2, -1 - 1)); zeroes.add(new PairInt(-2, -2 - 1)); zeroes.add(new PairInt(-1, 0 - 1)); zeroes.add(new PairInt(-1, -2 - 1)); zeroes.add(new PairInt(0, 2 - 1)); zeroes.add(new PairInt(1, 2 - 1)); zeroes.add(new PairInt(1, 1 - 1)); zeroes.add(new PairInt(2, 1 - 1)); zeroes.add(new PairInt(2, 0 - 1)); ones.add(new PairInt(-2, 0 - 1)); ones.add(new PairInt(-1, 1 - 1)); ones.add(new PairInt(-1, -1 - 1)); ones.add(new PairInt(0, 1 - 1)); ones.add(new PairInt(0, -1 - 1)); ones.add(new PairInt(1, 0 - 1)); changeToZeroes.add(new PairInt(-2, 0 - 1)); changeToZeroes.add(new PairInt(-1, -1 - 1)); changeToZeroes.add(new PairInt(-1, 1 - 1)); changeToZeroes.add(new PairInt(0, -1 - 1)); changeToZeroes.add(new PairInt(0, 1 - 1)); changeToZeroes.add(new PairInt(1, 0 - 1)); changeToOnes.add(new PairInt(-2, 1 - 1)); changeToOnes.add(new PairInt(-1, 0 - 1)); changeToOnes.add(new PairInt(0, 0 - 1)); changeToOnes.add(new PairInt(1, -1 - 1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForHoleArtifacts1_4(Set<PairInt> points, int imageWidth, int imageHeight) { /* 0 0 3 # # 0 0 2 # 0 # 0 1 0 # 0< # 0 0 0 #* -1 0 0 -2 -3 -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-2, 1 - 1)); zeroes.add(new PairInt(-2, 0 - 1)); zeroes.add(new PairInt(-1, 2 - 1)); zeroes.add(new PairInt(-1, 1 - 1)); zeroes.add(new PairInt(0, 2 - 1)); zeroes.add(new PairInt(0, -1 - 1)); zeroes.add(new PairInt(0, -3 - 1)); zeroes.add(new PairInt(1, -2 - 1)); zeroes.add(new PairInt(1, -3 - 1)); zeroes.add(new PairInt(2, -1 - 1)); zeroes.add(new PairInt(2, -2 - 1)); ones.add(new PairInt(-1, 0 - 1)); ones.add(new PairInt(-1, -1 - 1)); ones.add(new PairInt(-1, -2 - 1)); ones.add(new PairInt(0, 1 - 1)); ones.add(new PairInt(0, -2 - 1)); ones.add(new PairInt(1, 0 - 1)); ones.add(new PairInt(1, -1 - 1)); changeToZeroes.add(new PairInt(-1, 0 - 1)); changeToZeroes.add(new PairInt(-1, -1 - 1)); changeToZeroes.add(new PairInt(0, 1 - 1)); changeToZeroes.add(new PairInt(0, -2 - 1)); changeToZeroes.add(new PairInt(1, 0 - 1)); changeToZeroes.add(new PairInt(1, -1 - 1)); changeToOnes.add(new PairInt(0, -1 - 1)); changeToOnes.add(new PairInt(0, 0 - 1)); changeToOnes.add(new PairInt(1, 1 - 1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForHoleArtifacts0(Set<PairInt> points, int imageWidth, int imageHeight) { /* # # # 2 # 0 # 1 # #* # 0 -3 -2 -1 0 1 2 if the pattern is found, -- set the center 0's to '1' -- for each pixel in the open squares, test which values can be nulled and set them */ Set<PairInt> ones = new HashSet<PairInt>(); Set<PairInt> zeroes = new HashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) ones.add(new PairInt(-1, 0)); ones.add(new PairInt(-1, -1)); ones.add(new PairInt(-1, -2)); ones.add(new PairInt(0, -2)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(1, -2)); zeroes.add(new PairInt(0, -1)); int nRotations = 0; replaceAndRotateOnesIfNullable(points, imageWidth, imageHeight, zeroes, ones, nRotations); } private void correctForHoleArtifacts0_4(Set<PairInt> points, int imageWidth, int imageHeight) { /* # # 2 # 0 # 1 # #* # 0 -3 -2 -1 0 1 2 if the pattern is found, -- set the center 0's to '1' -- for each pixel in the open squares, test which values can be nulled and set them */ Set<PairInt> ones = new HashSet<PairInt>(); Set<PairInt> zeroes = new HashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) ones.add(new PairInt(-1, 0)); ones.add(new PairInt(-1, -1)); ones.add(new PairInt(-1, -2)); ones.add(new PairInt(0, -2)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, -1)); zeroes.add(new PairInt(0, -1)); int nRotations = 3; replaceAndRotateOnesIfNullable(points, imageWidth, imageHeight, zeroes, ones, nRotations); } private void correctForHoleArtifacts0_1(Set<PairInt> points, int imageWidth, int imageHeight) { /* # # # 2 # # 0 # 1 # # 0 #* # 0 # 0 # # -1 # # # -2 -3 -2 -1 0 1 2 if the pattern is found, -- set the center 0's to '1' -- for each pixel in the open squares, test which values can be nulled and set them */ Set<PairInt> ones = new HashSet<PairInt>(); Set<PairInt> zeroes = new HashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) ones.add(new PairInt(-3, 2)); ones.add(new PairInt(-3, 1)); ones.add(new PairInt(-3, 0)); ones.add(new PairInt(-2, 2)); ones.add(new PairInt(-2, 0)); ones.add(new PairInt(-2, -1)); ones.add(new PairInt(-1, 2)); ones.add(new PairInt(-1, 1)); ones.add(new PairInt(-1, -1)); ones.add(new PairInt(-1, -2)); ones.add(new PairInt(0, 1)); ones.add(new PairInt(0, -2)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(1, -2)); zeroes.add(new PairInt(-2, 1)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(0, -1)); int nRotations = 3; replaceAndRotateOnesIfNullable(points, imageWidth, imageHeight, zeroes, ones, nRotations); } private void correctForHoleArtifacts0_2(Set<PairInt> points, int imageWidth, int imageHeight) { /* # # # 2 # # 0 # 1 # 0 #* # 0 # # # -1 -2 -3 -2 -1 0 1 2 if the pattern is found, -- set the center 0's to '1' -- for each pixel in the open squares, test which values can be nulled and set them */ Set<PairInt> ones = new HashSet<PairInt>(); Set<PairInt> zeroes = new HashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) ones.add(new PairInt(-2, 1)); ones.add(new PairInt(-2, 0)); ones.add(new PairInt(-2, -1)); ones.add(new PairInt(-1, 1)); ones.add(new PairInt(-1, -1)); ones.add(new PairInt(-1, -2)); ones.add(new PairInt(0, 1)); ones.add(new PairInt(0, -2)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(1, -2)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(0, -1)); int nRotations = 3; replaceAndRotateOnesIfNullable(points, imageWidth, imageHeight, zeroes, ones, nRotations); } private void replaceAndRotateOnesIfNullable(Set<PairInt> points, int imageWidth, int imageHeight, Set<PairInt> zeroes, Set<PairInt> ones, int nRotations) { ErosionFilter erosionFilter = new ErosionFilter(); int w = imageWidth; int h = imageHeight; for (int nRot = 0; nRot <= nRotations; nRot++) { switch(nRot) { case 1: reverseXs(zeroes, ones); break; case 2: reverseYs(zeroes, ones); break; case 3: reverseXs(zeroes, ones); break; default: break; } Set<PairInt> tmpPointsRemoved = new HashSet<PairInt>(); Set<PairInt> tmpPointsAdded = new HashSet<PairInt>(); for (PairInt p : points) { int col = p.getX(); int row = p.getY(); // make sure the current point hasn't been added to tmpPointsRemoved boolean isNotPresent = tmpPointsRemoved.contains(p); if (isNotPresent) { continue; } boolean foundPattern = true; for (PairInt p2 : ones) { int x = col + p2.getX(); int y = row + p2.getY(); if ((x < 0) || (y < 0) || (x > (w - 1)) || (y > (h - 1))) { foundPattern = false; break; } PairInt p3 = new PairInt(x, y); if (tmpPointsRemoved.contains(p3) || (!points.contains(p3) && !tmpPointsAdded.contains(p3))) { foundPattern = false; break; } } if (!foundPattern) { continue; } for (PairInt p2 : zeroes) { int x = col + p2.getX(); int y = row + p2.getY(); if ((x < 0) || (y < 0) || (x > (w - 1)) || (y > (h - 1))) { foundPattern = false; break; } PairInt p3 = new PairInt(x, y); if (!tmpPointsRemoved.contains(p3) && (points.contains(p3) || tmpPointsAdded.contains(p3))) { foundPattern = false; break; } } if (!foundPattern) { continue; } //change the zeroes to ones. bounds have been checked for (PairInt p2 : zeroes) { int x = col + p2.getX(); int y = row + p2.getY(); PairInt p3 = new PairInt(x, y); tmpPointsAdded.add(p3); } // test if can set the surrounding 1's to 0's without disconnecting // lines for (PairInt p2 : ones) { int x = col + p2.getX(); int y = row + p2.getY(); PairInt p3 = new PairInt(x, y); // adds to tmpPointsRemoved boolean nullable = erosionFilter.process(p3, points, tmpPointsAdded, tmpPointsRemoved, w, h); } } for (PairInt p2 : tmpPointsRemoved) { points.remove(p2); } for (PairInt p2 : tmpPointsAdded) { points.add(p2); } } } private void correctForHoleArtifacts0_3(Set<PairInt> points, int imageWidth, int imageHeight) { /* # # 2 # 0 # 1 # 0 #* 0 # -1 -2 -3 -2 -1 0 1 2 if the pattern is found, -- set the center 0's to '1' -- for each pixel in the open squares, test which values can be nulled and set them */ Set<PairInt> ones = new HashSet<PairInt>(); Set<PairInt> zeroes = new HashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) ones.add(new PairInt(-2, 0)); ones.add(new PairInt(-1, 1)); ones.add(new PairInt(-1, -1)); ones.add(new PairInt(0, -2)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(1, -2)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(0, -1)); int nRotations = 3; replaceAndRotateOnesIfNullable(points, imageWidth, imageHeight, zeroes, ones, nRotations); } }
src/algorithms/imageProcessing/PostLineThinnerCorrections.java
package algorithms.imageProcessing; import algorithms.util.PairInt; import algorithms.util.ResourceFinder; import java.io.IOException; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.Set; import java.util.logging.Logger; /** * * @author nichole */ public class PostLineThinnerCorrections { private Logger log = Logger.getLogger(this.getClass().getName()); public void correctForArtifacts(GreyscaleImage input) { ImageProcessor imageProcessor = new ImageProcessor(); int w = input.getWidth(); int h = input.getHeight(); Set<PairInt> points = imageProcessor.readNonZeroPixels(input); //TODO: reduce the number of patterns here if possible // and make sure that true corners aren't drastically reduced to less // usable smaller corners try { String dirPath = ResourceFinder.findDirectory("bin"); ImageIOHelper.writeOutputImage(dirPath + "/nonZero.png", input); } catch (IOException e){} correctForHoleArtifacts0(points, w, h); correctForHoleArtifacts0_1(points, w, h); correctForHoleArtifacts0_2(points, w, h); correctForHoleArtifacts1(points, w, h); correctForHoleArtifacts1_2(points, w, h); correctForHoleArtifacts1_3(points, w, h); correctForHoleArtifacts1_4(points, w, h); correctForZigZag0(points, w, h); correctForZigZag0Alt(points, w, h); correctForZigZag1(points, w, h); correctForZigZag2(points, w, h); correctForZigZag1Alt(points, w, h); correctForZigZag3(points, w, h); correctForZigZag5(points, w, h); correctForZigZag6(points, w, h); correctForWs(points, w, h); // TODO: revisit, not sure this is always an artifact: correctForLine0(points, w, h); // better edge extraction at the expense of unsharpening true corners: correctForLs(points, w, h); correctForLs2(points, w, h); correctForZigZag1(points, w, h); correctForSpurs(points, w, h); correctForZigZag7(points, w, h); imageProcessor.writeAsBinaryToImage(input, points); } private void correctForZigZag0(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern 0 0 2 0 # # 1 #* #< 0 0 # 0 0 -1 -1 0 1 2 and removing the topmost left #'s */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(0, -1)); zeroes.add(new PairInt(0, -2)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, -2)); zeroes.add(new PairInt(2, 0)); zeroes.add(new PairInt(2, 1)); ones.add(new PairInt(-1, 1)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(2, -1)); changeToZeroes.add(new PairInt(1, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForZigZag0Alt(Set<PairInt> points, int imageWidth, int imageHeight) { /* keep 0 0 # 0 2 0 0 # 0 0 1 0 #* #< 0 0 0 0 # 0 0 -1 0 # 0 -2 -1 0 1 2 3 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 2)); zeroes.add(new PairInt(-1, 1)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(0, -1)); zeroes.add(new PairInt(0, -2)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, 2)); zeroes.add(new PairInt(1, -2)); zeroes.add(new PairInt(2, 1)); zeroes.add(new PairInt(2, 0)); zeroes.add(new PairInt(2, -1)); zeroes.add(new PairInt(3, 0)); zeroes.add(new PairInt(3, -1)); zeroes.add(new PairInt(3, -2)); ones.add(new PairInt(0, 2)); ones.add(new PairInt(0, 1)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(2, -2)); changeToZeroes.add(new PairInt(1, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForZigZag5(Set<PairInt> points, int imageWidth, int imageHeight) { /* keep 0 # 0 2 0 # 0 0 1 0 # #*<0 0 0 0 # 0 -1 0 # -2 -2 -1 0 1 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-2, 1)); zeroes.add(new PairInt(-2, 0)); zeroes.add(new PairInt(-2, -1)); zeroes.add(new PairInt(-2, -2)); zeroes.add(new PairInt(-1, 1)); zeroes.add(new PairInt(0, 2)); zeroes.add(new PairInt(0, -1)); zeroes.add(new PairInt(0, -2)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, 0)); zeroes.add(new PairInt(1, -1)); ones.add(new PairInt(-1, 0)); ones.add(new PairInt(-1, -1)); ones.add(new PairInt(-1, -2)); ones.add(new PairInt(0, 1)); ones.add(new PairInt(1, 2)); changeToZeroes.add(new PairInt(0, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForZigZag6(Set<PairInt> points, int imageWidth, int imageHeight) { /* 0 0 0 0 2 0 #< # # 1 # #* # 0 0 0 0 0 -1 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(0, 1)); zeroes.add(new PairInt(0, -1)); zeroes.add(new PairInt(0, -2)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, -2)); zeroes.add(new PairInt(2, 1)); zeroes.add(new PairInt(2, 0)); zeroes.add(new PairInt(2, -2)); zeroes.add(new PairInt(3, -2)); ones.add(new PairInt(-1, 0)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(2, -1)); ones.add(new PairInt(3, -1)); changeToZeroes.add(new PairInt(1, -1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } /** * using this one unfortunately reduces the sharpness of real corners, * but it allows clear point ordering when extracting edges from the * line thinned image. * * @param input */ private void correctForZigZag7(Set<PairInt> points, int imageWidth, int imageHeight) { /* # 0 # 0 1 # #*<0 0 # 0 0 -1 # -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(0, 1)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, 0)); zeroes.add(new PairInt(1, -1)); zeroes.add(new PairInt(-1, -1)); ones.add(new PairInt(0, -1)); ones.add(new PairInt(-1, 0)); ones.add(new PairInt(-1, 1)); ones.add(new PairInt(-1, 2)); ones.add(new PairInt(0, -2)); changeToZeroes.add(new PairInt(0, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } /** * using this one unfortunately reduces the sharpness of real corners, * but it allows clear point ordering when extracting edges from the * line thinned image. * * @param input */ private void correctForLs(Set<PairInt> points, int imageWidth, int imageHeight) { /* 0 # 0 1 0 #* 0 0 0 #< # -1 0 0 0 -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 2)); zeroes.add(new PairInt(-1, 1)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(0, 2)); zeroes.add(new PairInt(1, 2)); zeroes.add(new PairInt(1, 0)); zeroes.add(new PairInt(1, -1)); ones.add(new PairInt(0, 1)); ones.add(new PairInt(0, -1)); ones.add(new PairInt(1, 1)); changeToZeroes.add(new PairInt(0, 1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForLs2(Set<PairInt> points, int imageWidth, int imageHeight) { /* 0 # 0 0 1 0 #*<# # 0 0 0 0 -1 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(-1, 1)); zeroes.add(new PairInt(0, 1)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, -1)); zeroes.add(new PairInt(2, -1)); ones.add(new PairInt(0, -1)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(2, 0)); changeToZeroes.add(new PairInt(0, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForWs(Set<PairInt> points, int imageWidth, int imageHeight) { /* 0 0 # 1 0 #*<# 0 # # 0 -1 -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(0, -1)); zeroes.add(new PairInt(1, 1)); ones.add(new PairInt(-1, 1)); ones.add(new PairInt(0, 1)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, -1)); changeToZeroes.add(new PairInt(0, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForZigZag1(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern # 3 0 # 2 0 0 # 0 1 0 #* #< 0 0 # 0 -1 -1 0 1 2 and removing the topmost left #'s */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(0, -1)); zeroes.add(new PairInt(0, -2)); zeroes.add(new PairInt(2, 0)); zeroes.add(new PairInt(2, -1)); zeroes.add(new PairInt(1, 1)); ones.add(new PairInt(-1, 1)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(2, -2)); ones.add(new PairInt(2, -3)); changeToZeroes.add(new PairInt(1, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForZigZag2(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern 0 0 0 # 1 0 #* # 0 0 0 # 0 -1 # 0 -2 -2 -1 0 1 2 and removing the topmost left #'s */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 1)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(0, 2)); zeroes.add(new PairInt(0, -1)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, -1)); zeroes.add(new PairInt(2, 0)); ones.add(new PairInt(-1, 2)); ones.add(new PairInt(0, 1)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(2, -1)); changeToZeroes.add(new PairInt(0, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForZigZag1Alt(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern 0 0 # # 1 0 #* #< 0 0 0 # 0 0 -1 # 0 -2 -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(0, 2)); zeroes.add(new PairInt(-1, 1)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(0, -1)); zeroes.add(new PairInt(2, 1)); zeroes.add(new PairInt(2, 0)); ones.add(new PairInt(0, -1)); // NOT YET REV ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, 1)); ones.add(new PairInt(-1, -2)); ones.add(new PairInt(2, 1)); changeToZeroes.add(new PairInt(1, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForZigZag3(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern 0 # 3 0 # 2 0 #< # 0 1 0 #* 0 0 # 0 0 -1 -2 -1 0 1 2 and removing the topmost left #'s */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(-1, -1)); zeroes.add(new PairInt(0, 1)); zeroes.add(new PairInt(0, -2)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, 0)); zeroes.add(new PairInt(1, -3)); zeroes.add(new PairInt(2, -1)); ones.add(new PairInt(-1, 1)); ones.add(new PairInt(0, -1)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(1, -2)); ones.add(new PairInt(2, -3)); changeToZeroes.add(new PairInt(0, -1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } /** * using this one unfortunately reduces the sharpness of real corners, * but it allows clear point ordering when extracting edges from the * line thinned image. * * @param input */ private void correctForRemaining(Set<PairInt> points, int imageWidth, int imageHeight) { /* # 1 0 #* 0 0 0 #< # -1 0 0 0 -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 2)); zeroes.add(new PairInt(-1, 1)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(0, 2)); zeroes.add(new PairInt(1, 2)); zeroes.add(new PairInt(1, 0)); ones.add(new PairInt(-1, -1)); ones.add(new PairInt(0, 1)); ones.add(new PairInt(1, 1)); changeToZeroes.add(new PairInt(0, 1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void reverseXs( final Set<PairInt> zeroes, final Set<PairInt> ones, Set<PairInt> changeToZeroes, final Set<PairInt> changeToOnes) { // ----- change the sign of x to handle other direction ----- for (PairInt p : zeroes) { p.setX(-1 * p.getX()); } for (PairInt p : ones) { p.setX(-1 * p.getX()); } for (PairInt p : changeToZeroes) { p.setX(-1 * p.getX()); } for (PairInt p : changeToOnes) { p.setX(-1 * p.getX()); } } private void reverseXs(final Set<PairInt> zeroes, final Set<PairInt> ones) { // ----- change the sign of x to handle other direction ----- for (PairInt p : zeroes) { p.setX(-1 * p.getX()); } for (PairInt p : ones) { p.setX(-1 * p.getX()); } } private void reverseYs(final Set<PairInt> zeroes, final Set<PairInt> ones, Set<PairInt> changeToZeroes, final Set<PairInt> changeToOnes) { // ----- change the sign of y ----- for (PairInt p : zeroes) { p.setY(-1 * p.getY()); } for (PairInt p : ones) { p.setY(-1 * p.getY()); } for (PairInt p : changeToZeroes) { p.setY(-1 * p.getY()); } for (PairInt p : changeToOnes) { p.setY(-1 * p.getY()); } } private void reverseYs(final Set<PairInt> zeroes, final Set<PairInt> ones) { // ----- change the sign of y ----- for (PairInt p : zeroes) { p.setY(-1 * p.getY()); } for (PairInt p : ones) { p.setY(-1 * p.getY()); } } private void rotate90ThreeTimes( Set<PairInt> points, int imageWidth, int imageHeight, final LinkedHashSet<PairInt> zeroes, final LinkedHashSet<PairInt> ones, LinkedHashSet<PairInt> changeToZeroes, final LinkedHashSet<PairInt> changeToOnes, final int startCenterValue) { // ----- change the sign of x to handle other direction ----- reverseXs(zeroes, ones, changeToZeroes, changeToOnes); replacePattern( points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startCenterValue); // ----- change the sign of y to handle other direction ----- reverseYs(zeroes, ones, changeToZeroes, changeToOnes); replacePattern( points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startCenterValue); // ----- change the sign of x to handle another direction ----- reverseXs(zeroes, ones, changeToZeroes, changeToOnes); replacePattern( points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startCenterValue); } private void replacePattern( Set<PairInt> points, int imageWidth, int imageHeight, final LinkedHashSet<PairInt> zeroes, final LinkedHashSet<PairInt> ones, final LinkedHashSet<PairInt> changeToZeroes, final LinkedHashSet<PairInt> changeToOnes, final int startCenterValue) { int w = imageWidth; int h = imageHeight; Set<PairInt> tmpPointsRemoved = new HashSet<PairInt>(); Set<PairInt> tmpPointsAdded = new HashSet<PairInt>(); for (PairInt p : points) { boolean isNotPresent = tmpPointsRemoved.contains(p); if (startCenterValue == 0) { // skip if point is in set if (!isNotPresent) { continue; } } else if (isNotPresent) { // skip if point is not in set continue; } int col = p.getX(); int row = p.getY(); boolean foundPattern = true; for (PairInt p2 : zeroes) { int x = col + p2.getX(); int y = row + p2.getY(); if ((x < 0) || (y < 0) || (x > (w - 1)) || (y > (h - 1))) { //TODO: revisit this foundPattern = false; break; } PairInt p3 = new PairInt(x, y); if (!tmpPointsRemoved.contains(p3) && (points.contains(p3) || tmpPointsAdded.contains(p3))) { foundPattern = false; break; } } if (!foundPattern) { continue; } for (PairInt p2 : ones) { int x = col + p2.getX(); int y = row + p2.getY(); if ((x < 0) || (y < 0) || (x > (w - 1)) || (y > (h - 1))) { foundPattern = false; break; } PairInt p3 = new PairInt(x, y); if (tmpPointsRemoved.contains(p3) || (!points.contains(p3) && !tmpPointsAdded.contains(p3))) { foundPattern = false; break; } } if (!foundPattern) { continue; } for (PairInt p2 : changeToZeroes) { int x = col + p2.getX(); int y = row + p2.getY(); if ((x < 0) || (y < 0) || (x > (w - 1)) || (y > (h - 1))) { continue; } tmpPointsRemoved.add(new PairInt(x, y)); } for (PairInt p2 : changeToOnes) { int x = col + p2.getX(); int y = row + p2.getY(); if ((x < 0) || (y < 0) || (x > (w - 1)) || (y > (h - 1))) { continue; } tmpPointsAdded.add(new PairInt(x, y)); } } for (PairInt p2 : tmpPointsRemoved) { points.remove(p2); } for (PairInt p2 : tmpPointsAdded) { points.add(p2); } } private void debugPrint(GreyscaleImage input, int xStart, int xStop, int yStart, int yStop) { StringBuilder sb = new StringBuilder(); for (int row = yStart; row <= yStop; row++) { sb.append(String.format("%3d: ", row)); for (int col = xStart; col <= xStop; col++) { sb.append(String.format(" %3d ", input.getValue(col, row))); } sb.append(String.format("\n")); } System.out.println(sb.toString()); } /** * removes a hole artifact in inclined lines. note that this should * probably be adjusted for gaussian convolution combined radius * if used outside of the gradientXY image produced by the * CannyEdgeFilter. * @param input */ private void correctForHoleArtifacts3(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern 0 0 0 0 0 0 3 4 0 0 0 0 1 1 1 2 3 0 0 0 1 0 1 0 1 2 0 0 1 0* 1 1 0 0 1 0 1 0 1** 1 0 0 -1 0 0 1 1 1 0 0 0 -2 0 1 0 0 0 0 0 -3 -3 -2 -1 0 1 2 3 and removing the topmost left #'s */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, -1 - 1)); zeroes.add(new PairInt(-1, 1 - 1)); zeroes.add(new PairInt(1, -1 - 1)); zeroes.add(new PairInt(0, -2 - 1)); zeroes.add(new PairInt(1, 2 - 1)); zeroes.add(new PairInt(2, 2 - 1)); zeroes.add(new PairInt(2, 1 - 1)); zeroes.add(new PairInt(0, -3 - 1)); zeroes.add(new PairInt(1, -3 - 1)); zeroes.add(new PairInt(-1, -2 - 1)); zeroes.add(new PairInt(-1, -3 - 1)); zeroes.add(new PairInt(2, -3 - 1)); zeroes.add(new PairInt(-2, -3 - 1)); zeroes.add(new PairInt(-2, -2 - 1)); zeroes.add(new PairInt(-2, -1 - 1)); zeroes.add(new PairInt(-2, 0 - 1)); zeroes.add(new PairInt(-1, 3 - 1)); zeroes.add(new PairInt(0, 3 - 1)); zeroes.add(new PairInt(1, 3 - 1)); zeroes.add(new PairInt(2, 3 - 1)); zeroes.add(new PairInt(3, 3 - 1)); zeroes.add(new PairInt(3, 2 - 1)); zeroes.add(new PairInt(3, 1 - 1)); zeroes.add(new PairInt(3, 0 - 1)); zeroes.add(new PairInt(3, -1 - 1)); zeroes.add(new PairInt(-3, -3 - 1)); zeroes.add(new PairInt(-3, 2 - 1)); zeroes.add(new PairInt(-3, -1 - 1)); zeroes.add(new PairInt(-3, 0 - 1)); zeroes.add(new PairInt(-3, -1 - 1)); zeroes.add(new PairInt(-3, -2 - 1)); zeroes.add(new PairInt(-3, -3 - 1)); ones.add(new PairInt(0, 1 - 1)); ones.add(new PairInt(0, 2 - 1)); ones.add(new PairInt(0, -1 - 1)); ones.add(new PairInt(-1, 0 - 1)); ones.add(new PairInt(-1, 2 - 1)); ones.add(new PairInt(1, -2 - 1)); ones.add(new PairInt(1, 0 - 1)); ones.add(new PairInt(1, 1 - 1)); ones.add(new PairInt(2, -2 - 1)); ones.add(new PairInt(2, -1 - 1)); ones.add(new PairInt(2, 0 - 1)); ones.add(new PairInt(3, -2 - 1)); ones.add(new PairInt(-2, 1 - 1)); ones.add(new PairInt(-2, 2 - 1)); ones.add(new PairInt(-2, 3 - 1)); changeToZeroes.add(new PairInt(-2, 2 - 1)); changeToZeroes.add(new PairInt(-2, 1 - 1)); changeToZeroes.add(new PairInt(-1, 0 - 1)); changeToZeroes.add(new PairInt(0, -1 - 1)); changeToZeroes.add(new PairInt(1, -2 - 1)); changeToZeroes.add(new PairInt(2, -2 - 1)); changeToZeroes.add(new PairInt(2, 0 - 1)); changeToZeroes.add(new PairInt(1, 1 - 1)); changeToZeroes.add(new PairInt(0, 2 - 1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } /** * removes a hole artifact in inclined lines. note that this should * probably be adjusted for gaussian convolution combined radius * if used outside of the gradientXY image produced by the * CannyEdgeFilter. * @param input */ private void correctForHoleArtifacts2(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern 0 0 0 0 0 0 3 4 0 0 0 1 1 1 2 3 0 0 1 0 1 0 1 2 0 1 0 1 1 0 0 1 0 1 1* 1 0 0 -1 0 0 1 0 0 0 0 -2 -1 -2 -1 0 1 2 3 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(0, 2 - 1)); zeroes.add(new PairInt(0, -2 - 1)); zeroes.add(new PairInt(-1, -1 - 1)); zeroes.add(new PairInt(-1, -2 - 1)); zeroes.add(new PairInt(-1, -3 - 1)); zeroes.add(new PairInt(0, -3 - 1)); zeroes.add(new PairInt(1, -3 - 1)); zeroes.add(new PairInt(1, -1 - 1)); zeroes.add(new PairInt(1, 2 - 1)); zeroes.add(new PairInt(2, -3 - 1)); zeroes.add(new PairInt(2, 1 - 1)); zeroes.add(new PairInt(2, 2 - 1)); zeroes.add(new PairInt(-2, -3 - 1)); zeroes.add(new PairInt(-2, -2 - 1)); zeroes.add(new PairInt(-2, -1 - 1)); zeroes.add(new PairInt(-2, 0 - 1)); zeroes.add(new PairInt(-2, 1 - 1)); zeroes.add(new PairInt(-2, 2 - 1)); zeroes.add(new PairInt(3, -3 - 1)); zeroes.add(new PairInt(3, -1 - 1)); zeroes.add(new PairInt(3, 0 - 1)); zeroes.add(new PairInt(3, 1 - 1)); zeroes.add(new PairInt(3, 2 - 1)); ones.add(new PairInt(0, -1 - 1)); ones.add(new PairInt(0, 1 - 1)); ones.add(new PairInt(-1, 0 - 1)); ones.add(new PairInt(-1, 1 - 1)); ones.add(new PairInt(-1, 2 - 1)); ones.add(new PairInt(1, -2 - 1)); ones.add(new PairInt(1, 0 - 1)); ones.add(new PairInt(1, 1 - 1)); ones.add(new PairInt(2, -2 - 1)); ones.add(new PairInt(2, -1 - 1)); ones.add(new PairInt(2, 0 - 1)); ones.add(new PairInt(3, -2 - 1)); changeToZeroes.add(new PairInt(-1, 0 - 1)); changeToZeroes.add(new PairInt(-1, 1 - 1)); changeToZeroes.add(new PairInt(0, -1 - 1)); changeToZeroes.add(new PairInt(1, -2 - 1)); changeToZeroes.add(new PairInt(1, 1 - 1)); changeToZeroes.add(new PairInt(2, -2 - 1)); changeToZeroes.add(new PairInt(2, 0 - 1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForLine0(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern 0 # 0 2 0 # 0 0 1 0 0 # 0 0 0 #* 0 0 -1 0 # 0 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 2 - 1)); zeroes.add(new PairInt(-1, 1 - 1)); zeroes.add(new PairInt(-1, 0 - 1)); zeroes.add(new PairInt(-1, -1 - 1)); zeroes.add(new PairInt(-1, -2 - 1)); zeroes.add(new PairInt(1, 2 - 1)); zeroes.add(new PairInt(1, 1 - 1)); zeroes.add(new PairInt(1, -1 - 1)); zeroes.add(new PairInt(1, -2 - 1)); zeroes.add(new PairInt(2, 1 - 1)); zeroes.add(new PairInt(2, 0 - 1)); zeroes.add(new PairInt(2, -1 - 1)); ones.add(new PairInt(0, 2 - 1)); ones.add(new PairInt(0, 1 - 1)); ones.add(new PairInt(0, -1 - 1)); ones.add(new PairInt(0, -2 - 1)); ones.add(new PairInt(1, 0 - 1)); changeToZeroes.add(new PairInt(1, 0 - 1)); changeToOnes.add(new PairInt(0, 0 - 1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } /** * possibly decreases sharpness of a true diagonal edge at the expense of * making a better line width for the edge extractor. * @param input */ private void correctForSpurs(Set<PairInt> points, int imageWidth, int imageHeight) { /* looking for pattern # # 0 1 0 #*<0 0 0 0 0 -1 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-1, 1)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(0, 1)); zeroes.add(new PairInt(1, 1)); zeroes.add(new PairInt(1, -1)); ones.add(new PairInt(-1, -1)); ones.add(new PairInt(0, -1)); changeToZeroes.add(new PairInt(0, 0)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } protected void correctForHoleArtifacts1(Set<PairInt> points, int imageWidth, int imageHeight) { /* 1 2 1 0 1 1 1* 0 -1 -2 -1 0 1 2 */ ErosionFilter erosionFilter = new ErosionFilter(); Set<PairInt> ones = new HashSet<PairInt>(); Set<PairInt> zeroes = new HashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) ones.add(new PairInt(-1, -1)); ones.add(new PairInt(0, -2)); ones.add(new PairInt(1, -1)); zeroes.add(new PairInt(0, -1)); int w = imageWidth; int h = imageHeight; Set<PairInt> tmpPointsRemoved = new HashSet<PairInt>(); Set<PairInt> tmpPointsAdded = new HashSet<PairInt>(); for (PairInt p : points) { // test for the pattern of ones and zeroes in the neighbors, // then make a temporary set of center to zero and test if each of // the four sorrounding can be deleted int col = p.getX(); int row = p.getY(); boolean foundPattern = true; for (PairInt p2 : ones) { int x = col + p2.getX(); int y = row + p2.getY(); if ((x < 0) || (y < 0) || (x > (w - 1)) || (y > (h - 1))) { foundPattern = false; break; } PairInt p3 = new PairInt(x, y); if (tmpPointsRemoved.contains(p3) || (!points.contains(p3) && !tmpPointsAdded.contains(p3))) { foundPattern = false; break; } } if (!foundPattern) { continue; } for (PairInt p2 : zeroes) { int x = col + p2.getX(); int y = row + p2.getY(); if ((x < 0) || (y < 0) || (x > (w - 1)) || (y > (h - 1))) { foundPattern = false; break; } PairInt p3 = new PairInt(x, y); if (!tmpPointsRemoved.contains(p3) && (points.contains(p3) || tmpPointsAdded.contains(p3))) { foundPattern = false; break; } } if (!foundPattern) { continue; } // change the central 0 to a 1 tmpPointsAdded.add(new PairInt(col, row - 1)); // test if can set the surrounding 1's to 0's without disconnecting // lines for (PairInt p2 : ones) { int x = col + p2.getX(); int y = row + p2.getY(); PairInt p3 = new PairInt(x, y); // adds to tmpPointsRemoved boolean nullable = erosionFilter.process(p3, points, tmpPointsAdded, tmpPointsRemoved, w, h); } } for (PairInt p2 : tmpPointsRemoved) { points.remove(p2); } for (PairInt p2 : tmpPointsAdded) { points.add(p2); } } private void correctForHoleArtifacts1_2(Set<PairInt> points, int imageWidth, int imageHeight) { /* 0 0 2 0 0 # # # 1 # 0 0 # 0 # # #* 0 0 -1 0 0 -2 -3 -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-3, -1 - 1)); zeroes.add(new PairInt(-2, -1 - 1)); zeroes.add(new PairInt(-2, -2 - 1)); zeroes.add(new PairInt(-1, 0 - 1)); zeroes.add(new PairInt(-1, -2 - 1)); zeroes.add(new PairInt(0, 2 - 1)); zeroes.add(new PairInt(1, 2 - 1)); zeroes.add(new PairInt(1, 1 - 1)); zeroes.add(new PairInt(2, 1 - 1)); ones.add(new PairInt(-2, 1 - 1)); ones.add(new PairInt(-2, 0 - 1)); ones.add(new PairInt(-1, 1 - 1)); ones.add(new PairInt(-1, -1 - 1)); ones.add(new PairInt(0, 1 - 1)); ones.add(new PairInt(0, -1 - 1)); ones.add(new PairInt(1, 0 - 1)); ones.add(new PairInt(1, -1 - 1)); changeToZeroes.add(new PairInt(-2, 0 - 1)); changeToZeroes.add(new PairInt(-1, -1 - 1)); changeToZeroes.add(new PairInt(-1, 1 - 1)); changeToZeroes.add(new PairInt(0, 1 - 1)); changeToZeroes.add(new PairInt(0, -1 - 1)); changeToZeroes.add(new PairInt(1, 0 - 1)); changeToOnes.add(new PairInt(-1, 0 - 1)); changeToOnes.add(new PairInt(0, 0 - 1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes( points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForHoleArtifacts1_3(Set<PairInt> points, int imageWidth, int imageHeight) { /* 0 0 2 0 0 # # 1 0 # 0 0< # 0 0 # #* 0 0 -1 0 0 -2 -3 -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-3, 0 - 1)); zeroes.add(new PairInt(-3, -1 - 1)); zeroes.add(new PairInt(-2, -1 - 1)); zeroes.add(new PairInt(-2, -2 - 1)); zeroes.add(new PairInt(-1, 0 - 1)); zeroes.add(new PairInt(-1, -2 - 1)); zeroes.add(new PairInt(0, 2 - 1)); zeroes.add(new PairInt(1, 2 - 1)); zeroes.add(new PairInt(1, 1 - 1)); zeroes.add(new PairInt(2, 1 - 1)); zeroes.add(new PairInt(2, 0 - 1)); ones.add(new PairInt(-2, 0 - 1)); ones.add(new PairInt(-1, 1 - 1)); ones.add(new PairInt(-1, -1 - 1)); ones.add(new PairInt(0, 1 - 1)); ones.add(new PairInt(0, -1 - 1)); ones.add(new PairInt(1, 0 - 1)); changeToZeroes.add(new PairInt(-2, 0 - 1)); changeToZeroes.add(new PairInt(-1, -1 - 1)); changeToZeroes.add(new PairInt(-1, 1 - 1)); changeToZeroes.add(new PairInt(0, -1 - 1)); changeToZeroes.add(new PairInt(0, 1 - 1)); changeToZeroes.add(new PairInt(1, 0 - 1)); changeToOnes.add(new PairInt(-2, 1 - 1)); changeToOnes.add(new PairInt(-1, 0 - 1)); changeToOnes.add(new PairInt(0, 0 - 1)); changeToOnes.add(new PairInt(1, -1 - 1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForHoleArtifacts1_4(Set<PairInt> points, int imageWidth, int imageHeight) { /* 0 0 3 # # 0 0 2 # 0 # 0 1 0 # 0< # 0 0 0 #* -1 0 0 -2 -3 -2 -1 0 1 2 */ LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToZeroes = new LinkedHashSet<PairInt>(); LinkedHashSet<PairInt> changeToOnes = new LinkedHashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) zeroes.add(new PairInt(-2, 1 - 1)); zeroes.add(new PairInt(-2, 0 - 1)); zeroes.add(new PairInt(-1, 2 - 1)); zeroes.add(new PairInt(-1, 1 - 1)); zeroes.add(new PairInt(0, 2 - 1)); zeroes.add(new PairInt(0, -1 - 1)); zeroes.add(new PairInt(0, -3 - 1)); zeroes.add(new PairInt(1, -2 - 1)); zeroes.add(new PairInt(1, -3 - 1)); zeroes.add(new PairInt(2, -1 - 1)); zeroes.add(new PairInt(2, -2 - 1)); ones.add(new PairInt(-1, 0 - 1)); ones.add(new PairInt(-1, -1 - 1)); ones.add(new PairInt(-1, -2 - 1)); ones.add(new PairInt(0, 1 - 1)); ones.add(new PairInt(0, -2 - 1)); ones.add(new PairInt(1, 0 - 1)); ones.add(new PairInt(1, -1 - 1)); changeToZeroes.add(new PairInt(-1, 0 - 1)); changeToZeroes.add(new PairInt(-1, -1 - 1)); changeToZeroes.add(new PairInt(0, 1 - 1)); changeToZeroes.add(new PairInt(0, -2 - 1)); changeToZeroes.add(new PairInt(1, 0 - 1)); changeToZeroes.add(new PairInt(1, -1 - 1)); changeToOnes.add(new PairInt(0, -1 - 1)); changeToOnes.add(new PairInt(0, 0 - 1)); changeToOnes.add(new PairInt(1, 1 - 1)); int startValue = 1; replacePattern(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); rotate90ThreeTimes(points, imageWidth, imageHeight, zeroes, ones, changeToZeroes, changeToOnes, startValue); } private void correctForHoleArtifacts0(Set<PairInt> points, int imageWidth, int imageHeight) { /* # # # 2 # 0 # 1 # #* # 0 -3 -2 -1 0 1 2 if the pattern is found, -- set the center 0's to '1' -- for each pixel in the open squares, test which values can be nulled and set them */ Set<PairInt> ones = new HashSet<PairInt>(); Set<PairInt> zeroes = new HashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) ones.add(new PairInt(-1, 0)); ones.add(new PairInt(-1, -1)); ones.add(new PairInt(-1, -2)); ones.add(new PairInt(0, -2)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(1, -2)); zeroes.add(new PairInt(0, -1)); int nRotations = 0; replaceAndRotateOnesIfNullable(points, imageWidth, imageHeight, zeroes, ones, nRotations); } private void correctForHoleArtifacts0_1(Set<PairInt> points, int imageWidth, int imageHeight) { /* # # # 2 # # 0 # 1 # # 0 #* # 0 # 0 # # -1 # # # -2 -3 -2 -1 0 1 2 if the pattern is found, -- set the center 0's to '1' -- for each pixel in the open squares, test which values can be nulled and set them */ Set<PairInt> ones = new HashSet<PairInt>(); Set<PairInt> zeroes = new HashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) ones.add(new PairInt(-3, 2)); ones.add(new PairInt(-3, 1)); ones.add(new PairInt(-3, 0)); ones.add(new PairInt(-2, 2)); ones.add(new PairInt(-2, 0)); ones.add(new PairInt(-2, -1)); ones.add(new PairInt(-1, 2)); ones.add(new PairInt(-1, 1)); ones.add(new PairInt(-1, -1)); ones.add(new PairInt(-1, -2)); ones.add(new PairInt(0, 1)); ones.add(new PairInt(0, -2)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(1, -2)); zeroes.add(new PairInt(-2, 1)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(0, -1)); int nRotations = 3; replaceAndRotateOnesIfNullable(points, imageWidth, imageHeight, zeroes, ones, nRotations); } private void correctForHoleArtifacts0_2(Set<PairInt> points, int imageWidth, int imageHeight) { /* # # # 2 # # 0 # 1 # 0 #* # 0 # # # -1 -2 -3 -2 -1 0 1 2 if the pattern is found, -- set the center 0's to '1' -- for each pixel in the open squares, test which values can be nulled and set them */ Set<PairInt> ones = new HashSet<PairInt>(); Set<PairInt> zeroes = new HashSet<PairInt>(); // y's are inverted here because sketch above is top left is (0,0) ones.add(new PairInt(-2, 1)); ones.add(new PairInt(-2, 0)); ones.add(new PairInt(-2, -1)); ones.add(new PairInt(-1, 1)); ones.add(new PairInt(-1, -1)); ones.add(new PairInt(-1, -2)); ones.add(new PairInt(0, 1)); ones.add(new PairInt(0, -2)); ones.add(new PairInt(1, 0)); ones.add(new PairInt(1, -1)); ones.add(new PairInt(1, -2)); zeroes.add(new PairInt(-1, 0)); zeroes.add(new PairInt(0, -1)); int nRotations = 3; replaceAndRotateOnesIfNullable(points, imageWidth, imageHeight, zeroes, ones, nRotations); } private void replaceAndRotateOnesIfNullable(Set<PairInt> points, int imageWidth, int imageHeight, Set<PairInt> zeroes, Set<PairInt> ones, int nRotations) { ErosionFilter erosionFilter = new ErosionFilter(); int w = imageWidth; int h = imageHeight; for (int nRot = 0; nRot <= nRotations; nRot++) { switch(nRot) { case 1: reverseXs(zeroes, ones); break; case 2: reverseYs(zeroes, ones); break; case 3: reverseXs(zeroes, ones); break; default: break; } Set<PairInt> tmpPointsRemoved = new HashSet<PairInt>(); Set<PairInt> tmpPointsAdded = new HashSet<PairInt>(); for (PairInt p : points) { int col = p.getX(); int row = p.getY(); // make sure the current point hasn't been added to tmpPointsRemoved boolean isNotPresent = tmpPointsRemoved.contains(p); if (isNotPresent) { continue; } boolean foundPattern = true; for (PairInt p2 : ones) { int x = col + p2.getX(); int y = row + p2.getY(); if ((x < 0) || (y < 0) || (x > (w - 1)) || (y > (h - 1))) { foundPattern = false; break; } PairInt p3 = new PairInt(x, y); if (tmpPointsRemoved.contains(p3) || (!points.contains(p3) && !tmpPointsAdded.contains(p3))) { foundPattern = false; break; } } if (!foundPattern) { continue; } for (PairInt p2 : zeroes) { int x = col + p2.getX(); int y = row + p2.getY(); if ((x < 0) || (y < 0) || (x > (w - 1)) || (y > (h - 1))) { foundPattern = false; break; } PairInt p3 = new PairInt(x, y); if (!tmpPointsRemoved.contains(p3) && (points.contains(p3) || tmpPointsAdded.contains(p3))) { foundPattern = false; break; } } if (!foundPattern) { continue; } //change the zeroes to ones. bounds have been checked for (PairInt p2 : zeroes) { int x = col + p2.getX(); int y = row + p2.getY(); PairInt p3 = new PairInt(x, y); tmpPointsAdded.add(p3); } // test if can set the surrounding 1's to 0's without disconnecting // lines for (PairInt p2 : ones) { int x = col + p2.getX(); int y = row + p2.getY(); PairInt p3 = new PairInt(x, y); // adds to tmpPointsRemoved boolean nullable = erosionFilter.process(p3, points, tmpPointsAdded, tmpPointsRemoved, w, h); } } for (PairInt p2 : tmpPointsRemoved) { points.remove(p2); } for (PairInt p2 : tmpPointsAdded) { points.add(p2); } } } }
more improvements to the post-line thinner methods for sparse point sets.
src/algorithms/imageProcessing/PostLineThinnerCorrections.java
more improvements to the post-line thinner methods for sparse point sets.
<ide><path>rc/algorithms/imageProcessing/PostLineThinnerCorrections.java <ide> correctForHoleArtifacts0(points, w, h); <ide> correctForHoleArtifacts0_1(points, w, h); <ide> correctForHoleArtifacts0_2(points, w, h); <del> <add> correctForHoleArtifacts0_3(points, w, h); <add> correctForHoleArtifacts0_4(points, w, h); <add> <ide> correctForHoleArtifacts1(points, w, h); <ide> <ide> correctForHoleArtifacts1_2(points, w, h); <add> correctForHoleArtifacts1_2_1(points, w, h); <ide> <ide> correctForHoleArtifacts1_3(points, w, h); <ide> <ide> <ide> } <ide> <add> private void correctForHoleArtifacts1_2_1(Set<PairInt> points, int imageWidth, <add> int imageHeight) { <add> <add> /* <add> <add> # # # 2 <add> # 0 0 # 1 <add> # # #* # 0 <add> <add> -3 -2 -1 0 1 2 <add> */ <add> <add> LinkedHashSet<PairInt> ones = new LinkedHashSet<PairInt>(); <add> LinkedHashSet<PairInt> zeroes = new LinkedHashSet<PairInt>(); <add> <add> // y's are inverted here because sketch above is top left is (0,0) <add> ones.add(new PairInt(-2, 0)); <add> ones.add(new PairInt(-2, -1)); <add> ones.add(new PairInt(-1, 0)); <add> ones.add(new PairInt(-1, -2)); <add> ones.add(new PairInt(0, -2)); <add> ones.add(new PairInt(1, 0)); <add> ones.add(new PairInt(1, -1)); <add> ones.add(new PairInt(1, -2)); <add> <add> zeroes.add(new PairInt(-1, -1)); <add> zeroes.add(new PairInt(0, -1)); <add> <add> int nRotations = 3; <add> <add> replaceAndRotateOnesIfNullable(points, imageWidth, imageHeight, <add> zeroes, ones, nRotations); <add> <add> } <add> <ide> private void correctForHoleArtifacts1_3(Set<PairInt> points, int imageWidth, <ide> int imageHeight) { <ide> <ide> replaceAndRotateOnesIfNullable(points, imageWidth, imageHeight, <ide> zeroes, ones, nRotations); <ide> } <add> <add> private void correctForHoleArtifacts0_4(Set<PairInt> points, int imageWidth, <add> int imageHeight) { <add> <add> /* <add> # # 2 <add> # 0 # 1 <add> # #* # 0 <add> <add> -3 -2 -1 0 1 2 <add> <add> if the pattern is found, <add> -- set the center 0's to '1' <add> -- for each pixel in the open squares, test which <add> values can be nulled and set them <add> */ <add> <add> Set<PairInt> ones = new HashSet<PairInt>(); <add> Set<PairInt> zeroes = new HashSet<PairInt>(); <add> <add> // y's are inverted here because sketch above is top left is (0,0) <add> ones.add(new PairInt(-1, 0)); <add> ones.add(new PairInt(-1, -1)); <add> ones.add(new PairInt(-1, -2)); <add> ones.add(new PairInt(0, -2)); <add> ones.add(new PairInt(1, 0)); <add> ones.add(new PairInt(1, -1)); <add> <add> zeroes.add(new PairInt(0, -1)); <add> <add> int nRotations = 3; <add> <add> replaceAndRotateOnesIfNullable(points, imageWidth, imageHeight, <add> zeroes, ones, nRotations); <add> } <ide> <ide> private void correctForHoleArtifacts0_1(Set<PairInt> points, int imageWidth, <ide> int imageHeight) { <ide> } <ide> } <ide> <add> private void correctForHoleArtifacts0_3(Set<PairInt> points, int imageWidth, <add> int imageHeight) { <add> <add> /* <add> # # 2 <add> # 0 # 1 <add> # 0 #* 0 <add> # -1 <add> -2 <add> -3 -2 -1 0 1 2 <add> <add> if the pattern is found, <add> -- set the center 0's to '1' <add> -- for each pixel in the open squares, test which <add> values can be nulled and set them <add> */ <add> <add> Set<PairInt> ones = new HashSet<PairInt>(); <add> Set<PairInt> zeroes = new HashSet<PairInt>(); <add> <add> // y's are inverted here because sketch above is top left is (0,0) <add> ones.add(new PairInt(-2, 0)); <add> ones.add(new PairInt(-1, 1)); ones.add(new PairInt(-1, -1)); <add> ones.add(new PairInt(0, -2)); <add> ones.add(new PairInt(1, -1)); ones.add(new PairInt(1, -2)); <add> <add> zeroes.add(new PairInt(-1, 0)); <add> zeroes.add(new PairInt(0, -1)); <add> <add> int nRotations = 3; <add> <add> replaceAndRotateOnesIfNullable(points, imageWidth, imageHeight, <add> zeroes, ones, nRotations); <add> } <add> <ide> }
Java
apache-2.0
53856baaccd608560605c3a5b08914839ef9bb8b
0
gxa/atlas,gxa/atlas,gxa/atlas,gxa/atlas,gxa/atlas
package uk.ac.ebi.atlas.experiments; public class NumberOfExperiments { public static final int ALL = 29; public static final int DISTINCT_ORGANISMS = 18; public static final int NUMBER_OF_BASELINE_SPECIES = 16; public static final int NUMBER_OF_PLANTS_EXPERIMENTS = 9; public static final int NUMBER_OF_ANIMALS_AND_FUNGI_EXPERIMENTS = 18; public static final int NUMBER_OF_HOMO_SAPIENS_BASELINE_EXPERIMENTS = 7; }
web/src/test/java/uk/ac/ebi/atlas/experiments/NumberOfExperiments.java
package uk.ac.ebi.atlas.experiments; public class NumberOfExperiments { public static final int ALL = 28; public static final int DISTINCT_ORGANISMS = 18; public static final int NUMBER_OF_BASELINE_SPECIES = 16; public static final int NUMBER_OF_PLANTS_EXPERIMENTS = 9; public static final int NUMBER_OF_ANIMALS_AND_FUNGI_EXPERIMENTS = 18; public static final int NUMBER_OF_HOMO_SAPIENS_BASELINE_EXPERIMENTS = 7; }
Fix tests
web/src/test/java/uk/ac/ebi/atlas/experiments/NumberOfExperiments.java
Fix tests
<ide><path>eb/src/test/java/uk/ac/ebi/atlas/experiments/NumberOfExperiments.java <ide> <ide> public class NumberOfExperiments { <ide> <del> public static final int ALL = 28; <add> public static final int ALL = 29; <ide> public static final int DISTINCT_ORGANISMS = 18; <ide> public static final int NUMBER_OF_BASELINE_SPECIES = 16; <ide> public static final int NUMBER_OF_PLANTS_EXPERIMENTS = 9;
Java
mpl-2.0
d64c4e1c5c4de3f01ab8df69727afc9cf7a0fb4b
0
Wurst-Imperium/Wurst-MC-1.11
/* * Copyright 2014 - 2017 | Wurst-Imperium | All rights reserved. * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package net.wurstclient.features.mods.items; import net.minecraft.init.Blocks; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.wurstclient.compatibility.WMinecraft; import net.wurstclient.features.Mod; import net.wurstclient.features.SearchTags; import net.wurstclient.utils.ChatUtils; import net.wurstclient.utils.InventoryUtils; @SearchTags({"crash chest"}) @Mod.Bypasses public final class CrashChestMod extends Mod { public CrashChestMod() { super("CrashChest", "Generates a chest that can kick people from the server if they have too many copies of it."); } @Override public void onEnable() { if(!WMinecraft.getPlayer().capabilities.isCreativeMode) { ChatUtils.error("Creative mode only."); setEnabled(false); return; } if(!InventoryUtils.isSlotEmpty(36)) { ChatUtils.error("Your shoes slot must be empty."); setEnabled(false); return; } // generate item ItemStack stack = new ItemStack(Blocks.CHEST); NBTTagCompound nbtCompound = new NBTTagCompound(); NBTTagList nbtList = new NBTTagList(); for(int i = 0; i < 40000; i++) nbtList.appendTag(new NBTTagList()); nbtCompound.setTag("www.wurstclient.net", nbtList); stack.setTagInfo("www.wurstclient.net", nbtCompound); // give item InventoryUtils.placeStackInArmor(0, stack); ChatUtils.message("A CrashChest was placed in your shoes slot."); setEnabled(false); } }
shared-src/net/wurstclient/features/mods/items/CrashChestMod.java
/* * Copyright 2014 - 2017 | Wurst-Imperium | All rights reserved. * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package net.wurstclient.features.mods.items; import net.minecraft.init.Blocks; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.wurstclient.compatibility.WMinecraft; import net.wurstclient.features.HelpPage; import net.wurstclient.features.Mod; import net.wurstclient.features.SearchTags; import net.wurstclient.utils.ChatUtils; import net.wurstclient.utils.InventoryUtils; @SearchTags({"crash chest"}) @HelpPage("Mods/CrashChest") @Mod.Bypasses public final class CrashChestMod extends Mod { public CrashChestMod() { super("CrashChest", "Generates a CrashChest. Give a lot of these to another\n" + "player to make them crash. They will not be able to join the server\n" + "ever again!"); } @Override public void onEnable() { if(!InventoryUtils.isSlotEmpty(36)) { if(WMinecraft.getPlayer().inventory.getStackInSlot(36) .getDisplayName().equals("6lCOPY ME")) ChatUtils.error("You already have a CrashChest."); else ChatUtils.error("Please take off your shoes."); setEnabled(false); return; }else if(!WMinecraft.getPlayer().capabilities.isCreativeMode) { ChatUtils.error("Creative mode only."); setEnabled(false); return; } ItemStack stack = new ItemStack(Blocks.CHEST); NBTTagCompound nbtTagCompound = new NBTTagCompound(); NBTTagList nbtList = new NBTTagList(); for(int i = 0; i < 40000; i++) nbtList.appendTag(new NBTTagList()); nbtTagCompound.setTag("www.wurstclient.net", nbtList); stack.setTagInfo("www.wurstclient.net", nbtTagCompound); InventoryUtils.placeStackInArmor(0, stack); stack.setStackDisplayName("6lCOPY ME"); ChatUtils.message("A CrashChest was placed in your shoes slot."); setEnabled(false); } }
Update CrashChestMod
shared-src/net/wurstclient/features/mods/items/CrashChestMod.java
Update CrashChestMod
<ide><path>hared-src/net/wurstclient/features/mods/items/CrashChestMod.java <ide> import net.minecraft.nbt.NBTTagCompound; <ide> import net.minecraft.nbt.NBTTagList; <ide> import net.wurstclient.compatibility.WMinecraft; <del>import net.wurstclient.features.HelpPage; <ide> import net.wurstclient.features.Mod; <ide> import net.wurstclient.features.SearchTags; <ide> import net.wurstclient.utils.ChatUtils; <ide> import net.wurstclient.utils.InventoryUtils; <ide> <ide> @SearchTags({"crash chest"}) <del>@HelpPage("Mods/CrashChest") <ide> @Mod.Bypasses <ide> public final class CrashChestMod extends Mod <ide> { <ide> public CrashChestMod() <ide> { <ide> super("CrashChest", <del> "Generates a CrashChest. Give a lot of these to another\n" <del> + "player to make them crash. They will not be able to join the server\n" <del> + "ever again!"); <add> "Generates a chest that can kick people from the server if they have too many copies of it."); <ide> } <ide> <ide> @Override <ide> public void onEnable() <ide> { <del> if(!InventoryUtils.isSlotEmpty(36)) <del> { <del> if(WMinecraft.getPlayer().inventory.getStackInSlot(36) <del> .getDisplayName().equals("6lCOPY ME")) <del> ChatUtils.error("You already have a CrashChest."); <del> else <del> ChatUtils.error("Please take off your shoes."); <del> setEnabled(false); <del> return; <del> }else if(!WMinecraft.getPlayer().capabilities.isCreativeMode) <add> if(!WMinecraft.getPlayer().capabilities.isCreativeMode) <ide> { <ide> ChatUtils.error("Creative mode only."); <ide> setEnabled(false); <ide> return; <ide> } <add> <add> if(!InventoryUtils.isSlotEmpty(36)) <add> { <add> ChatUtils.error("Your shoes slot must be empty."); <add> setEnabled(false); <add> return; <add> } <add> <add> // generate item <ide> ItemStack stack = new ItemStack(Blocks.CHEST); <del> NBTTagCompound nbtTagCompound = new NBTTagCompound(); <add> NBTTagCompound nbtCompound = new NBTTagCompound(); <ide> NBTTagList nbtList = new NBTTagList(); <ide> for(int i = 0; i < 40000; i++) <ide> nbtList.appendTag(new NBTTagList()); <del> nbtTagCompound.setTag("www.wurstclient.net", nbtList); <del> stack.setTagInfo("www.wurstclient.net", nbtTagCompound); <add> nbtCompound.setTag("www.wurstclient.net", nbtList); <add> stack.setTagInfo("www.wurstclient.net", nbtCompound); <add> <add> // give item <ide> InventoryUtils.placeStackInArmor(0, stack); <del> stack.setStackDisplayName("6lCOPY ME"); <ide> ChatUtils.message("A CrashChest was placed in your shoes slot."); <ide> setEnabled(false); <ide> }
Java
apache-2.0
226999ef88092e619a9d9b22b79f9511db6d9224
0
phrocker/accumulo-1,ctubbsii/accumulo,adamjshook/accumulo,dhutchis/accumulo,mikewalch/accumulo,lstav/accumulo,adamjshook/accumulo,mikewalch/accumulo,ivakegg/accumulo,adamjshook/accumulo,ivakegg/accumulo,lstav/accumulo,phrocker/accumulo-1,keith-turner/accumulo,lstav/accumulo,apache/accumulo,phrocker/accumulo-1,keith-turner/accumulo,keith-turner/accumulo,milleruntime/accumulo,mikewalch/accumulo,mjwall/accumulo,ctubbsii/accumulo,keith-turner/accumulo,milleruntime/accumulo,adamjshook/accumulo,mikewalch/accumulo,mikewalch/accumulo,phrocker/accumulo-1,lstav/accumulo,milleruntime/accumulo,dhutchis/accumulo,apache/accumulo,mjwall/accumulo,phrocker/accumulo-1,phrocker/accumulo-1,adamjshook/accumulo,mjwall/accumulo,mjwall/accumulo,mjwall/accumulo,ctubbsii/accumulo,dhutchis/accumulo,dhutchis/accumulo,mjwall/accumulo,ctubbsii/accumulo,lstav/accumulo,mikewalch/accumulo,keith-turner/accumulo,keith-turner/accumulo,apache/accumulo,dhutchis/accumulo,milleruntime/accumulo,apache/accumulo,mjwall/accumulo,ivakegg/accumulo,milleruntime/accumulo,adamjshook/accumulo,lstav/accumulo,dhutchis/accumulo,ivakegg/accumulo,dhutchis/accumulo,milleruntime/accumulo,apache/accumulo,dhutchis/accumulo,adamjshook/accumulo,ivakegg/accumulo,mikewalch/accumulo,keith-turner/accumulo,ivakegg/accumulo,milleruntime/accumulo,mikewalch/accumulo,ctubbsii/accumulo,lstav/accumulo,ctubbsii/accumulo,ivakegg/accumulo,dhutchis/accumulo,adamjshook/accumulo,adamjshook/accumulo,phrocker/accumulo-1,ctubbsii/accumulo,apache/accumulo,apache/accumulo
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.accumulo.proxy; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.nio.ByteBuffer; import java.util.Collections; import java.util.Properties; import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.thrift.TException; import org.apache.thrift.protocol.TCompactProtocol; import org.apache.thrift.server.TServer; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.net.HostAndPort; public class TestProxyInstanceOperations { private static final Logger log = LoggerFactory.getLogger(TestProxyInstanceOperations.class); protected static TServer proxy; protected static TestProxyClient tpc; protected static ByteBuffer userpass; protected static final int port = 10197; @BeforeClass public static void setup() throws Exception { Properties prop = new Properties(); prop.setProperty("useMockInstance", "true"); prop.put("tokenClass", PasswordToken.class.getName()); proxy = Proxy.createProxyServer(HostAndPort.fromParts("localhost", port), new TCompactProtocol.Factory(), prop).server; log.info("Waiting for proxy to start"); while (!proxy.isServing()) { Thread.sleep(500); } log.info("Proxy started"); tpc = new TestProxyClient("localhost", port); userpass = tpc.proxy.login("root", Collections.singletonMap("password", "")); } @AfterClass public static void tearDown() throws InterruptedException { proxy.stop(); } @Test public void properties() throws TException { tpc.proxy().setProperty(userpass, "test.systemprop", "whistletips"); assertEquals(tpc.proxy().getSystemConfiguration(userpass).get("test.systemprop"), "whistletips"); tpc.proxy().removeProperty(userpass, "test.systemprop"); assertNull(tpc.proxy().getSystemConfiguration(userpass).get("test.systemprop")); } @Test public void testClassLoad() throws TException { assertTrue(tpc.proxy().testClassLoad(userpass, "org.apache.accumulo.core.iterators.user.RegExFilter", "org.apache.accumulo.core.iterators.Filter")); } }
test/src/test/java/org/apache/accumulo/proxy/TestProxyInstanceOperations.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.accumulo.proxy; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.nio.ByteBuffer; import java.util.Properties; import java.util.TreeMap; import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.thrift.TException; import org.apache.thrift.protocol.TCompactProtocol; import org.apache.thrift.server.TServer; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import com.google.common.net.HostAndPort; public class TestProxyInstanceOperations { protected static TServer proxy; protected static Thread thread; protected static TestProxyClient tpc; protected static ByteBuffer userpass; protected static final int port = 10197; @SuppressWarnings("serial") @BeforeClass public static void setup() throws Exception { Properties prop = new Properties(); prop.setProperty("useMockInstance", "true"); prop.put("tokenClass", PasswordToken.class.getName()); proxy = Proxy.createProxyServer(HostAndPort.fromParts("localhost", port), new TCompactProtocol.Factory(), prop).server; thread = new Thread() { @Override public void run() { proxy.serve(); } }; thread.start(); tpc = new TestProxyClient("localhost", port); userpass = tpc.proxy.login("root", new TreeMap<String,String>() { { put("password", ""); } }); } @AfterClass public static void tearDown() throws InterruptedException { proxy.stop(); thread.join(); } @Test public void properties() throws TException { tpc.proxy().setProperty(userpass, "test.systemprop", "whistletips"); assertEquals(tpc.proxy().getSystemConfiguration(userpass).get("test.systemprop"), "whistletips"); tpc.proxy().removeProperty(userpass, "test.systemprop"); assertNull(tpc.proxy().getSystemConfiguration(userpass).get("test.systemprop")); } @Test public void testClassLoad() throws TException { assertTrue(tpc.proxy().testClassLoad(userpass, "org.apache.accumulo.core.iterators.user.RegExFilter", "org.apache.accumulo.core.iterators.Filter")); } }
ACCUMULO-3534 Don't start the already started TServer Also wait for the tserver to start before running the test.
test/src/test/java/org/apache/accumulo/proxy/TestProxyInstanceOperations.java
ACCUMULO-3534 Don't start the already started TServer
<ide><path>est/src/test/java/org/apache/accumulo/proxy/TestProxyInstanceOperations.java <ide> import static org.junit.Assert.assertTrue; <ide> <ide> import java.nio.ByteBuffer; <add>import java.util.Collections; <ide> import java.util.Properties; <del>import java.util.TreeMap; <ide> <ide> import org.apache.accumulo.core.client.security.tokens.PasswordToken; <ide> import org.apache.thrift.TException; <ide> import org.junit.AfterClass; <ide> import org.junit.BeforeClass; <ide> import org.junit.Test; <add>import org.slf4j.Logger; <add>import org.slf4j.LoggerFactory; <ide> <ide> import com.google.common.net.HostAndPort; <ide> <ide> public class TestProxyInstanceOperations { <add> private static final Logger log = LoggerFactory.getLogger(TestProxyInstanceOperations.class); <add> <ide> protected static TServer proxy; <del> protected static Thread thread; <ide> protected static TestProxyClient tpc; <ide> protected static ByteBuffer userpass; <ide> protected static final int port = 10197; <ide> <del> @SuppressWarnings("serial") <ide> @BeforeClass <ide> public static void setup() throws Exception { <ide> Properties prop = new Properties(); <ide> prop.put("tokenClass", PasswordToken.class.getName()); <ide> <ide> proxy = Proxy.createProxyServer(HostAndPort.fromParts("localhost", port), new TCompactProtocol.Factory(), prop).server; <del> thread = new Thread() { <del> @Override <del> public void run() { <del> proxy.serve(); <del> } <del> }; <del> thread.start(); <add> log.info("Waiting for proxy to start"); <add> while (!proxy.isServing()) { <add> Thread.sleep(500); <add> } <add> log.info("Proxy started"); <ide> tpc = new TestProxyClient("localhost", port); <del> userpass = tpc.proxy.login("root", new TreeMap<String,String>() { <del> { <del> put("password", ""); <del> } <del> }); <add> userpass = tpc.proxy.login("root", Collections.singletonMap("password", "")); <ide> } <ide> <ide> @AfterClass <ide> public static void tearDown() throws InterruptedException { <ide> proxy.stop(); <del> thread.join(); <ide> } <ide> <ide> @Test
Java
apache-2.0
f2f539d1b6f59e44bb32c6905a88df216eb22a0d
0
apache/jena,apache/jena,atsolakid/jena,atsolakid/jena,atsolakid/jena,tr3vr/jena,adrapereira/jena,apache/jena,kidaa/jena,apache/jena,tr3vr/jena,kamir/jena,atsolakid/jena,kidaa/jena,kidaa/jena,apache/jena,CesarPantoja/jena,samaitra/jena,tr3vr/jena,kidaa/jena,adrapereira/jena,CesarPantoja/jena,adrapereira/jena,CesarPantoja/jena,kamir/jena,jianglili007/jena,samaitra/jena,jianglili007/jena,adrapereira/jena,CesarPantoja/jena,tr3vr/jena,kidaa/jena,apache/jena,kamir/jena,kamir/jena,atsolakid/jena,tr3vr/jena,jianglili007/jena,CesarPantoja/jena,adrapereira/jena,apache/jena,samaitra/jena,samaitra/jena,jianglili007/jena,tr3vr/jena,atsolakid/jena,jianglili007/jena,tr3vr/jena,CesarPantoja/jena,kamir/jena,adrapereira/jena,kamir/jena,kidaa/jena,samaitra/jena,kidaa/jena,adrapereira/jena,jianglili007/jena,kamir/jena,apache/jena,samaitra/jena,samaitra/jena,jianglili007/jena,atsolakid/jena,CesarPantoja/jena
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.atlas.lib; import java.io.File ; import java.nio.file.Paths ; import org.apache.jena.atlas.junit.BaseTest ; import org.junit.Test ; public class TestFilenameProcessing extends BaseTest { @Test public void encode_1() { encodeComponent("abc", "abc") ; } @Test public void encode_2() { encodeComponent("", "") ; } @Test public void encode_3() { encodeComponent(":/", "%3A%2F") ; } // ---- Main tests. // Portablility static boolean isWindows = File.separatorChar != '/' ; private static String cwd = Paths.get(".").toAbsolutePath().normalize().toString() ; // Sort out cwd. // Must start "/", must not end "/" // Must be /-style, not \ static { if ( isWindows ) { // Canonical cwd = cwd.replace(File.separatorChar, '/') ; // Drive letters. if ( ! cwd.startsWith("/" ) ) cwd = "/" + cwd ; } } @Test public void fileIRI_1() { String uri = testFileIRI("D.ttl") ; assertTrue(uri.endsWith("D.ttl")) ; } @Test public void fileIRI_2() { String uri = testFileIRI("file:/D.ttl") ; assertTrue(uri.endsWith("D.ttl")) ; } @Test public void fileIRI_3() { String uri = testFileIRI("file://D.ttl") ; assertTrue(uri.endsWith("D.ttl")) ; } @Test public void fileIRI_4() { String iri = testFileIRI("file:///D.ttl") ; // Even on windows, this is used as-is so no drive letter. assertEquals("file:///D.ttl", iri) ; } private static String testFileIRI(String fn) { String uri1 = IRILib.filenameToIRI(fn) ; assertTrue(uri1.startsWith("file:///")) ; String uri2 = IRILib.filenameToIRI(uri1) ; assertEquals(uri1, uri2) ; return uri1 ; } @Test public void fileURL_1() { assertNotEquals(cwd, "") ; assertNotNull(cwd) ; filenameToIRI("abc", "file://" + cwd + "/abc") ; } @Test public void fileURL_2() { if ( ! isWindows ) // Windows inserts a drive letter filenameToIRI("/abc", "file:///abc") ; } @Test public void fileURL_3() { if ( isWindows ) filenameToIRI("C:/Program File/App File", "file:///C:/Program%20File/App%20File") ; else filenameToIRI("/Program File/App File", "file:///Program%20File/App%20File") ; } @Test public void fileURL_4() { if ( isWindows ) filenameToIRI("C:/Program File/App Dir/", "file:///C:/Program%20File/App%20Dir/") ; else filenameToIRI("/Program File/App Dir/", "file:///Program%20File/App%20Dir/") ; } @Test public void fileURL_5() { if ( isWindows ) filenameToIRI("C:\\Windows\\Path", "file:///C:/Windows/Path") ; else filenameToIRI("C:\\Windows\\Path", "file://" + cwd + "/C:%5CWindows%5CPath") ; } @Test public void fileURL_6() { filenameToIRI("~user", "file://" + cwd + "/~user") ; } @Test public void fileURL_7() { filenameToIRI(".", "file://" + cwd) ; } @Test public void fileURL_10() { filenameToIRI("file:abc", "file://" + cwd + "/abc") ; } @Test public void fileURL_11() { if ( ! isWindows ) // Windows inserts a drive letter filenameToIRI("file:/abc", "file:///abc" ) ; } @Test public void fileURL_12() { filenameToIRI("file:", "file://" + cwd) ; } @Test public void fileURL_13() { filenameToIRI("file:.", "file://" + cwd + "") ; } @Test public void fileURL_14() { String x = cwd.replaceAll("/[^/]*$", "") ; filenameToIRI("file:..", "file://"+x ) ; } private static void encodeComponent(String string, String result) { String r = IRILib.encodeUriComponent(string) ; assertEquals(result, r) ; } private static void filenameToIRI(String string, String result) { String r = IRILib.filenameToIRI(string) ; assertEquals(result, r) ; } }
jena-base/src/test/java/org/apache/jena/atlas/lib/TestFilenameProcessing.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.atlas.lib; import java.io.File ; import java.nio.file.Paths ; import org.apache.jena.atlas.junit.BaseTest ; import org.junit.Test ; public class TestFilenameProcessing extends BaseTest { @Test public void encode_1() { encodeComponent("abc", "abc") ; } @Test public void encode_2() { encodeComponent("", "") ; } @Test public void encode_3() { encodeComponent(":/", "%3A%2F") ; } // ---- Main tests. // Portablility static boolean isWindows = File.separatorChar != '/' ; private static String cwd = Paths.get(".").toAbsolutePath().normalize().toString() ; // Sort out cwd. // Must start "/", must not end "/" // Must be /-style, not \ static { if ( isWindows ) { // Canonical cwd = cwd.replace(File.separatorChar, '/') ; // Drive letters. if ( ! cwd.startsWith("/" ) ) cwd = "/" + cwd ; } } @Test public void fileIRI_1() { String uri = testFileIRI("D.ttl") ; assertTrue(uri.endsWith("D.ttl")) ; } @Test public void fileIRI_2() { String uri = testFileIRI("file:/D.ttl") ; assertTrue(uri.endsWith("D.ttl")) ; } @Test public void fileIRI_3() { String uri = testFileIRI("file://D.ttl") ; assertTrue(uri.endsWith("D.ttl")) ; } @Test public void fileIRI_4() { String iri = testFileIRI("file:///D.ttl") ; // Even on windows, this is used as-is so no drive letter. assertEquals("file:///D.ttl", iri) ; } private static String testFileIRI(String fn) { String uri1 = IRILib.filenameToIRI(fn) ; assertTrue(uri1.startsWith("file:///")) ; String uri2 = IRILib.filenameToIRI(uri1) ; assertEquals(uri1, uri2) ; return uri1 ; } @Test public void fileURL_1() { assertNotEquals(cwd, "") ; assertNotNull(cwd) ; filenameToIRI("abc", "file://" + cwd + "/abc") ; } @Test public void fileURL_2() { if ( ! isWindows ) // Windows inserts a drive letter filenameToIRI("/abc", "file:///abc") ; } @Test public void fileURL_3() { if ( isWindows ) filenameToIRI("C:/Program File/App File", "file:///C:/Program%20File/App%20File") ; else filenameToIRI("/Program File/App File", "file:///Program%20File/App%20File") ; } @Test public void fileURL_4() { if ( isWindows ) filenameToIRI("C:/Program File/App Dir/", "file:///C:/Program%20File/App%20Dir/") ; else filenameToIRI("/Program File/App Dir/", "file:///Program%20File/App%20Dir/") ; } @Test public void fileURL_5() { if ( isWindows ) filenameToIRI("C:\\Windows\\Path", "file:///C:/Windows/Path") ; else filenameToIRI("C:\\Windows\\Path", "file://" + cwd + "/C:%5CWindows%5CPath") ; } @Test public void fileURL_6() { filenameToIRI("~user", "file://" + cwd + "/~user") ; } @Test public void fileURL_7() { filenameToIRI(".", "file://" + cwd) ; } @Test public void fileURL_10() { filenameToIRI("file:abc", "file://" + cwd + "/abc") ; } @Test public void fileURL_11() { if ( ! isWindows ) // Windows inserts a drive letter filenameToIRI("file:/abc", "file:///abc" ) ; } @Test public void fileURL_12() { filenameToIRI("file:", "file://" + cwd) ; } @Test public void fileURL_13() { filenameToIRI("file:.", "file://" + cwd + "") ; } @Test public void fileURL_14() { String x = cwd ; if ( isWindows ) x = x.replace('\\', '/') ; x = cwd.replaceAll("/[^/]*$", "") ; filenameToIRI("file:..", "file://"+x ) ; } private static void encodeComponent(String string, String result) { String r = IRILib.encodeUriComponent(string) ; assertEquals(result, r) ; } private static void filenameToIRI(String string, String result) { String r = IRILib.filenameToIRI(string) ; assertEquals(result, r) ; } }
Remove dev code.
jena-base/src/test/java/org/apache/jena/atlas/lib/TestFilenameProcessing.java
Remove dev code.
<ide><path>ena-base/src/test/java/org/apache/jena/atlas/lib/TestFilenameProcessing.java <ide> } <ide> <ide> @Test public void fileURL_14() { <del> String x = cwd ; <del> if ( isWindows ) <del> x = x.replace('\\', '/') ; <del> x = cwd.replaceAll("/[^/]*$", "") ; <add> String x = cwd.replaceAll("/[^/]*$", "") ; <ide> filenameToIRI("file:..", "file://"+x ) ; <ide> } <ide>
Java
agpl-3.0
b4770fd644e636a43db28d2dd28f38bde137d3fe
0
kumarrus/voltdb,deerwalk/voltdb,flybird119/voltdb,migue/voltdb,zuowang/voltdb,paulmartel/voltdb,ingted/voltdb,VoltDB/voltdb,flybird119/voltdb,deerwalk/voltdb,VoltDB/voltdb,paulmartel/voltdb,simonzhangsm/voltdb,ingted/voltdb,flybird119/voltdb,ingted/voltdb,creative-quant/voltdb,zuowang/voltdb,simonzhangsm/voltdb,flybird119/voltdb,deerwalk/voltdb,ingted/voltdb,paulmartel/voltdb,VoltDB/voltdb,migue/voltdb,zuowang/voltdb,kumarrus/voltdb,simonzhangsm/voltdb,paulmartel/voltdb,flybird119/voltdb,creative-quant/voltdb,VoltDB/voltdb,paulmartel/voltdb,simonzhangsm/voltdb,migue/voltdb,kumarrus/voltdb,creative-quant/voltdb,simonzhangsm/voltdb,creative-quant/voltdb,creative-quant/voltdb,simonzhangsm/voltdb,paulmartel/voltdb,creative-quant/voltdb,paulmartel/voltdb,creative-quant/voltdb,zuowang/voltdb,deerwalk/voltdb,ingted/voltdb,migue/voltdb,deerwalk/voltdb,ingted/voltdb,migue/voltdb,kumarrus/voltdb,flybird119/voltdb,VoltDB/voltdb,kumarrus/voltdb,ingted/voltdb,kumarrus/voltdb,deerwalk/voltdb,kumarrus/voltdb,zuowang/voltdb,migue/voltdb,VoltDB/voltdb,zuowang/voltdb,deerwalk/voltdb,simonzhangsm/voltdb,zuowang/voltdb,zuowang/voltdb,creative-quant/voltdb,flybird119/voltdb,paulmartel/voltdb,kumarrus/voltdb,deerwalk/voltdb,migue/voltdb,flybird119/voltdb,simonzhangsm/voltdb,VoltDB/voltdb,ingted/voltdb,migue/voltdb
/* This file is part of VoltDB. * Copyright (C) 2008-2015 VoltDB Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with VoltDB. If not, see <http://www.gnu.org/licenses/>. */ package org.voltdb; import static org.voltdb.common.Constants.AUTH_HANDSHAKE; import static org.voltdb.common.Constants.AUTH_HANDSHAKE_VERSION; import static org.voltdb.common.Constants.AUTH_SERVICE_NAME; import java.io.EOFException; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.SocketChannel; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.security.auth.Subject; import javax.security.auth.login.AccountExpiredException; import javax.security.auth.login.CredentialExpiredException; import javax.security.auth.login.FailedLoginException; import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginException; import org.ietf.jgss.GSSContext; import org.ietf.jgss.GSSCredential; import org.ietf.jgss.GSSException; import org.ietf.jgss.GSSManager; import org.mindrot.BCrypt; import org.voltcore.logging.Level; import org.voltcore.logging.VoltLogger; import org.voltdb.catalog.Connector; import org.voltdb.catalog.Database; import org.voltdb.catalog.Procedure; import org.voltdb.security.AuthenticationRequest; import org.voltdb.utils.Encoder; import org.voltdb.utils.LogKeys; import com.google_voltpatches.common.base.Charsets; import com.google_voltpatches.common.base.Throwables; import com.google_voltpatches.common.collect.ImmutableList; import com.google_voltpatches.common.collect.ImmutableMap; import com.google_voltpatches.common.collect.ImmutableSet; import java.util.EnumSet; import java.util.concurrent.TimeUnit; import org.voltcore.utils.RateLimitedLogger; import org.voltdb.client.ClientAuthHashScheme; import org.voltdb.common.Permission; /** * The AuthSystem parses authentication and permission information from the catalog and uses it to generate a representation * of the permissions assigned to users and groups. * */ public class AuthSystem { private static final VoltLogger authLogger = new VoltLogger("AUTH"); /** * JASS Login configuration entry designator */ public static final String VOLTDB_SERVICE_LOGIN_MODULE = System.getProperty("VOLTDB_SERVICE_LOGIN_MODULE", "VoltDBService"); /** * Authentication provider enumeration. It serves also as mapping mechanism * for providers, which are configured in the deployment file, and the login * packet service field. */ public enum AuthProvider { HASH("hash","database"), KERBEROS("kerberos","kerberos"); private final static Map<String,AuthProvider> providerMap; private final static Map<String,AuthProvider> serviceMap; static { ImmutableMap.Builder<String, AuthProvider> pbldr = ImmutableMap.builder(); ImmutableMap.Builder<String, AuthProvider> sbldr = ImmutableMap.builder(); for (AuthProvider ap: values()) { pbldr.put(ap.provider,ap); sbldr.put(ap.service,ap); } providerMap = pbldr.build(); serviceMap = sbldr.build(); } final String provider; final String service; AuthProvider(String provider, String service) { this.provider = provider; this.service = service; } /** * @return its security provider equivalent */ public String provider() { return provider; } /** * @return its login packet service equivalent */ public String service() { return service; } public static AuthProvider fromProvider(String provider) { AuthProvider ap = providerMap.get(provider); if (ap == null) { throw new IllegalArgumentException("No provider mapping for " + provider); } return ap; } public static AuthProvider fromService(String service) { AuthProvider ap = serviceMap.get(service); if (ap == null) { throw new IllegalArgumentException("No service mapping for " + service); } return ap; } } /** * Representation of a permission group. * */ class AuthGroup { /** * Name of the group */ private final String m_name; /** * Set of users that are a member of this group */ private Set<AuthUser> m_users = new HashSet<AuthUser>(); private final EnumSet<Permission> m_permissions = EnumSet.noneOf(Permission.class); /** * * @param name Name of the group * @param sysproc Whether membership in this group grants permission to invoke system procedures * @param defaultproc Whether membership in this group grants permission to invoke default procedures * @param defaultprocread Whether membership in this group grants permission to invoke only read default procedures * @param adhoc Whether membership in this group grants permission to invoke adhoc queries */ private AuthGroup(String name, EnumSet<Permission> permissions) { m_name = name.intern(); m_permissions.addAll(permissions); } private void finish() { m_users = ImmutableSet.copyOf(m_users); } } /** * Representation of the permissions associated with a specific user along with a SHA-1 double hashed copy of the users * clear text password. * */ public static class AuthUser { /** * SHA-1 double hashed copy of the users clear text password */ private final byte[] m_sha1ShadowPassword; /** * SHA-2 double hashed copy of the users clear text password */ private final byte[] m_sha2ShadowPassword; /** * SHA-1 hashed and then bcrypted copy of the users clear text password */ private final String m_bcryptShadowPassword; /** * SHA-2 hashed and then bcrypted copy of the users clear text password */ private final String m_bcryptSha2ShadowPassword; /** * Name of the user */ public final String m_name; /** * Fast iterable list of groups this user is a member of. */ private List<AuthGroup> m_groups = new ArrayList<AuthGroup>(); private EnumSet<Permission> m_permissions = EnumSet.noneOf(Permission.class); private String[] m_permissions_list; /** * Fast membership check set of stored procedures this user has permission to invoke. * This is generated when the catalog is parsed and it includes procedures the user has permission * to invoke by virtue of group membership. The catalog entry for the stored procedure is used here. */ private Set<Procedure> m_authorizedProcedures = new HashSet<Procedure>(); /** * Set of export connectors this user is authorized to access. */ private Set<Connector> m_authorizedConnectors = new HashSet<Connector>(); /** * The constructor accepts the password as either sha1 or bcrypt. In practice * there will be only one passed in depending on the format of the password in the catalog. * The other will be null and that is used to determine how to hash the supplied password * for auth * @param shadowPassword SHA-1 double hashed copy of the users clear text password * @param name Name of the user */ private AuthUser(byte[] sha1ShadowPassword, byte[] sha2ShadowPassword, String bcryptShadowPassword, String bCryptSha2ShadowPassword, String name) { m_sha1ShadowPassword = sha1ShadowPassword; m_sha2ShadowPassword = sha2ShadowPassword; m_bcryptShadowPassword = bcryptShadowPassword; m_bcryptSha2ShadowPassword = bCryptSha2ShadowPassword; if (name != null) { m_name = name.intern(); } else { m_name = null; } } /** * Check if a user has permission to invoke the specified stored procedure * Handle both user-written procedures and default auto-generated ones. * @param proc Catalog entry for the stored procedure to check * @return true if the user has permission and false otherwise */ public boolean hasUserDefinedProcedurePermission(Procedure proc) { if (proc == null) { return false; } return hasPermission(Permission.ALLPROC) || m_authorizedProcedures.contains(proc); } /** * Check if a user has any one of given permission. * @return true if the user has permission and false otherwise */ public boolean hasPermission(Permission... perms) { for (int i = 0; i < perms.length;i++) { if (m_permissions.contains(perms[i])) { return true; } } return false; } /** * Get group names. * @return group name array */ public final String[] getGroupNames() { String[] groupNames = new String[m_groups.size()]; for (int i = 0; i < m_groups.size(); ++i) { groupNames[i] = m_groups.get(i).m_name; } return groupNames; } public boolean authorizeConnector(String connectorClass) { if (connectorClass == null) { return false; } for (Connector c : m_authorizedConnectors) { if (c.getLoaderclass().equals(connectorClass)) { return true; } } return false; } public boolean isAuthEnabled() { return true; } private void finish() { m_groups = ImmutableList.copyOf(m_groups); m_authorizedProcedures = ImmutableSet.copyOf(m_authorizedProcedures); m_authorizedConnectors = ImmutableSet.copyOf(m_authorizedConnectors); } } /** * Storage for user permissions keyed on the username */ private Map<String, AuthUser> m_users = new HashMap<String, AuthUser>(); /** * Storage for group permissions keyed on group name. */ private Map<String, AuthGroup> m_groups = new HashMap<String, AuthGroup>(); /** * Indicates whether security is enabled. If security is disabled all authentications will succede and all returned * AuthUsers will allow everything. */ private final boolean m_enabled; /** * The configured authentication provider */ private final AuthProvider m_authProvider; /** * VoltDB Kerberos service login context */ private final LoginContext m_loginCtx; /** * VoltDB service principal name */ private final byte [] m_principalName; private final GSSManager m_gssManager; //Auth system keeps a array of all perms used for auth disabled user not for checking permissions. private static String[] m_perm_list; AuthSystem(final Database db, boolean enabled) { AuthProvider ap = null; LoginContext loginContext = null; GSSManager gssManager = null; byte [] principal = null; //Build static list of perms auth system knows. m_perm_list = new String[Permission.values().length]; int idx = 0; for (Permission p : Permission.values()) { m_perm_list[idx++] = p.name(); } m_enabled = enabled; if (!m_enabled) { m_authProvider = ap; m_loginCtx = loginContext; m_principalName = principal; m_gssManager = null; return; } m_authProvider = AuthProvider.fromProvider(db.getSecurityprovider()); if (m_authProvider == AuthProvider.KERBEROS) { try { loginContext = new LoginContext(VOLTDB_SERVICE_LOGIN_MODULE); } catch (LoginException|SecurityException ex) { VoltDB.crashGlobalVoltDB( "Cannot initialize JAAS LoginContext", true, ex); } try { loginContext.login(); principal = loginContext .getSubject() .getPrincipals() .iterator().next() .getName() .getBytes(Charsets.UTF_8) ; gssManager = GSSManager.getInstance(); } catch (AccountExpiredException ex) { VoltDB.crashGlobalVoltDB( "VoltDB assigned service principal has expired", true, ex); } catch(CredentialExpiredException ex) { VoltDB.crashGlobalVoltDB( "VoltDB assigned service principal credentials have expired", true, ex); } catch(FailedLoginException ex) { VoltDB.crashGlobalVoltDB( "VoltDB failed to authenticate against kerberos", true, ex); } catch (LoginException ex) { VoltDB.crashGlobalVoltDB( "VoltDB service principal failed to login", true, ex); } catch (Exception ex) { VoltDB.crashGlobalVoltDB( "Unexpected exception occured during service authentication", true, ex); } } m_loginCtx = loginContext; m_principalName = principal; m_gssManager = gssManager; /* * First associate all users with groups and vice versa */ for (org.voltdb.catalog.User catalogUser : db.getUsers()) { //shadow are bcrypt of sha-? String shadowPassword = catalogUser.getShadowpassword(); String sha256shadowPassword = catalogUser.getSha256shadowpassword(); byte sha1ShadowPassword[] = null; byte sha2ShadowPassword[] = null; if (shadowPassword.length() == 40) { /* * This is an old catalog with a SHA-1 password * Need to hex decode it */ sha1ShadowPassword = Encoder.hexDecode(shadowPassword); sha2ShadowPassword = Encoder.hexDecode(sha256shadowPassword); } else if (shadowPassword.length() != 60) { /* * If not 40 should be 60 since it is bcrypt */ VoltDB.crashGlobalVoltDB( "Found a shadowPassword in the catalog that was in an unrecogized format", true, null); } final AuthUser user = new AuthUser( sha1ShadowPassword, sha2ShadowPassword, shadowPassword, sha256shadowPassword, catalogUser.getTypeName()); m_users.put(user.m_name, user); for (org.voltdb.catalog.GroupRef catalogGroupRef : catalogUser.getGroups()) { final org.voltdb.catalog.Group catalogGroup = catalogGroupRef.getGroup(); AuthGroup group = null; if (!m_groups.containsKey(catalogGroup.getTypeName())) { group = new AuthGroup(catalogGroup.getTypeName(), Permission.getPermissionSetForGroup(catalogGroup)); m_groups.put(group.m_name, group); } else { group = m_groups.get(catalogGroup.getTypeName()); } user.m_permissions.addAll(group.m_permissions); group.m_users.add(user); user.m_groups.add(group); } //Cache the list so we dont rebuild everytime this is asked. user.m_permissions_list = new String[user.m_permissions.size()]; idx = 0; for (Permission p : user.m_permissions) { user.m_permissions_list[idx++] = p.toString(); } } for (org.voltdb.catalog.Group catalogGroup : db.getGroups()) { AuthGroup group = null; if (!m_groups.containsKey(catalogGroup.getTypeName())) { group = new AuthGroup(catalogGroup.getTypeName(), Permission.getPermissionSetForGroup(catalogGroup)); m_groups.put(group.m_name, group); //A group not associated with any users? Weird stuff. } else { group = m_groups.get(catalogGroup.getTypeName()); } } /* * Then iterate through each procedure and and add it * to the set of procedures for each specified user and for the members of * each specified group. */ for (org.voltdb.catalog.Procedure catalogProcedure : db.getProcedures()) { for (org.voltdb.catalog.UserRef catalogUserRef : catalogProcedure.getAuthusers()) { final org.voltdb.catalog.User catalogUser = catalogUserRef.getUser(); final AuthUser user = m_users.get(catalogUser.getTypeName()); if (user == null) { //Error case. Procedure has a user listed as authorized but no such user exists } else { user.m_authorizedProcedures.add(catalogProcedure); } } for (org.voltdb.catalog.GroupRef catalogGroupRef : catalogProcedure.getAuthgroups()) { final org.voltdb.catalog.Group catalogGroup = catalogGroupRef.getGroup(); final AuthGroup group = m_groups.get(catalogGroup.getTypeName()); if (group == null) { //Error case. Procedure has a group listed as authorized but no such user exists } else { for (AuthUser user : group.m_users) { user.m_authorizedProcedures.add(catalogProcedure); } } } } m_users = ImmutableMap.copyOf(m_users); m_groups = ImmutableMap.copyOf(m_groups); for (AuthUser user : m_users.values()) { user.finish(); } for (AuthGroup group : m_groups.values()) { group.finish(); } } //Is security enabled? public boolean isSecurityEnabled() { return m_enabled; } /** * Check the username and password against the catalog. Return the appropriate permission * set for that user if the information is correct and return null otherwise. If security is disabled * an AuthUser object that grants all permissions is returned. * @param username Name of the user to authenticate * @param password SHA-1 single hashed version of the users clear text password * @return The permission set for the user if authentication succeeds or null if authentication fails. */ boolean authenticate(String username, byte[] password, ClientAuthHashScheme scheme) { if (!m_enabled) { return true; } final AuthUser user = m_users.get(username); if (user == null) { authLogger.l7dlog(Level.INFO, LogKeys.auth_AuthSystem_NoSuchUser.name(), new String[] {username}, null); return false; } boolean matched = true; if (user.m_sha1ShadowPassword != null || user.m_sha2ShadowPassword != null) { MessageDigest md = null; try { md = MessageDigest.getInstance(ClientAuthHashScheme.getDigestScheme(scheme)); } catch (NoSuchAlgorithmException e) { VoltDB.crashLocalVoltDB(e.getMessage(), true, e); } byte passwordHash[] = md.digest(password); /* * A n00bs attempt at constant time comparison */ for (int ii = 0; ii < passwordHash.length; ii++) { if (passwordHash[ii] != user.m_sha1ShadowPassword[ii]){ matched = false; } } } else { String pwToCheck = (scheme == ClientAuthHashScheme.HASH_SHA1 ? user.m_bcryptShadowPassword : user.m_bcryptSha2ShadowPassword); matched = BCrypt.checkpw(Encoder.hexEncode(password), pwToCheck); } if (matched) { logAuthSuccess(username); return true; } else { authLogger.l7dlog(Level.INFO, LogKeys.auth_AuthSystem_AuthFailedPasswordMistmatch.name(), new String[] {username}, null); return false; } } public static class AuthDisabledUser extends AuthUser { public AuthDisabledUser() { super(null, null, null, null, null); } @Override public boolean hasUserDefinedProcedurePermission(Procedure proc) { return true; } @Override public boolean hasPermission(Permission... p) { return true; } @Override public boolean authorizeConnector(String connectorName) { return true; } @Override public boolean isAuthEnabled() { return false; } } private final AuthUser m_authDisabledUser = new AuthDisabledUser(); public AuthUser getUser(String name) { if (!m_enabled) { return m_authDisabledUser; } return m_users.get(name); } public String[] getGroupNamesForUser(String userName) { if (userName == null) { return new String[] {}; } AuthUser user = getUser(userName); if (user == null) { return new String[] {}; } return user.getGroupNames(); } //Get users permission list not god for permission checking. public String[] getUserPermissionList(String userName) { if (!m_enabled) { return m_perm_list; } if (userName == null) { return new String[] {}; } AuthUser user = getUser(userName); if (user == null) { return new String[] {}; } return user.m_permissions_list; } public class HashAuthenticationRequest extends AuthenticationRequest { private final String m_user; private final byte [] m_password; public HashAuthenticationRequest(final String user, final byte [] hash, final ClientAuthHashScheme scheme) { m_user = user; m_password = hash; } @Override protected boolean authenticateImpl(ClientAuthHashScheme scheme) throws Exception { if (!m_enabled) { m_authenticatedUser = m_user; return true; } else if (m_authProvider != AuthProvider.HASH) { return false; } final AuthUser user = m_users.get(m_user); if (user == null) { authLogger.l7dlog(Level.INFO, LogKeys.auth_AuthSystem_NoSuchUser.name(), new String[] {m_user}, null); return false; } boolean matched = true; if (user.m_sha1ShadowPassword != null || user.m_sha2ShadowPassword != null) { MessageDigest md = null; try { md = MessageDigest.getInstance(ClientAuthHashScheme.getDigestScheme(scheme)); } catch (NoSuchAlgorithmException e) { VoltDB.crashLocalVoltDB(e.getMessage(), true, e); } byte passwordHash[] = md.digest(m_password); /* * A n00bs attempt at constant time comparison */ byte shaShadowPassword[] = (scheme == ClientAuthHashScheme.HASH_SHA1 ? user.m_sha1ShadowPassword : user.m_sha2ShadowPassword); for (int ii = 0; ii < passwordHash.length; ii++) { if (passwordHash[ii] != shaShadowPassword[ii]){ matched = false; } } } else { String pwToCheck = (scheme == ClientAuthHashScheme.HASH_SHA1 ? user.m_bcryptShadowPassword : user.m_bcryptSha2ShadowPassword); matched = BCrypt.checkpw(Encoder.hexEncode(m_password), pwToCheck); } if (matched) { m_authenticatedUser = m_user; logAuthSuccess(m_authenticatedUser); return true; } else { authLogger.l7dlog(Level.INFO, LogKeys.auth_AuthSystem_AuthFailedPasswordMistmatch.name(), new String[] {m_user}, null); return false; } } } private static void logAuthSuccess(String user) { //Make sure its logged per user String format = "Authenticated user " + user + "%s"; RateLimitedLogger.tryLogForMessage(System.currentTimeMillis(), 60, TimeUnit.SECONDS, authLogger, Level.INFO, format, ". This message is rate limited to once every 60 seconds."); } public class KerberosAuthenticationRequest extends AuthenticationRequest { private SocketChannel m_socket; public KerberosAuthenticationRequest(final SocketChannel socket) { m_socket = socket; } @Override protected boolean authenticateImpl(ClientAuthHashScheme scheme) throws Exception { if (!m_enabled) { m_authenticatedUser = "_^_pinco_pallo_^_"; return true; } else if (m_authProvider != AuthProvider.KERBEROS) { return false; } int msgSize = 4 // message size header + 1 // protocol version + 1 // result code + 4 // service name length + m_principalName.length; final ByteBuffer bb = ByteBuffer.allocate(4096); /* * write the service principal response. This gives the connecting client * the service principal name form which it constructs the GSS context * used in the client/service authentication handshake */ bb.putInt(msgSize-4).put(AUTH_HANDSHAKE_VERSION).put(AUTH_SERVICE_NAME); bb.putInt(m_principalName.length); bb.put(m_principalName); bb.flip(); while (bb.hasRemaining()) { m_socket.write(bb); } String authenticatedUser = Subject.doAs(m_loginCtx.getSubject(), new PrivilegedAction<String>() { /** * Establish an authenticated GSS security context * For further information on GSS please refer to * <a href="http://en.wikipedia.org/wiki/Generic_Security_Services_Application_Program_Interface">this</a> * article on Generic Security Services Application Program Interface */ @Override public String run() { GSSContext context = null; try { // derive the credentials from the authenticated service subject context = m_gssManager.createContext((GSSCredential)null); byte [] token; while (!context.isEstablished()) { // read in the next packet size bb.clear().limit(4); while (bb.hasRemaining()) { if (m_socket.read(bb) == -1) throw new EOFException(); } bb.flip(); int msgSize = bb.getInt(); if (msgSize > bb.capacity()) { authLogger.warn("Authentication packet exceeded alloted size"); return null; } // read the initiator (client) context token bb.clear().limit(msgSize); while (bb.hasRemaining()) { if (m_socket.read(bb) == -1) throw new EOFException(); } bb.flip(); byte version = bb.get(); if (version != AUTH_HANDSHAKE_VERSION) { authLogger.warn("Encountered unexpected authentication protocol version " + version); return null; } byte tag = bb.get(); if (tag != AUTH_HANDSHAKE) { authLogger.warn("Encountered unexpected authentication protocol tag " + tag); return null; } // process the initiator (client) context token. If it returns a non empty token // transmit it to the initiator token = context.acceptSecContext(bb.array(), bb.arrayOffset() + bb.position(), bb.remaining()); if (token != null) { msgSize = 4 + 1 + 1 + token.length; bb.clear().limit(msgSize); bb.putInt(msgSize-4).put(AUTH_HANDSHAKE_VERSION).put(AUTH_HANDSHAKE); bb.put(token); bb.flip(); while (bb.hasRemaining()) { m_socket.write(bb); } } } // at this juncture we an established security context between // the client and this service String authenticateUserName = context.getSrcName().toString(); // check if both ends are authenticated if (!context.getMutualAuthState()) { return null; } context.dispose(); context = null; return authenticateUserName; } catch (IOException|GSSException ex) { Throwables.propagate(ex); } finally { if (context != null) try { context.dispose(); } catch (Exception ignoreIt) {} } return null; } }); if (authenticatedUser != null) { final AuthUser user = m_users.get(authenticatedUser); if (user == null) { authLogger.l7dlog(Level.INFO, LogKeys.auth_AuthSystem_NoSuchUser.name(), new String[] {authenticatedUser}, null); return false; } m_authenticatedUser = authenticatedUser; logAuthSuccess(m_authenticatedUser); } return true; } } }
src/frontend/org/voltdb/AuthSystem.java
/* This file is part of VoltDB. * Copyright (C) 2008-2015 VoltDB Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with VoltDB. If not, see <http://www.gnu.org/licenses/>. */ package org.voltdb; import static org.voltdb.common.Constants.AUTH_HANDSHAKE; import static org.voltdb.common.Constants.AUTH_HANDSHAKE_VERSION; import static org.voltdb.common.Constants.AUTH_SERVICE_NAME; import java.io.EOFException; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.SocketChannel; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.security.auth.Subject; import javax.security.auth.login.AccountExpiredException; import javax.security.auth.login.CredentialExpiredException; import javax.security.auth.login.FailedLoginException; import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginException; import org.ietf.jgss.GSSContext; import org.ietf.jgss.GSSCredential; import org.ietf.jgss.GSSException; import org.ietf.jgss.GSSManager; import org.mindrot.BCrypt; import org.voltcore.logging.Level; import org.voltcore.logging.VoltLogger; import org.voltdb.catalog.Connector; import org.voltdb.catalog.Database; import org.voltdb.catalog.Procedure; import org.voltdb.security.AuthenticationRequest; import org.voltdb.utils.Encoder; import org.voltdb.utils.LogKeys; import com.google_voltpatches.common.base.Charsets; import com.google_voltpatches.common.base.Throwables; import com.google_voltpatches.common.collect.ImmutableList; import com.google_voltpatches.common.collect.ImmutableMap; import com.google_voltpatches.common.collect.ImmutableSet; import java.util.EnumSet; import java.util.concurrent.TimeUnit; import org.voltcore.utils.RateLimitedLogger; import org.voltdb.client.ClientAuthHashScheme; import org.voltdb.common.Permission; /** * The AuthSystem parses authentication and permission information from the catalog and uses it to generate a representation * of the permissions assigned to users and groups. * */ public class AuthSystem { private static final VoltLogger authLogger = new VoltLogger("AUTH"); /** * JASS Login configuration entry designator */ public static final String VOLTDB_SERVICE_LOGIN_MODULE = System.getProperty("VOLTDB_SERVICE_LOGIN_MODULE", "VoltDBService"); /** * Authentication provider enumeration. It serves also as mapping mechanism * for providers, which are configured in the deployment file, and the login * packet service field. */ public enum AuthProvider { HASH("hash","database"), KERBEROS("kerberos","kerberos"); private final static Map<String,AuthProvider> providerMap; private final static Map<String,AuthProvider> serviceMap; static { ImmutableMap.Builder<String, AuthProvider> pbldr = ImmutableMap.builder(); ImmutableMap.Builder<String, AuthProvider> sbldr = ImmutableMap.builder(); for (AuthProvider ap: values()) { pbldr.put(ap.provider,ap); sbldr.put(ap.service,ap); } providerMap = pbldr.build(); serviceMap = sbldr.build(); } final String provider; final String service; AuthProvider(String provider, String service) { this.provider = provider; this.service = service; } /** * @return its security provider equivalent */ public String provider() { return provider; } /** * @return its login packet service equivalent */ public String service() { return service; } public static AuthProvider fromProvider(String provider) { AuthProvider ap = providerMap.get(provider); if (ap == null) { throw new IllegalArgumentException("No provider mapping for " + provider); } return ap; } public static AuthProvider fromService(String service) { AuthProvider ap = serviceMap.get(service); if (ap == null) { throw new IllegalArgumentException("No service mapping for " + service); } return ap; } } /** * Representation of a permission group. * */ class AuthGroup { /** * Name of the group */ private final String m_name; /** * Set of users that are a member of this group */ private Set<AuthUser> m_users = new HashSet<AuthUser>(); private final EnumSet<Permission> m_permissions = EnumSet.noneOf(Permission.class); /** * * @param name Name of the group * @param sysproc Whether membership in this group grants permission to invoke system procedures * @param defaultproc Whether membership in this group grants permission to invoke default procedures * @param defaultprocread Whether membership in this group grants permission to invoke only read default procedures * @param adhoc Whether membership in this group grants permission to invoke adhoc queries */ private AuthGroup(String name, EnumSet<Permission> permissions) { m_name = name.intern(); m_permissions.addAll(permissions); } private void finish() { m_users = ImmutableSet.copyOf(m_users); } } /** * Representation of the permissions associated with a specific user along with a SHA-1 double hashed copy of the users * clear text password. * */ public static class AuthUser { /** * SHA-1 double hashed copy of the users clear text password */ private final byte[] m_sha1ShadowPassword; /** * SHA-2 double hashed copy of the users clear text password */ private final byte[] m_sha2ShadowPassword; /** * SHA-1 hashed and then bcrypted copy of the users clear text password */ private final String m_bcryptShadowPassword; /** * SHA-2 hashed and then bcrypted copy of the users clear text password */ private final String m_bcryptSha2ShadowPassword; /** * Name of the user */ public final String m_name; /** * Fast iterable list of groups this user is a member of. */ private List<AuthGroup> m_groups = new ArrayList<AuthGroup>(); private EnumSet<Permission> m_permissions = EnumSet.noneOf(Permission.class); private String[] m_permissions_list; /** * Fast membership check set of stored procedures this user has permission to invoke. * This is generated when the catalog is parsed and it includes procedures the user has permission * to invoke by virtue of group membership. The catalog entry for the stored procedure is used here. */ private Set<Procedure> m_authorizedProcedures = new HashSet<Procedure>(); /** * Set of export connectors this user is authorized to access. */ private Set<Connector> m_authorizedConnectors = new HashSet<Connector>(); /** * The constructor accepts the password as either sha1 or bcrypt. In practice * there will be only one passed in depending on the format of the password in the catalog. * The other will be null and that is used to determine how to hash the supplied password * for auth * @param shadowPassword SHA-1 double hashed copy of the users clear text password * @param name Name of the user */ private AuthUser(byte[] sha1ShadowPassword, byte[] sha2ShadowPassword, String bcryptShadowPassword, String bCryptSha2ShadowPassword, String name) { m_sha1ShadowPassword = sha1ShadowPassword; m_sha2ShadowPassword = sha2ShadowPassword; m_bcryptShadowPassword = bcryptShadowPassword; m_bcryptSha2ShadowPassword = bCryptSha2ShadowPassword; if (name != null) { m_name = name.intern(); } else { m_name = null; } } /** * Check if a user has permission to invoke the specified stored procedure * Handle both user-written procedures and default auto-generated ones. * @param proc Catalog entry for the stored procedure to check * @return true if the user has permission and false otherwise */ public boolean hasUserDefinedProcedurePermission(Procedure proc) { if (proc == null) { return false; } return hasPermission(Permission.ALLPROC) || m_authorizedProcedures.contains(proc); } /** * Check if a user has any one of given permission. * @return true if the user has permission and false otherwise */ public boolean hasPermission(Permission... perms) { for (int i = 0; i < perms.length;i++) { if (m_permissions.contains(perms[i])) { return true; } } return false; } /** * Get group names. * @return group name array */ public final String[] getGroupNames() { String[] groupNames = new String[m_groups.size()]; for (int i = 0; i < m_groups.size(); ++i) { groupNames[i] = m_groups.get(i).m_name; } return groupNames; } public boolean authorizeConnector(String connectorClass) { if (connectorClass == null) { return false; } for (Connector c : m_authorizedConnectors) { if (c.getLoaderclass().equals(connectorClass)) { return true; } } return false; } public boolean isAuthEnabled() { return true; } private void finish() { m_groups = ImmutableList.copyOf(m_groups); m_authorizedProcedures = ImmutableSet.copyOf(m_authorizedProcedures); m_authorizedConnectors = ImmutableSet.copyOf(m_authorizedConnectors); } } /** * Storage for user permissions keyed on the username */ private Map<String, AuthUser> m_users = new HashMap<String, AuthUser>(); /** * Storage for group permissions keyed on group name. */ private Map<String, AuthGroup> m_groups = new HashMap<String, AuthGroup>(); /** * Indicates whether security is enabled. If security is disabled all authentications will succede and all returned * AuthUsers will allow everything. */ private final boolean m_enabled; /** * The configured authentication provider */ private final AuthProvider m_authProvider; /** * VoltDB Kerberos service login context */ private final LoginContext m_loginCtx; /** * VoltDB service principal name */ private final byte [] m_principalName; private final GSSManager m_gssManager; //Auth system keeps a array of all perms used for auth disabled user not for checking permissions. private static String[] m_perm_list; AuthSystem(final Database db, boolean enabled) { AuthProvider ap = null; LoginContext loginContext = null; GSSManager gssManager = null; byte [] principal = null; //Build static list of perms auth system knows. m_perm_list = new String[Permission.values().length]; int idx = 0; for (Permission p : Permission.values()) { m_perm_list[idx++] = p.name(); } m_enabled = enabled; if (!m_enabled) { m_authProvider = ap; m_loginCtx = loginContext; m_principalName = principal; m_gssManager = null; return; } m_authProvider = AuthProvider.fromProvider(db.getSecurityprovider()); if (m_authProvider == AuthProvider.KERBEROS) { try { loginContext = new LoginContext(VOLTDB_SERVICE_LOGIN_MODULE); } catch (LoginException|SecurityException ex) { VoltDB.crashGlobalVoltDB( "Cannot initialize JAAS LoginContext", true, ex); } try { loginContext.login(); principal = loginContext .getSubject() .getPrincipals() .iterator().next() .getName() .getBytes(Charsets.UTF_8) ; gssManager = GSSManager.getInstance(); } catch (AccountExpiredException ex) { VoltDB.crashGlobalVoltDB( "VoltDB assigned service principal has expired", true, ex); } catch(CredentialExpiredException ex) { VoltDB.crashGlobalVoltDB( "VoltDB assigned service principal credentials have expired", true, ex); } catch(FailedLoginException ex) { VoltDB.crashGlobalVoltDB( "VoltDB failed to authenticate against kerberos", true, ex); } catch (LoginException ex) { VoltDB.crashGlobalVoltDB( "VoltDB service principal failed to login", true, ex); } catch (Exception ex) { VoltDB.crashGlobalVoltDB( "Unexpected exception occured during service authentication", true, ex); } } m_loginCtx = loginContext; m_principalName = principal; m_gssManager = gssManager; /* * First associate all users with groups and vice versa */ for (org.voltdb.catalog.User catalogUser : db.getUsers()) { //shadow are bcrypt of sha-? String shadowPassword = catalogUser.getShadowpassword(); String sha256shadowPassword = catalogUser.getSha256shadowpassword(); byte sha1ShadowPassword[] = null; byte sha2ShadowPassword[] = null; if (shadowPassword.length() == 40) { /* * This is an old catalog with a SHA-1 password * Need to hex decode it */ sha1ShadowPassword = Encoder.hexDecode(shadowPassword); sha2ShadowPassword = Encoder.hexDecode(sha256shadowPassword); } else if (shadowPassword.length() != 60) { /* * If not 40 should be 60 since it is bcrypt */ VoltDB.crashGlobalVoltDB( "Found a shadowPassword in the catalog that was in an unrecogized format", true, null); } final AuthUser user = new AuthUser( sha1ShadowPassword, sha2ShadowPassword, shadowPassword, sha256shadowPassword, catalogUser.getTypeName()); m_users.put(user.m_name, user); for (org.voltdb.catalog.GroupRef catalogGroupRef : catalogUser.getGroups()) { final org.voltdb.catalog.Group catalogGroup = catalogGroupRef.getGroup(); AuthGroup group = null; if (!m_groups.containsKey(catalogGroup.getTypeName())) { group = new AuthGroup(catalogGroup.getTypeName(), Permission.getPermissionSetForGroup(catalogGroup)); m_groups.put(group.m_name, group); } else { group = m_groups.get(catalogGroup.getTypeName()); } user.m_permissions.addAll(group.m_permissions); group.m_users.add(user); user.m_groups.add(group); } //Cache the list so we dont rebuild everytime this is asked. user.m_permissions_list = new String[user.m_permissions.size()]; idx = 0; for (Permission p : user.m_permissions) { user.m_permissions_list[idx++] = p.toString(); } } for (org.voltdb.catalog.Group catalogGroup : db.getGroups()) { AuthGroup group = null; if (!m_groups.containsKey(catalogGroup.getTypeName())) { group = new AuthGroup(catalogGroup.getTypeName(), Permission.getPermissionSetForGroup(catalogGroup)); m_groups.put(group.m_name, group); //A group not associated with any users? Weird stuff. } else { group = m_groups.get(catalogGroup.getTypeName()); } } /* * Then iterate through each procedure and and add it * to the set of procedures for each specified user and for the members of * each specified group. */ for (org.voltdb.catalog.Procedure catalogProcedure : db.getProcedures()) { for (org.voltdb.catalog.UserRef catalogUserRef : catalogProcedure.getAuthusers()) { final org.voltdb.catalog.User catalogUser = catalogUserRef.getUser(); final AuthUser user = m_users.get(catalogUser.getTypeName()); if (user == null) { //Error case. Procedure has a user listed as authorized but no such user exists } else { user.m_authorizedProcedures.add(catalogProcedure); } } for (org.voltdb.catalog.GroupRef catalogGroupRef : catalogProcedure.getAuthgroups()) { final org.voltdb.catalog.Group catalogGroup = catalogGroupRef.getGroup(); final AuthGroup group = m_groups.get(catalogGroup.getTypeName()); if (group == null) { //Error case. Procedure has a group listed as authorized but no such user exists } else { for (AuthUser user : group.m_users) { user.m_authorizedProcedures.add(catalogProcedure); } } } } m_users = ImmutableMap.copyOf(m_users); m_groups = ImmutableMap.copyOf(m_groups); for (AuthUser user : m_users.values()) { user.finish(); } for (AuthGroup group : m_groups.values()) { group.finish(); } } //Is security enabled? public boolean isSecurityEnabled() { return m_enabled; } /** * Check the username and password against the catalog. Return the appropriate permission * set for that user if the information is correct and return null otherwise. If security is disabled * an AuthUser object that grants all permissions is returned. * @param username Name of the user to authenticate * @param password SHA-1 single hashed version of the users clear text password * @return The permission set for the user if authentication succeeds or null if authentication fails. */ boolean authenticate(String username, byte[] password, ClientAuthHashScheme scheme) { if (!m_enabled) { return true; } final AuthUser user = m_users.get(username); if (user == null) { authLogger.l7dlog(Level.INFO, LogKeys.auth_AuthSystem_NoSuchUser.name(), new String[] {username}, null); return false; } boolean matched = true; if (user.m_sha1ShadowPassword != null || user.m_sha2ShadowPassword != null) { MessageDigest md = null; try { md = MessageDigest.getInstance(ClientAuthHashScheme.getDigestScheme(scheme)); } catch (NoSuchAlgorithmException e) { VoltDB.crashLocalVoltDB(e.getMessage(), true, e); } byte passwordHash[] = md.digest(password); /* * A n00bs attempt at constant time comparison */ for (int ii = 0; ii < passwordHash.length; ii++) { if (passwordHash[ii] != user.m_sha1ShadowPassword[ii]){ matched = false; } } } else { String pwToCheck = (scheme == ClientAuthHashScheme.HASH_SHA1 ? user.m_bcryptShadowPassword : user.m_bcryptSha2ShadowPassword); matched = BCrypt.checkpw(Encoder.hexEncode(password), pwToCheck); } if (matched) { logAuthSuccess(username); return true; } else { authLogger.l7dlog(Level.INFO, LogKeys.auth_AuthSystem_AuthFailedPasswordMistmatch.name(), new String[] {username}, null); return false; } } public static class AuthDisabledUser extends AuthUser { public AuthDisabledUser() { super(null, null, null, null, null); } @Override public boolean hasUserDefinedProcedurePermission(Procedure proc) { return true; } @Override public boolean hasPermission(Permission... p) { return true; } @Override public boolean authorizeConnector(String connectorName) { return true; } @Override public boolean isAuthEnabled() { return false; } } private final AuthUser m_authDisabledUser = new AuthDisabledUser(); public AuthUser getUser(String name) { if (!m_enabled) { return m_authDisabledUser; } return m_users.get(name); } public String[] getGroupNamesForUser(String userName) { if (userName == null) { return new String[] {}; } AuthUser user = getUser(userName); if (user == null) { return new String[] {}; } return user.getGroupNames(); } //Get users permission list not god for permission checking. public String[] getUserPermissionList(String userName) { if (!m_enabled) { return m_perm_list; } if (userName == null) { return new String[] {}; } AuthUser user = getUser(userName); if (user == null) { return new String[] {}; } return user.m_permissions_list; } public class HashAuthenticationRequest extends AuthenticationRequest { private final String m_user; private final byte [] m_password; private ClientAuthHashScheme m_hashScheme; public HashAuthenticationRequest(final String user, final byte [] hash, final ClientAuthHashScheme scheme) { m_user = user; m_password = hash; m_hashScheme = scheme; } @Override protected boolean authenticateImpl(ClientAuthHashScheme scheme) throws Exception { if (!m_enabled) { m_authenticatedUser = m_user; return true; } else if (m_authProvider != AuthProvider.HASH) { return false; } final AuthUser user = m_users.get(m_user); if (user == null) { authLogger.l7dlog(Level.INFO, LogKeys.auth_AuthSystem_NoSuchUser.name(), new String[] {m_user}, null); return false; } boolean matched = true; if (user.m_sha1ShadowPassword != null || user.m_sha2ShadowPassword != null) { MessageDigest md = null; try { md = MessageDigest.getInstance(ClientAuthHashScheme.getDigestScheme(scheme)); } catch (NoSuchAlgorithmException e) { VoltDB.crashLocalVoltDB(e.getMessage(), true, e); } byte passwordHash[] = md.digest(m_password); /* * A n00bs attempt at constant time comparison */ byte shaShadowPassword[] = (scheme == ClientAuthHashScheme.HASH_SHA1 ? user.m_sha1ShadowPassword : user.m_sha2ShadowPassword); for (int ii = 0; ii < passwordHash.length; ii++) { if (passwordHash[ii] != shaShadowPassword[ii]){ matched = false; } } } else { String pwToCheck = (scheme == ClientAuthHashScheme.HASH_SHA1 ? user.m_bcryptShadowPassword : user.m_bcryptSha2ShadowPassword); matched = BCrypt.checkpw(Encoder.hexEncode(m_password), pwToCheck); } if (matched) { m_authenticatedUser = m_user; logAuthSuccess(m_authenticatedUser); return true; } else { authLogger.l7dlog(Level.INFO, LogKeys.auth_AuthSystem_AuthFailedPasswordMistmatch.name(), new String[] {m_user}, null); return false; } } } private static void logAuthSuccess(String user) { //Make sure its logged per user String format = "Authenticated user " + user + "%s"; RateLimitedLogger.tryLogForMessage(System.currentTimeMillis(), 60, TimeUnit.SECONDS, authLogger, Level.INFO, format, ". This message is rate limited to once every 60 seconds."); } public class KerberosAuthenticationRequest extends AuthenticationRequest { private SocketChannel m_socket; public KerberosAuthenticationRequest(final SocketChannel socket) { m_socket = socket; } @Override protected boolean authenticateImpl(ClientAuthHashScheme scheme) throws Exception { if (!m_enabled) { m_authenticatedUser = "_^_pinco_pallo_^_"; return true; } else if (m_authProvider != AuthProvider.KERBEROS) { return false; } int msgSize = 4 // message size header + 1 // protocol version + 1 // result code + 4 // service name length + m_principalName.length; final ByteBuffer bb = ByteBuffer.allocate(4096); /* * write the service principal response. This gives the connecting client * the service principal name form which it constructs the GSS context * used in the client/service authentication handshake */ bb.putInt(msgSize-4).put(AUTH_HANDSHAKE_VERSION).put(AUTH_SERVICE_NAME); bb.putInt(m_principalName.length); bb.put(m_principalName); bb.flip(); while (bb.hasRemaining()) { m_socket.write(bb); } String authenticatedUser = Subject.doAs(m_loginCtx.getSubject(), new PrivilegedAction<String>() { /** * Establish an authenticated GSS security context * For further information on GSS please refer to * <a href="http://en.wikipedia.org/wiki/Generic_Security_Services_Application_Program_Interface">this</a> * article on Generic Security Services Application Program Interface */ @Override public String run() { GSSContext context = null; try { // derive the credentials from the authenticated service subject context = m_gssManager.createContext((GSSCredential)null); byte [] token; while (!context.isEstablished()) { // read in the next packet size bb.clear().limit(4); while (bb.hasRemaining()) { if (m_socket.read(bb) == -1) throw new EOFException(); } bb.flip(); int msgSize = bb.getInt(); if (msgSize > bb.capacity()) { authLogger.warn("Authentication packet exceeded alloted size"); return null; } // read the initiator (client) context token bb.clear().limit(msgSize); while (bb.hasRemaining()) { if (m_socket.read(bb) == -1) throw new EOFException(); } bb.flip(); byte version = bb.get(); if (version != AUTH_HANDSHAKE_VERSION) { authLogger.warn("Encountered unexpected authentication protocol version " + version); return null; } byte tag = bb.get(); if (tag != AUTH_HANDSHAKE) { authLogger.warn("Encountered unexpected authentication protocol tag " + tag); return null; } // process the initiator (client) context token. If it returns a non empty token // transmit it to the initiator token = context.acceptSecContext(bb.array(), bb.arrayOffset() + bb.position(), bb.remaining()); if (token != null) { msgSize = 4 + 1 + 1 + token.length; bb.clear().limit(msgSize); bb.putInt(msgSize-4).put(AUTH_HANDSHAKE_VERSION).put(AUTH_HANDSHAKE); bb.put(token); bb.flip(); while (bb.hasRemaining()) { m_socket.write(bb); } } } // at this juncture we an established security context between // the client and this service String authenticateUserName = context.getSrcName().toString(); // check if both ends are authenticated if (!context.getMutualAuthState()) { return null; } context.dispose(); context = null; return authenticateUserName; } catch (IOException|GSSException ex) { Throwables.propagate(ex); } finally { if (context != null) try { context.dispose(); } catch (Exception ignoreIt) {} } return null; } }); if (authenticatedUser != null) { final AuthUser user = m_users.get(authenticatedUser); if (user == null) { authLogger.l7dlog(Level.INFO, LogKeys.auth_AuthSystem_NoSuchUser.name(), new String[] {authenticatedUser}, null); return false; } m_authenticatedUser = authenticatedUser; logAuthSuccess(m_authenticatedUser); } return true; } } }
Remove unused variable.
src/frontend/org/voltdb/AuthSystem.java
Remove unused variable.
<ide><path>rc/frontend/org/voltdb/AuthSystem.java <ide> <ide> private final String m_user; <ide> private final byte [] m_password; <del> private ClientAuthHashScheme m_hashScheme; <ide> <ide> public HashAuthenticationRequest(final String user, final byte [] hash, final ClientAuthHashScheme scheme) { <ide> m_user = user; <ide> m_password = hash; <del> m_hashScheme = scheme; <ide> } <ide> <ide> @Override
Java
mpl-2.0
45f4902490fed21ddc0f5a42b10b5ed7e9603200
0
Wurst-Imperium/Wurst-Client-for-MC-1.9.X,Wurst-Imperium/Wurst-Client-for-MC-1.9.X,Wurst-Imperium/Wurst-MC-1.9
src/net/wurstclient/gui/options/zoom/GuiZoomManager.java
/* * Copyright 2014 - 2017 | Wurst-Imperium | All rights reserved. * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package net.wurstclient.gui.options.zoom; import org.lwjgl.input.Keyboard; import net.minecraft.client.gui.GuiButton; import net.minecraft.client.gui.GuiScreen; import net.wurstclient.WurstClient; import net.wurstclient.files.ConfigFiles; import net.wurstclient.gui.options.GuiPressAKey; import net.wurstclient.gui.options.GuiPressAKeyCallback; import net.wurstclient.options.OptionsManager; public class GuiZoomManager extends GuiScreen implements GuiPressAKeyCallback { private GuiScreen prevScreen; public GuiZoomManager(GuiScreen par1GuiScreen) { prevScreen = par1GuiScreen; } @Override public void initGui() { buttonList.clear(); buttonList.add(new GuiButton(0, width / 2 - 100, height / 4 + 144 - 16, 200, 20, "Back")); buttonList.add(new GuiButton(1, width / 2 - 79, height / 4 + 24 - 16, 158, 20, "Zoom Key: " + Keyboard .getKeyName(WurstClient.INSTANCE.options.zoom.keybind))); buttonList.add(new GuiButton(2, width / 2 - 79, height / 4 + 72 - 16, 50, 20, "More")); buttonList.add(new GuiButton(3, width / 2 - 25, height / 4 + 72 - 16, 50, 20, "Less")); buttonList.add(new GuiButton(4, width / 2 + 29, height / 4 + 72 - 16, 50, 20, "Default")); buttonList.add(new GuiButton(5, width / 2 - 79, height / 4 + 96 - 16, 158, 20, "Use Mouse Wheel: " + (WurstClient.INSTANCE.options.zoom.scroll ? "ON" : "OFF"))); WurstClient.INSTANCE.analytics.trackPageView("/options/keybind-manager", "Keybind Manager"); } /** * Called from the main game loop to update the screen. */ @Override public void updateScreen() { } @Override protected void actionPerformed(GuiButton button) { if(button.enabled) switch(button.id) { case 0: // Back mc.displayGuiScreen(prevScreen); break; case 1: // Zoom Key mc.displayGuiScreen(new GuiPressAKey(this)); break; case 2: // Zoom Level More WurstClient.INSTANCE.options.zoom.level = Math.min(Math.round( WurstClient.INSTANCE.options.zoom.level * 10F + 1F) / 10F, 10F); ConfigFiles.OPTIONS.save(); break; case 3: // Zoom Level Less WurstClient.INSTANCE.options.zoom.level = Math.max(Math.round( WurstClient.INSTANCE.options.zoom.level * 10F - 1F) / 10F, 1F); ConfigFiles.OPTIONS.save(); break; case 4: // Zoom Level Default WurstClient.INSTANCE.options.zoom.level = new OptionsManager().zoom.level; ConfigFiles.OPTIONS.save(); break; case 5: // Use Mouse Wheel WurstClient.INSTANCE.options.zoom.scroll = !WurstClient.INSTANCE.options.zoom.scroll; ConfigFiles.OPTIONS.save(); buttonList.get(5).displayString = "Use Mouse Wheel: " + (WurstClient.INSTANCE.options.zoom.scroll ? "ON" : "OFF"); break; } } /** * Fired when a key is typed. This is the equivalent of * KeyListener.keyTyped(KeyEvent e). */ @Override protected void keyTyped(char par1, int par2) { } /** * Draws the screen and all the components in it. */ @Override public void drawScreen(int par1, int par2, float par3) { drawBackground(0); drawCenteredString(fontRendererObj, "Zoom Manager", width / 2, 40, 0xffffff); drawString( fontRendererObj, "Zoom Level: " + WurstClient.INSTANCE.options.zoom.level + " x normal", width / 2 - 75, height / 4 + 44, 0xcccccc); super.drawScreen(par1, par2, par3); } @Override public void setKey(String key) { WurstClient.INSTANCE.options.zoom.keybind = Keyboard.getKeyIndex(key); ConfigFiles.OPTIONS.save(); buttonList.get(1).displayString = "Zoom Key: " + key; } }
Move GuiZoomManager
src/net/wurstclient/gui/options/zoom/GuiZoomManager.java
Move GuiZoomManager
<ide><path>rc/net/wurstclient/gui/options/zoom/GuiZoomManager.java <del>/* <del> * Copyright 2014 - 2017 | Wurst-Imperium | All rights reserved. <del> * <del> * This Source Code Form is subject to the terms of the Mozilla Public <del> * License, v. 2.0. If a copy of the MPL was not distributed with this <del> * file, You can obtain one at http://mozilla.org/MPL/2.0/. <del> */ <del>package net.wurstclient.gui.options.zoom; <del> <del>import org.lwjgl.input.Keyboard; <del> <del>import net.minecraft.client.gui.GuiButton; <del>import net.minecraft.client.gui.GuiScreen; <del>import net.wurstclient.WurstClient; <del>import net.wurstclient.files.ConfigFiles; <del>import net.wurstclient.gui.options.GuiPressAKey; <del>import net.wurstclient.gui.options.GuiPressAKeyCallback; <del>import net.wurstclient.options.OptionsManager; <del> <del>public class GuiZoomManager extends GuiScreen implements GuiPressAKeyCallback <del>{ <del> private GuiScreen prevScreen; <del> <del> public GuiZoomManager(GuiScreen par1GuiScreen) <del> { <del> prevScreen = par1GuiScreen; <del> } <del> <del> @Override <del> public void initGui() <del> { <del> buttonList.clear(); <del> buttonList.add(new GuiButton(0, width / 2 - 100, height / 4 + 144 - 16, <del> 200, 20, "Back")); <del> buttonList.add(new GuiButton(1, width / 2 - 79, height / 4 + 24 - 16, <del> 158, 20, "Zoom Key: " + Keyboard <del> .getKeyName(WurstClient.INSTANCE.options.zoom.keybind))); <del> buttonList.add(new GuiButton(2, width / 2 - 79, height / 4 + 72 - 16, <del> 50, 20, "More")); <del> buttonList.add(new GuiButton(3, width / 2 - 25, height / 4 + 72 - 16, <del> 50, 20, "Less")); <del> buttonList.add(new GuiButton(4, width / 2 + 29, height / 4 + 72 - 16, <del> 50, 20, "Default")); <del> buttonList.add(new GuiButton(5, width / 2 - 79, height / 4 + 96 - 16, <del> 158, 20, "Use Mouse Wheel: " <del> + (WurstClient.INSTANCE.options.zoom.scroll ? "ON" : "OFF"))); <del> WurstClient.INSTANCE.analytics.trackPageView("/options/keybind-manager", <del> "Keybind Manager"); <del> } <del> <del> /** <del> * Called from the main game loop to update the screen. <del> */ <del> @Override <del> public void updateScreen() <del> { <del> <del> } <del> <del> @Override <del> protected void actionPerformed(GuiButton button) <del> { <del> if(button.enabled) <del> switch(button.id) <del> { <del> case 0: <del> // Back <del> mc.displayGuiScreen(prevScreen); <del> break; <del> case 1: <del> // Zoom Key <del> mc.displayGuiScreen(new GuiPressAKey(this)); <del> break; <del> case 2: <del> // Zoom Level More <del> WurstClient.INSTANCE.options.zoom.level = Math.min(Math.round( <del> WurstClient.INSTANCE.options.zoom.level * 10F + 1F) / 10F, <del> 10F); <del> ConfigFiles.OPTIONS.save(); <del> break; <del> case 3: <del> // Zoom Level Less <del> WurstClient.INSTANCE.options.zoom.level = Math.max(Math.round( <del> WurstClient.INSTANCE.options.zoom.level * 10F - 1F) / 10F, <del> 1F); <del> ConfigFiles.OPTIONS.save(); <del> break; <del> case 4: <del> // Zoom Level Default <del> WurstClient.INSTANCE.options.zoom.level = <del> new OptionsManager().zoom.level; <del> ConfigFiles.OPTIONS.save(); <del> break; <del> case 5: <del> // Use Mouse Wheel <del> WurstClient.INSTANCE.options.zoom.scroll = <del> !WurstClient.INSTANCE.options.zoom.scroll; <del> ConfigFiles.OPTIONS.save(); <del> buttonList.get(5).displayString = "Use Mouse Wheel: " <del> + (WurstClient.INSTANCE.options.zoom.scroll ? "ON" : "OFF"); <del> break; <del> } <del> } <del> <del> /** <del> * Fired when a key is typed. This is the equivalent of <del> * KeyListener.keyTyped(KeyEvent e). <del> */ <del> @Override <del> protected void keyTyped(char par1, int par2) <del> { <del> <del> } <del> <del> /** <del> * Draws the screen and all the components in it. <del> */ <del> @Override <del> public void drawScreen(int par1, int par2, float par3) <del> { <del> drawBackground(0); <del> drawCenteredString(fontRendererObj, "Zoom Manager", width / 2, 40, <del> 0xffffff); <del> drawString( <del> fontRendererObj, "Zoom Level: " <del> + WurstClient.INSTANCE.options.zoom.level + " x normal", <del> width / 2 - 75, height / 4 + 44, 0xcccccc); <del> super.drawScreen(par1, par2, par3); <del> } <del> <del> @Override <del> public void setKey(String key) <del> { <del> WurstClient.INSTANCE.options.zoom.keybind = Keyboard.getKeyIndex(key); <del> ConfigFiles.OPTIONS.save(); <del> buttonList.get(1).displayString = "Zoom Key: " + key; <del> } <del>}
Java
apache-2.0
90434a66ad26f48d9c9ccacb5d530bab1c1efc77
0
sibok666/flowable-engine,zwets/flowable-engine,lsmall/flowable-engine,zwets/flowable-engine,motorina0/flowable-engine,roberthafner/flowable-engine,martin-grofcik/flowable-engine,yvoswillens/flowable-engine,robsoncardosoti/flowable-engine,roberthafner/flowable-engine,gro-mar/flowable-engine,yvoswillens/flowable-engine,motorina0/flowable-engine,marcus-nl/flowable-engine,stefan-ziel/Activiti,lsmall/flowable-engine,stephraleigh/flowable-engine,gro-mar/flowable-engine,stefan-ziel/Activiti,marcus-nl/flowable-engine,Activiti/Activiti,robsoncardosoti/flowable-engine,dbmalkovsky/flowable-engine,paulstapleton/flowable-engine,motorina0/flowable-engine,sibok666/flowable-engine,roberthafner/flowable-engine,lsmall/flowable-engine,flowable/flowable-engine,robsoncardosoti/flowable-engine,paulstapleton/flowable-engine,paulstapleton/flowable-engine,marcus-nl/flowable-engine,sibok666/flowable-engine,roberthafner/flowable-engine,stephraleigh/flowable-engine,motorina0/flowable-engine,stephraleigh/flowable-engine,flowable/flowable-engine,stefan-ziel/Activiti,Activiti/Activiti,martin-grofcik/flowable-engine,stefan-ziel/Activiti,stephraleigh/flowable-engine,zwets/flowable-engine,zwets/flowable-engine,gro-mar/flowable-engine,dbmalkovsky/flowable-engine,yvoswillens/flowable-engine,marcus-nl/flowable-engine,robsoncardosoti/flowable-engine,paulstapleton/flowable-engine,yvoswillens/flowable-engine,martin-grofcik/flowable-engine,flowable/flowable-engine,lsmall/flowable-engine,martin-grofcik/flowable-engine,gro-mar/flowable-engine,dbmalkovsky/flowable-engine,dbmalkovsky/flowable-engine,sibok666/flowable-engine,flowable/flowable-engine
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.engine.impl.bpmn.parser; import java.io.InputStream; import java.net.URL; import java.text.StringCharacterIterator; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import org.activiti.engine.ActivitiException; import org.activiti.engine.delegate.ExecutionListener; import org.activiti.engine.delegate.Expression; import org.activiti.engine.delegate.TaskListener; import org.activiti.engine.impl.Condition; import org.activiti.engine.impl.bpmn.behavior.AbstractBpmnActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.BoundaryEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.BusinessRuleTaskActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.CallActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.CancelBoundaryEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.CancelEndEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.ErrorEndEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.EventBasedGatewayActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.EventSubProcessStartEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.ExclusiveGatewayActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.InclusiveGatewayActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.IntermediateCatchEventActivitiBehaviour; import org.activiti.engine.impl.bpmn.behavior.IntermediateThrowCompensationEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.IntermediateThrowNoneEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.IntermediateThrowSignalEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.MailActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.ManualTaskActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.MultiInstanceActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.NoneEndEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.NoneStartEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.ParallelGatewayActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.ParallelMultiInstanceBehavior; import org.activiti.engine.impl.bpmn.behavior.ReceiveTaskActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.ScriptTaskActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.SequentialMultiInstanceBehavior; import org.activiti.engine.impl.bpmn.behavior.ServiceTaskDelegateExpressionActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.ServiceTaskExpressionActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.ShellActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.SubProcessActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.TaskActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.TerminateEndEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.TransactionActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.UserTaskActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.WebServiceActivityBehavior; import org.activiti.engine.impl.bpmn.data.AbstractDataAssociation; import org.activiti.engine.impl.bpmn.data.Assignment; import org.activiti.engine.impl.bpmn.data.ClassStructureDefinition; import org.activiti.engine.impl.bpmn.data.Data; import org.activiti.engine.impl.bpmn.data.DataRef; import org.activiti.engine.impl.bpmn.data.IOSpecification; import org.activiti.engine.impl.bpmn.data.ItemDefinition; import org.activiti.engine.impl.bpmn.data.ItemKind; import org.activiti.engine.impl.bpmn.data.SimpleDataInputAssociation; import org.activiti.engine.impl.bpmn.data.StructureDefinition; import org.activiti.engine.impl.bpmn.data.TransformationDataOutputAssociation; import org.activiti.engine.impl.bpmn.helper.ClassDelegate; import org.activiti.engine.impl.bpmn.listener.DelegateExpressionExecutionListener; import org.activiti.engine.impl.bpmn.listener.DelegateExpressionTaskListener; import org.activiti.engine.impl.bpmn.listener.ExpressionExecutionListener; import org.activiti.engine.impl.bpmn.listener.ExpressionTaskListener; import org.activiti.engine.impl.bpmn.parser.BpmnParseListener; import org.activiti.engine.impl.bpmn.webservice.BpmnInterface; import org.activiti.engine.impl.bpmn.webservice.BpmnInterfaceImplementation; import org.activiti.engine.impl.bpmn.webservice.MessageDefinition; import org.activiti.engine.impl.bpmn.webservice.MessageImplicitDataInputAssociation; import org.activiti.engine.impl.bpmn.webservice.MessageImplicitDataOutputAssociation; import org.activiti.engine.impl.bpmn.webservice.Operation; import org.activiti.engine.impl.bpmn.webservice.OperationImplementation; import org.activiti.engine.impl.el.ExpressionManager; import org.activiti.engine.impl.el.FixedValue; import org.activiti.engine.impl.el.UelExpressionCondition; import org.activiti.engine.impl.form.DefaultStartFormHandler; import org.activiti.engine.impl.form.DefaultTaskFormHandler; import org.activiti.engine.impl.form.StartFormHandler; import org.activiti.engine.impl.form.TaskFormHandler; import org.activiti.engine.impl.jobexecutor.TimerCatchIntermediateEventJobHandler; import org.activiti.engine.impl.jobexecutor.TimerDeclarationImpl; import org.activiti.engine.impl.jobexecutor.TimerDeclarationType; import org.activiti.engine.impl.jobexecutor.TimerExecuteNestedActivityJobHandler; import org.activiti.engine.impl.jobexecutor.TimerStartEventJobHandler; import org.activiti.engine.impl.persistence.entity.DeploymentEntity; import org.activiti.engine.impl.persistence.entity.JobEntity; import org.activiti.engine.impl.persistence.entity.ProcessDefinitionEntity; import org.activiti.engine.impl.pvm.PvmTransition; import org.activiti.engine.impl.pvm.delegate.ActivityBehavior; import org.activiti.engine.impl.pvm.process.ActivityImpl; import org.activiti.engine.impl.pvm.process.HasDIBounds; import org.activiti.engine.impl.pvm.process.Lane; import org.activiti.engine.impl.pvm.process.LaneSet; import org.activiti.engine.impl.pvm.process.ParticipantProcess; import org.activiti.engine.impl.pvm.process.ProcessDefinitionImpl; import org.activiti.engine.impl.pvm.process.ScopeImpl; import org.activiti.engine.impl.pvm.process.TransitionImpl; import org.activiti.engine.impl.scripting.ScriptingEngines; import org.activiti.engine.impl.task.TaskDefinition; import org.activiti.engine.impl.util.ReflectUtil; import org.activiti.engine.impl.util.xml.Element; import org.activiti.engine.impl.util.xml.Parse; import org.activiti.engine.impl.variable.VariableDeclaration; import org.activiti.engine.repository.ProcessDefinition; /** * Specific parsing of one BPMN 2.0 XML file, created by the {@link BpmnParser}. * * @author Tom Baeyens * @author Joram Barrez * @author Christian Stettler * @author Frederik Heremans * @author Falko Menge * @author Esteban Robles * @author Daniel Meyer * @author Saeid Mirzaei */ public class BpmnParse extends Parse { protected static final Logger LOGGER = Logger.getLogger(BpmnParse.class.getName()); public static final String PROPERTYNAME_DOCUMENTATION = "documentation"; public static final String PROPERTYNAME_INITIAL = "initial"; public static final String PROPERTYNAME_INITIATOR_VARIABLE_NAME = "initiatorVariableName"; public static final String PROPERTYNAME_CONDITION = "condition"; public static final String PROPERTYNAME_CONDITION_TEXT = "conditionText"; public static final String PROPERTYNAME_VARIABLE_DECLARATIONS = "variableDeclarations"; public static final String PROPERTYNAME_TIMER_DECLARATION = "timerDeclarations"; public static final String PROPERTYNAME_ISEXPANDED = "isExpanded"; public static final String PROPERTYNAME_START_TIMER = "timerStart"; public static final String PROPERTYNAME_COMPENSATION_HANDLER_ID = "compensationHandler"; public static final String PROPERTYNAME_IS_FOR_COMPENSATION = "isForCompensation"; public static final String PROPERTYNAME_ERROR_EVENT_DEFINITIONS = "errorEventDefinitions"; public static final String PROPERTYNAME_EVENT_SUBSCRIPTION_DECLARATION = "eventDefinitions"; /* process start authorization specific finals */ protected static final String POTENTIAL_STARTER = "potentialStarter"; protected static final String CANDIDATE_STARTER_USERS_EXTENSION = "candidateStarterUsers"; protected static final String CANDIDATE_STARTER_GROUPS_EXTENSION = "candidateStarterGroups"; protected static final String ATTRIBUTEVALUE_T_FORMAL_EXPRESSION = BpmnParser.BPMN20_NS + ":tFormalExpression"; /** The deployment to which the parsed process definitions will be added. */ protected DeploymentEntity deployment; /** The end result of the parsing: a list of process definition. */ protected List<ProcessDefinitionEntity> processDefinitions = new ArrayList<ProcessDefinitionEntity>(); /** Mapping of found errors in BPMN 2.0 file */ protected Map<String, Error> errors = new HashMap<String, Error>(); /** A map for storing sequence flow based on their id during parsing. */ protected Map<String, TransitionImpl> sequenceFlows; /** A list of all element IDs. This allows us to parse only what we actually support but * still validate the references among elements we do not support. */ protected List<String> elementIds = new ArrayList<String>(); /** A map for storing the process references of participants */ protected Map<String, String> participantProcesses = new HashMap<String, String>(); /** * Mapping containing values stored during the first phase of parsing since * other elements can reference these messages. * * All the map's elements are defined outside the process definition(s), which * means that this map doesn't need to be re-initialized for each new process * definition. */ protected Map<String, MessageDefinition> messages = new HashMap<String, MessageDefinition>(); protected Map<String, StructureDefinition> structures = new HashMap<String, StructureDefinition>(); protected Map<String, BpmnInterfaceImplementation> interfaceImplementations = new HashMap<String, BpmnInterfaceImplementation>(); protected Map<String, OperationImplementation> operationImplementations = new HashMap<String, OperationImplementation>(); protected Map<String, ItemDefinition> itemDefinitions = new HashMap<String, ItemDefinition>(); protected Map<String, BpmnInterface> bpmnInterfaces = new HashMap<String, BpmnInterface>(); protected Map<String, Operation> operations = new HashMap<String, Operation>(); protected Map<String, SignalDefinition> signals = new HashMap<String, SignalDefinition>(); // Members protected ExpressionManager expressionManager; protected List<BpmnParseListener> parseListeners; protected Map<String, XMLImporter> importers = new HashMap<String, XMLImporter>(); protected Map<String, String> prefixs = new HashMap<String, String>(); protected String targetNamespace; /** * Constructor to be called by the {@link BpmnParser}. */ public BpmnParse(BpmnParser parser) { super(parser); this.expressionManager = parser.getExpressionManager(); this.parseListeners = parser.getParseListeners(); setSchemaResource(ReflectUtil.getResource(BpmnParser.BPMN_20_SCHEMA_LOCATION).toString()); this.initializeXSDItemDefinitions(); } protected void initializeXSDItemDefinitions() { this.itemDefinitions.put("http://www.w3.org/2001/XMLSchema:string", new ItemDefinition("http://www.w3.org/2001/XMLSchema:string", new ClassStructureDefinition(String.class))); } public BpmnParse deployment(DeploymentEntity deployment) { this.deployment = deployment; return this; } @Override public BpmnParse execute() { super.execute(); // schema validation try { parseRootElement(); } catch (Exception e) { LOGGER.log(Level.SEVERE, "Unknown exception", e); // ALL unexpected exceptions should bubble up since they are not handled // accordingly by onderlying parse-methods and can't be deployed throw new ActivitiException("Error while parsing process: " + e.getMessage(), e); } finally { if (hasWarnings()) { logWarnings(); } if (hasErrors()) { throwActivitiExceptionForErrors(); } } return this; } /** * Parses the 'definitions' root element */ protected void parseRootElement() { collectElementIds(); parseDefinitionsAttributes(); parseImports(); parseItemDefinitions(); parseMessages(); parseInterfaces(); parseErrors(); parseSignals(); parseProcessDefinitions(); parseCollaboration(); // Diagram interchange parsing must be after parseProcessDefinitions, // since it depends and sets values on existing process definition objects parseDiagramInterchangeElements(); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseRootElement(rootElement, getProcessDefinitions()); } } protected void collectElementIds() { rootElement.collectIds(elementIds); } protected void parseDefinitionsAttributes() { this.targetNamespace = rootElement.attribute("targetNamespace"); for (String attribute : rootElement.attributes()) { if (attribute.startsWith("xmlns:")) { String prefixValue = rootElement.attribute(attribute); String prefixName = attribute.substring(6); this.prefixs.put(prefixName, prefixValue); } } } protected String resolveName(String name) { if (name == null) { return null; } int indexOfP = name.indexOf(':'); if (indexOfP != -1) { String prefix = name.substring(0, indexOfP); String resolvedPrefix = this.prefixs.get(prefix); return resolvedPrefix + ":" + name.substring(indexOfP + 1); } else { return this.targetNamespace + ":" + name; } } /** * Parses the rootElement importing structures * * @param rootElement * The root element of the XML file. */ protected void parseImports() { List<Element> imports = rootElement.elements("import"); for (Element theImport : imports) { String importType = theImport.attribute("importType"); XMLImporter importer = this.getImporter(importType, theImport); if (importer == null) { addError("Could not import item of type " + importType, theImport); } else { importer.importFrom(theImport, this); } } } protected XMLImporter getImporter(String importType, Element theImport) { if (this.importers.containsKey(importType)) { return this.importers.get(importType); } else { if (importType.equals("http://schemas.xmlsoap.org/wsdl/")) { Class< ? > wsdlImporterClass; try { wsdlImporterClass = Class.forName("org.activiti.engine.impl.webservice.CxfWSDLImporter", true, Thread.currentThread().getContextClassLoader()); XMLImporter newInstance = (XMLImporter) wsdlImporterClass.newInstance(); this.importers.put(importType, newInstance); return newInstance; } catch (Exception e) { addError("Could not find importer for type " + importType, theImport); } } return null; } } /** * Parses the itemDefinitions of the given definitions file. Item definitions * are not contained within a process element, but they can be referenced from * inner process elements. * * @param definitionsElement * The root element of the XML file. */ public void parseItemDefinitions() { for (Element itemDefinitionElement : rootElement.elements("itemDefinition")) { String id = itemDefinitionElement.attribute("id"); String structureRef = this.resolveName(itemDefinitionElement.attribute("structureRef")); String itemKind = itemDefinitionElement.attribute("itemKind"); StructureDefinition structure = null; try { // it is a class Class< ? > classStructure = ReflectUtil.loadClass(structureRef); structure = new ClassStructureDefinition(classStructure); } catch (ActivitiException e) { // it is a reference to a different structure structure = this.structures.get(structureRef); } ItemDefinition itemDefinition = new ItemDefinition(this.targetNamespace + ":" + id, structure); if (itemKind != null) { itemDefinition.setItemKind(ItemKind.valueOf(itemKind)); } itemDefinitions.put(itemDefinition.getId(), itemDefinition); } } /** * Parses the messages of the given definitions file. Messages are not * contained within a process element, but they can be referenced from inner * process elements. * * @param definitionsElement * The root element of the XML file/ */ public void parseMessages() { for (Element messageElement : rootElement.elements("message")) { String id = messageElement.attribute("id"); String itemRef = this.resolveName(messageElement.attribute("itemRef")); String name = messageElement.attribute("name"); MessageDefinition messageDefinition = new MessageDefinition(this.targetNamespace + ":" + id, name); if(itemRef != null) { if(!this.itemDefinitions.containsKey(itemRef)) { addError(itemRef + " does not exist", messageElement); } else { ItemDefinition itemDefinition = this.itemDefinitions.get(itemRef); messageDefinition.setItemDefinition(itemDefinition); } } this.messages.put(messageDefinition.getId(), messageDefinition); } } /** * Parses the signals of the given definitions file. Signals are not * contained within a process element, but they can be referenced from inner * process elements. * * @param definitionsElement * The root element of the XML file/ */ protected void parseSignals() { for (Element signalElement : rootElement.elements("signal")) { String id = signalElement.attribute("id"); String signalName = signalElement.attribute("name"); for (SignalDefinition signalDefinition : signals.values()) { if(signalDefinition.getName().equals(signalName)) { addError("duplicate signal name '"+signalName+"'.", signalElement); } } if(id == null) { addError("signal must have an id", signalElement); } else if(signalName == null) { addError("signal with id '"+id+"' has no name", signalElement); }else { SignalDefinition signal = new SignalDefinition(); signal.setId(this.targetNamespace + ":" + id); signal.setName(signalName); this.signals.put(signal.getId(), signal); } } } /** * Parses the interfaces and operations defined withing the root element. * * @param definitionsElement * The root element of the XML file/ */ public void parseInterfaces() { for (Element interfaceElement : rootElement.elements("interface")) { // Create the interface String id = interfaceElement.attribute("id"); String name = interfaceElement.attribute("name"); String implementationRef = this.resolveName(interfaceElement.attribute("implementationRef")); BpmnInterface bpmnInterface = new BpmnInterface(this.targetNamespace + ":" + id, name); bpmnInterface.setImplementation(this.interfaceImplementations.get(implementationRef)); // Handle all its operations for (Element operationElement : interfaceElement.elements("operation")) { Operation operation = parseOperation(operationElement, bpmnInterface); bpmnInterface.addOperation(operation); } bpmnInterfaces.put(bpmnInterface.getId(), bpmnInterface); } } public Operation parseOperation(Element operationElement, BpmnInterface bpmnInterface) { Element inMessageRefElement = operationElement.element("inMessageRef"); String inMessageRef = this.resolveName(inMessageRefElement.getText()); if (!this.messages.containsKey(inMessageRef)) { addError(inMessageRef + " does not exist", inMessageRefElement); return null; } else { MessageDefinition inMessage = this.messages.get(inMessageRef); String id = operationElement.attribute("id"); String name = operationElement.attribute("name"); String implementationRef = this.resolveName(operationElement.attribute("implementationRef")); Operation operation = new Operation(this.targetNamespace + ":" + id, name, bpmnInterface, inMessage); operation.setImplementation(this.operationImplementations.get(implementationRef)); Element outMessageRefElement = operationElement.element("outMessageRef"); if (outMessageRefElement != null) { String outMessageRef = this.resolveName(outMessageRefElement.getText()); if (this.messages.containsKey(outMessageRef)) { MessageDefinition outMessage = this.messages.get(outMessageRef); operation.setOutMessage(outMessage); } } operations.put(operation.getId(), operation); return operation; } } public void parseErrors() { for (Element errorElement : rootElement.elements("error")) { Error error = new Error(); String id = errorElement.attribute("id"); if (id == null) { addError("'id' is mandatory on error definition", errorElement); } error.setId(id); String errorCode = errorElement.attribute("errorCode"); if (errorCode != null) { error.setErrorCode(errorCode); } errors.put(id, error); } } /** * Parses all the process definitions defined within the 'definitions' root * element. * * @param definitionsElement * The root element of the XML file. */ public void parseProcessDefinitions() { for (Element processElement : rootElement.elements("process")) { boolean isExecutable = true; String isExecutableStr = processElement.attribute("isExecutable"); if (isExecutableStr != null) { if (!Boolean.parseBoolean(isExecutableStr)) { isExecutable = false; LOGGER.info("Ignoring non-executable process with id='" + processElement.attribute("id") + "'. Set the attribute isExecutable=\"true\" to deploy this process."); } } else { LOGGER.info("Process with id='" + processElement.attribute("id") + "' has no attribute isExecutable. Assuming it is executable. Better set the attribute explicitely, especially to be compatible with future engine versions which might change the default behavior."); } //Only process executable processes if (isExecutable) { processDefinitions.add(parseProcess(processElement)); } } } /** * Parses the collaboration definition defined within the 'definitions' * root element and get all participants to lookup their process references * during DI parsing. */ public void parseCollaboration() { Element collaboration = rootElement.element("collaboration"); if (collaboration != null) { for (Element participant : collaboration.elements("participant")) { String processRef = participant.attribute("processRef"); if (processRef != null) { ProcessDefinitionImpl procDef = getProcessDefinition(processRef); if(procDef != null) { // Set participant process on the procDef, so it can get rendered later on if needed ParticipantProcess participantProcess = new ParticipantProcess(); participantProcess.setId(participant.attribute("id")); participantProcess.setName(participant.attribute("name")); procDef.setParticipantProcess(participantProcess); participantProcesses.put(participantProcess.getId(), processRef); } } } } } /** * Parses one process (ie anything inside a &lt;process&gt; element). * * @param processElement * The 'process' element. * @return The parsed version of the XML: a {@link ProcessDefinitionImpl} * object. */ public ProcessDefinitionEntity parseProcess(Element processElement) { // reset all mappings that are related to one process definition sequenceFlows = new HashMap<String, TransitionImpl>(); ProcessDefinitionEntity processDefinition = new ProcessDefinitionEntity(); /* * Mapping object model - bpmn xml: processDefinition.id -> generated by * activiti engine processDefinition.key -> bpmn id (required) * processDefinition.name -> bpmn name (optional) */ processDefinition.setKey(processElement.attribute("id")); processDefinition.setName(processElement.attribute("name")); processDefinition.setCategory(rootElement.attribute("targetNamespace")); processDefinition.setDescription(parseDocumentation(processElement)); processDefinition.setProperty(PROPERTYNAME_DOCUMENTATION, parseDocumentation(processElement)); // Kept for backwards compatibility. See ACT-1020 processDefinition.setTaskDefinitions(new HashMap<String, TaskDefinition>()); processDefinition.setDeploymentId(deployment.getId()); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Parsing process " + processDefinition.getKey()); } parseScope(processElement, processDefinition); // Parse any laneSets defined for this process parseLaneSets(processElement, processDefinition); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseProcess(processElement, processDefinition); } // now we have parsed anything we can validate some stuff validateActivities(processDefinition.getActivities()); return processDefinition; } protected void parseLaneSets(Element parentElement, ProcessDefinitionEntity processDefinition) { List<Element> laneSets = parentElement.elements("laneSet"); if(laneSets != null && laneSets.size() > 0) { for(Element laneSetElement : laneSets) { LaneSet newLaneSet = new LaneSet(); newLaneSet.setId(laneSetElement.attribute("id")); newLaneSet.setName(laneSetElement.attribute("name")); parseLanes(laneSetElement, newLaneSet); // Finally, add the set processDefinition.addLaneSet(newLaneSet); } } } protected void parseLanes(Element laneSetElement, LaneSet laneSet) { List<Element> lanes = laneSetElement.elements("lane"); if(lanes != null && lanes.size() > 0) { for(Element laneElement : lanes) { // Parse basic attributes Lane lane = new Lane(); lane.setId(laneElement.attribute("id")); lane.setName(laneElement.attribute("name")); // Parse ID's of flow-nodes that live inside this lane List<Element> flowNodeElements = laneElement.elements("flowNodeRef"); if(flowNodeElements != null && flowNodeElements.size() > 0) { for(Element flowNodeElement : flowNodeElements) { lane.getFlowNodeIds().add(flowNodeElement.getText()); } } laneSet.addLane(lane); } } } /** * Parses a scope: a process, subprocess, etc. * * Note that a process definition is a scope on itself. * * @param scopeElement * The XML element defining the scope * @param parentScope * The scope that contains the nested scope. */ public void parseScope(Element scopeElement, ScopeImpl parentScope) { // Not yet supported on process level (PVM additions needed): // parseProperties(processElement); HashMap<String, Element> postponedElements = new HashMap<String, Element>(); parseStartEvents(scopeElement, parentScope); parseActivities(scopeElement, parentScope, postponedElements); parsePostponedElements(scopeElement, parentScope, postponedElements); parseEndEvents(scopeElement, parentScope); parseBoundaryEvents(scopeElement, parentScope); parseSequenceFlow(scopeElement, parentScope); parseExecutionListenersOnScope(scopeElement, parentScope); parseAssociations(scopeElement, parentScope); if(parentScope instanceof ProcessDefinition) { parseProcessDefinitionCustomExtensions(scopeElement, (ProcessDefinition) parentScope); } postponedElements.clear(); IOSpecification ioSpecification = parseIOSpecification(scopeElement.element("ioSpecification")); parentScope.setIoSpecification(ioSpecification); } protected void parsePostponedElements(Element scopeElement, ScopeImpl parentScope, HashMap<String, Element> postponedElements) { for (Element postponedElement : postponedElements.values()) { if(parentScope.findActivity(postponedElement.attribute("id")) == null) { // check whether activity is already parsed if(postponedElement.getTagName().equals("intermediateCatchEvent")) { parseIntermediateCatchEvent(postponedElement, parentScope, false); } } } } protected void parseProcessDefinitionCustomExtensions(Element scopeElement, ProcessDefinition definition) { parseStartAuthorization(scopeElement, definition); } protected void parseStartAuthorization(Element scopeElement, ProcessDefinition definition) { ProcessDefinitionEntity processDefinition = (ProcessDefinitionEntity) definition; // parse activiti:potentialStarters Element extentionsElement = scopeElement.element("extensionElements"); if (extentionsElement != null) { List<Element> potentialStarterElements = extentionsElement.elementsNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, POTENTIAL_STARTER); for (Element potentialStarterElement : potentialStarterElements) { parsePotentialStarterResourceAssignment(potentialStarterElement, processDefinition); } } // parse activiti:candidateStarterUsers String candidateUsersString = scopeElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, CANDIDATE_STARTER_USERS_EXTENSION); if (candidateUsersString != null) { List<String> candidateUsers = parseCommaSeparatedList(candidateUsersString); for (String candidateUser : candidateUsers) { processDefinition.addCandidateStarterUserIdExpression(expressionManager.createExpression(candidateUser.trim())); } } // Candidate activiti:candidateStarterGroups String candidateGroupsString = scopeElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, CANDIDATE_STARTER_GROUPS_EXTENSION); if (candidateGroupsString != null) { List<String> candidateGroups = parseCommaSeparatedList(candidateGroupsString); for (String candidateGroup : candidateGroups) { processDefinition.addCandidateStarterGroupIdExpression(expressionManager.createExpression(candidateGroup.trim())); } } } protected void parsePotentialStarterResourceAssignment(Element performerElement, ProcessDefinitionEntity processDefinition) { Element raeElement = performerElement.element(RESOURCE_ASSIGNMENT_EXPR); if (raeElement != null) { Element feElement = raeElement.element(FORMAL_EXPRESSION); if (feElement != null) { List<String> assignmentExpressions = parseCommaSeparatedList(feElement.getText()); for (String assignmentExpression : assignmentExpressions) { assignmentExpression = assignmentExpression.trim(); if (assignmentExpression.startsWith(USER_PREFIX)) { String userAssignementId = getAssignmentId(assignmentExpression, USER_PREFIX); processDefinition.addCandidateStarterUserIdExpression(expressionManager.createExpression(userAssignementId)); } else if (assignmentExpression.startsWith(GROUP_PREFIX)) { String groupAssignementId = getAssignmentId(assignmentExpression, GROUP_PREFIX); processDefinition.addCandidateStarterGroupIdExpression(expressionManager.createExpression(groupAssignementId)); } else { // default: given string is a goupId, as-is. processDefinition.addCandidateStarterGroupIdExpression(expressionManager.createExpression(assignmentExpression)); } } } } } protected void parseAssociations(Element scopeElement, ScopeImpl parentScope) { for (Element associationElement : scopeElement.elements("association")) { String sourceRef = associationElement.attribute("sourceRef"); if(sourceRef == null) { addError("association element missing attribute 'sourceRef'", associationElement); } String targetRef = associationElement.attribute("targetRef"); if(targetRef == null) { addError("association element missing attribute 'targetRef'", associationElement); } ActivityImpl sourceActivity = parentScope.findActivity(sourceRef); ActivityImpl targetActivity = parentScope.findActivity(targetRef); // an association may reference elements that are not parsed as activities (like for instance // text annotations so do not throw an exception if sourceActivity or targetActivity are null) // However, we make sure they reference 'something': if(sourceActivity == null && !elementIds.contains(sourceRef)) { addError("Invalid reference sourceRef '"+sourceRef+"' of association element ", associationElement); } else if(targetActivity == null && !elementIds.contains(targetRef)) { addError("Invalid reference targetRef '"+targetRef+"' of association element ", associationElement); } else { if(sourceActivity != null && sourceActivity.getProperty("type").equals("compensationBoundaryCatch")) { Object isForCompensation = targetActivity.getProperty(PROPERTYNAME_IS_FOR_COMPENSATION); if(isForCompensation == null || !(Boolean) isForCompensation) { addError("compensation boundary catch must be connected to element with isForCompensation=true", associationElement); } else { ActivityImpl compensatedActivity = sourceActivity.getParentActivity(); compensatedActivity.setProperty(PROPERTYNAME_COMPENSATION_HANDLER_ID, targetActivity.getId()); } } } } } protected IOSpecification parseIOSpecification(Element ioSpecificationElement) { if (ioSpecificationElement == null) { return null; } IOSpecification ioSpecification = new IOSpecification(); for (Element dataInputElement : ioSpecificationElement.elements("dataInput")) { String id = dataInputElement.attribute("id"); String itemSubjectRef = this.resolveName(dataInputElement.attribute("itemSubjectRef")); ItemDefinition itemDefinition = this.itemDefinitions.get(itemSubjectRef); Data dataInput = new Data(this.targetNamespace + ":" + id, id, itemDefinition); ioSpecification.addInput(dataInput); } for (Element dataOutputElement : ioSpecificationElement.elements("dataOutput")) { String id = dataOutputElement.attribute("id"); String itemSubjectRef = this.resolveName(dataOutputElement.attribute("itemSubjectRef")); ItemDefinition itemDefinition = this.itemDefinitions.get(itemSubjectRef); Data dataOutput = new Data(this.targetNamespace + ":" + id, id, itemDefinition); ioSpecification.addOutput(dataOutput); } for (Element inputSetElement : ioSpecificationElement.elements("inputSet")) { for (Element dataInputRef : inputSetElement.elements("dataInputRefs")) { DataRef dataRef = new DataRef(dataInputRef.getText()); ioSpecification.addInputRef(dataRef); } } for (Element outputSetElement : ioSpecificationElement.elements("outputSet")) { for (Element dataInputRef : outputSetElement.elements("dataOutputRefs")) { DataRef dataRef = new DataRef(dataInputRef.getText()); ioSpecification.addOutputRef(dataRef); } } return ioSpecification; } protected AbstractDataAssociation parseDataInputAssociation(Element dataAssociationElement) { String sourceRef = null; Element sourceElement = dataAssociationElement.element("sourceRef"); if (sourceElement != null) { sourceRef = sourceElement.getText(); } String targetRef = null; Element targetElement = dataAssociationElement.element("targetRef"); if (targetElement != null) { targetRef = targetElement.getText(); } if (targetRef != null && targetRef.equals("")) { addError("targetRef is required", dataAssociationElement); } List<Element> assignments = dataAssociationElement.elements("assignment"); if (assignments.isEmpty()) { return new MessageImplicitDataInputAssociation(sourceRef, targetRef); } else { SimpleDataInputAssociation dataAssociation = new SimpleDataInputAssociation(sourceRef, targetRef); for (Element assigmentElement : dataAssociationElement.elements("assignment")) { if (assigmentElement.element("from") != null && assigmentElement.element("to") != null) { Expression from = this.expressionManager.createExpression(assigmentElement.element("from").getText()); Expression to = this.expressionManager.createExpression(assigmentElement.element("to").getText()); Assignment assignment = new Assignment(from, to); dataAssociation.addAssignment(assignment); } } return dataAssociation; } } /** * Parses the start events of a certain level in the process (process, * subprocess or another scope). * * @param parentElement * The 'parent' element that contains the start events (process, * subprocess). * @param scope * The {@link ScopeImpl} to which the start events must be added. */ public void parseStartEvents(Element parentElement, ScopeImpl scope) { List<Element> startEventElements = parentElement.elements("startEvent"); List<ActivityImpl> startEventActivities = new ArrayList<ActivityImpl>(); for (Element startEventElement : startEventElements) { ActivityImpl startEventActivity = createActivityOnScope(startEventElement, scope); if (scope instanceof ProcessDefinitionEntity) { parseProcessDefinitionStartEvent(startEventActivity, startEventElement, parentElement, scope); startEventActivities.add(startEventActivity); } else { parseScopeStartEvent(startEventActivity, startEventElement, parentElement, scope); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseStartEvent(startEventElement, scope, startEventActivity); } parseExecutionListenersOnScope(startEventElement, startEventActivity); } if(scope instanceof ProcessDefinitionEntity) { selectInitial(startEventActivities, (ProcessDefinitionEntity) scope, parentElement); parseStartFormHandlers(startEventElements, (ProcessDefinitionEntity) scope); } } protected void selectInitial(List<ActivityImpl> startEventActivities, ProcessDefinitionEntity processDefinition, Element parentElement) { ActivityImpl initial = null; // validate that there is s single none start event / timer start event: for (ActivityImpl activityImpl : startEventActivities) { if(!activityImpl.getProperty("type").equals("messageStartEvent")) { if(initial == null) { initial = activityImpl; } else { addError("multiple none start events or timer start events not supported on process definition", parentElement); } } } // if there is a single start event, select it as initial, regardless of it's type: if(initial == null && startEventActivities.size() == 1) { initial = startEventActivities.get(0); } processDefinition.setInitial(initial); } protected void parseProcessDefinitionStartEvent(ActivityImpl startEventActivity, Element startEventElement, Element parentElement, ScopeImpl scope) { ProcessDefinitionEntity processDefinition = (ProcessDefinitionEntity) scope; String initiatorVariableName = startEventElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "initiator"); if (initiatorVariableName != null) { processDefinition.setProperty(PROPERTYNAME_INITIATOR_VARIABLE_NAME, initiatorVariableName); } // all start events share the same behavior: startEventActivity.setActivityBehavior(new NoneStartEventActivityBehavior()); Element timerEventDefinition = startEventElement.element("timerEventDefinition"); Element messageEventDefinition = startEventElement.element("messageEventDefinition"); if (timerEventDefinition != null) { parseTimerStartEventDefinition(timerEventDefinition, startEventActivity, processDefinition); } else if(messageEventDefinition != null) { EventSubscriptionDeclaration messageDefinition = parseMessageEventDefinition(messageEventDefinition); startEventActivity.setProperty("type", "messageStartEvent"); messageDefinition.setActivityId(startEventActivity.getId()); // create message event subscription: messageDefinition.setStartEvent(true); addEventSubscriptionDeclaration(messageDefinition, processDefinition, startEventElement); } } protected void parseStartFormHandlers(List<Element> startEventElements, ProcessDefinitionEntity processDefinition) { if(processDefinition.getInitial() != null) { for (Element startEventElement : startEventElements) { if(startEventElement.attribute("id").equals(processDefinition.getInitial().getId())) { StartFormHandler startFormHandler; String startFormHandlerClassName = startEventElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "formHandlerClass"); if (startFormHandlerClassName != null) { startFormHandler = (StartFormHandler) ReflectUtil.instantiate(startFormHandlerClassName); } else { startFormHandler = new DefaultStartFormHandler(); } startFormHandler.parseConfiguration(startEventElement, deployment, processDefinition, this); processDefinition.setStartFormHandler(startFormHandler); } } } } protected void parseScopeStartEvent(ActivityImpl startEventActivity, Element startEventElement, Element parentElement, ScopeImpl scope) { Object triggeredByEvent = scope.getProperty("triggeredByEvent"); boolean isTriggeredByEvent = triggeredByEvent != null && ((Boolean) triggeredByEvent == true); Element errorEventDefinition = startEventElement.element("errorEventDefinition"); Element messageEventDefinition = startEventElement.element("messageEventDefinition"); Element signalEventDefinition = startEventElement.element("signalEventDefinition"); if (isTriggeredByEvent) { // event subprocess // all start events of an event subprocess share common behavior EventSubProcessStartEventActivityBehavior activityBehavior = new EventSubProcessStartEventActivityBehavior(startEventActivity.getId()); startEventActivity.setActivityBehavior(activityBehavior); String isInterrupting = startEventElement.attribute("isInterrupting"); if(isInterrupting != null && (isInterrupting.equals("false")||isInterrupting.equals("FALSE"))) { activityBehavior.setInterrupting(false); } // the scope of the event subscription is the parent of the event // subprocess (subscription must be created when parent is initialized) ScopeImpl catchingScope = ((ActivityImpl) scope).getParent(); if (errorEventDefinition != null) { if(!activityBehavior.isInterrupting()) { addError("error start event of event subprocess must be interrupting", startEventElement); } if (scope.getProperty(PROPERTYNAME_INITIAL) == null) { scope.setProperty(PROPERTYNAME_INITIAL, startEventActivity); parseErrorStartEventDefinition(errorEventDefinition, startEventActivity, catchingScope); } else { addError("multiple start events not supported for subprocess", startEventElement); } } else if (messageEventDefinition != null) { EventSubscriptionDeclaration eventSubscriptionDeclaration = parseMessageEventDefinition(messageEventDefinition); eventSubscriptionDeclaration.setActivityId(startEventActivity.getId()); eventSubscriptionDeclaration.setStartEvent(false); addEventSubscriptionDeclaration(eventSubscriptionDeclaration, catchingScope, messageEventDefinition); } else if (signalEventDefinition != null) { EventSubscriptionDeclaration eventSubscriptionDeclaration = parseSignalEventDefinition(signalEventDefinition); eventSubscriptionDeclaration.setActivityId(startEventActivity.getId()); eventSubscriptionDeclaration.setStartEvent(false); addEventSubscriptionDeclaration(eventSubscriptionDeclaration, catchingScope, signalEventDefinition); } else { addError("start event of event subprocess must be of type 'error', 'message' or 'signal'", startEventElement); } } else { // "regular" subprocess if(errorEventDefinition != null) { addError("errorEventDefinition only allowed on start event if subprocess is an event subprocess", errorEventDefinition); } if(messageEventDefinition != null) { addError("messageEventDefinition only allowed on start event if subprocess is an event subprocess", messageEventDefinition); } if(signalEventDefinition != null) { addError("signalEventDefintion only allowed on start event if subprocess is an event subprocess", messageEventDefinition); } if (scope.getProperty(PROPERTYNAME_INITIAL) == null) { scope.setProperty(PROPERTYNAME_INITIAL, startEventActivity); startEventActivity.setActivityBehavior(new NoneStartEventActivityBehavior()); } else { addError("multiple start events not supported for subprocess", startEventElement); } } } protected void parseErrorStartEventDefinition(Element errorEventDefinition, ActivityImpl startEventActivity, ScopeImpl scope) { startEventActivity.setProperty("type", "errorStartEvent"); String errorRef = errorEventDefinition.attribute("errorRef"); Error error = null; ErrorEventDefinition definition = new ErrorEventDefinition(startEventActivity.getId()); if (errorRef != null) { error = errors.get(errorRef); String errorCode = error == null ? errorRef : error.getErrorCode(); definition.setErrorCode(errorCode); } definition.setPrecedence(10); addErrorEventDefinition(definition, scope); } protected EventSubscriptionDeclaration parseMessageEventDefinition(Element messageEventDefinition) { String messageRef = messageEventDefinition.attribute("messageRef"); if(messageRef == null) { addError("attribute 'messageRef' is required", messageEventDefinition); } MessageDefinition messageDefinition = messages.get(resolveName(messageRef)); if(messageDefinition == null) { addError("Invalid 'messageRef': no message with id '"+messageRef+"' found.", messageEventDefinition); } return new EventSubscriptionDeclaration(messageDefinition.getName(), "message"); } @SuppressWarnings("unchecked") protected void addEventSubscriptionDeclaration(EventSubscriptionDeclaration subscription, ScopeImpl scope, Element element) { List<EventSubscriptionDeclaration> eventDefinitions = (List<EventSubscriptionDeclaration>) scope.getProperty(PROPERTYNAME_EVENT_SUBSCRIPTION_DECLARATION); if(eventDefinitions == null) { eventDefinitions = new ArrayList<EventSubscriptionDeclaration>(); scope.setProperty(PROPERTYNAME_EVENT_SUBSCRIPTION_DECLARATION, eventDefinitions); } else { // if this is a message event, validate that it is the only one with the provided name for this scope if(subscription.getEventType().equals("message")) { for (EventSubscriptionDeclaration eventDefinition : eventDefinitions) { if(eventDefinition.getEventType().equals("message") && eventDefinition.getEventName().equals(subscription.getEventName()) && eventDefinition.isStartEvent() == subscription.isStartEvent()) { addError("Cannot have more than one message event subscription with name '"+subscription.getEventName()+"' for scope '"+scope.getId()+"'", element); } } } } eventDefinitions.add(subscription); } /** * Parses the activities of a certain level in the process (process, * subprocess or another scope). * * @param parentElement * The 'parent' element that contains the activities (process, * subprocess). * @param scopeElement * The {@link ScopeImpl} to which the activities must be added. * @param postponedElements * @param postProcessActivities */ public void parseActivities(Element parentElement, ScopeImpl scopeElement, HashMap<String, Element> postponedElements) { for (Element activityElement : parentElement.elements()) { parseActivity(activityElement, parentElement, scopeElement, postponedElements); } } protected void parseActivity(Element activityElement, Element parentElement, ScopeImpl scopeElement, HashMap<String, Element> postponedElements) { ActivityImpl activity = null; if (activityElement.getTagName().equals("exclusiveGateway")) { activity = parseExclusiveGateway(activityElement, scopeElement); } else if (activityElement.getTagName().equals("inclusiveGateway")) { activity = parseInclusiveGateway(activityElement, scopeElement); } else if (activityElement.getTagName().equals("parallelGateway")) { activity = parseParallelGateway(activityElement, scopeElement); } else if (activityElement.getTagName().equals("scriptTask")) { activity = parseScriptTask(activityElement, scopeElement); } else if (activityElement.getTagName().equals("serviceTask")) { activity = parseServiceTask(activityElement, scopeElement); } else if (activityElement.getTagName().equals("businessRuleTask")) { activity = parseBusinessRuleTask(activityElement, scopeElement); } else if (activityElement.getTagName().equals("task")) { activity = parseTask(activityElement, scopeElement); } else if (activityElement.getTagName().equals("manualTask")) { activity = parseManualTask(activityElement, scopeElement); } else if (activityElement.getTagName().equals("userTask")) { activity = parseUserTask(activityElement, scopeElement); } else if (activityElement.getTagName().equals("sendTask")) { activity = parseSendTask(activityElement, scopeElement); } else if (activityElement.getTagName().equals("receiveTask")) { activity = parseReceiveTask(activityElement, scopeElement); } else if (activityElement.getTagName().equals("subProcess")) { activity = parseSubProcess(activityElement, scopeElement); } else if (activityElement.getTagName().equals("callActivity")) { activity = parseCallActivity(activityElement, scopeElement); } else if (activityElement.getTagName().equals("intermediateCatchEvent")) { // postpone all intermediate catch events (required for supporting event-based gw) postponedElements.put(activityElement.attribute("id"), activityElement); } else if (activityElement.getTagName().equals("intermediateThrowEvent")) { activity = parseIntermediateThrowEvent(activityElement, scopeElement); } else if (activityElement.getTagName().equals("eventBasedGateway")) { activity = parseEventBasedGateway(activityElement, parentElement, scopeElement); } else if(activityElement.getTagName().equals("transaction")) { activity = parseTransaction(activityElement, scopeElement); } else if (activityElement.getTagName().equals("adHocSubProcess") || activityElement.getTagName().equals("complexGateway")) { addWarning("Ignoring unsupported activity type", activityElement); } // Parse stuff common to activities above if (activity != null) { parseMultiInstanceLoopCharacteristics(activityElement, activity); } } public void validateActivities(List<ActivityImpl> activities) { for (ActivityImpl activity : activities) { validateActivity(activity); // check children if it is an own scope / subprocess / ... if (activity.getActivities().size()>0) { validateActivities(activity.getActivities()); } } } protected void validateActivity(ActivityImpl activity) { if (activity.getActivityBehavior() instanceof ExclusiveGatewayActivityBehavior) { validateExclusiveGateway(activity); } } public void validateExclusiveGateway(ActivityImpl activity) { if (activity.getOutgoingTransitions().size()==0) { // TODO: double check if this is valid (I think in Activiti yes, since we need start events we will need an end event as well) addError("Exclusive Gateway '" + activity.getId() + "' has no outgoing sequence flows.", null); } else if (activity.getOutgoingTransitions().size()==1) { PvmTransition flow = activity.getOutgoingTransitions().get(0); Condition condition = (Condition) flow.getProperty(BpmnParse.PROPERTYNAME_CONDITION); if (condition!=null) { addError("Exclusive Gateway '" + activity.getId() + "' has only one outgoing sequence flow ('" + flow.getId() + "'). This is not allowed to have a condition.", null); } } else { String defaultSequenceFlow = (String) activity.getProperty("default"); boolean hasDefaultFlow = defaultSequenceFlow!=null && defaultSequenceFlow.length()>0; ArrayList<PvmTransition> flowsWithoutCondition = new ArrayList<PvmTransition>(); for (PvmTransition flow : activity.getOutgoingTransitions()) { Condition condition = (Condition) flow.getProperty(BpmnParse.PROPERTYNAME_CONDITION); boolean isDefaultFlow = flow.getId()!=null && flow.getId().equals(defaultSequenceFlow); boolean hasConditon = condition!=null; if (!hasConditon && !isDefaultFlow) { flowsWithoutCondition.add(flow); } if (hasConditon && isDefaultFlow) { addError("Exclusive Gateway '" + activity.getId() + "' has outgoing sequence flow '" + flow.getId() + "' which is the default flow but has a condition too.", null); } } if (hasDefaultFlow || flowsWithoutCondition.size()>1) { // if we either have a default flow (then no flows without conditions are valid at all) or if we have more than one flow without condition this is an error for (PvmTransition flow : flowsWithoutCondition) { addError("Exclusive Gateway '" + activity.getId() + "' has outgoing sequence flow '" + flow.getId() + "' without condition which is not the default flow.", null); } } else if (flowsWithoutCondition.size()==1) { // Havinf no default and exactly one flow without condition this is considered the default one now (to not break backward compatibility) PvmTransition flow = flowsWithoutCondition.get(0); addWarning("Exclusive Gateway '" + activity.getId() + "' has outgoing sequence flow '" + flow.getId() + "' without condition which is not the default flow. We assume it to be the default flow, but it is bad modeling practice, better set the default flow in your gateway.", null); } } } public ActivityImpl parseIntermediateCatchEvent(Element intermediateEventElement, ScopeImpl scopeElement, boolean isAfterEventBasedGateway) { ActivityImpl nestedActivity = createActivityOnScope(intermediateEventElement, scopeElement); // Catch event behavior is the same for all types nestedActivity.setActivityBehavior(new IntermediateCatchEventActivitiBehaviour()); Element timerEventDefinition = intermediateEventElement.element("timerEventDefinition"); Element signalEventDefinition = intermediateEventElement.element("signalEventDefinition"); Element messageEventDefinition = intermediateEventElement.element("messageEventDefinition"); if (timerEventDefinition != null) { parseIntemediateTimerEventDefinition(timerEventDefinition, nestedActivity, isAfterEventBasedGateway); }else if(signalEventDefinition != null) { parseIntemediateSignalEventDefinition(signalEventDefinition, nestedActivity, isAfterEventBasedGateway); }else if(messageEventDefinition != null) { parseIntemediateMessageEventDefinition(messageEventDefinition, nestedActivity, isAfterEventBasedGateway); } else { addError("Unsupported intermediate catch event type", intermediateEventElement); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseIntermediateCatchEvent(intermediateEventElement, scopeElement, nestedActivity); } parseExecutionListenersOnScope(intermediateEventElement, nestedActivity); return nestedActivity; } protected void parseIntemediateMessageEventDefinition(Element messageEventDefinition, ActivityImpl nestedActivity, boolean isAfterEventBasedGateway) { nestedActivity.setProperty("type", "intermediateMessageCatch"); EventSubscriptionDeclaration messageDefinition = parseMessageEventDefinition(messageEventDefinition); if(isAfterEventBasedGateway) { messageDefinition.setActivityId(nestedActivity.getId()); addEventSubscriptionDeclaration(messageDefinition, nestedActivity.getParent(), messageEventDefinition); }else { nestedActivity.setScope(true); addEventSubscriptionDeclaration(messageDefinition, nestedActivity, messageEventDefinition); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseIntermediateMessageCatchEventDefinition(messageEventDefinition, nestedActivity); } } public ActivityImpl parseIntermediateThrowEvent(Element intermediateEventElement, ScopeImpl scopeElement) { ActivityImpl nestedActivityImpl = createActivityOnScope(intermediateEventElement, scopeElement); ActivityBehavior activityBehavior = null; Element signalEventDefinitionElement = intermediateEventElement.element("signalEventDefinition"); Element compensateEventDefinitionElement = intermediateEventElement.element("compensateEventDefinition"); boolean otherUnsupportedThrowingIntermediateEvent = (intermediateEventElement.element("escalationEventDefinition") != null) || // (intermediateEventElement.element("messageEventDefinition") != null) || // (intermediateEventElement.element("linkEventDefinition") != null); // All other event definition types cannot be intermediate throwing (cancelEventDefinition, conditionalEventDefinition, errorEventDefinition, terminateEventDefinition, timerEventDefinition if(signalEventDefinitionElement != null) { nestedActivityImpl.setProperty("type", "intermediateSignalThrow"); EventSubscriptionDeclaration signalDefinition = parseSignalEventDefinition(signalEventDefinitionElement); activityBehavior = new IntermediateThrowSignalEventActivityBehavior(signalDefinition); } else if(compensateEventDefinitionElement != null) { CompensateEventDefinition compensateEventDefinition = parseCompensateEventDefinition(compensateEventDefinitionElement, scopeElement); activityBehavior = new IntermediateThrowCompensationEventActivityBehavior(compensateEventDefinition); // IntermediateThrowNoneEventActivityBehavior } else if (otherUnsupportedThrowingIntermediateEvent) { addError("Unsupported intermediate throw event type", intermediateEventElement); } else { // None intermediate event activityBehavior = new IntermediateThrowNoneEventActivityBehavior(); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseIntermediateThrowEvent(intermediateEventElement, scopeElement, nestedActivityImpl); } nestedActivityImpl.setActivityBehavior(activityBehavior); parseExecutionListenersOnScope(intermediateEventElement, nestedActivityImpl); return nestedActivityImpl; } protected CompensateEventDefinition parseCompensateEventDefinition(Element compensateEventDefinitionElement, ScopeImpl scopeElement) { String activityRef = compensateEventDefinitionElement.attribute("activityRef"); boolean waitForCompletion = "true".equals(compensateEventDefinitionElement.attribute("waitForCompletion", "true")); if(activityRef != null) { if(scopeElement.findActivity(activityRef) == null) { addError("Invalid attribute value for 'activityRef': no activity with id '"+activityRef+"' in current scope", compensateEventDefinitionElement); } } CompensateEventDefinition compensateEventDefinition = new CompensateEventDefinition(); compensateEventDefinition.setActivityRef(activityRef); compensateEventDefinition.setWaitForCompletion(waitForCompletion); return compensateEventDefinition; } protected void parseCatchCompensateEventDefinition(Element compensateEventDefinition, ActivityImpl activity) { activity.setProperty("type", "compensationBoundaryCatch"); ScopeImpl parent = activity.getParent(); for (ActivityImpl child : parent.getActivities()) { if(child.getProperty("type").equals("compensationBoundaryCatch") && child != activity ) { addError("multiple boundary events with compensateEventDefinition not supported on same activity", compensateEventDefinition); } } } protected ActivityBehavior parseBoundaryCancelEventDefinition(Element cancelEventDefinition, ActivityImpl activity) { activity.setProperty("type", "cancelBoundaryCatch"); ActivityImpl parent = (ActivityImpl) activity.getParent(); if(!parent.getProperty("type").equals("transaction")) { addError("boundary event with cancelEventDefinition only supported on transaction subprocesses", cancelEventDefinition); } for (ActivityImpl child : parent.getActivities()) { if(child.getProperty("type").equals("cancelBoundaryCatch") && child != activity ) { addError("multiple boundary events with cancelEventDefinition not supported on same transaction subprocess", cancelEventDefinition); } } return new CancelBoundaryEventActivityBehavior(); } /** * Parses loopCharacteristics (standardLoop/Multi-instance) of an activity, if * any is defined. */ public void parseMultiInstanceLoopCharacteristics(Element activityElement, ActivityImpl activity) { // Only 'activities' (in the BPMN 2.0 spec meaning) can have mi // characteristics if (!(activity.getActivityBehavior() instanceof AbstractBpmnActivityBehavior)) { return; } Element miLoopCharacteristics = activityElement.element("multiInstanceLoopCharacteristics"); if (miLoopCharacteristics != null) { MultiInstanceActivityBehavior miActivityBehavior = null; boolean isSequential = parseBooleanAttribute(miLoopCharacteristics.attribute("isSequential"), false); if (isSequential) { miActivityBehavior = new SequentialMultiInstanceBehavior(activity, (AbstractBpmnActivityBehavior) activity.getActivityBehavior()); } else { miActivityBehavior = new ParallelMultiInstanceBehavior(activity, (AbstractBpmnActivityBehavior) activity.getActivityBehavior()); } activity.setScope(true); activity.setProperty("multiInstance", isSequential ? "sequential" : "parallel"); activity.setActivityBehavior(miActivityBehavior); // loopCardinality Element loopCardinality = miLoopCharacteristics.element("loopCardinality"); if (loopCardinality != null) { String loopCardinalityText = loopCardinality.getText(); if (loopCardinalityText == null || "".equals(loopCardinalityText)) { addError("loopCardinality must be defined for a multiInstanceLoopCharacteristics definition ", miLoopCharacteristics); } miActivityBehavior.setLoopCardinalityExpression(expressionManager.createExpression(loopCardinalityText)); } // completionCondition Element completionCondition = miLoopCharacteristics.element("completionCondition"); if (completionCondition != null) { String completionConditionText = completionCondition.getText(); miActivityBehavior.setCompletionConditionExpression(expressionManager.createExpression(completionConditionText)); } // activiti:collection String collection = miLoopCharacteristics.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "collection"); if (collection != null) { if (collection.contains("{")) { miActivityBehavior.setCollectionExpression(expressionManager.createExpression(collection)); } else { miActivityBehavior.setCollectionVariable(collection); } } // loopDataInputRef Element loopDataInputRef = miLoopCharacteristics.element("loopDataInputRef"); if (loopDataInputRef != null) { String loopDataInputRefText = loopDataInputRef.getText(); if (loopDataInputRefText != null) { if (loopDataInputRefText.contains("{")) { miActivityBehavior.setCollectionExpression(expressionManager.createExpression(loopDataInputRefText)); } else { miActivityBehavior.setCollectionVariable(loopDataInputRefText); } } } // activiti:elementVariable String elementVariable = miLoopCharacteristics.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "elementVariable"); if (elementVariable != null) { miActivityBehavior.setCollectionElementVariable(elementVariable); } // dataInputItem Element inputDataItem = miLoopCharacteristics.element("inputDataItem"); if (inputDataItem != null) { String inputDataItemName = inputDataItem.attribute("name"); miActivityBehavior.setCollectionElementVariable(inputDataItemName); } // Validation if (miActivityBehavior.getLoopCardinalityExpression() == null && miActivityBehavior.getCollectionExpression() == null && miActivityBehavior.getCollectionVariable() == null) { addError("Either loopCardinality or loopDataInputRef/activiti:collection must been set", miLoopCharacteristics); } // Validation if (miActivityBehavior.getCollectionExpression() == null && miActivityBehavior.getCollectionVariable() == null && miActivityBehavior.getCollectionElementVariable() != null) { addError("LoopDataInputRef/activiti:collection must be set when using inputDataItem or activiti:elementVariable", miLoopCharacteristics); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseMultiInstanceLoopCharacteristics(activityElement, miLoopCharacteristics, activity); } } } /** * Parses the generic information of an activity element (id, name, * documentation, etc.), and creates a new {@link ActivityImpl} on the given * scope element. */ public ActivityImpl createActivityOnScope(Element activityElement, ScopeImpl scopeElement) { String id = activityElement.attribute("id"); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Parsing activity " + id); } ActivityImpl activity = scopeElement.createActivity(id); activity.setProperty("name", activityElement.attribute("name")); activity.setProperty("documentation", parseDocumentation(activityElement)); activity.setProperty("default", activityElement.attribute("default")); activity.setProperty("type", activityElement.getTagName()); activity.setProperty("line", activityElement.getLine()); String isForCompensation = activityElement.attribute("isForCompensation"); if(isForCompensation != null && (isForCompensation.equals("true")||isForCompensation.equals("TRUE"))) { activity.setProperty(PROPERTYNAME_IS_FOR_COMPENSATION, true); } return activity; } public String parseDocumentation(Element element) { List<Element> docElements = element.elements("documentation"); if (docElements.isEmpty()) { return null; } StringBuilder builder = new StringBuilder(); for (Element e: docElements) { if (builder.length() != 0) { builder.append("\n\n"); } builder.append(e.getText().trim()); } return builder.toString(); } /** * Parses an exclusive gateway declaration. */ public ActivityImpl parseExclusiveGateway(Element exclusiveGwElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(exclusiveGwElement, scope); activity.setActivityBehavior(new ExclusiveGatewayActivityBehavior()); parseExecutionListenersOnScope(exclusiveGwElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseExclusiveGateway(exclusiveGwElement, scope, activity); } return activity; } /** * Parses an inclusive gateway declaration. */ public ActivityImpl parseInclusiveGateway(Element inclusiveGwElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(inclusiveGwElement, scope); activity.setActivityBehavior(new InclusiveGatewayActivityBehavior()); parseExecutionListenersOnScope(inclusiveGwElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseInclusiveGateway(inclusiveGwElement, scope, activity); } return activity; } public ActivityImpl parseEventBasedGateway(Element eventBasedGwElement, Element parentElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(eventBasedGwElement, scope); activity.setActivityBehavior(new EventBasedGatewayActivityBehavior()); activity.setScope(true); parseExecutionListenersOnScope(eventBasedGwElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseEventBasedGateway(eventBasedGwElement, scope, activity); } // find all outgoing sequence flows: List<Element> sequenceFlows = parentElement.elements("sequenceFlow"); // collect all siblings in a map Map<String, Element> siblingsMap = new HashMap<String, Element>(); List<Element> siblings = parentElement.elements(); for (Element sibling : siblings) { siblingsMap.put(sibling.attribute("id"), sibling); } for (Element sequenceFlow : sequenceFlows) { String sourceRef = sequenceFlow.attribute("sourceRef"); String targetRef = sequenceFlow.attribute("targetRef"); if (activity.getId().equals(sourceRef)) { Element sibling = siblingsMap.get(targetRef); if (sibling != null) { if (sibling.getTagName().equals("intermediateCatchEvent")) { parseIntermediateCatchEvent(sibling, activity, true); } else { addError("Event based gateway can only be connected to elements of type intermediateCatchEvent", sibling); } } } } return activity; } /** * Parses a parallel gateway declaration. */ public ActivityImpl parseParallelGateway(Element parallelGwElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(parallelGwElement, scope); activity.setActivityBehavior(new ParallelGatewayActivityBehavior()); parseExecutionListenersOnScope(parallelGwElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseParallelGateway(parallelGwElement, scope, activity); } return activity; } /** * Parses a scriptTask declaration. */ public ActivityImpl parseScriptTask(Element scriptTaskElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(scriptTaskElement, scope); String script = null; String language = null; String resultVariableName = null; Element scriptElement = scriptTaskElement.element("script"); if (scriptElement != null) { script = scriptElement.getText(); if (language == null) { language = scriptTaskElement.attribute("scriptFormat"); } if (language == null) { language = ScriptingEngines.DEFAULT_SCRIPTING_LANGUAGE; } resultVariableName = scriptTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "resultVariable"); if (resultVariableName == null) { // for backwards compatible reasons resultVariableName = scriptTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "resultVariableName"); } } activity.setAsync(isAsync(scriptTaskElement)); activity.setExclusive(isExclusive(scriptTaskElement)); activity.setActivityBehavior(new ScriptTaskActivityBehavior(script, language, resultVariableName)); parseExecutionListenersOnScope(scriptTaskElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseScriptTask(scriptTaskElement, scope, activity); } return activity; } /** * Parses a serviceTask declaration. */ public ActivityImpl parseServiceTask(Element serviceTaskElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(serviceTaskElement, scope); String type = serviceTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "type"); String className = serviceTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "class"); String expression = serviceTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "expression"); String delegateExpression = serviceTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "delegateExpression"); String resultVariableName = serviceTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "resultVariable"); if (resultVariableName == null) { resultVariableName = serviceTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "resultVariableName"); } String implementation = serviceTaskElement.attribute("implementation"); String operationRef = this.resolveName(serviceTaskElement.attribute("operationRef")); activity.setAsync(isAsync(serviceTaskElement)); activity.setExclusive(isExclusive(serviceTaskElement)); if (type != null) { if (type.equalsIgnoreCase("mail")) { parseEmailServiceTask(activity, serviceTaskElement, parseFieldDeclarations(serviceTaskElement)); } else if (type.equalsIgnoreCase("mule")) { parseMuleServiceTask(activity, serviceTaskElement, parseFieldDeclarations(serviceTaskElement)); } else if (type.equalsIgnoreCase("shell")) { parseShellServiceTask(activity, serviceTaskElement, parseFieldDeclarations(serviceTaskElement)); } else { addError("Invalid usage of type attribute: '" + type + "'", serviceTaskElement); } } else if (className != null && className.trim().length() > 0) { if (resultVariableName != null) { addError("'resultVariableName' not supported for service tasks using 'class'", serviceTaskElement); } activity.setActivityBehavior(new ClassDelegate(className, parseFieldDeclarations(serviceTaskElement))); } else if (delegateExpression != null) { if (resultVariableName != null) { addError("'resultVariableName' not supported for service tasks using 'delegateExpression'", serviceTaskElement); } activity.setActivityBehavior(new ServiceTaskDelegateExpressionActivityBehavior(expressionManager.createExpression(delegateExpression), parseFieldDeclarations(serviceTaskElement))); } else if (expression != null && expression.trim().length() > 0) { activity.setActivityBehavior(new ServiceTaskExpressionActivityBehavior(expressionManager.createExpression(expression), resultVariableName)); } else if (implementation != null && operationRef != null && implementation.equalsIgnoreCase("##WebService")) { if (!this.operations.containsKey(operationRef)) { addError(operationRef + " does not exist", serviceTaskElement); } else { Operation operation = this.operations.get(operationRef); WebServiceActivityBehavior webServiceActivityBehavior = new WebServiceActivityBehavior(operation); Element ioSpecificationElement = serviceTaskElement.element("ioSpecification"); if (ioSpecificationElement != null) { IOSpecification ioSpecification = this.parseIOSpecification(ioSpecificationElement); webServiceActivityBehavior.setIoSpecification(ioSpecification); } for (Element dataAssociationElement : serviceTaskElement.elements("dataInputAssociation")) { AbstractDataAssociation dataAssociation = this.parseDataInputAssociation(dataAssociationElement); webServiceActivityBehavior.addDataInputAssociation(dataAssociation); } for (Element dataAssociationElement : serviceTaskElement.elements("dataOutputAssociation")) { AbstractDataAssociation dataAssociation = this.parseDataOutputAssociation(dataAssociationElement); webServiceActivityBehavior.addDataOutputAssociation(dataAssociation); } activity.setActivityBehavior(webServiceActivityBehavior); } } else { addError("One of the attributes 'class', 'delegateExpression', 'type', 'operation', or 'expression' is mandatory on serviceTask.", serviceTaskElement); } parseExecutionListenersOnScope(serviceTaskElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseServiceTask(serviceTaskElement, scope, activity); } return activity; } /** * Parses a businessRuleTask declaration. */ public ActivityImpl parseBusinessRuleTask(Element businessRuleTaskElement, ScopeImpl scope) { if (businessRuleTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "class")!=null || businessRuleTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "expression") !=null || businessRuleTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "delegateExpression") != null) { // ACT-1164: If expression or class is set on a BusinessRuleTask it behaves like a service task // to allow implementing the rule handling yourself return parseServiceTask(businessRuleTaskElement, scope); } else { ActivityImpl activity = createActivityOnScope(businessRuleTaskElement, scope); BusinessRuleTaskActivityBehavior ruleActivity = new BusinessRuleTaskActivityBehavior(); String ruleVariableInputString = businessRuleTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "ruleVariablesInput"); String rulesString = businessRuleTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "rules"); String excludeString = businessRuleTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "exclude"); String resultVariableNameString = businessRuleTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "resultVariable"); activity.setAsync(isAsync(businessRuleTaskElement)); activity.setExclusive(isExclusive(businessRuleTaskElement)); if (resultVariableNameString == null) { resultVariableNameString = businessRuleTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "resultVariableName"); } if (ruleVariableInputString != null) { List<String> ruleVariableInputObjects = parseCommaSeparatedList(ruleVariableInputString); for (String ruleVariableInputObject : ruleVariableInputObjects) { ruleActivity.addRuleVariableInputIdExpression(expressionManager.createExpression(ruleVariableInputObject.trim())); } } if (rulesString != null) { List<String> rules = parseCommaSeparatedList(rulesString); for (String rule : rules) { ruleActivity.addRuleIdExpression(expressionManager.createExpression(rule.trim())); } if (excludeString != null) { excludeString = excludeString.trim(); if ("true".equalsIgnoreCase(excludeString) == false && "false".equalsIgnoreCase(excludeString) == false) { addError("'exclude' only supports true or false for business rule tasks", businessRuleTaskElement); } else { ruleActivity.setExclude(Boolean.valueOf(excludeString.toLowerCase())); } } } else if (excludeString != null) { addError("'exclude' not supported for business rule tasks not defining 'rules'", businessRuleTaskElement); } if (resultVariableNameString != null) { resultVariableNameString = resultVariableNameString.trim(); if (resultVariableNameString.length() > 0 == false) { addError("'resultVariable' must contain a text value for business rule tasks", businessRuleTaskElement); } else { ruleActivity.setResultVariable(resultVariableNameString); } } else { ruleActivity.setResultVariable("org.activiti.engine.rules.OUTPUT"); } activity.setActivityBehavior(ruleActivity); parseExecutionListenersOnScope(businessRuleTaskElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseBusinessRuleTask(businessRuleTaskElement, scope, activity); } return activity; } } /** * Parses a sendTask declaration. */ public ActivityImpl parseSendTask(Element sendTaskElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(sendTaskElement, scope); activity.setAsync(isAsync(sendTaskElement)); activity.setExclusive(isExclusive(sendTaskElement)); // for e-mail String type = sendTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "type"); // for web service String implementation = sendTaskElement.attribute("implementation"); String operationRef = this.resolveName(sendTaskElement.attribute("operationRef")); // for e-mail if (type != null) { if (type.equalsIgnoreCase("mail")) { parseEmailServiceTask(activity, sendTaskElement, parseFieldDeclarations(sendTaskElement)); } else if (type.equalsIgnoreCase("mule")) { parseMuleServiceTask(activity, sendTaskElement, parseFieldDeclarations(sendTaskElement)); } else { addError("Invalid usage of type attribute: '" + type + "'", sendTaskElement); } // for web service } else if (implementation != null && operationRef != null && implementation.equalsIgnoreCase("##WebService")) { if (!this.operations.containsKey(operationRef)) { addError(operationRef + " does not exist", sendTaskElement); } else { Operation operation = this.operations.get(operationRef); WebServiceActivityBehavior webServiceActivityBehavior = new WebServiceActivityBehavior(operation); Element ioSpecificationElement = sendTaskElement.element("ioSpecification"); if (ioSpecificationElement != null) { IOSpecification ioSpecification = this.parseIOSpecification(ioSpecificationElement); webServiceActivityBehavior.setIoSpecification(ioSpecification); } for (Element dataAssociationElement : sendTaskElement.elements("dataInputAssociation")) { AbstractDataAssociation dataAssociation = this.parseDataInputAssociation(dataAssociationElement); webServiceActivityBehavior.addDataInputAssociation(dataAssociation); } for (Element dataAssociationElement : sendTaskElement.elements("dataOutputAssociation")) { AbstractDataAssociation dataAssociation = this.parseDataOutputAssociation(dataAssociationElement); webServiceActivityBehavior.addDataOutputAssociation(dataAssociation); } activity.setActivityBehavior(webServiceActivityBehavior); } } else { addError("One of the attributes 'type' or 'operation' is mandatory on sendTask.", sendTaskElement); } parseExecutionListenersOnScope(sendTaskElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseSendTask(sendTaskElement, scope, activity); } return activity; } protected AbstractDataAssociation parseDataOutputAssociation(Element dataAssociationElement) { String targetRef = dataAssociationElement.element("targetRef").getText(); if (dataAssociationElement.element("sourceRef") != null) { String sourceRef = dataAssociationElement.element("sourceRef").getText(); return new MessageImplicitDataOutputAssociation(targetRef, sourceRef); } else { Expression transformation = this.expressionManager.createExpression(dataAssociationElement.element("transformation").getText()); AbstractDataAssociation dataOutputAssociation = new TransformationDataOutputAssociation(null, targetRef, transformation); return dataOutputAssociation; } } protected void parseMuleServiceTask(ActivityImpl activity, Element serviceTaskElement, List<FieldDeclaration> fieldDeclarations) { try { Class< ? > theClass = Class.forName("org.activiti.mule.MuleSendActivitiBehavior"); activity.setActivityBehavior((ActivityBehavior) ClassDelegate.instantiateDelegate(theClass, fieldDeclarations)); } catch (ClassNotFoundException e) { addError("Could not find org.activiti.mule.MuleSendActivitiBehavior", serviceTaskElement); } } protected void parseEmailServiceTask(ActivityImpl activity, Element serviceTaskElement, List<FieldDeclaration> fieldDeclarations) { validateFieldDeclarationsForEmail(serviceTaskElement, fieldDeclarations); activity.setActivityBehavior((MailActivityBehavior) ClassDelegate.instantiateDelegate(MailActivityBehavior.class, fieldDeclarations)); } protected void parseShellServiceTask(ActivityImpl activity, Element serviceTaskElement, List<FieldDeclaration> fieldDeclarations) { validateFieldDeclarationsForShell(serviceTaskElement, fieldDeclarations); activity.setActivityBehavior((ActivityBehavior) ClassDelegate.instantiateDelegate(ShellActivityBehavior.class, fieldDeclarations)); } protected void validateFieldDeclarationsForEmail(Element serviceTaskElement, List<FieldDeclaration> fieldDeclarations) { boolean toDefined = false; boolean textOrHtmlDefined = false; for (FieldDeclaration fieldDeclaration : fieldDeclarations) { if (fieldDeclaration.getName().equals("to")) { toDefined = true; } if (fieldDeclaration.getName().equals("html")) { textOrHtmlDefined = true; } if (fieldDeclaration.getName().equals("text")) { textOrHtmlDefined = true; } } if (!toDefined) { addError("No recipient is defined on the mail activity", serviceTaskElement); } if (!textOrHtmlDefined) { addError("Text or html field should be provided", serviceTaskElement); } } protected void validateFieldDeclarationsForShell(Element serviceTaskElement, List<FieldDeclaration> fieldDeclarations) { boolean shellCommandDefined = false; for (FieldDeclaration fieldDeclaration : fieldDeclarations) { String fieldName = fieldDeclaration.getName(); FixedValue fieldFixedValue = (FixedValue) fieldDeclaration.getValue(); String fieldValue = fieldFixedValue.getExpressionText(); shellCommandDefined |= fieldName.equals("command"); if ((fieldName.equals("wait") || fieldName.equals("redirectError") || fieldName.equals("cleanEnv")) && !fieldValue.toLowerCase().equals("true") && !fieldValue.toLowerCase().equals("false")) { addError("undefined value for shell " + fieldName + " parameter :" + fieldValue.toString(), serviceTaskElement); } } if (!shellCommandDefined) { addError("No shell command is defined on the shell activity", serviceTaskElement); } } public List<FieldDeclaration> parseFieldDeclarations(Element element) { List<FieldDeclaration> fieldDeclarations = new ArrayList<FieldDeclaration>(); Element elementWithFieldInjections = element.element("extensionElements"); if (elementWithFieldInjections == null) { // Custom extensions will just // have the <field.. as a // subelement elementWithFieldInjections = element; } List<Element> fieldDeclarationElements = elementWithFieldInjections.elementsNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "field"); if (fieldDeclarationElements != null && !fieldDeclarationElements.isEmpty()) { for (Element fieldDeclarationElement : fieldDeclarationElements) { FieldDeclaration fieldDeclaration = parseFieldDeclaration(element, fieldDeclarationElement); if (fieldDeclaration != null) { fieldDeclarations.add(fieldDeclaration); } } } return fieldDeclarations; } protected FieldDeclaration parseFieldDeclaration(Element serviceTaskElement, Element fieldDeclarationElement) { String fieldName = fieldDeclarationElement.attribute("name"); FieldDeclaration fieldDeclaration = parseStringFieldDeclaration(fieldDeclarationElement, serviceTaskElement, fieldName); if (fieldDeclaration == null) { fieldDeclaration = parseExpressionFieldDeclaration(fieldDeclarationElement, serviceTaskElement, fieldName); } if (fieldDeclaration == null) { addError("One of the following is mandatory on a field declaration: one of attributes stringValue|expression " + "or one of child elements string|expression", serviceTaskElement); } return fieldDeclaration; } protected FieldDeclaration parseStringFieldDeclaration(Element fieldDeclarationElement, Element serviceTaskElement, String fieldName) { try { String fieldValue = getStringValueFromAttributeOrElement("stringValue", "string", fieldDeclarationElement); if (fieldValue != null) { return new FieldDeclaration(fieldName, Expression.class.getName(), new FixedValue(fieldValue)); } } catch (ActivitiException ae) { if (ae.getMessage().contains("multiple elements with tag name")) { addError("Multiple string field declarations found", serviceTaskElement); } else { addError("Error when paring field declarations: " + ae.getMessage(), serviceTaskElement); } } return null; } protected FieldDeclaration parseExpressionFieldDeclaration(Element fieldDeclarationElement, Element serviceTaskElement, String fieldName) { try { String expression = getStringValueFromAttributeOrElement("expression", "expression", fieldDeclarationElement); if (expression != null && expression.trim().length() > 0) { return new FieldDeclaration(fieldName, Expression.class.getName(), expressionManager.createExpression(expression)); } } catch (ActivitiException ae) { if (ae.getMessage().contains("multiple elements with tag name")) { addError("Multiple expression field declarations found", serviceTaskElement); } else { addError("Error when paring field declarations: " + ae.getMessage(), serviceTaskElement); } } return null; } protected String getStringValueFromAttributeOrElement(String attributeName, String elementName, Element element) { String value = null; String attributeValue = element.attribute(attributeName); Element childElement = element.elementNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, elementName); String stringElementText = null; if (attributeValue != null && childElement != null) { addError("Can't use attribute '" + attributeName + "' and element '" + elementName + "' together, only use one", element); } else if (childElement != null) { stringElementText = childElement.getText(); if (stringElementText == null || stringElementText.length() == 0) { addError("No valid value found in attribute '" + attributeName + "' nor element '" + elementName + "'", element); } else { // Use text of element value = stringElementText; } } else if (attributeValue != null && attributeValue.length() > 0) { // Using attribute value = attributeValue; } return value; } /** * Parses a task with no specific type (behaves as passthrough). */ public ActivityImpl parseTask(Element taskElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(taskElement, scope); activity.setActivityBehavior(new TaskActivityBehavior()); activity.setAsync(isAsync(taskElement)); activity.setExclusive(isExclusive(taskElement)); parseExecutionListenersOnScope(taskElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseTask(taskElement, scope, activity); } return activity; } /** * Parses a manual task. */ public ActivityImpl parseManualTask(Element manualTaskElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(manualTaskElement, scope); activity.setActivityBehavior(new ManualTaskActivityBehavior()); parseExecutionListenersOnScope(manualTaskElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseManualTask(manualTaskElement, scope, activity); } return activity; } /** * Parses a receive task. */ public ActivityImpl parseReceiveTask(Element receiveTaskElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(receiveTaskElement, scope); activity.setActivityBehavior(new ReceiveTaskActivityBehavior()); activity.setAsync(isAsync(receiveTaskElement)); activity.setExclusive(isExclusive(receiveTaskElement)); parseExecutionListenersOnScope(receiveTaskElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseReceiveTask(receiveTaskElement, scope, activity); } return activity; } /* userTask specific finals */ protected static final String HUMAN_PERFORMER = "humanPerformer"; protected static final String POTENTIAL_OWNER = "potentialOwner"; protected static final String RESOURCE_ASSIGNMENT_EXPR = "resourceAssignmentExpression"; protected static final String FORMAL_EXPRESSION = "formalExpression"; protected static final String USER_PREFIX = "user("; protected static final String GROUP_PREFIX = "group("; protected static final String ASSIGNEE_EXTENSION = "assignee"; protected static final String CANDIDATE_USERS_EXTENSION = "candidateUsers"; protected static final String CANDIDATE_GROUPS_EXTENSION = "candidateGroups"; protected static final String DUE_DATE_EXTENSION = "dueDate"; protected static final String PRIORITY_EXTENSION = "priority"; /** * Parses a userTask declaration. */ public ActivityImpl parseUserTask(Element userTaskElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(userTaskElement, scope); activity.setAsync(isAsync(userTaskElement)); activity.setExclusive(isExclusive(userTaskElement)); TaskDefinition taskDefinition = parseTaskDefinition(userTaskElement, activity.getId(), (ProcessDefinitionEntity) scope.getProcessDefinition()); UserTaskActivityBehavior userTaskActivity = new UserTaskActivityBehavior(expressionManager, taskDefinition); activity.setActivityBehavior(userTaskActivity); parseProperties(userTaskElement, activity); parseExecutionListenersOnScope(userTaskElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseUserTask(userTaskElement, scope, activity); } return activity; } public TaskDefinition parseTaskDefinition(Element taskElement, String taskDefinitionKey, ProcessDefinitionEntity processDefinition) { TaskFormHandler taskFormHandler; String taskFormHandlerClassName = taskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "formHandlerClass"); if (taskFormHandlerClassName != null) { taskFormHandler = (TaskFormHandler) ReflectUtil.instantiate(taskFormHandlerClassName); } else { taskFormHandler = new DefaultTaskFormHandler(); } taskFormHandler.parseConfiguration(taskElement, deployment, processDefinition, this); TaskDefinition taskDefinition = new TaskDefinition(taskFormHandler); taskDefinition.setKey(taskDefinitionKey); processDefinition.getTaskDefinitions().put(taskDefinitionKey, taskDefinition); String name = taskElement.attribute("name"); if (name != null) { taskDefinition.setNameExpression(expressionManager.createExpression(name)); } String descriptionStr = parseDocumentation(taskElement); if (descriptionStr != null) { taskDefinition.setDescriptionExpression(expressionManager.createExpression(descriptionStr)); } parseHumanPerformer(taskElement, taskDefinition); parsePotentialOwner(taskElement, taskDefinition); // Activiti custom extension parseUserTaskCustomExtensions(taskElement, taskDefinition); return taskDefinition; } protected void parseHumanPerformer(Element taskElement, TaskDefinition taskDefinition) { List<Element> humanPerformerElements = taskElement.elements(HUMAN_PERFORMER); if (humanPerformerElements.size() > 1) { addError("Invalid task definition: multiple " + HUMAN_PERFORMER + " sub elements defined for " + taskDefinition.getNameExpression(), taskElement); } else if (humanPerformerElements.size() == 1) { Element humanPerformerElement = humanPerformerElements.get(0); if (humanPerformerElement != null) { parseHumanPerformerResourceAssignment(humanPerformerElement, taskDefinition); } } } protected void parsePotentialOwner(Element taskElement, TaskDefinition taskDefinition) { List<Element> potentialOwnerElements = taskElement.elements(POTENTIAL_OWNER); for (Element potentialOwnerElement : potentialOwnerElements) { parsePotentialOwnerResourceAssignment(potentialOwnerElement, taskDefinition); } } protected void parseHumanPerformerResourceAssignment(Element performerElement, TaskDefinition taskDefinition) { Element raeElement = performerElement.element(RESOURCE_ASSIGNMENT_EXPR); if (raeElement != null) { Element feElement = raeElement.element(FORMAL_EXPRESSION); if (feElement != null) { taskDefinition.setAssigneeExpression(expressionManager.createExpression(feElement.getText())); } } } protected void parsePotentialOwnerResourceAssignment(Element performerElement, TaskDefinition taskDefinition) { Element raeElement = performerElement.element(RESOURCE_ASSIGNMENT_EXPR); if (raeElement != null) { Element feElement = raeElement.element(FORMAL_EXPRESSION); if (feElement != null) { List<String> assignmentExpressions = parseCommaSeparatedList(feElement.getText()); for (String assignmentExpression : assignmentExpressions) { assignmentExpression = assignmentExpression.trim(); if (assignmentExpression.startsWith(USER_PREFIX)) { String userAssignementId = getAssignmentId(assignmentExpression, USER_PREFIX); taskDefinition.addCandidateUserIdExpression(expressionManager.createExpression(userAssignementId)); } else if (assignmentExpression.startsWith(GROUP_PREFIX)) { String groupAssignementId = getAssignmentId(assignmentExpression, GROUP_PREFIX); taskDefinition.addCandidateGroupIdExpression(expressionManager.createExpression(groupAssignementId)); } else { // default: given string is a goupId, as-is. taskDefinition.addCandidateGroupIdExpression(expressionManager.createExpression(assignmentExpression)); } } } } } protected String getAssignmentId(String expression, String prefix) { return expression.substring(prefix.length(), expression.length() - 1).trim(); } protected void parseUserTaskCustomExtensions(Element taskElement, TaskDefinition taskDefinition) { // assignee String assignee = taskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, ASSIGNEE_EXTENSION); if (assignee != null) { if (taskDefinition.getAssigneeExpression() == null) { taskDefinition.setAssigneeExpression(expressionManager.createExpression(assignee)); } else { addError("Invalid usage: duplicate assignee declaration for task " + taskDefinition.getNameExpression(), taskElement); } } // Candidate users String candidateUsersString = taskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, CANDIDATE_USERS_EXTENSION); if (candidateUsersString != null) { List<String> candidateUsers = parseCommaSeparatedList(candidateUsersString); for (String candidateUser : candidateUsers) { taskDefinition.addCandidateUserIdExpression(expressionManager.createExpression(candidateUser.trim())); } } // Candidate groups String candidateGroupsString = taskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, CANDIDATE_GROUPS_EXTENSION); if (candidateGroupsString != null) { List<String> candidateGroups = parseCommaSeparatedList(candidateGroupsString); for (String candidateGroup : candidateGroups) { taskDefinition.addCandidateGroupIdExpression(expressionManager.createExpression(candidateGroup.trim())); } } // Task listeners parseTaskListeners(taskElement, taskDefinition); // Due date String dueDateExpression = taskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, DUE_DATE_EXTENSION); if (dueDateExpression != null) { taskDefinition.setDueDateExpression(expressionManager.createExpression(dueDateExpression)); } // Priority final String priorityExpression = taskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, PRIORITY_EXTENSION); if (priorityExpression != null) { taskDefinition.setPriorityExpression(expressionManager.createExpression(priorityExpression)); } } /** * Parses the given String as a list of comma separated entries, where an * entry can possibly be an expression that has comma's. * * If somebody is smart enough to write a regex for this, please let us know. * * @return the entries of the comma separated list, trimmed. */ protected List<String> parseCommaSeparatedList(String s) { List<String> result = new ArrayList<String>(); if (s != null && !"".equals(s)) { StringCharacterIterator iterator = new StringCharacterIterator(s); char c = iterator.first(); StringBuilder strb = new StringBuilder(); boolean insideExpression = false; while (c != StringCharacterIterator.DONE) { if (c == '{' || c == '$') { insideExpression = true; } else if (c == '}') { insideExpression = false; } else if (c == ',' && !insideExpression) { result.add(strb.toString().trim()); strb.delete(0, strb.length()); } if (c != ',' || (insideExpression)) { strb.append(c); } c = iterator.next(); } if (strb.length() > 0) { result.add(strb.toString().trim()); } } return result; } protected void parseTaskListeners(Element userTaskElement, TaskDefinition taskDefinition) { Element extentionsElement = userTaskElement.element("extensionElements"); if (extentionsElement != null) { List<Element> taskListenerElements = extentionsElement.elementsNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "taskListener"); for (Element taskListenerElement : taskListenerElements) { String eventName = taskListenerElement.attribute("event"); if (eventName != null) { if (TaskListener.EVENTNAME_CREATE.equals(eventName) || TaskListener.EVENTNAME_ASSIGNMENT.equals(eventName) || TaskListener.EVENTNAME_COMPLETE.equals(eventName)) { TaskListener taskListener = parseTaskListener(taskListenerElement); taskDefinition.addTaskListener(eventName, taskListener); } else { addError("Invalid eventName for taskListener: choose 'create' | 'assignment' | 'complete'", userTaskElement); } } else { addError("Event is mandatory on taskListener", userTaskElement); } } } } protected TaskListener parseTaskListener(Element taskListenerElement) { TaskListener taskListener = null; String className = taskListenerElement.attribute("class"); String expression = taskListenerElement.attribute("expression"); String delegateExpression = taskListenerElement.attribute("delegateExpression"); if (className != null) { taskListener = new ClassDelegate(className, parseFieldDeclarations(taskListenerElement)); } else if (expression != null) { taskListener = new ExpressionTaskListener(expressionManager.createExpression(expression)); } else if (delegateExpression != null) { taskListener = new DelegateExpressionTaskListener(expressionManager.createExpression(delegateExpression), parseFieldDeclarations(taskListenerElement)); } else { addError("Element 'class', 'expression' or 'delegateExpression' is mandatory on taskListener", taskListenerElement); } return taskListener; } /** * Parses the end events of a certain level in the process (process, * subprocess or another scope). * * @param parentElement * The 'parent' element that contains the end events (process, * subprocess). * @param scope * The {@link ScopeImpl} to which the end events must be added. */ public void parseEndEvents(Element parentElement, ScopeImpl scope) { for (Element endEventElement : parentElement.elements("endEvent")) { ActivityImpl activity = createActivityOnScope(endEventElement, scope); Element errorEventDefinition = endEventElement.element("errorEventDefinition"); Element cancelEventDefinition = endEventElement.element("cancelEventDefinition"); Element terminateEventDefinition = endEventElement.element("terminateEventDefinition"); if (errorEventDefinition != null) { // error end event String errorRef = errorEventDefinition.attribute("errorRef"); if (errorRef == null || "".equals(errorRef)) { addError("'errorRef' attribute is mandatory on error end event", errorEventDefinition); } else { Error error = errors.get(errorRef); if (error != null && (error.getErrorCode() == null || "".equals(error.getErrorCode()))) { addError("'errorCode' is mandatory on errors referenced by throwing error event definitions, but the error '" + error.getId() + "' does not define one.", errorEventDefinition); } activity.setProperty("type", "errorEndEvent"); activity.setActivityBehavior(new ErrorEndEventActivityBehavior(error != null ? error.getErrorCode() : errorRef)); } } else if (cancelEventDefinition != null) { if (scope.getProperty("type")==null || !scope.getProperty("type").equals("transaction")) { addError("end event with cancelEventDefinition only supported inside transaction subprocess", cancelEventDefinition); } else { activity.setProperty("type", "cancelEndEvent"); activity.setActivityBehavior(new CancelEndEventActivityBehavior()); } } else if (terminateEventDefinition != null) { activity.setActivityBehavior(new TerminateEndEventActivityBehavior()); } else { // default: none end event activity.setActivityBehavior(new NoneEndEventActivityBehavior()); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseEndEvent(endEventElement, scope, activity); } parseExecutionListenersOnScope(endEventElement, activity); } } /** * Parses the boundary events of a certain 'level' (process, subprocess or * other scope). * * Note that the boundary events are not parsed during the parsing of the bpmn * activities, since the semantics are different (boundaryEvent needs to be * added as nested activity to the reference activity on PVM level). * * @param parentElement * The 'parent' element that contains the activities (process, * subprocess). * @param scopeElement * The {@link ScopeImpl} to which the activities must be added. */ public void parseBoundaryEvents(Element parentElement, ScopeImpl scopeElement) { for (Element boundaryEventElement : parentElement.elements("boundaryEvent")) { // The boundary event is attached to an activity, reference by the // 'attachedToRef' attribute String attachedToRef = boundaryEventElement.attribute("attachedToRef"); if (attachedToRef == null || attachedToRef.equals("")) { addError("AttachedToRef is required when using a timerEventDefinition", boundaryEventElement); } // Representation structure-wise is a nested activity in the activity to // which its attached String id = boundaryEventElement.attribute("id"); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Parsing boundary event " + id); } ActivityImpl parentActivity = scopeElement.findActivity(attachedToRef); if (parentActivity == null) { addError("Invalid reference in boundary event. Make sure that the referenced activity is " + "defined in the same scope as the boundary event", boundaryEventElement); } ActivityImpl nestedActivity = createActivityOnScope(boundaryEventElement, parentActivity); String cancelActivity = boundaryEventElement.attribute("cancelActivity", "true"); boolean interrupting = cancelActivity.equals("true") ? true : false; // Catch event behavior is the same for most types ActivityBehavior behavior = null; // Depending on the sub-element definition, the correct activityBehavior // parsing is selected Element timerEventDefinition = boundaryEventElement.element("timerEventDefinition"); Element errorEventDefinition = boundaryEventElement.element("errorEventDefinition"); Element signalEventDefinition = boundaryEventElement.element("signalEventDefinition"); Element cancelEventDefinition = boundaryEventElement.element("cancelEventDefinition"); Element compensateEventDefinition = boundaryEventElement.element("compensateEventDefinition"); Element messageEventDefinition = boundaryEventElement.element("messageEventDefinition"); if (timerEventDefinition != null) { behavior = new BoundaryEventActivityBehavior(interrupting, nestedActivity.getId()); parseBoundaryTimerEventDefinition(timerEventDefinition, interrupting, nestedActivity); } else if (errorEventDefinition != null) { interrupting = true; // non-interrupting not yet supported behavior = new BoundaryEventActivityBehavior(interrupting, nestedActivity.getId()); parseBoundaryErrorEventDefinition(errorEventDefinition, interrupting, parentActivity, nestedActivity); } else if (signalEventDefinition != null) { behavior = new BoundaryEventActivityBehavior(interrupting, nestedActivity.getId()); parseBoundarySignalEventDefinition(signalEventDefinition, interrupting, nestedActivity); } else if (cancelEventDefinition != null) { // always interrupting behavior = parseBoundaryCancelEventDefinition(cancelEventDefinition, nestedActivity); } else if(compensateEventDefinition != null) { behavior = new BoundaryEventActivityBehavior(interrupting, nestedActivity.getId()); parseCatchCompensateEventDefinition(compensateEventDefinition, nestedActivity); } else if(messageEventDefinition != null) { behavior = new BoundaryEventActivityBehavior(interrupting, nestedActivity.getId()); parseBoundaryMessageEventDefinition(messageEventDefinition, interrupting, nestedActivity); } else { addError("Unsupported boundary event type", boundaryEventElement); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseBoundaryEvent(boundaryEventElement, scopeElement, nestedActivity); } nestedActivity.setActivityBehavior(behavior); } } /** * Parses a boundary timer event. The end-result will be that the given nested * activity will get the appropriate {@link ActivityBehavior}. * * @param timerEventDefinition * The XML element corresponding with the timer event details * @param interrupting * Indicates whether this timer is interrupting. * @param timerActivity * The activity which maps to the structure of the timer event on the * boundary of another activity. Note that this is NOT the activity * onto which the boundary event is attached, but a nested activity * inside this activity, specifically created for this event. */ public void parseBoundaryTimerEventDefinition(Element timerEventDefinition, boolean interrupting, ActivityImpl timerActivity) { timerActivity.setProperty("type", "boundaryTimer"); TimerDeclarationImpl timerDeclaration = parseTimer(timerEventDefinition, timerActivity, TimerExecuteNestedActivityJobHandler.TYPE); // ACT-1427 if (interrupting) { timerDeclaration.setInterruptingTimer(true); } addTimerDeclaration(timerActivity.getParent(), timerDeclaration); if (timerActivity.getParent() instanceof ActivityImpl) { ((ActivityImpl) timerActivity.getParent()).setScope(true); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseBoundaryTimerEventDefinition(timerEventDefinition, interrupting, timerActivity); } } public void parseBoundarySignalEventDefinition(Element element, boolean interrupting, ActivityImpl signalActivity) { signalActivity.setProperty("type", "boundarySignal"); EventSubscriptionDeclaration signalDefinition = parseSignalEventDefinition(element); if(signalActivity.getId() == null) { addError("boundary event has no id", element); } signalDefinition.setActivityId(signalActivity.getId()); addEventSubscriptionDeclaration(signalDefinition, signalActivity.getParent(), element); if (signalActivity.getParent() instanceof ActivityImpl) { ((ActivityImpl) signalActivity.getParent()).setScope(true); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseBoundarySignalEventDefinition(element, interrupting, signalActivity); } } public void parseBoundaryMessageEventDefinition(Element element, boolean interrupting, ActivityImpl messageActivity) { messageActivity.setProperty("type", "boundaryMessage"); EventSubscriptionDeclaration messageEventDefinition = parseMessageEventDefinition(element); if(messageActivity.getId() == null) { addError("boundary event has no id", element); } messageEventDefinition.setActivityId(messageActivity.getId()); addEventSubscriptionDeclaration(messageEventDefinition, messageActivity.getParent(), element); if (messageActivity.getParent() instanceof ActivityImpl) { ((ActivityImpl) messageActivity.getParent()).setScope(true); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseBoundaryMessageEventDefinition(element, interrupting, messageActivity); } } @SuppressWarnings("unchecked") protected void parseTimerStartEventDefinition(Element timerEventDefinition, ActivityImpl timerActivity, ProcessDefinitionEntity processDefinition) { timerActivity.setProperty("type", "startTimerEvent"); TimerDeclarationImpl timerDeclaration = parseTimer(timerEventDefinition, timerActivity, TimerStartEventJobHandler.TYPE); timerDeclaration.setJobHandlerConfiguration(processDefinition.getKey()); List<TimerDeclarationImpl> timerDeclarations = (List<TimerDeclarationImpl>) processDefinition.getProperty(PROPERTYNAME_START_TIMER); if (timerDeclarations == null) { timerDeclarations = new ArrayList<TimerDeclarationImpl>(); processDefinition.setProperty(PROPERTYNAME_START_TIMER, timerDeclarations); } timerDeclarations.add(timerDeclaration); } protected void parseIntemediateSignalEventDefinition(Element element, ActivityImpl signalActivity, boolean isAfterEventBasedGateway) { signalActivity.setProperty("type", "intermediateSignalCatch"); EventSubscriptionDeclaration signalDefinition = parseSignalEventDefinition(element); if(isAfterEventBasedGateway) { signalDefinition.setActivityId(signalActivity.getId()); addEventSubscriptionDeclaration(signalDefinition, signalActivity.getParent(), element); }else { signalActivity.setScope(true); addEventSubscriptionDeclaration(signalDefinition, signalActivity, element); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseIntermediateSignalCatchEventDefinition(element, signalActivity); } } protected EventSubscriptionDeclaration parseSignalEventDefinition(Element signalEventDefinitionElement) { String signalRef = signalEventDefinitionElement.attribute("signalRef"); if (signalRef == null) { addError("signalEventDefinition does not have required property 'signalRef'", signalEventDefinitionElement); return null; } else { SignalDefinition signalDefinition = signals.get(resolveName(signalRef)); if (signalDefinition == null) { addError("Could not find signal with id '" + signalRef + "'", signalEventDefinitionElement); } EventSubscriptionDeclaration signalEventDefinition = new EventSubscriptionDeclaration(signalDefinition.getName(), "signal"); boolean asynch = "true".equals(signalEventDefinitionElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "async", "false")); signalEventDefinition.setAsync(asynch); return signalEventDefinition; } } protected void parseIntemediateTimerEventDefinition(Element timerEventDefinition, ActivityImpl timerActivity, boolean isAfterEventBasedGateway) { timerActivity.setProperty("type", "intermediateTimer"); TimerDeclarationImpl timerDeclaration = parseTimer(timerEventDefinition, timerActivity, TimerCatchIntermediateEventJobHandler.TYPE); if(isAfterEventBasedGateway) { addTimerDeclaration(timerActivity.getParent(), timerDeclaration); }else { addTimerDeclaration(timerActivity, timerDeclaration); timerActivity.setScope(true); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseIntermediateTimerEventDefinition(timerEventDefinition, timerActivity); } } protected TimerDeclarationImpl parseTimer(Element timerEventDefinition, ScopeImpl timerActivity, String jobHandlerType) { // TimeDate TimerDeclarationType type = TimerDeclarationType.DATE; Expression expression = parseExpression(timerEventDefinition, "timeDate"); // TimeCycle if (expression == null) { type = TimerDeclarationType.CYCLE; expression = parseExpression(timerEventDefinition, "timeCycle"); } // TimeDuration if (expression == null) { type = TimerDeclarationType.DURATION; expression = parseExpression(timerEventDefinition, "timeDuration"); } // neither date, cycle or duration configured! if (expression==null) { addError("Timer needs configuration (either timeDate, timeCycle or timeDuration is needed).", timerEventDefinition); } // Parse the timer declaration // TODO move the timer declaration into the bpmn activity or next to the // TimerSession TimerDeclarationImpl timerDeclaration = new TimerDeclarationImpl(expression, type, jobHandlerType); timerDeclaration.setJobHandlerConfiguration(timerActivity.getId()); timerDeclaration.setExclusive("true".equals(timerEventDefinition.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "exclusive", String.valueOf(JobEntity.DEFAULT_EXCLUSIVE)))); return timerDeclaration; } protected Expression parseExpression(Element parent, String name) { Element value = parent.element(name); if (value != null) { String expressionText = value.getText().trim(); return expressionManager.createExpression(expressionText); } return null; } public void parseBoundaryErrorEventDefinition(Element errorEventDefinition, boolean interrupting, ActivityImpl activity, ActivityImpl nestedErrorEventActivity) { nestedErrorEventActivity.setProperty("type", "boundaryError"); ScopeImpl catchingScope = nestedErrorEventActivity.getParent(); ((ActivityImpl) catchingScope).setScope(true); String errorRef = errorEventDefinition.attribute("errorRef"); Error error = null; ErrorEventDefinition definition = new ErrorEventDefinition(nestedErrorEventActivity.getId()); if (errorRef != null) { error = errors.get(errorRef); definition.setErrorCode(error == null ? errorRef : error.getErrorCode()); } addErrorEventDefinition(definition, catchingScope); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseBoundaryErrorEventDefinition(errorEventDefinition, interrupting, activity, nestedErrorEventActivity); } } protected void addErrorEventDefinition(ErrorEventDefinition errorEventDefinition, ScopeImpl catchingScope) { List<ErrorEventDefinition> errorEventDefinitions = (List<ErrorEventDefinition>) catchingScope.getProperty(PROPERTYNAME_ERROR_EVENT_DEFINITIONS); if(errorEventDefinitions == null) { errorEventDefinitions = new ArrayList<ErrorEventDefinition>(); catchingScope.setProperty(PROPERTYNAME_ERROR_EVENT_DEFINITIONS, errorEventDefinitions); } errorEventDefinitions.add(errorEventDefinition); Collections.sort(errorEventDefinitions, ErrorEventDefinition.comparator); } protected List<ActivityImpl> getAllChildActivitiesOfType(String type, ScopeImpl scope) { List<ActivityImpl> children = new ArrayList<ActivityImpl>(); for (ActivityImpl childActivity : scope.getActivities()) { if (type.equals(childActivity.getProperty("type"))) { children.add(childActivity); } children.addAll(getAllChildActivitiesOfType(type, childActivity)); } return children; } /** * Checks if the given activity is a child activity of the * possibleParentActivity. */ protected boolean isChildActivity(ActivityImpl activityToCheck, ActivityImpl possibleParentActivity) { for (ActivityImpl child : possibleParentActivity.getActivities()) { if (child.getId().equals(activityToCheck.getId()) || isChildActivity(activityToCheck, child)) { return true; } } return false; } @SuppressWarnings("unchecked") protected void addTimerDeclaration(ScopeImpl scope, TimerDeclarationImpl timerDeclaration) { List<TimerDeclarationImpl> timerDeclarations = (List<TimerDeclarationImpl>) scope.getProperty(PROPERTYNAME_TIMER_DECLARATION); if (timerDeclarations == null) { timerDeclarations = new ArrayList<TimerDeclarationImpl>(); scope.setProperty(PROPERTYNAME_TIMER_DECLARATION, timerDeclarations); } timerDeclarations.add(timerDeclaration); } @SuppressWarnings("unchecked") protected void addVariableDeclaration(ScopeImpl scope, VariableDeclaration variableDeclaration) { List<VariableDeclaration> variableDeclarations = (List<VariableDeclaration>) scope.getProperty(PROPERTYNAME_VARIABLE_DECLARATIONS); if (variableDeclarations == null) { variableDeclarations = new ArrayList<VariableDeclaration>(); scope.setProperty(PROPERTYNAME_VARIABLE_DECLARATIONS, variableDeclarations); } variableDeclarations.add(variableDeclaration); } /** * Parses a subprocess (formally known as an embedded subprocess): a subprocess * defined within another process definition. * * @param subProcessElement * The XML element corresponding with the subprocess definition * @param scope * The current scope on which the subprocess is defined. */ public ActivityImpl parseSubProcess(Element subProcessElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(subProcessElement, scope); activity.setAsync(isAsync(subProcessElement)); activity.setExclusive(isExclusive(subProcessElement)); Boolean isTriggeredByEvent = parseBooleanAttribute(subProcessElement.attribute("triggeredByEvent"), false); activity.setProperty("triggeredByEvent", isTriggeredByEvent); // event subprocesses are not scopes activity.setScope(!isTriggeredByEvent); activity.setActivityBehavior(new SubProcessActivityBehavior()); parseScope(subProcessElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseSubProcess(subProcessElement, scope, activity); } return activity; } protected ActivityImpl parseTransaction(Element transactionElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(transactionElement, scope); activity.setAsync(isAsync(transactionElement)); activity.setExclusive(isExclusive(transactionElement)); activity.setScope(true); activity.setActivityBehavior(new TransactionActivityBehavior()); parseScope(transactionElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseTransaction(transactionElement, scope, activity); } return activity; } /** * Parses a call activity (currently only supporting calling subprocesses). * * @param callActivityElement * The XML element defining the call activity * @param scope * The current scope on which the call activity is defined. */ public ActivityImpl parseCallActivity(Element callActivityElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(callActivityElement, scope); activity.setAsync(isAsync(callActivityElement)); activity.setExclusive(isExclusive(callActivityElement)); String calledElement = callActivityElement.attribute("calledElement"); if (calledElement == null) { addError("Missing attribute 'calledElement'", callActivityElement); } CallActivityBehavior callActivityBehaviour = null; String expressionRegex = "\\$+\\{+.+\\}"; if (calledElement != null && calledElement.matches(expressionRegex)) { callActivityBehaviour = new CallActivityBehavior(expressionManager.createExpression(calledElement)); } else { callActivityBehaviour = new CallActivityBehavior(calledElement); } Element extentionsElement = callActivityElement.element("extensionElements"); if (extentionsElement != null) { // input data elements for (Element listenerElement : extentionsElement.elementsNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "in")) { String sourceExpression = listenerElement.attribute("sourceExpression"); String target = listenerElement.attribute("target"); if (sourceExpression != null) { Expression expression = expressionManager.createExpression(sourceExpression.trim()); callActivityBehaviour.addDataInputAssociation(new SimpleDataInputAssociation(expression, target)); } else { String source = listenerElement.attribute("source"); callActivityBehaviour.addDataInputAssociation(new SimpleDataInputAssociation(source, target)); } } // output data elements for (Element listenerElement : extentionsElement.elementsNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "out")) { String sourceExpression = listenerElement.attribute("sourceExpression"); String target = listenerElement.attribute("target"); if (sourceExpression != null) { Expression expression = expressionManager.createExpression(sourceExpression.trim()); callActivityBehaviour.addDataOutputAssociation(new MessageImplicitDataOutputAssociation(target, expression)); } else { String source = listenerElement.attribute("source"); callActivityBehaviour.addDataOutputAssociation(new MessageImplicitDataOutputAssociation(target, source)); } } } // // parse data input and output // for (Element dataAssociationElement : // callActivityElement.elements("dataInputAssociation")) { // AbstractDataAssociation dataAssociation = // this.parseDataInputAssociation(dataAssociationElement); // callActivityBehaviour.addDataInputAssociation(dataAssociation); // } // // for (Element dataAssociationElement : // callActivityElement.elements("dataOutputAssociation")) { // AbstractDataAssociation dataAssociation = // this.parseDataOutputAssociation(dataAssociationElement); // callActivityBehaviour.addDataOutputAssociation(dataAssociation); // } activity.setScope(true); activity.setActivityBehavior(callActivityBehaviour); parseExecutionListenersOnScope(callActivityElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseCallActivity(callActivityElement, scope, activity); } return activity; } /** * Parses the properties of an element (if any) that can contain properties * (processes, activities, etc.) * * Returns true if property subelemens are found. * * @param element * The element that can contain properties. * @param activity * The activity where the property declaration is done. */ public void parseProperties(Element element, ActivityImpl activity) { List<Element> propertyElements = element.elements("property"); for (Element propertyElement : propertyElements) { parseProperty(propertyElement, activity); } } /** * Parses one property definition. * * @param propertyElement * The 'property' element that defines how a property looks like and * is handled. */ public void parseProperty(Element propertyElement, ActivityImpl activity) { String id = propertyElement.attribute("id"); String name = propertyElement.attribute("name"); // If name isn't given, use the id as name if (name == null) { if (id == null) { addError("Invalid property usage on line " + propertyElement.getLine() + ": no id or name specified.", propertyElement); } else { name = id; } } String itemSubjectRef = propertyElement.attribute("itemSubjectRef"); String type = null; if (itemSubjectRef != null) { ItemDefinition itemDefinition = itemDefinitions.get(itemSubjectRef); if (itemDefinition != null) { StructureDefinition structure = itemDefinition.getStructureDefinition(); type = structure.getId(); } else { addError("Invalid itemDefinition reference: " + itemSubjectRef + " not found", propertyElement); } } parsePropertyCustomExtensions(activity, propertyElement, name, type); } /** * Parses the custom extensions for properties. * * @param activity * The activity where the property declaration is done. * @param propertyElement * The 'property' element defining the property. * @param propertyName * The name of the property. * @param propertyType * The type of the property. */ public void parsePropertyCustomExtensions(ActivityImpl activity, Element propertyElement, String propertyName, String propertyType) { if (propertyType == null) { String type = propertyElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "type"); propertyType = type != null ? type : "string"; // default is string } VariableDeclaration variableDeclaration = new VariableDeclaration(propertyName, propertyType); addVariableDeclaration(activity, variableDeclaration); activity.setScope(true); String src = propertyElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "src"); if (src != null) { variableDeclaration.setSourceVariableName(src); } String srcExpr = propertyElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "srcExpr"); if (srcExpr != null) { Expression sourceExpression = expressionManager.createExpression(srcExpr); variableDeclaration.setSourceExpression(sourceExpression); } String dst = propertyElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "dst"); if (dst != null) { variableDeclaration.setDestinationVariableName(dst); } String destExpr = propertyElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "dstExpr"); if (destExpr != null) { Expression destinationExpression = expressionManager.createExpression(destExpr); variableDeclaration.setDestinationExpression(destinationExpression); } String link = propertyElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "link"); if (link != null) { variableDeclaration.setLink(link); } String linkExpr = propertyElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "linkExpr"); if (linkExpr != null) { Expression linkExpression = expressionManager.createExpression(linkExpr); variableDeclaration.setLinkExpression(linkExpression); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseProperty(propertyElement, variableDeclaration, activity); } } /** * Parses all sequence flow of a scope. * * @param processElement * The 'process' element wherein the sequence flow are defined. * @param scope * The scope to which the sequence flow must be added. */ public void parseSequenceFlow(Element processElement, ScopeImpl scope) { for (Element sequenceFlowElement : processElement.elements("sequenceFlow")) { String id = sequenceFlowElement.attribute("id"); String sourceRef = sequenceFlowElement.attribute("sourceRef"); String destinationRef = sequenceFlowElement.attribute("targetRef"); // Implicit check: sequence flow cannot cross (sub) process boundaries: we // don't do a processDefinition.findActivity here ActivityImpl sourceActivity = scope.findActivity(sourceRef); ActivityImpl destinationActivity = scope.findActivity(destinationRef); if (sourceActivity == null) { addError("Invalid source '" + sourceRef + "' of sequence flow '" + id + "'", sequenceFlowElement); } else if (destinationActivity == null) { addError("Invalid destination '" + destinationRef + "' of sequence flow '" + id + "'", sequenceFlowElement); /*} else if(sourceActivity.getActivityBehavior() instanceof EventBasedGatewayActivityBehavior) { // ignore*/ } else if(!(sourceActivity.getActivityBehavior() instanceof EventBasedGatewayActivityBehavior) && destinationActivity.getActivityBehavior() instanceof IntermediateCatchEventActivitiBehaviour && (destinationActivity.getParentActivity() != null) && (destinationActivity.getParentActivity().getActivityBehavior() instanceof EventBasedGatewayActivityBehavior)) { addError("Invalid incoming sequenceflow for intermediateCatchEvent with id '"+destinationActivity.getId()+"' connected to an event-based gateway.", sequenceFlowElement); } else { TransitionImpl transition = sourceActivity.createOutgoingTransition(id); sequenceFlows.put(id, transition); transition.setProperty("name", sequenceFlowElement.attribute("name")); transition.setProperty("documentation", parseDocumentation(sequenceFlowElement)); transition.setDestination(destinationActivity); parseSequenceFlowConditionExpression(sequenceFlowElement, transition); parseExecutionListenersOnTransition(sequenceFlowElement, transition); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseSequenceFlow(sequenceFlowElement, scope, transition); } } } } /** * Parses a condition expression on a sequence flow. * * @param seqFlowElement * The 'sequenceFlow' element that can contain a condition. * @param seqFlow * The sequenceFlow object representation to which the condition must * be added. */ public void parseSequenceFlowConditionExpression(Element seqFlowElement, TransitionImpl seqFlow) { Element conditionExprElement = seqFlowElement.element("conditionExpression"); if (conditionExprElement != null) { String expression = conditionExprElement.getText().trim(); String type = conditionExprElement.attributeNS(BpmnParser.XSI_NS, "type"); if (type != null) { String value = type.contains(":") ? resolveName(type) : BpmnParser.BPMN20_NS + ":" + type; if (!value.equals(ATTRIBUTEVALUE_T_FORMAL_EXPRESSION)) { addError("Invalid type, only tFormalExpression is currently supported", conditionExprElement); } } Condition expressionCondition = new UelExpressionCondition(expressionManager.createExpression(expression)); seqFlow.setProperty(PROPERTYNAME_CONDITION_TEXT, expression); seqFlow.setProperty(PROPERTYNAME_CONDITION, expressionCondition); } } /** * Parses all execution-listeners on a scope. * * @param scopeElement * the XML element containing the scope definition. * @param scope * the scope to add the executionListeners to. * @param postProcessActivities */ public void parseExecutionListenersOnScope(Element scopeElement, ScopeImpl scope) { Element extentionsElement = scopeElement.element("extensionElements"); if (extentionsElement != null) { List<Element> listenerElements = extentionsElement.elementsNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "executionListener"); for (Element listenerElement : listenerElements) { String eventName = listenerElement.attribute("event"); if (isValidEventNameForScope(eventName, listenerElement)) { ExecutionListener listener = parseExecutionListener(listenerElement); if (listener != null) { scope.addExecutionListener(eventName, listener); } } } } } /** * Check if the given event name is valid. If not, an appropriate error is * added. */ protected boolean isValidEventNameForScope(String eventName, Element listenerElement) { if (eventName != null && eventName.trim().length() > 0) { if ("start".equals(eventName) || "end".equals(eventName)) { return true; } else { addError("Attribute 'eventName' must be one of {start|end}", listenerElement); } } else { addError("Attribute 'eventName' is mandatory on listener", listenerElement); } return false; } public void parseExecutionListenersOnTransition(Element activitiElement, TransitionImpl activity) { Element extentionsElement = activitiElement.element("extensionElements"); if (extentionsElement != null) { List<Element> listenerElements = extentionsElement.elementsNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "executionListener"); for (Element listenerElement : listenerElements) { ExecutionListener listener = parseExecutionListener(listenerElement); if (listener != null) { // Since a transition only fires event 'take', we don't parse the // eventName, it is ignored activity.addExecutionListener(listener); } } } } /** * Parses an {@link ExecutionListener} implementation for the given * executionListener element. * * @param executionListenerElement * the XML element containing the executionListener definition. */ public ExecutionListener parseExecutionListener(Element executionListenerElement) { ExecutionListener executionListener = null; String className = executionListenerElement.attribute("class"); String expression = executionListenerElement.attribute("expression"); String delegateExpression = executionListenerElement.attribute("delegateExpression"); if (className != null) { executionListener = new ClassDelegate(className, parseFieldDeclarations(executionListenerElement)); } else if (expression != null) { executionListener = new ExpressionExecutionListener(expressionManager.createExpression(expression)); } else if (delegateExpression != null) { executionListener = new DelegateExpressionExecutionListener(expressionManager.createExpression(delegateExpression), parseFieldDeclarations(executionListenerElement)); } else { addError("Element 'class' or 'expression' is mandatory on executionListener", executionListenerElement); } return executionListener; } /** * Retrieves the {@link Operation} corresponding with the given operation * identifier. */ public Operation getOperation(String operationId) { return operations.get(operationId); } // Diagram interchange // ///////////////////////////////////////////////////////////////// public void parseDiagramInterchangeElements() { // Multiple BPMNDiagram possible List<Element> diagrams = rootElement.elementsNS(BpmnParser.BPMN_DI_NS, "BPMNDiagram"); if (!diagrams.isEmpty()) { for (Element diagramElement : diagrams) { parseBPMNDiagram(diagramElement); } } } public void parseBPMNDiagram(Element bpmndiagramElement) { // Each BPMNdiagram needs to have exactly one BPMNPlane Element bpmnPlane = bpmndiagramElement.elementNS(BpmnParser.BPMN_DI_NS, "BPMNPlane"); if (bpmnPlane != null) { parseBPMNPlane(bpmnPlane); } } public void parseBPMNPlane(Element bpmnPlaneElement) { String bpmnElement = bpmnPlaneElement.attribute("bpmnElement"); if (bpmnElement != null && !"".equals(bpmnElement)) { // there seems to be only on process without collaboration if (getProcessDefinition(bpmnElement) != null) { getProcessDefinition(bpmnElement).setGraphicalNotationDefined(true); } List<Element> shapes = bpmnPlaneElement.elementsNS(BpmnParser.BPMN_DI_NS, "BPMNShape"); for (Element shape : shapes) { parseBPMNShape(shape); } List<Element> edges = bpmnPlaneElement.elementsNS(BpmnParser.BPMN_DI_NS, "BPMNEdge"); for (Element edge : edges) { parseBPMNEdge(edge); } } else { addError("'bpmnElement' attribute is required on BPMNPlane ", bpmnPlaneElement); } } public void parseBPMNShape(Element bpmnShapeElement) { String bpmnElement = bpmnShapeElement.attribute("bpmnElement"); if (bpmnElement != null && !"".equals(bpmnElement)) { // For collaborations, their are also shape definitions for the // participants / processes if (participantProcesses.get(bpmnElement) != null) { ProcessDefinitionEntity procDef = getProcessDefinition(participantProcesses.get(bpmnElement)); procDef.setGraphicalNotationDefined(true); // The participation that references this process, has a bounds to be rendered + a name as wel parseDIBounds(bpmnShapeElement, procDef.getParticipantProcess()); return; } for (ProcessDefinitionEntity processDefinition : getProcessDefinitions()) { ActivityImpl activity = processDefinition.findActivity(bpmnElement); if (activity != null) { parseDIBounds(bpmnShapeElement, activity); // collapsed or expanded String isExpanded = bpmnShapeElement.attribute("isExpanded"); if (isExpanded != null) { activity.setProperty(PROPERTYNAME_ISEXPANDED, parseBooleanAttribute(isExpanded)); } } else { Lane lane = processDefinition.getLaneForId(bpmnElement); if(lane != null) { // The shape represents a lane parseDIBounds(bpmnShapeElement, lane); } else if(!elementIds.contains(bpmnElement)) { // It might not be an activity nor a lane, but it might still reference 'something' addError("Invalid reference in 'bpmnElement' attribute, activity " + bpmnElement + "not found", bpmnShapeElement); } } } } else { addError("'bpmnElement' attribute is required on BPMNShape", bpmnShapeElement); } } protected void parseDIBounds(Element bpmnShapeElement, HasDIBounds target) { Element bounds = bpmnShapeElement.elementNS(BpmnParser.BPMN_DC_NS, "Bounds"); if (bounds != null) { target.setX(parseDoubleAttribute(bpmnShapeElement, "x", bounds.attribute("x"), true).intValue()); target.setY(parseDoubleAttribute(bpmnShapeElement, "y", bounds.attribute("y"), true).intValue()); target.setWidth(parseDoubleAttribute(bpmnShapeElement, "width", bounds.attribute("width"), true).intValue()); target.setHeight(parseDoubleAttribute(bpmnShapeElement, "height", bounds.attribute("height"), true).intValue()); } else { addError("'Bounds' element is required", bpmnShapeElement); } } public void parseBPMNEdge(Element bpmnEdgeElement) { String sequenceFlowId = bpmnEdgeElement.attribute("bpmnElement"); if (sequenceFlowId != null && !"".equals(sequenceFlowId)) { if (sequenceFlows != null && sequenceFlows.containsKey(sequenceFlowId)) { TransitionImpl sequenceFlow = sequenceFlows.get(sequenceFlowId); List<Element> waypointElements = bpmnEdgeElement.elementsNS(BpmnParser.OMG_DI_NS, "waypoint"); if (waypointElements.size() >= 2) { List<Integer> waypoints = new ArrayList<Integer>(); for (Element waypointElement : waypointElements) { waypoints.add(parseDoubleAttribute(waypointElement, "x", waypointElement.attribute("x"), true).intValue()); waypoints.add(parseDoubleAttribute(waypointElement, "y", waypointElement.attribute("y"), true).intValue()); } sequenceFlow.setWaypoints(waypoints); } else { addError("Minimum 2 waypoint elements must be definted for a 'BPMNEdge'", bpmnEdgeElement); } } else if(!elementIds.contains(sequenceFlowId)) { // it might not be a sequenceFlow but it might still reference 'something' addError("Invalid reference in 'bpmnElement' attribute, sequenceFlow " + sequenceFlowId + "not found", bpmnEdgeElement); } } else { addError("'bpmnElement' attribute is required on BPMNEdge", bpmnEdgeElement); } } // Getters, setters and Parser overriden operations // //////////////////////////////////////// public List<ProcessDefinitionEntity> getProcessDefinitions() { return processDefinitions; } public ProcessDefinitionEntity getProcessDefinition(String processDefinitionKey) { for (ProcessDefinitionEntity processDefinition : processDefinitions) { if (processDefinition.getKey().equals(processDefinitionKey)) { return processDefinition; } } return null; } @Override public BpmnParse name(String name) { super.name(name); return this; } @Override public BpmnParse sourceInputStream(InputStream inputStream) { super.sourceInputStream(inputStream); return this; } @Override public BpmnParse sourceResource(String resource, ClassLoader classLoader) { super.sourceResource(resource, classLoader); return this; } @Override public BpmnParse sourceResource(String resource) { super.sourceResource(resource); return this; } @Override public BpmnParse sourceString(String string) { super.sourceString(string); return this; } @Override public BpmnParse sourceUrl(String url) { super.sourceUrl(url); return this; } @Override public BpmnParse sourceUrl(URL url) { super.sourceUrl(url); return this; } public void addStructure(StructureDefinition structure) { this.structures.put(structure.getId(), structure); } public void addService(BpmnInterfaceImplementation bpmnInterfaceImplementation) { this.interfaceImplementations.put(bpmnInterfaceImplementation.getName(), bpmnInterfaceImplementation); } public void addOperation(OperationImplementation operationImplementation) { this.operationImplementations.put(operationImplementation.getId(), operationImplementation); } public Boolean parseBooleanAttribute(String booleanText, boolean defaultValue) { if (booleanText == null) { return defaultValue; } else { return parseBooleanAttribute(booleanText); } } public Boolean parseBooleanAttribute(String booleanText) { if ("true".equals(booleanText) || "enabled".equals(booleanText) || "on".equals(booleanText) || "active".equals(booleanText) || "yes".equals(booleanText)) { return Boolean.TRUE; } if ("false".equals(booleanText) || "disabled".equals(booleanText) || "off".equals(booleanText) || "inactive".equals(booleanText) || "no".equals(booleanText)) { return Boolean.FALSE; } return null; } public Double parseDoubleAttribute(Element element, String attributename, String doubleText, boolean required) { if (required && (doubleText == null || "".equals(doubleText))) { addError(attributename + " is required", element); } else { try { return Double.parseDouble(doubleText); } catch (NumberFormatException e) { addError("Cannot parse " + attributename + ": " + e.getMessage(), element); } } return -1.0; } protected boolean isExclusive(Element element) { return "true".equals(element.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "exclusive", String.valueOf(JobEntity.DEFAULT_EXCLUSIVE))); } protected boolean isAsync(Element element) { return "true".equals(element.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "async")); } }
modules/activiti-engine/src/main/java/org/activiti/engine/impl/bpmn/parser/BpmnParse.java
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.engine.impl.bpmn.parser; import java.io.InputStream; import java.net.URL; import java.text.StringCharacterIterator; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import org.activiti.engine.ActivitiException; import org.activiti.engine.delegate.ExecutionListener; import org.activiti.engine.delegate.Expression; import org.activiti.engine.delegate.TaskListener; import org.activiti.engine.impl.Condition; import org.activiti.engine.impl.bpmn.behavior.AbstractBpmnActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.BoundaryEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.BusinessRuleTaskActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.CallActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.CancelBoundaryEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.CancelEndEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.ErrorEndEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.EventBasedGatewayActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.EventSubProcessStartEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.ExclusiveGatewayActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.InclusiveGatewayActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.IntermediateCatchEventActivitiBehaviour; import org.activiti.engine.impl.bpmn.behavior.IntermediateThrowCompensationEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.IntermediateThrowNoneEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.IntermediateThrowSignalEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.MailActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.ManualTaskActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.MultiInstanceActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.NoneEndEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.NoneStartEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.ParallelGatewayActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.ParallelMultiInstanceBehavior; import org.activiti.engine.impl.bpmn.behavior.ReceiveTaskActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.ScriptTaskActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.SequentialMultiInstanceBehavior; import org.activiti.engine.impl.bpmn.behavior.ServiceTaskDelegateExpressionActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.ServiceTaskExpressionActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.ShellActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.SubProcessActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.TaskActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.TerminateEndEventActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.TransactionActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.UserTaskActivityBehavior; import org.activiti.engine.impl.bpmn.behavior.WebServiceActivityBehavior; import org.activiti.engine.impl.bpmn.data.AbstractDataAssociation; import org.activiti.engine.impl.bpmn.data.Assignment; import org.activiti.engine.impl.bpmn.data.ClassStructureDefinition; import org.activiti.engine.impl.bpmn.data.Data; import org.activiti.engine.impl.bpmn.data.DataRef; import org.activiti.engine.impl.bpmn.data.IOSpecification; import org.activiti.engine.impl.bpmn.data.ItemDefinition; import org.activiti.engine.impl.bpmn.data.ItemKind; import org.activiti.engine.impl.bpmn.data.SimpleDataInputAssociation; import org.activiti.engine.impl.bpmn.data.StructureDefinition; import org.activiti.engine.impl.bpmn.data.TransformationDataOutputAssociation; import org.activiti.engine.impl.bpmn.helper.ClassDelegate; import org.activiti.engine.impl.bpmn.listener.DelegateExpressionExecutionListener; import org.activiti.engine.impl.bpmn.listener.DelegateExpressionTaskListener; import org.activiti.engine.impl.bpmn.listener.ExpressionExecutionListener; import org.activiti.engine.impl.bpmn.listener.ExpressionTaskListener; import org.activiti.engine.impl.bpmn.parser.BpmnParseListener; import org.activiti.engine.impl.bpmn.webservice.BpmnInterface; import org.activiti.engine.impl.bpmn.webservice.BpmnInterfaceImplementation; import org.activiti.engine.impl.bpmn.webservice.MessageDefinition; import org.activiti.engine.impl.bpmn.webservice.MessageImplicitDataInputAssociation; import org.activiti.engine.impl.bpmn.webservice.MessageImplicitDataOutputAssociation; import org.activiti.engine.impl.bpmn.webservice.Operation; import org.activiti.engine.impl.bpmn.webservice.OperationImplementation; import org.activiti.engine.impl.el.ExpressionManager; import org.activiti.engine.impl.el.FixedValue; import org.activiti.engine.impl.el.UelExpressionCondition; import org.activiti.engine.impl.form.DefaultStartFormHandler; import org.activiti.engine.impl.form.DefaultTaskFormHandler; import org.activiti.engine.impl.form.StartFormHandler; import org.activiti.engine.impl.form.TaskFormHandler; import org.activiti.engine.impl.jobexecutor.TimerCatchIntermediateEventJobHandler; import org.activiti.engine.impl.jobexecutor.TimerDeclarationImpl; import org.activiti.engine.impl.jobexecutor.TimerDeclarationType; import org.activiti.engine.impl.jobexecutor.TimerExecuteNestedActivityJobHandler; import org.activiti.engine.impl.jobexecutor.TimerStartEventJobHandler; import org.activiti.engine.impl.persistence.entity.DeploymentEntity; import org.activiti.engine.impl.persistence.entity.JobEntity; import org.activiti.engine.impl.persistence.entity.ProcessDefinitionEntity; import org.activiti.engine.impl.pvm.PvmTransition; import org.activiti.engine.impl.pvm.delegate.ActivityBehavior; import org.activiti.engine.impl.pvm.process.ActivityImpl; import org.activiti.engine.impl.pvm.process.HasDIBounds; import org.activiti.engine.impl.pvm.process.Lane; import org.activiti.engine.impl.pvm.process.LaneSet; import org.activiti.engine.impl.pvm.process.ParticipantProcess; import org.activiti.engine.impl.pvm.process.ProcessDefinitionImpl; import org.activiti.engine.impl.pvm.process.ScopeImpl; import org.activiti.engine.impl.pvm.process.TransitionImpl; import org.activiti.engine.impl.scripting.ScriptingEngines; import org.activiti.engine.impl.task.TaskDefinition; import org.activiti.engine.impl.util.ReflectUtil; import org.activiti.engine.impl.util.xml.Element; import org.activiti.engine.impl.util.xml.Parse; import org.activiti.engine.impl.variable.VariableDeclaration; import org.activiti.engine.repository.ProcessDefinition; /** * Specific parsing of one BPMN 2.0 XML file, created by the {@link BpmnParser}. * * @author Tom Baeyens * @author Joram Barrez * @author Christian Stettler * @author Frederik Heremans * @author Falko Menge * @author Esteban Robles * @author Daniel Meyer * @author Saeid Mirzaei */ public class BpmnParse extends Parse { protected static final Logger LOGGER = Logger.getLogger(BpmnParse.class.getName()); public static final String PROPERTYNAME_DOCUMENTATION = "documentation"; public static final String PROPERTYNAME_INITIAL = "initial"; public static final String PROPERTYNAME_INITIATOR_VARIABLE_NAME = "initiatorVariableName"; public static final String PROPERTYNAME_CONDITION = "condition"; public static final String PROPERTYNAME_CONDITION_TEXT = "conditionText"; public static final String PROPERTYNAME_VARIABLE_DECLARATIONS = "variableDeclarations"; public static final String PROPERTYNAME_TIMER_DECLARATION = "timerDeclarations"; public static final String PROPERTYNAME_ISEXPANDED = "isExpanded"; public static final String PROPERTYNAME_START_TIMER = "timerStart"; public static final String PROPERTYNAME_COMPENSATION_HANDLER_ID = "compensationHandler"; public static final String PROPERTYNAME_IS_FOR_COMPENSATION = "isForCompensation"; public static final String PROPERTYNAME_ERROR_EVENT_DEFINITIONS = "errorEventDefinitions"; public static final String PROPERTYNAME_EVENT_SUBSCRIPTION_DECLARATION = "eventDefinitions"; /* process start authorization specific finals */ protected static final String POTENTIAL_STARTER = "potentialStarter"; protected static final String CANDIDATE_STARTER_USERS_EXTENSION = "candidateStarterUsers"; protected static final String CANDIDATE_STARTER_GROUPS_EXTENSION = "candidateStarterGroups"; protected static final String ATTRIBUTEVALUE_T_FORMAL_EXPRESSION = BpmnParser.BPMN20_NS + ":tFormalExpression"; /** The deployment to which the parsed process definitions will be added. */ protected DeploymentEntity deployment; /** The end result of the parsing: a list of process definition. */ protected List<ProcessDefinitionEntity> processDefinitions = new ArrayList<ProcessDefinitionEntity>(); /** Mapping of found errors in BPMN 2.0 file */ protected Map<String, Error> errors = new HashMap<String, Error>(); /** A map for storing sequence flow based on their id during parsing. */ protected Map<String, TransitionImpl> sequenceFlows; /** A list of all element IDs. This allows us to parse only what we actually support but * still validate the references among elements we do not support. */ protected List<String> elementIds = new ArrayList<String>(); /** A map for storing the process references of participants */ protected Map<String, String> participantProcesses = new HashMap<String, String>(); /** * Mapping containing values stored during the first phase of parsing since * other elements can reference these messages. * * All the map's elements are defined outside the process definition(s), which * means that this map doesn't need to be re-initialized for each new process * definition. */ protected Map<String, MessageDefinition> messages = new HashMap<String, MessageDefinition>(); protected Map<String, StructureDefinition> structures = new HashMap<String, StructureDefinition>(); protected Map<String, BpmnInterfaceImplementation> interfaceImplementations = new HashMap<String, BpmnInterfaceImplementation>(); protected Map<String, OperationImplementation> operationImplementations = new HashMap<String, OperationImplementation>(); protected Map<String, ItemDefinition> itemDefinitions = new HashMap<String, ItemDefinition>(); protected Map<String, BpmnInterface> bpmnInterfaces = new HashMap<String, BpmnInterface>(); protected Map<String, Operation> operations = new HashMap<String, Operation>(); protected Map<String, SignalDefinition> signals = new HashMap<String, SignalDefinition>(); // Members protected ExpressionManager expressionManager; protected List<BpmnParseListener> parseListeners; protected Map<String, XMLImporter> importers = new HashMap<String, XMLImporter>(); protected Map<String, String> prefixs = new HashMap<String, String>(); protected String targetNamespace; /** * Constructor to be called by the {@link BpmnParser}. */ public BpmnParse(BpmnParser parser) { super(parser); this.expressionManager = parser.getExpressionManager(); this.parseListeners = parser.getParseListeners(); setSchemaResource(ReflectUtil.getResource(BpmnParser.BPMN_20_SCHEMA_LOCATION).toString()); this.initializeXSDItemDefinitions(); } protected void initializeXSDItemDefinitions() { this.itemDefinitions.put("http://www.w3.org/2001/XMLSchema:string", new ItemDefinition("http://www.w3.org/2001/XMLSchema:string", new ClassStructureDefinition(String.class))); } public BpmnParse deployment(DeploymentEntity deployment) { this.deployment = deployment; return this; } @Override public BpmnParse execute() { super.execute(); // schema validation try { parseRootElement(); } catch (Exception e) { LOGGER.log(Level.SEVERE, "Unknown exception", e); // ALL unexpected exceptions should bubble up since they are not handled // accordingly by onderlying parse-methods and can't be deployed throw new ActivitiException("Error while parsing process: " + e.getMessage(), e); } finally { if (hasWarnings()) { logWarnings(); } if (hasErrors()) { throwActivitiExceptionForErrors(); } } return this; } /** * Parses the 'definitions' root element */ protected void parseRootElement() { collectElementIds(); parseDefinitionsAttributes(); parseImports(); parseItemDefinitions(); parseMessages(); parseInterfaces(); parseErrors(); parseSignals(); parseProcessDefinitions(); parseCollaboration(); // Diagram interchange parsing must be after parseProcessDefinitions, // since it depends and sets values on existing process definition objects parseDiagramInterchangeElements(); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseRootElement(rootElement, getProcessDefinitions()); } } protected void collectElementIds() { rootElement.collectIds(elementIds); } protected void parseDefinitionsAttributes() { String typeLanguage = rootElement.attribute("typeLanguage"); String expressionLanguage = rootElement.attribute("expressionLanguage"); this.targetNamespace = rootElement.attribute("targetNamespace"); if (typeLanguage != null) { if (typeLanguage.contains("XMLSchema")) { LOGGER.info("XMLSchema currently not supported as typeLanguage"); } } if (expressionLanguage != null) { if (expressionLanguage.contains("XPath")) { LOGGER.info("XPath currently not supported as expressionLanguage"); } } for (String attribute : rootElement.attributes()) { if (attribute.startsWith("xmlns:")) { String prefixValue = rootElement.attribute(attribute); String prefixName = attribute.substring(6); this.prefixs.put(prefixName, prefixValue); } } } protected String resolveName(String name) { if (name == null) { return null; } int indexOfP = name.indexOf(':'); if (indexOfP != -1) { String prefix = name.substring(0, indexOfP); String resolvedPrefix = this.prefixs.get(prefix); return resolvedPrefix + ":" + name.substring(indexOfP + 1); } else { return this.targetNamespace + ":" + name; } } /** * Parses the rootElement importing structures * * @param rootElement * The root element of the XML file. */ protected void parseImports() { List<Element> imports = rootElement.elements("import"); for (Element theImport : imports) { String importType = theImport.attribute("importType"); XMLImporter importer = this.getImporter(importType, theImport); if (importer == null) { addError("Could not import item of type " + importType, theImport); } else { importer.importFrom(theImport, this); } } } protected XMLImporter getImporter(String importType, Element theImport) { if (this.importers.containsKey(importType)) { return this.importers.get(importType); } else { if (importType.equals("http://schemas.xmlsoap.org/wsdl/")) { Class< ? > wsdlImporterClass; try { wsdlImporterClass = Class.forName("org.activiti.engine.impl.webservice.CxfWSDLImporter", true, Thread.currentThread().getContextClassLoader()); XMLImporter newInstance = (XMLImporter) wsdlImporterClass.newInstance(); this.importers.put(importType, newInstance); return newInstance; } catch (Exception e) { addError("Could not find importer for type " + importType, theImport); } } return null; } } /** * Parses the itemDefinitions of the given definitions file. Item definitions * are not contained within a process element, but they can be referenced from * inner process elements. * * @param definitionsElement * The root element of the XML file. */ public void parseItemDefinitions() { for (Element itemDefinitionElement : rootElement.elements("itemDefinition")) { String id = itemDefinitionElement.attribute("id"); String structureRef = this.resolveName(itemDefinitionElement.attribute("structureRef")); String itemKind = itemDefinitionElement.attribute("itemKind"); StructureDefinition structure = null; try { // it is a class Class< ? > classStructure = ReflectUtil.loadClass(structureRef); structure = new ClassStructureDefinition(classStructure); } catch (ActivitiException e) { // it is a reference to a different structure structure = this.structures.get(structureRef); } ItemDefinition itemDefinition = new ItemDefinition(this.targetNamespace + ":" + id, structure); if (itemKind != null) { itemDefinition.setItemKind(ItemKind.valueOf(itemKind)); } itemDefinitions.put(itemDefinition.getId(), itemDefinition); } } /** * Parses the messages of the given definitions file. Messages are not * contained within a process element, but they can be referenced from inner * process elements. * * @param definitionsElement * The root element of the XML file/ */ public void parseMessages() { for (Element messageElement : rootElement.elements("message")) { String id = messageElement.attribute("id"); String itemRef = this.resolveName(messageElement.attribute("itemRef")); String name = messageElement.attribute("name"); MessageDefinition messageDefinition = new MessageDefinition(this.targetNamespace + ":" + id, name); if(itemRef != null) { if(!this.itemDefinitions.containsKey(itemRef)) { addError(itemRef + " does not exist", messageElement); } else { ItemDefinition itemDefinition = this.itemDefinitions.get(itemRef); messageDefinition.setItemDefinition(itemDefinition); } } this.messages.put(messageDefinition.getId(), messageDefinition); } } /** * Parses the signals of the given definitions file. Signals are not * contained within a process element, but they can be referenced from inner * process elements. * * @param definitionsElement * The root element of the XML file/ */ protected void parseSignals() { for (Element signalElement : rootElement.elements("signal")) { String id = signalElement.attribute("id"); String signalName = signalElement.attribute("name"); for (SignalDefinition signalDefinition : signals.values()) { if(signalDefinition.getName().equals(signalName)) { addError("duplicate signal name '"+signalName+"'.", signalElement); } } if(id == null) { addError("signal must have an id", signalElement); } else if(signalName == null) { addError("signal with id '"+id+"' has no name", signalElement); }else { SignalDefinition signal = new SignalDefinition(); signal.setId(this.targetNamespace + ":" + id); signal.setName(signalName); this.signals.put(signal.getId(), signal); } } } /** * Parses the interfaces and operations defined withing the root element. * * @param definitionsElement * The root element of the XML file/ */ public void parseInterfaces() { for (Element interfaceElement : rootElement.elements("interface")) { // Create the interface String id = interfaceElement.attribute("id"); String name = interfaceElement.attribute("name"); String implementationRef = this.resolveName(interfaceElement.attribute("implementationRef")); BpmnInterface bpmnInterface = new BpmnInterface(this.targetNamespace + ":" + id, name); bpmnInterface.setImplementation(this.interfaceImplementations.get(implementationRef)); // Handle all its operations for (Element operationElement : interfaceElement.elements("operation")) { Operation operation = parseOperation(operationElement, bpmnInterface); bpmnInterface.addOperation(operation); } bpmnInterfaces.put(bpmnInterface.getId(), bpmnInterface); } } public Operation parseOperation(Element operationElement, BpmnInterface bpmnInterface) { Element inMessageRefElement = operationElement.element("inMessageRef"); String inMessageRef = this.resolveName(inMessageRefElement.getText()); if (!this.messages.containsKey(inMessageRef)) { addError(inMessageRef + " does not exist", inMessageRefElement); return null; } else { MessageDefinition inMessage = this.messages.get(inMessageRef); String id = operationElement.attribute("id"); String name = operationElement.attribute("name"); String implementationRef = this.resolveName(operationElement.attribute("implementationRef")); Operation operation = new Operation(this.targetNamespace + ":" + id, name, bpmnInterface, inMessage); operation.setImplementation(this.operationImplementations.get(implementationRef)); Element outMessageRefElement = operationElement.element("outMessageRef"); if (outMessageRefElement != null) { String outMessageRef = this.resolveName(outMessageRefElement.getText()); if (this.messages.containsKey(outMessageRef)) { MessageDefinition outMessage = this.messages.get(outMessageRef); operation.setOutMessage(outMessage); } } operations.put(operation.getId(), operation); return operation; } } public void parseErrors() { for (Element errorElement : rootElement.elements("error")) { Error error = new Error(); String id = errorElement.attribute("id"); if (id == null) { addError("'id' is mandatory on error definition", errorElement); } error.setId(id); String errorCode = errorElement.attribute("errorCode"); if (errorCode != null) { error.setErrorCode(errorCode); } errors.put(id, error); } } /** * Parses all the process definitions defined within the 'definitions' root * element. * * @param definitionsElement * The root element of the XML file. */ public void parseProcessDefinitions() { for (Element processElement : rootElement.elements("process")) { boolean isExecutable = true; String isExecutableStr = processElement.attribute("isExecutable"); if (isExecutableStr != null) { if (!Boolean.parseBoolean(isExecutableStr)) { isExecutable = false; LOGGER.info("Ignoring non-executable process with id='" + processElement.attribute("id") + "'. Set the attribute isExecutable=\"true\" to deploy this process."); } } else { LOGGER.info("Process with id='" + processElement.attribute("id") + "' has no attribute isExecutable. Assuming it is executable. Better set the attribute explicitely, especially to be compatible with future engine versions which might change the default behavior."); } //Only process executable processes if (isExecutable) { processDefinitions.add(parseProcess(processElement)); } } } /** * Parses the collaboration definition defined within the 'definitions' * root element and get all participants to lookup their process references * during DI parsing. */ public void parseCollaboration() { Element collaboration = rootElement.element("collaboration"); if (collaboration != null) { for (Element participant : collaboration.elements("participant")) { String processRef = participant.attribute("processRef"); if (processRef != null) { ProcessDefinitionImpl procDef = getProcessDefinition(processRef); if(procDef != null) { // Set participant process on the procDef, so it can get rendered later on if needed ParticipantProcess participantProcess = new ParticipantProcess(); participantProcess.setId(participant.attribute("id")); participantProcess.setName(participant.attribute("name")); procDef.setParticipantProcess(participantProcess); participantProcesses.put(participantProcess.getId(), processRef); } } } } } /** * Parses one process (ie anything inside a &lt;process&gt; element). * * @param processElement * The 'process' element. * @return The parsed version of the XML: a {@link ProcessDefinitionImpl} * object. */ public ProcessDefinitionEntity parseProcess(Element processElement) { // reset all mappings that are related to one process definition sequenceFlows = new HashMap<String, TransitionImpl>(); ProcessDefinitionEntity processDefinition = new ProcessDefinitionEntity(); /* * Mapping object model - bpmn xml: processDefinition.id -> generated by * activiti engine processDefinition.key -> bpmn id (required) * processDefinition.name -> bpmn name (optional) */ processDefinition.setKey(processElement.attribute("id")); processDefinition.setName(processElement.attribute("name")); processDefinition.setCategory(rootElement.attribute("targetNamespace")); processDefinition.setDescription(parseDocumentation(processElement)); processDefinition.setProperty(PROPERTYNAME_DOCUMENTATION, parseDocumentation(processElement)); // Kept for backwards compatibility. See ACT-1020 processDefinition.setTaskDefinitions(new HashMap<String, TaskDefinition>()); processDefinition.setDeploymentId(deployment.getId()); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Parsing process " + processDefinition.getKey()); } parseScope(processElement, processDefinition); // Parse any laneSets defined for this process parseLaneSets(processElement, processDefinition); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseProcess(processElement, processDefinition); } // now we have parsed anything we can validate some stuff validateActivities(processDefinition.getActivities()); return processDefinition; } protected void parseLaneSets(Element parentElement, ProcessDefinitionEntity processDefinition) { List<Element> laneSets = parentElement.elements("laneSet"); if(laneSets != null && laneSets.size() > 0) { for(Element laneSetElement : laneSets) { LaneSet newLaneSet = new LaneSet(); newLaneSet.setId(laneSetElement.attribute("id")); newLaneSet.setName(laneSetElement.attribute("name")); parseLanes(laneSetElement, newLaneSet); // Finally, add the set processDefinition.addLaneSet(newLaneSet); } } } protected void parseLanes(Element laneSetElement, LaneSet laneSet) { List<Element> lanes = laneSetElement.elements("lane"); if(lanes != null && lanes.size() > 0) { for(Element laneElement : lanes) { // Parse basic attributes Lane lane = new Lane(); lane.setId(laneElement.attribute("id")); lane.setName(laneElement.attribute("name")); // Parse ID's of flow-nodes that live inside this lane List<Element> flowNodeElements = laneElement.elements("flowNodeRef"); if(flowNodeElements != null && flowNodeElements.size() > 0) { for(Element flowNodeElement : flowNodeElements) { lane.getFlowNodeIds().add(flowNodeElement.getText()); } } laneSet.addLane(lane); } } } /** * Parses a scope: a process, subprocess, etc. * * Note that a process definition is a scope on itself. * * @param scopeElement * The XML element defining the scope * @param parentScope * The scope that contains the nested scope. */ public void parseScope(Element scopeElement, ScopeImpl parentScope) { // Not yet supported on process level (PVM additions needed): // parseProperties(processElement); HashMap<String, Element> postponedElements = new HashMap<String, Element>(); parseStartEvents(scopeElement, parentScope); parseActivities(scopeElement, parentScope, postponedElements); parsePostponedElements(scopeElement, parentScope, postponedElements); parseEndEvents(scopeElement, parentScope); parseBoundaryEvents(scopeElement, parentScope); parseSequenceFlow(scopeElement, parentScope); parseExecutionListenersOnScope(scopeElement, parentScope); parseAssociations(scopeElement, parentScope); if(parentScope instanceof ProcessDefinition) { parseProcessDefinitionCustomExtensions(scopeElement, (ProcessDefinition) parentScope); } postponedElements.clear(); IOSpecification ioSpecification = parseIOSpecification(scopeElement.element("ioSpecification")); parentScope.setIoSpecification(ioSpecification); } protected void parsePostponedElements(Element scopeElement, ScopeImpl parentScope, HashMap<String, Element> postponedElements) { for (Element postponedElement : postponedElements.values()) { if(parentScope.findActivity(postponedElement.attribute("id")) == null) { // check whether activity is already parsed if(postponedElement.getTagName().equals("intermediateCatchEvent")) { parseIntermediateCatchEvent(postponedElement, parentScope, false); } } } } protected void parseProcessDefinitionCustomExtensions(Element scopeElement, ProcessDefinition definition) { parseStartAuthorization(scopeElement, definition); } protected void parseStartAuthorization(Element scopeElement, ProcessDefinition definition) { ProcessDefinitionEntity processDefinition = (ProcessDefinitionEntity) definition; // parse activiti:potentialStarters Element extentionsElement = scopeElement.element("extensionElements"); if (extentionsElement != null) { List<Element> potentialStarterElements = extentionsElement.elementsNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, POTENTIAL_STARTER); for (Element potentialStarterElement : potentialStarterElements) { parsePotentialStarterResourceAssignment(potentialStarterElement, processDefinition); } } // parse activiti:candidateStarterUsers String candidateUsersString = scopeElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, CANDIDATE_STARTER_USERS_EXTENSION); if (candidateUsersString != null) { List<String> candidateUsers = parseCommaSeparatedList(candidateUsersString); for (String candidateUser : candidateUsers) { processDefinition.addCandidateStarterUserIdExpression(expressionManager.createExpression(candidateUser.trim())); } } // Candidate activiti:candidateStarterGroups String candidateGroupsString = scopeElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, CANDIDATE_STARTER_GROUPS_EXTENSION); if (candidateGroupsString != null) { List<String> candidateGroups = parseCommaSeparatedList(candidateGroupsString); for (String candidateGroup : candidateGroups) { processDefinition.addCandidateStarterGroupIdExpression(expressionManager.createExpression(candidateGroup.trim())); } } } protected void parsePotentialStarterResourceAssignment(Element performerElement, ProcessDefinitionEntity processDefinition) { Element raeElement = performerElement.element(RESOURCE_ASSIGNMENT_EXPR); if (raeElement != null) { Element feElement = raeElement.element(FORMAL_EXPRESSION); if (feElement != null) { List<String> assignmentExpressions = parseCommaSeparatedList(feElement.getText()); for (String assignmentExpression : assignmentExpressions) { assignmentExpression = assignmentExpression.trim(); if (assignmentExpression.startsWith(USER_PREFIX)) { String userAssignementId = getAssignmentId(assignmentExpression, USER_PREFIX); processDefinition.addCandidateStarterUserIdExpression(expressionManager.createExpression(userAssignementId)); } else if (assignmentExpression.startsWith(GROUP_PREFIX)) { String groupAssignementId = getAssignmentId(assignmentExpression, GROUP_PREFIX); processDefinition.addCandidateStarterGroupIdExpression(expressionManager.createExpression(groupAssignementId)); } else { // default: given string is a goupId, as-is. processDefinition.addCandidateStarterGroupIdExpression(expressionManager.createExpression(assignmentExpression)); } } } } } protected void parseAssociations(Element scopeElement, ScopeImpl parentScope) { for (Element associationElement : scopeElement.elements("association")) { String sourceRef = associationElement.attribute("sourceRef"); if(sourceRef == null) { addError("association element missing attribute 'sourceRef'", associationElement); } String targetRef = associationElement.attribute("targetRef"); if(targetRef == null) { addError("association element missing attribute 'targetRef'", associationElement); } ActivityImpl sourceActivity = parentScope.findActivity(sourceRef); ActivityImpl targetActivity = parentScope.findActivity(targetRef); // an association may reference elements that are not parsed as activities (like for instance // text annotations so do not throw an exception if sourceActivity or targetActivity are null) // However, we make sure they reference 'something': if(sourceActivity == null && !elementIds.contains(sourceRef)) { addError("Invalid reference sourceRef '"+sourceRef+"' of association element ", associationElement); } else if(targetActivity == null && !elementIds.contains(targetRef)) { addError("Invalid reference targetRef '"+targetRef+"' of association element ", associationElement); } else { if(sourceActivity != null && sourceActivity.getProperty("type").equals("compensationBoundaryCatch")) { Object isForCompensation = targetActivity.getProperty(PROPERTYNAME_IS_FOR_COMPENSATION); if(isForCompensation == null || !(Boolean) isForCompensation) { addError("compensation boundary catch must be connected to element with isForCompensation=true", associationElement); } else { ActivityImpl compensatedActivity = sourceActivity.getParentActivity(); compensatedActivity.setProperty(PROPERTYNAME_COMPENSATION_HANDLER_ID, targetActivity.getId()); } } } } } protected IOSpecification parseIOSpecification(Element ioSpecificationElement) { if (ioSpecificationElement == null) { return null; } IOSpecification ioSpecification = new IOSpecification(); for (Element dataInputElement : ioSpecificationElement.elements("dataInput")) { String id = dataInputElement.attribute("id"); String itemSubjectRef = this.resolveName(dataInputElement.attribute("itemSubjectRef")); ItemDefinition itemDefinition = this.itemDefinitions.get(itemSubjectRef); Data dataInput = new Data(this.targetNamespace + ":" + id, id, itemDefinition); ioSpecification.addInput(dataInput); } for (Element dataOutputElement : ioSpecificationElement.elements("dataOutput")) { String id = dataOutputElement.attribute("id"); String itemSubjectRef = this.resolveName(dataOutputElement.attribute("itemSubjectRef")); ItemDefinition itemDefinition = this.itemDefinitions.get(itemSubjectRef); Data dataOutput = new Data(this.targetNamespace + ":" + id, id, itemDefinition); ioSpecification.addOutput(dataOutput); } for (Element inputSetElement : ioSpecificationElement.elements("inputSet")) { for (Element dataInputRef : inputSetElement.elements("dataInputRefs")) { DataRef dataRef = new DataRef(dataInputRef.getText()); ioSpecification.addInputRef(dataRef); } } for (Element outputSetElement : ioSpecificationElement.elements("outputSet")) { for (Element dataInputRef : outputSetElement.elements("dataOutputRefs")) { DataRef dataRef = new DataRef(dataInputRef.getText()); ioSpecification.addOutputRef(dataRef); } } return ioSpecification; } protected AbstractDataAssociation parseDataInputAssociation(Element dataAssociationElement) { String sourceRef = null; Element sourceElement = dataAssociationElement.element("sourceRef"); if (sourceElement != null) { sourceRef = sourceElement.getText(); } String targetRef = null; Element targetElement = dataAssociationElement.element("targetRef"); if (targetElement != null) { targetRef = targetElement.getText(); } if (targetRef != null && targetRef.equals("")) { addError("targetRef is required", dataAssociationElement); } List<Element> assignments = dataAssociationElement.elements("assignment"); if (assignments.isEmpty()) { return new MessageImplicitDataInputAssociation(sourceRef, targetRef); } else { SimpleDataInputAssociation dataAssociation = new SimpleDataInputAssociation(sourceRef, targetRef); for (Element assigmentElement : dataAssociationElement.elements("assignment")) { if (assigmentElement.element("from") != null && assigmentElement.element("to") != null) { Expression from = this.expressionManager.createExpression(assigmentElement.element("from").getText()); Expression to = this.expressionManager.createExpression(assigmentElement.element("to").getText()); Assignment assignment = new Assignment(from, to); dataAssociation.addAssignment(assignment); } } return dataAssociation; } } /** * Parses the start events of a certain level in the process (process, * subprocess or another scope). * * @param parentElement * The 'parent' element that contains the start events (process, * subprocess). * @param scope * The {@link ScopeImpl} to which the start events must be added. */ public void parseStartEvents(Element parentElement, ScopeImpl scope) { List<Element> startEventElements = parentElement.elements("startEvent"); List<ActivityImpl> startEventActivities = new ArrayList<ActivityImpl>(); for (Element startEventElement : startEventElements) { ActivityImpl startEventActivity = createActivityOnScope(startEventElement, scope); if (scope instanceof ProcessDefinitionEntity) { parseProcessDefinitionStartEvent(startEventActivity, startEventElement, parentElement, scope); startEventActivities.add(startEventActivity); } else { parseScopeStartEvent(startEventActivity, startEventElement, parentElement, scope); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseStartEvent(startEventElement, scope, startEventActivity); } parseExecutionListenersOnScope(startEventElement, startEventActivity); } if(scope instanceof ProcessDefinitionEntity) { selectInitial(startEventActivities, (ProcessDefinitionEntity) scope, parentElement); parseStartFormHandlers(startEventElements, (ProcessDefinitionEntity) scope); } } protected void selectInitial(List<ActivityImpl> startEventActivities, ProcessDefinitionEntity processDefinition, Element parentElement) { ActivityImpl initial = null; // validate that there is s single none start event / timer start event: for (ActivityImpl activityImpl : startEventActivities) { if(!activityImpl.getProperty("type").equals("messageStartEvent")) { if(initial == null) { initial = activityImpl; } else { addError("multiple none start events or timer start events not supported on process definition", parentElement); } } } // if there is a single start event, select it as initial, regardless of it's type: if(initial == null && startEventActivities.size() == 1) { initial = startEventActivities.get(0); } processDefinition.setInitial(initial); } protected void parseProcessDefinitionStartEvent(ActivityImpl startEventActivity, Element startEventElement, Element parentElement, ScopeImpl scope) { ProcessDefinitionEntity processDefinition = (ProcessDefinitionEntity) scope; String initiatorVariableName = startEventElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "initiator"); if (initiatorVariableName != null) { processDefinition.setProperty(PROPERTYNAME_INITIATOR_VARIABLE_NAME, initiatorVariableName); } // all start events share the same behavior: startEventActivity.setActivityBehavior(new NoneStartEventActivityBehavior()); Element timerEventDefinition = startEventElement.element("timerEventDefinition"); Element messageEventDefinition = startEventElement.element("messageEventDefinition"); if (timerEventDefinition != null) { parseTimerStartEventDefinition(timerEventDefinition, startEventActivity, processDefinition); } else if(messageEventDefinition != null) { EventSubscriptionDeclaration messageDefinition = parseMessageEventDefinition(messageEventDefinition); startEventActivity.setProperty("type", "messageStartEvent"); messageDefinition.setActivityId(startEventActivity.getId()); // create message event subscription: messageDefinition.setStartEvent(true); addEventSubscriptionDeclaration(messageDefinition, processDefinition, startEventElement); } } protected void parseStartFormHandlers(List<Element> startEventElements, ProcessDefinitionEntity processDefinition) { if(processDefinition.getInitial() != null) { for (Element startEventElement : startEventElements) { if(startEventElement.attribute("id").equals(processDefinition.getInitial().getId())) { StartFormHandler startFormHandler; String startFormHandlerClassName = startEventElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "formHandlerClass"); if (startFormHandlerClassName != null) { startFormHandler = (StartFormHandler) ReflectUtil.instantiate(startFormHandlerClassName); } else { startFormHandler = new DefaultStartFormHandler(); } startFormHandler.parseConfiguration(startEventElement, deployment, processDefinition, this); processDefinition.setStartFormHandler(startFormHandler); } } } } protected void parseScopeStartEvent(ActivityImpl startEventActivity, Element startEventElement, Element parentElement, ScopeImpl scope) { Object triggeredByEvent = scope.getProperty("triggeredByEvent"); boolean isTriggeredByEvent = triggeredByEvent != null && ((Boolean) triggeredByEvent == true); Element errorEventDefinition = startEventElement.element("errorEventDefinition"); Element messageEventDefinition = startEventElement.element("messageEventDefinition"); Element signalEventDefinition = startEventElement.element("signalEventDefinition"); if (isTriggeredByEvent) { // event subprocess // all start events of an event subprocess share common behavior EventSubProcessStartEventActivityBehavior activityBehavior = new EventSubProcessStartEventActivityBehavior(startEventActivity.getId()); startEventActivity.setActivityBehavior(activityBehavior); String isInterrupting = startEventElement.attribute("isInterrupting"); if(isInterrupting != null && (isInterrupting.equals("false")||isInterrupting.equals("FALSE"))) { activityBehavior.setInterrupting(false); } // the scope of the event subscription is the parent of the event // subprocess (subscription must be created when parent is initialized) ScopeImpl catchingScope = ((ActivityImpl) scope).getParent(); if (errorEventDefinition != null) { if(!activityBehavior.isInterrupting()) { addError("error start event of event subprocess must be interrupting", startEventElement); } if (scope.getProperty(PROPERTYNAME_INITIAL) == null) { scope.setProperty(PROPERTYNAME_INITIAL, startEventActivity); parseErrorStartEventDefinition(errorEventDefinition, startEventActivity, catchingScope); } else { addError("multiple start events not supported for subprocess", startEventElement); } } else if (messageEventDefinition != null) { EventSubscriptionDeclaration eventSubscriptionDeclaration = parseMessageEventDefinition(messageEventDefinition); eventSubscriptionDeclaration.setActivityId(startEventActivity.getId()); eventSubscriptionDeclaration.setStartEvent(false); addEventSubscriptionDeclaration(eventSubscriptionDeclaration, catchingScope, messageEventDefinition); } else if (signalEventDefinition != null) { EventSubscriptionDeclaration eventSubscriptionDeclaration = parseSignalEventDefinition(signalEventDefinition); eventSubscriptionDeclaration.setActivityId(startEventActivity.getId()); eventSubscriptionDeclaration.setStartEvent(false); addEventSubscriptionDeclaration(eventSubscriptionDeclaration, catchingScope, signalEventDefinition); } else { addError("start event of event subprocess must be of type 'error', 'message' or 'signal'", startEventElement); } } else { // "regular" subprocess if(errorEventDefinition != null) { addError("errorEventDefinition only allowed on start event if subprocess is an event subprocess", errorEventDefinition); } if(messageEventDefinition != null) { addError("messageEventDefinition only allowed on start event if subprocess is an event subprocess", messageEventDefinition); } if(signalEventDefinition != null) { addError("signalEventDefintion only allowed on start event if subprocess is an event subprocess", messageEventDefinition); } if (scope.getProperty(PROPERTYNAME_INITIAL) == null) { scope.setProperty(PROPERTYNAME_INITIAL, startEventActivity); startEventActivity.setActivityBehavior(new NoneStartEventActivityBehavior()); } else { addError("multiple start events not supported for subprocess", startEventElement); } } } protected void parseErrorStartEventDefinition(Element errorEventDefinition, ActivityImpl startEventActivity, ScopeImpl scope) { startEventActivity.setProperty("type", "errorStartEvent"); String errorRef = errorEventDefinition.attribute("errorRef"); Error error = null; ErrorEventDefinition definition = new ErrorEventDefinition(startEventActivity.getId()); if (errorRef != null) { error = errors.get(errorRef); String errorCode = error == null ? errorRef : error.getErrorCode(); definition.setErrorCode(errorCode); } definition.setPrecedence(10); addErrorEventDefinition(definition, scope); } protected EventSubscriptionDeclaration parseMessageEventDefinition(Element messageEventDefinition) { String messageRef = messageEventDefinition.attribute("messageRef"); if(messageRef == null) { addError("attribute 'messageRef' is required", messageEventDefinition); } MessageDefinition messageDefinition = messages.get(resolveName(messageRef)); if(messageDefinition == null) { addError("Invalid 'messageRef': no message with id '"+messageRef+"' found.", messageEventDefinition); } return new EventSubscriptionDeclaration(messageDefinition.getName(), "message"); } @SuppressWarnings("unchecked") protected void addEventSubscriptionDeclaration(EventSubscriptionDeclaration subscription, ScopeImpl scope, Element element) { List<EventSubscriptionDeclaration> eventDefinitions = (List<EventSubscriptionDeclaration>) scope.getProperty(PROPERTYNAME_EVENT_SUBSCRIPTION_DECLARATION); if(eventDefinitions == null) { eventDefinitions = new ArrayList<EventSubscriptionDeclaration>(); scope.setProperty(PROPERTYNAME_EVENT_SUBSCRIPTION_DECLARATION, eventDefinitions); } else { // if this is a message event, validate that it is the only one with the provided name for this scope if(subscription.getEventType().equals("message")) { for (EventSubscriptionDeclaration eventDefinition : eventDefinitions) { if(eventDefinition.getEventType().equals("message") && eventDefinition.getEventName().equals(subscription.getEventName()) && eventDefinition.isStartEvent() == subscription.isStartEvent()) { addError("Cannot have more than one message event subscription with name '"+subscription.getEventName()+"' for scope '"+scope.getId()+"'", element); } } } } eventDefinitions.add(subscription); } /** * Parses the activities of a certain level in the process (process, * subprocess or another scope). * * @param parentElement * The 'parent' element that contains the activities (process, * subprocess). * @param scopeElement * The {@link ScopeImpl} to which the activities must be added. * @param postponedElements * @param postProcessActivities */ public void parseActivities(Element parentElement, ScopeImpl scopeElement, HashMap<String, Element> postponedElements) { for (Element activityElement : parentElement.elements()) { parseActivity(activityElement, parentElement, scopeElement, postponedElements); } } protected void parseActivity(Element activityElement, Element parentElement, ScopeImpl scopeElement, HashMap<String, Element> postponedElements) { ActivityImpl activity = null; if (activityElement.getTagName().equals("exclusiveGateway")) { activity = parseExclusiveGateway(activityElement, scopeElement); } else if (activityElement.getTagName().equals("inclusiveGateway")) { activity = parseInclusiveGateway(activityElement, scopeElement); } else if (activityElement.getTagName().equals("parallelGateway")) { activity = parseParallelGateway(activityElement, scopeElement); } else if (activityElement.getTagName().equals("scriptTask")) { activity = parseScriptTask(activityElement, scopeElement); } else if (activityElement.getTagName().equals("serviceTask")) { activity = parseServiceTask(activityElement, scopeElement); } else if (activityElement.getTagName().equals("businessRuleTask")) { activity = parseBusinessRuleTask(activityElement, scopeElement); } else if (activityElement.getTagName().equals("task")) { activity = parseTask(activityElement, scopeElement); } else if (activityElement.getTagName().equals("manualTask")) { activity = parseManualTask(activityElement, scopeElement); } else if (activityElement.getTagName().equals("userTask")) { activity = parseUserTask(activityElement, scopeElement); } else if (activityElement.getTagName().equals("sendTask")) { activity = parseSendTask(activityElement, scopeElement); } else if (activityElement.getTagName().equals("receiveTask")) { activity = parseReceiveTask(activityElement, scopeElement); } else if (activityElement.getTagName().equals("subProcess")) { activity = parseSubProcess(activityElement, scopeElement); } else if (activityElement.getTagName().equals("callActivity")) { activity = parseCallActivity(activityElement, scopeElement); } else if (activityElement.getTagName().equals("intermediateCatchEvent")) { // postpone all intermediate catch events (required for supporting event-based gw) postponedElements.put(activityElement.attribute("id"), activityElement); } else if (activityElement.getTagName().equals("intermediateThrowEvent")) { activity = parseIntermediateThrowEvent(activityElement, scopeElement); } else if (activityElement.getTagName().equals("eventBasedGateway")) { activity = parseEventBasedGateway(activityElement, parentElement, scopeElement); } else if(activityElement.getTagName().equals("transaction")) { activity = parseTransaction(activityElement, scopeElement); } else if (activityElement.getTagName().equals("adHocSubProcess") || activityElement.getTagName().equals("complexGateway")) { addWarning("Ignoring unsupported activity type", activityElement); } // Parse stuff common to activities above if (activity != null) { parseMultiInstanceLoopCharacteristics(activityElement, activity); } } public void validateActivities(List<ActivityImpl> activities) { for (ActivityImpl activity : activities) { validateActivity(activity); // check children if it is an own scope / subprocess / ... if (activity.getActivities().size()>0) { validateActivities(activity.getActivities()); } } } protected void validateActivity(ActivityImpl activity) { if (activity.getActivityBehavior() instanceof ExclusiveGatewayActivityBehavior) { validateExclusiveGateway(activity); } } public void validateExclusiveGateway(ActivityImpl activity) { if (activity.getOutgoingTransitions().size()==0) { // TODO: double check if this is valid (I think in Activiti yes, since we need start events we will need an end event as well) addError("Exclusive Gateway '" + activity.getId() + "' has no outgoing sequence flows.", null); } else if (activity.getOutgoingTransitions().size()==1) { PvmTransition flow = activity.getOutgoingTransitions().get(0); Condition condition = (Condition) flow.getProperty(BpmnParse.PROPERTYNAME_CONDITION); if (condition!=null) { addError("Exclusive Gateway '" + activity.getId() + "' has only one outgoing sequence flow ('" + flow.getId() + "'). This is not allowed to have a condition.", null); } } else { String defaultSequenceFlow = (String) activity.getProperty("default"); boolean hasDefaultFlow = defaultSequenceFlow!=null && defaultSequenceFlow.length()>0; ArrayList<PvmTransition> flowsWithoutCondition = new ArrayList<PvmTransition>(); for (PvmTransition flow : activity.getOutgoingTransitions()) { Condition condition = (Condition) flow.getProperty(BpmnParse.PROPERTYNAME_CONDITION); boolean isDefaultFlow = flow.getId()!=null && flow.getId().equals(defaultSequenceFlow); boolean hasConditon = condition!=null; if (!hasConditon && !isDefaultFlow) { flowsWithoutCondition.add(flow); } if (hasConditon && isDefaultFlow) { addError("Exclusive Gateway '" + activity.getId() + "' has outgoing sequence flow '" + flow.getId() + "' which is the default flow but has a condition too.", null); } } if (hasDefaultFlow || flowsWithoutCondition.size()>1) { // if we either have a default flow (then no flows without conditions are valid at all) or if we have more than one flow without condition this is an error for (PvmTransition flow : flowsWithoutCondition) { addError("Exclusive Gateway '" + activity.getId() + "' has outgoing sequence flow '" + flow.getId() + "' without condition which is not the default flow.", null); } } else if (flowsWithoutCondition.size()==1) { // Havinf no default and exactly one flow without condition this is considered the default one now (to not break backward compatibility) PvmTransition flow = flowsWithoutCondition.get(0); addWarning("Exclusive Gateway '" + activity.getId() + "' has outgoing sequence flow '" + flow.getId() + "' without condition which is not the default flow. We assume it to be the default flow, but it is bad modeling practice, better set the default flow in your gateway.", null); } } } public ActivityImpl parseIntermediateCatchEvent(Element intermediateEventElement, ScopeImpl scopeElement, boolean isAfterEventBasedGateway) { ActivityImpl nestedActivity = createActivityOnScope(intermediateEventElement, scopeElement); // Catch event behavior is the same for all types nestedActivity.setActivityBehavior(new IntermediateCatchEventActivitiBehaviour()); Element timerEventDefinition = intermediateEventElement.element("timerEventDefinition"); Element signalEventDefinition = intermediateEventElement.element("signalEventDefinition"); Element messageEventDefinition = intermediateEventElement.element("messageEventDefinition"); if (timerEventDefinition != null) { parseIntemediateTimerEventDefinition(timerEventDefinition, nestedActivity, isAfterEventBasedGateway); }else if(signalEventDefinition != null) { parseIntemediateSignalEventDefinition(signalEventDefinition, nestedActivity, isAfterEventBasedGateway); }else if(messageEventDefinition != null) { parseIntemediateMessageEventDefinition(messageEventDefinition, nestedActivity, isAfterEventBasedGateway); } else { addError("Unsupported intermediate catch event type", intermediateEventElement); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseIntermediateCatchEvent(intermediateEventElement, scopeElement, nestedActivity); } parseExecutionListenersOnScope(intermediateEventElement, nestedActivity); return nestedActivity; } protected void parseIntemediateMessageEventDefinition(Element messageEventDefinition, ActivityImpl nestedActivity, boolean isAfterEventBasedGateway) { nestedActivity.setProperty("type", "intermediateMessageCatch"); EventSubscriptionDeclaration messageDefinition = parseMessageEventDefinition(messageEventDefinition); if(isAfterEventBasedGateway) { messageDefinition.setActivityId(nestedActivity.getId()); addEventSubscriptionDeclaration(messageDefinition, nestedActivity.getParent(), messageEventDefinition); }else { nestedActivity.setScope(true); addEventSubscriptionDeclaration(messageDefinition, nestedActivity, messageEventDefinition); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseIntermediateMessageCatchEventDefinition(messageEventDefinition, nestedActivity); } } public ActivityImpl parseIntermediateThrowEvent(Element intermediateEventElement, ScopeImpl scopeElement) { ActivityImpl nestedActivityImpl = createActivityOnScope(intermediateEventElement, scopeElement); ActivityBehavior activityBehavior = null; Element signalEventDefinitionElement = intermediateEventElement.element("signalEventDefinition"); Element compensateEventDefinitionElement = intermediateEventElement.element("compensateEventDefinition"); boolean otherUnsupportedThrowingIntermediateEvent = (intermediateEventElement.element("escalationEventDefinition") != null) || // (intermediateEventElement.element("messageEventDefinition") != null) || // (intermediateEventElement.element("linkEventDefinition") != null); // All other event definition types cannot be intermediate throwing (cancelEventDefinition, conditionalEventDefinition, errorEventDefinition, terminateEventDefinition, timerEventDefinition if(signalEventDefinitionElement != null) { nestedActivityImpl.setProperty("type", "intermediateSignalThrow"); EventSubscriptionDeclaration signalDefinition = parseSignalEventDefinition(signalEventDefinitionElement); activityBehavior = new IntermediateThrowSignalEventActivityBehavior(signalDefinition); } else if(compensateEventDefinitionElement != null) { CompensateEventDefinition compensateEventDefinition = parseCompensateEventDefinition(compensateEventDefinitionElement, scopeElement); activityBehavior = new IntermediateThrowCompensationEventActivityBehavior(compensateEventDefinition); // IntermediateThrowNoneEventActivityBehavior } else if (otherUnsupportedThrowingIntermediateEvent) { addError("Unsupported intermediate throw event type", intermediateEventElement); } else { // None intermediate event activityBehavior = new IntermediateThrowNoneEventActivityBehavior(); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseIntermediateThrowEvent(intermediateEventElement, scopeElement, nestedActivityImpl); } nestedActivityImpl.setActivityBehavior(activityBehavior); parseExecutionListenersOnScope(intermediateEventElement, nestedActivityImpl); return nestedActivityImpl; } protected CompensateEventDefinition parseCompensateEventDefinition(Element compensateEventDefinitionElement, ScopeImpl scopeElement) { String activityRef = compensateEventDefinitionElement.attribute("activityRef"); boolean waitForCompletion = "true".equals(compensateEventDefinitionElement.attribute("waitForCompletion", "true")); if(activityRef != null) { if(scopeElement.findActivity(activityRef) == null) { addError("Invalid attribute value for 'activityRef': no activity with id '"+activityRef+"' in current scope", compensateEventDefinitionElement); } } CompensateEventDefinition compensateEventDefinition = new CompensateEventDefinition(); compensateEventDefinition.setActivityRef(activityRef); compensateEventDefinition.setWaitForCompletion(waitForCompletion); return compensateEventDefinition; } protected void parseCatchCompensateEventDefinition(Element compensateEventDefinition, ActivityImpl activity) { activity.setProperty("type", "compensationBoundaryCatch"); ScopeImpl parent = activity.getParent(); for (ActivityImpl child : parent.getActivities()) { if(child.getProperty("type").equals("compensationBoundaryCatch") && child != activity ) { addError("multiple boundary events with compensateEventDefinition not supported on same activity", compensateEventDefinition); } } } protected ActivityBehavior parseBoundaryCancelEventDefinition(Element cancelEventDefinition, ActivityImpl activity) { activity.setProperty("type", "cancelBoundaryCatch"); ActivityImpl parent = (ActivityImpl) activity.getParent(); if(!parent.getProperty("type").equals("transaction")) { addError("boundary event with cancelEventDefinition only supported on transaction subprocesses", cancelEventDefinition); } for (ActivityImpl child : parent.getActivities()) { if(child.getProperty("type").equals("cancelBoundaryCatch") && child != activity ) { addError("multiple boundary events with cancelEventDefinition not supported on same transaction subprocess", cancelEventDefinition); } } return new CancelBoundaryEventActivityBehavior(); } /** * Parses loopCharacteristics (standardLoop/Multi-instance) of an activity, if * any is defined. */ public void parseMultiInstanceLoopCharacteristics(Element activityElement, ActivityImpl activity) { // Only 'activities' (in the BPMN 2.0 spec meaning) can have mi // characteristics if (!(activity.getActivityBehavior() instanceof AbstractBpmnActivityBehavior)) { return; } Element miLoopCharacteristics = activityElement.element("multiInstanceLoopCharacteristics"); if (miLoopCharacteristics != null) { MultiInstanceActivityBehavior miActivityBehavior = null; boolean isSequential = parseBooleanAttribute(miLoopCharacteristics.attribute("isSequential"), false); if (isSequential) { miActivityBehavior = new SequentialMultiInstanceBehavior(activity, (AbstractBpmnActivityBehavior) activity.getActivityBehavior()); } else { miActivityBehavior = new ParallelMultiInstanceBehavior(activity, (AbstractBpmnActivityBehavior) activity.getActivityBehavior()); } activity.setScope(true); activity.setProperty("multiInstance", isSequential ? "sequential" : "parallel"); activity.setActivityBehavior(miActivityBehavior); // loopCardinality Element loopCardinality = miLoopCharacteristics.element("loopCardinality"); if (loopCardinality != null) { String loopCardinalityText = loopCardinality.getText(); if (loopCardinalityText == null || "".equals(loopCardinalityText)) { addError("loopCardinality must be defined for a multiInstanceLoopCharacteristics definition ", miLoopCharacteristics); } miActivityBehavior.setLoopCardinalityExpression(expressionManager.createExpression(loopCardinalityText)); } // completionCondition Element completionCondition = miLoopCharacteristics.element("completionCondition"); if (completionCondition != null) { String completionConditionText = completionCondition.getText(); miActivityBehavior.setCompletionConditionExpression(expressionManager.createExpression(completionConditionText)); } // activiti:collection String collection = miLoopCharacteristics.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "collection"); if (collection != null) { if (collection.contains("{")) { miActivityBehavior.setCollectionExpression(expressionManager.createExpression(collection)); } else { miActivityBehavior.setCollectionVariable(collection); } } // loopDataInputRef Element loopDataInputRef = miLoopCharacteristics.element("loopDataInputRef"); if (loopDataInputRef != null) { String loopDataInputRefText = loopDataInputRef.getText(); if (loopDataInputRefText != null) { if (loopDataInputRefText.contains("{")) { miActivityBehavior.setCollectionExpression(expressionManager.createExpression(loopDataInputRefText)); } else { miActivityBehavior.setCollectionVariable(loopDataInputRefText); } } } // activiti:elementVariable String elementVariable = miLoopCharacteristics.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "elementVariable"); if (elementVariable != null) { miActivityBehavior.setCollectionElementVariable(elementVariable); } // dataInputItem Element inputDataItem = miLoopCharacteristics.element("inputDataItem"); if (inputDataItem != null) { String inputDataItemName = inputDataItem.attribute("name"); miActivityBehavior.setCollectionElementVariable(inputDataItemName); } // Validation if (miActivityBehavior.getLoopCardinalityExpression() == null && miActivityBehavior.getCollectionExpression() == null && miActivityBehavior.getCollectionVariable() == null) { addError("Either loopCardinality or loopDataInputRef/activiti:collection must been set", miLoopCharacteristics); } // Validation if (miActivityBehavior.getCollectionExpression() == null && miActivityBehavior.getCollectionVariable() == null && miActivityBehavior.getCollectionElementVariable() != null) { addError("LoopDataInputRef/activiti:collection must be set when using inputDataItem or activiti:elementVariable", miLoopCharacteristics); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseMultiInstanceLoopCharacteristics(activityElement, miLoopCharacteristics, activity); } } } /** * Parses the generic information of an activity element (id, name, * documentation, etc.), and creates a new {@link ActivityImpl} on the given * scope element. */ public ActivityImpl createActivityOnScope(Element activityElement, ScopeImpl scopeElement) { String id = activityElement.attribute("id"); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Parsing activity " + id); } ActivityImpl activity = scopeElement.createActivity(id); activity.setProperty("name", activityElement.attribute("name")); activity.setProperty("documentation", parseDocumentation(activityElement)); activity.setProperty("default", activityElement.attribute("default")); activity.setProperty("type", activityElement.getTagName()); activity.setProperty("line", activityElement.getLine()); String isForCompensation = activityElement.attribute("isForCompensation"); if(isForCompensation != null && (isForCompensation.equals("true")||isForCompensation.equals("TRUE"))) { activity.setProperty(PROPERTYNAME_IS_FOR_COMPENSATION, true); } return activity; } public String parseDocumentation(Element element) { List<Element> docElements = element.elements("documentation"); if (docElements.isEmpty()) { return null; } StringBuilder builder = new StringBuilder(); for (Element e: docElements) { if (builder.length() != 0) { builder.append("\n\n"); } builder.append(e.getText().trim()); } return builder.toString(); } /** * Parses an exclusive gateway declaration. */ public ActivityImpl parseExclusiveGateway(Element exclusiveGwElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(exclusiveGwElement, scope); activity.setActivityBehavior(new ExclusiveGatewayActivityBehavior()); parseExecutionListenersOnScope(exclusiveGwElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseExclusiveGateway(exclusiveGwElement, scope, activity); } return activity; } /** * Parses an inclusive gateway declaration. */ public ActivityImpl parseInclusiveGateway(Element inclusiveGwElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(inclusiveGwElement, scope); activity.setActivityBehavior(new InclusiveGatewayActivityBehavior()); parseExecutionListenersOnScope(inclusiveGwElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseInclusiveGateway(inclusiveGwElement, scope, activity); } return activity; } public ActivityImpl parseEventBasedGateway(Element eventBasedGwElement, Element parentElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(eventBasedGwElement, scope); activity.setActivityBehavior(new EventBasedGatewayActivityBehavior()); activity.setScope(true); parseExecutionListenersOnScope(eventBasedGwElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseEventBasedGateway(eventBasedGwElement, scope, activity); } // find all outgoing sequence flows: List<Element> sequenceFlows = parentElement.elements("sequenceFlow"); // collect all siblings in a map Map<String, Element> siblingsMap = new HashMap<String, Element>(); List<Element> siblings = parentElement.elements(); for (Element sibling : siblings) { siblingsMap.put(sibling.attribute("id"), sibling); } for (Element sequenceFlow : sequenceFlows) { String sourceRef = sequenceFlow.attribute("sourceRef"); String targetRef = sequenceFlow.attribute("targetRef"); if (activity.getId().equals(sourceRef)) { Element sibling = siblingsMap.get(targetRef); if (sibling != null) { if (sibling.getTagName().equals("intermediateCatchEvent")) { parseIntermediateCatchEvent(sibling, activity, true); } else { addError("Event based gateway can only be connected to elements of type intermediateCatchEvent", sibling); } } } } return activity; } /** * Parses a parallel gateway declaration. */ public ActivityImpl parseParallelGateway(Element parallelGwElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(parallelGwElement, scope); activity.setActivityBehavior(new ParallelGatewayActivityBehavior()); parseExecutionListenersOnScope(parallelGwElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseParallelGateway(parallelGwElement, scope, activity); } return activity; } /** * Parses a scriptTask declaration. */ public ActivityImpl parseScriptTask(Element scriptTaskElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(scriptTaskElement, scope); String script = null; String language = null; String resultVariableName = null; Element scriptElement = scriptTaskElement.element("script"); if (scriptElement != null) { script = scriptElement.getText(); if (language == null) { language = scriptTaskElement.attribute("scriptFormat"); } if (language == null) { language = ScriptingEngines.DEFAULT_SCRIPTING_LANGUAGE; } resultVariableName = scriptTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "resultVariable"); if (resultVariableName == null) { // for backwards compatible reasons resultVariableName = scriptTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "resultVariableName"); } } activity.setAsync(isAsync(scriptTaskElement)); activity.setExclusive(isExclusive(scriptTaskElement)); activity.setActivityBehavior(new ScriptTaskActivityBehavior(script, language, resultVariableName)); parseExecutionListenersOnScope(scriptTaskElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseScriptTask(scriptTaskElement, scope, activity); } return activity; } /** * Parses a serviceTask declaration. */ public ActivityImpl parseServiceTask(Element serviceTaskElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(serviceTaskElement, scope); String type = serviceTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "type"); String className = serviceTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "class"); String expression = serviceTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "expression"); String delegateExpression = serviceTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "delegateExpression"); String resultVariableName = serviceTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "resultVariable"); if (resultVariableName == null) { resultVariableName = serviceTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "resultVariableName"); } String implementation = serviceTaskElement.attribute("implementation"); String operationRef = this.resolveName(serviceTaskElement.attribute("operationRef")); activity.setAsync(isAsync(serviceTaskElement)); activity.setExclusive(isExclusive(serviceTaskElement)); if (type != null) { if (type.equalsIgnoreCase("mail")) { parseEmailServiceTask(activity, serviceTaskElement, parseFieldDeclarations(serviceTaskElement)); } else if (type.equalsIgnoreCase("mule")) { parseMuleServiceTask(activity, serviceTaskElement, parseFieldDeclarations(serviceTaskElement)); } else if (type.equalsIgnoreCase("shell")) { parseShellServiceTask(activity, serviceTaskElement, parseFieldDeclarations(serviceTaskElement)); } else { addError("Invalid usage of type attribute: '" + type + "'", serviceTaskElement); } } else if (className != null && className.trim().length() > 0) { if (resultVariableName != null) { addError("'resultVariableName' not supported for service tasks using 'class'", serviceTaskElement); } activity.setActivityBehavior(new ClassDelegate(className, parseFieldDeclarations(serviceTaskElement))); } else if (delegateExpression != null) { if (resultVariableName != null) { addError("'resultVariableName' not supported for service tasks using 'delegateExpression'", serviceTaskElement); } activity.setActivityBehavior(new ServiceTaskDelegateExpressionActivityBehavior(expressionManager.createExpression(delegateExpression), parseFieldDeclarations(serviceTaskElement))); } else if (expression != null && expression.trim().length() > 0) { activity.setActivityBehavior(new ServiceTaskExpressionActivityBehavior(expressionManager.createExpression(expression), resultVariableName)); } else if (implementation != null && operationRef != null && implementation.equalsIgnoreCase("##WebService")) { if (!this.operations.containsKey(operationRef)) { addError(operationRef + " does not exist", serviceTaskElement); } else { Operation operation = this.operations.get(operationRef); WebServiceActivityBehavior webServiceActivityBehavior = new WebServiceActivityBehavior(operation); Element ioSpecificationElement = serviceTaskElement.element("ioSpecification"); if (ioSpecificationElement != null) { IOSpecification ioSpecification = this.parseIOSpecification(ioSpecificationElement); webServiceActivityBehavior.setIoSpecification(ioSpecification); } for (Element dataAssociationElement : serviceTaskElement.elements("dataInputAssociation")) { AbstractDataAssociation dataAssociation = this.parseDataInputAssociation(dataAssociationElement); webServiceActivityBehavior.addDataInputAssociation(dataAssociation); } for (Element dataAssociationElement : serviceTaskElement.elements("dataOutputAssociation")) { AbstractDataAssociation dataAssociation = this.parseDataOutputAssociation(dataAssociationElement); webServiceActivityBehavior.addDataOutputAssociation(dataAssociation); } activity.setActivityBehavior(webServiceActivityBehavior); } } else { addError("One of the attributes 'class', 'delegateExpression', 'type', 'operation', or 'expression' is mandatory on serviceTask.", serviceTaskElement); } parseExecutionListenersOnScope(serviceTaskElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseServiceTask(serviceTaskElement, scope, activity); } return activity; } /** * Parses a businessRuleTask declaration. */ public ActivityImpl parseBusinessRuleTask(Element businessRuleTaskElement, ScopeImpl scope) { if (businessRuleTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "class")!=null || businessRuleTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "expression") !=null || businessRuleTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "delegateExpression") != null) { // ACT-1164: If expression or class is set on a BusinessRuleTask it behaves like a service task // to allow implementing the rule handling yourself return parseServiceTask(businessRuleTaskElement, scope); } else { ActivityImpl activity = createActivityOnScope(businessRuleTaskElement, scope); BusinessRuleTaskActivityBehavior ruleActivity = new BusinessRuleTaskActivityBehavior(); String ruleVariableInputString = businessRuleTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "ruleVariablesInput"); String rulesString = businessRuleTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "rules"); String excludeString = businessRuleTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "exclude"); String resultVariableNameString = businessRuleTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "resultVariable"); activity.setAsync(isAsync(businessRuleTaskElement)); activity.setExclusive(isExclusive(businessRuleTaskElement)); if (resultVariableNameString == null) { resultVariableNameString = businessRuleTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "resultVariableName"); } if (ruleVariableInputString != null) { List<String> ruleVariableInputObjects = parseCommaSeparatedList(ruleVariableInputString); for (String ruleVariableInputObject : ruleVariableInputObjects) { ruleActivity.addRuleVariableInputIdExpression(expressionManager.createExpression(ruleVariableInputObject.trim())); } } if (rulesString != null) { List<String> rules = parseCommaSeparatedList(rulesString); for (String rule : rules) { ruleActivity.addRuleIdExpression(expressionManager.createExpression(rule.trim())); } if (excludeString != null) { excludeString = excludeString.trim(); if ("true".equalsIgnoreCase(excludeString) == false && "false".equalsIgnoreCase(excludeString) == false) { addError("'exclude' only supports true or false for business rule tasks", businessRuleTaskElement); } else { ruleActivity.setExclude(Boolean.valueOf(excludeString.toLowerCase())); } } } else if (excludeString != null) { addError("'exclude' not supported for business rule tasks not defining 'rules'", businessRuleTaskElement); } if (resultVariableNameString != null) { resultVariableNameString = resultVariableNameString.trim(); if (resultVariableNameString.length() > 0 == false) { addError("'resultVariable' must contain a text value for business rule tasks", businessRuleTaskElement); } else { ruleActivity.setResultVariable(resultVariableNameString); } } else { ruleActivity.setResultVariable("org.activiti.engine.rules.OUTPUT"); } activity.setActivityBehavior(ruleActivity); parseExecutionListenersOnScope(businessRuleTaskElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseBusinessRuleTask(businessRuleTaskElement, scope, activity); } return activity; } } /** * Parses a sendTask declaration. */ public ActivityImpl parseSendTask(Element sendTaskElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(sendTaskElement, scope); activity.setAsync(isAsync(sendTaskElement)); activity.setExclusive(isExclusive(sendTaskElement)); // for e-mail String type = sendTaskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "type"); // for web service String implementation = sendTaskElement.attribute("implementation"); String operationRef = this.resolveName(sendTaskElement.attribute("operationRef")); // for e-mail if (type != null) { if (type.equalsIgnoreCase("mail")) { parseEmailServiceTask(activity, sendTaskElement, parseFieldDeclarations(sendTaskElement)); } else if (type.equalsIgnoreCase("mule")) { parseMuleServiceTask(activity, sendTaskElement, parseFieldDeclarations(sendTaskElement)); } else { addError("Invalid usage of type attribute: '" + type + "'", sendTaskElement); } // for web service } else if (implementation != null && operationRef != null && implementation.equalsIgnoreCase("##WebService")) { if (!this.operations.containsKey(operationRef)) { addError(operationRef + " does not exist", sendTaskElement); } else { Operation operation = this.operations.get(operationRef); WebServiceActivityBehavior webServiceActivityBehavior = new WebServiceActivityBehavior(operation); Element ioSpecificationElement = sendTaskElement.element("ioSpecification"); if (ioSpecificationElement != null) { IOSpecification ioSpecification = this.parseIOSpecification(ioSpecificationElement); webServiceActivityBehavior.setIoSpecification(ioSpecification); } for (Element dataAssociationElement : sendTaskElement.elements("dataInputAssociation")) { AbstractDataAssociation dataAssociation = this.parseDataInputAssociation(dataAssociationElement); webServiceActivityBehavior.addDataInputAssociation(dataAssociation); } for (Element dataAssociationElement : sendTaskElement.elements("dataOutputAssociation")) { AbstractDataAssociation dataAssociation = this.parseDataOutputAssociation(dataAssociationElement); webServiceActivityBehavior.addDataOutputAssociation(dataAssociation); } activity.setActivityBehavior(webServiceActivityBehavior); } } else { addError("One of the attributes 'type' or 'operation' is mandatory on sendTask.", sendTaskElement); } parseExecutionListenersOnScope(sendTaskElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseSendTask(sendTaskElement, scope, activity); } return activity; } protected AbstractDataAssociation parseDataOutputAssociation(Element dataAssociationElement) { String targetRef = dataAssociationElement.element("targetRef").getText(); if (dataAssociationElement.element("sourceRef") != null) { String sourceRef = dataAssociationElement.element("sourceRef").getText(); return new MessageImplicitDataOutputAssociation(targetRef, sourceRef); } else { Expression transformation = this.expressionManager.createExpression(dataAssociationElement.element("transformation").getText()); AbstractDataAssociation dataOutputAssociation = new TransformationDataOutputAssociation(null, targetRef, transformation); return dataOutputAssociation; } } protected void parseMuleServiceTask(ActivityImpl activity, Element serviceTaskElement, List<FieldDeclaration> fieldDeclarations) { try { Class< ? > theClass = Class.forName("org.activiti.mule.MuleSendActivitiBehavior"); activity.setActivityBehavior((ActivityBehavior) ClassDelegate.instantiateDelegate(theClass, fieldDeclarations)); } catch (ClassNotFoundException e) { addError("Could not find org.activiti.mule.MuleSendActivitiBehavior", serviceTaskElement); } } protected void parseEmailServiceTask(ActivityImpl activity, Element serviceTaskElement, List<FieldDeclaration> fieldDeclarations) { validateFieldDeclarationsForEmail(serviceTaskElement, fieldDeclarations); activity.setActivityBehavior((MailActivityBehavior) ClassDelegate.instantiateDelegate(MailActivityBehavior.class, fieldDeclarations)); } protected void parseShellServiceTask(ActivityImpl activity, Element serviceTaskElement, List<FieldDeclaration> fieldDeclarations) { validateFieldDeclarationsForShell(serviceTaskElement, fieldDeclarations); activity.setActivityBehavior((ActivityBehavior) ClassDelegate.instantiateDelegate(ShellActivityBehavior.class, fieldDeclarations)); } protected void validateFieldDeclarationsForEmail(Element serviceTaskElement, List<FieldDeclaration> fieldDeclarations) { boolean toDefined = false; boolean textOrHtmlDefined = false; for (FieldDeclaration fieldDeclaration : fieldDeclarations) { if (fieldDeclaration.getName().equals("to")) { toDefined = true; } if (fieldDeclaration.getName().equals("html")) { textOrHtmlDefined = true; } if (fieldDeclaration.getName().equals("text")) { textOrHtmlDefined = true; } } if (!toDefined) { addError("No recipient is defined on the mail activity", serviceTaskElement); } if (!textOrHtmlDefined) { addError("Text or html field should be provided", serviceTaskElement); } } protected void validateFieldDeclarationsForShell(Element serviceTaskElement, List<FieldDeclaration> fieldDeclarations) { boolean shellCommandDefined = false; for (FieldDeclaration fieldDeclaration : fieldDeclarations) { String fieldName = fieldDeclaration.getName(); FixedValue fieldFixedValue = (FixedValue) fieldDeclaration.getValue(); String fieldValue = fieldFixedValue.getExpressionText(); shellCommandDefined |= fieldName.equals("command"); if ((fieldName.equals("wait") || fieldName.equals("redirectError") || fieldName.equals("cleanEnv")) && !fieldValue.toLowerCase().equals("true") && !fieldValue.toLowerCase().equals("false")) { addError("undefined value for shell " + fieldName + " parameter :" + fieldValue.toString(), serviceTaskElement); } } if (!shellCommandDefined) { addError("No shell command is defined on the shell activity", serviceTaskElement); } } public List<FieldDeclaration> parseFieldDeclarations(Element element) { List<FieldDeclaration> fieldDeclarations = new ArrayList<FieldDeclaration>(); Element elementWithFieldInjections = element.element("extensionElements"); if (elementWithFieldInjections == null) { // Custom extensions will just // have the <field.. as a // subelement elementWithFieldInjections = element; } List<Element> fieldDeclarationElements = elementWithFieldInjections.elementsNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "field"); if (fieldDeclarationElements != null && !fieldDeclarationElements.isEmpty()) { for (Element fieldDeclarationElement : fieldDeclarationElements) { FieldDeclaration fieldDeclaration = parseFieldDeclaration(element, fieldDeclarationElement); if (fieldDeclaration != null) { fieldDeclarations.add(fieldDeclaration); } } } return fieldDeclarations; } protected FieldDeclaration parseFieldDeclaration(Element serviceTaskElement, Element fieldDeclarationElement) { String fieldName = fieldDeclarationElement.attribute("name"); FieldDeclaration fieldDeclaration = parseStringFieldDeclaration(fieldDeclarationElement, serviceTaskElement, fieldName); if (fieldDeclaration == null) { fieldDeclaration = parseExpressionFieldDeclaration(fieldDeclarationElement, serviceTaskElement, fieldName); } if (fieldDeclaration == null) { addError("One of the following is mandatory on a field declaration: one of attributes stringValue|expression " + "or one of child elements string|expression", serviceTaskElement); } return fieldDeclaration; } protected FieldDeclaration parseStringFieldDeclaration(Element fieldDeclarationElement, Element serviceTaskElement, String fieldName) { try { String fieldValue = getStringValueFromAttributeOrElement("stringValue", "string", fieldDeclarationElement); if (fieldValue != null) { return new FieldDeclaration(fieldName, Expression.class.getName(), new FixedValue(fieldValue)); } } catch (ActivitiException ae) { if (ae.getMessage().contains("multiple elements with tag name")) { addError("Multiple string field declarations found", serviceTaskElement); } else { addError("Error when paring field declarations: " + ae.getMessage(), serviceTaskElement); } } return null; } protected FieldDeclaration parseExpressionFieldDeclaration(Element fieldDeclarationElement, Element serviceTaskElement, String fieldName) { try { String expression = getStringValueFromAttributeOrElement("expression", "expression", fieldDeclarationElement); if (expression != null && expression.trim().length() > 0) { return new FieldDeclaration(fieldName, Expression.class.getName(), expressionManager.createExpression(expression)); } } catch (ActivitiException ae) { if (ae.getMessage().contains("multiple elements with tag name")) { addError("Multiple expression field declarations found", serviceTaskElement); } else { addError("Error when paring field declarations: " + ae.getMessage(), serviceTaskElement); } } return null; } protected String getStringValueFromAttributeOrElement(String attributeName, String elementName, Element element) { String value = null; String attributeValue = element.attribute(attributeName); Element childElement = element.elementNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, elementName); String stringElementText = null; if (attributeValue != null && childElement != null) { addError("Can't use attribute '" + attributeName + "' and element '" + elementName + "' together, only use one", element); } else if (childElement != null) { stringElementText = childElement.getText(); if (stringElementText == null || stringElementText.length() == 0) { addError("No valid value found in attribute '" + attributeName + "' nor element '" + elementName + "'", element); } else { // Use text of element value = stringElementText; } } else if (attributeValue != null && attributeValue.length() > 0) { // Using attribute value = attributeValue; } return value; } /** * Parses a task with no specific type (behaves as passthrough). */ public ActivityImpl parseTask(Element taskElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(taskElement, scope); activity.setActivityBehavior(new TaskActivityBehavior()); activity.setAsync(isAsync(taskElement)); activity.setExclusive(isExclusive(taskElement)); parseExecutionListenersOnScope(taskElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseTask(taskElement, scope, activity); } return activity; } /** * Parses a manual task. */ public ActivityImpl parseManualTask(Element manualTaskElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(manualTaskElement, scope); activity.setActivityBehavior(new ManualTaskActivityBehavior()); parseExecutionListenersOnScope(manualTaskElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseManualTask(manualTaskElement, scope, activity); } return activity; } /** * Parses a receive task. */ public ActivityImpl parseReceiveTask(Element receiveTaskElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(receiveTaskElement, scope); activity.setActivityBehavior(new ReceiveTaskActivityBehavior()); activity.setAsync(isAsync(receiveTaskElement)); activity.setExclusive(isExclusive(receiveTaskElement)); parseExecutionListenersOnScope(receiveTaskElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseReceiveTask(receiveTaskElement, scope, activity); } return activity; } /* userTask specific finals */ protected static final String HUMAN_PERFORMER = "humanPerformer"; protected static final String POTENTIAL_OWNER = "potentialOwner"; protected static final String RESOURCE_ASSIGNMENT_EXPR = "resourceAssignmentExpression"; protected static final String FORMAL_EXPRESSION = "formalExpression"; protected static final String USER_PREFIX = "user("; protected static final String GROUP_PREFIX = "group("; protected static final String ASSIGNEE_EXTENSION = "assignee"; protected static final String CANDIDATE_USERS_EXTENSION = "candidateUsers"; protected static final String CANDIDATE_GROUPS_EXTENSION = "candidateGroups"; protected static final String DUE_DATE_EXTENSION = "dueDate"; protected static final String PRIORITY_EXTENSION = "priority"; /** * Parses a userTask declaration. */ public ActivityImpl parseUserTask(Element userTaskElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(userTaskElement, scope); activity.setAsync(isAsync(userTaskElement)); activity.setExclusive(isExclusive(userTaskElement)); TaskDefinition taskDefinition = parseTaskDefinition(userTaskElement, activity.getId(), (ProcessDefinitionEntity) scope.getProcessDefinition()); UserTaskActivityBehavior userTaskActivity = new UserTaskActivityBehavior(expressionManager, taskDefinition); activity.setActivityBehavior(userTaskActivity); parseProperties(userTaskElement, activity); parseExecutionListenersOnScope(userTaskElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseUserTask(userTaskElement, scope, activity); } return activity; } public TaskDefinition parseTaskDefinition(Element taskElement, String taskDefinitionKey, ProcessDefinitionEntity processDefinition) { TaskFormHandler taskFormHandler; String taskFormHandlerClassName = taskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "formHandlerClass"); if (taskFormHandlerClassName != null) { taskFormHandler = (TaskFormHandler) ReflectUtil.instantiate(taskFormHandlerClassName); } else { taskFormHandler = new DefaultTaskFormHandler(); } taskFormHandler.parseConfiguration(taskElement, deployment, processDefinition, this); TaskDefinition taskDefinition = new TaskDefinition(taskFormHandler); taskDefinition.setKey(taskDefinitionKey); processDefinition.getTaskDefinitions().put(taskDefinitionKey, taskDefinition); String name = taskElement.attribute("name"); if (name != null) { taskDefinition.setNameExpression(expressionManager.createExpression(name)); } String descriptionStr = parseDocumentation(taskElement); if (descriptionStr != null) { taskDefinition.setDescriptionExpression(expressionManager.createExpression(descriptionStr)); } parseHumanPerformer(taskElement, taskDefinition); parsePotentialOwner(taskElement, taskDefinition); // Activiti custom extension parseUserTaskCustomExtensions(taskElement, taskDefinition); return taskDefinition; } protected void parseHumanPerformer(Element taskElement, TaskDefinition taskDefinition) { List<Element> humanPerformerElements = taskElement.elements(HUMAN_PERFORMER); if (humanPerformerElements.size() > 1) { addError("Invalid task definition: multiple " + HUMAN_PERFORMER + " sub elements defined for " + taskDefinition.getNameExpression(), taskElement); } else if (humanPerformerElements.size() == 1) { Element humanPerformerElement = humanPerformerElements.get(0); if (humanPerformerElement != null) { parseHumanPerformerResourceAssignment(humanPerformerElement, taskDefinition); } } } protected void parsePotentialOwner(Element taskElement, TaskDefinition taskDefinition) { List<Element> potentialOwnerElements = taskElement.elements(POTENTIAL_OWNER); for (Element potentialOwnerElement : potentialOwnerElements) { parsePotentialOwnerResourceAssignment(potentialOwnerElement, taskDefinition); } } protected void parseHumanPerformerResourceAssignment(Element performerElement, TaskDefinition taskDefinition) { Element raeElement = performerElement.element(RESOURCE_ASSIGNMENT_EXPR); if (raeElement != null) { Element feElement = raeElement.element(FORMAL_EXPRESSION); if (feElement != null) { taskDefinition.setAssigneeExpression(expressionManager.createExpression(feElement.getText())); } } } protected void parsePotentialOwnerResourceAssignment(Element performerElement, TaskDefinition taskDefinition) { Element raeElement = performerElement.element(RESOURCE_ASSIGNMENT_EXPR); if (raeElement != null) { Element feElement = raeElement.element(FORMAL_EXPRESSION); if (feElement != null) { List<String> assignmentExpressions = parseCommaSeparatedList(feElement.getText()); for (String assignmentExpression : assignmentExpressions) { assignmentExpression = assignmentExpression.trim(); if (assignmentExpression.startsWith(USER_PREFIX)) { String userAssignementId = getAssignmentId(assignmentExpression, USER_PREFIX); taskDefinition.addCandidateUserIdExpression(expressionManager.createExpression(userAssignementId)); } else if (assignmentExpression.startsWith(GROUP_PREFIX)) { String groupAssignementId = getAssignmentId(assignmentExpression, GROUP_PREFIX); taskDefinition.addCandidateGroupIdExpression(expressionManager.createExpression(groupAssignementId)); } else { // default: given string is a goupId, as-is. taskDefinition.addCandidateGroupIdExpression(expressionManager.createExpression(assignmentExpression)); } } } } } protected String getAssignmentId(String expression, String prefix) { return expression.substring(prefix.length(), expression.length() - 1).trim(); } protected void parseUserTaskCustomExtensions(Element taskElement, TaskDefinition taskDefinition) { // assignee String assignee = taskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, ASSIGNEE_EXTENSION); if (assignee != null) { if (taskDefinition.getAssigneeExpression() == null) { taskDefinition.setAssigneeExpression(expressionManager.createExpression(assignee)); } else { addError("Invalid usage: duplicate assignee declaration for task " + taskDefinition.getNameExpression(), taskElement); } } // Candidate users String candidateUsersString = taskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, CANDIDATE_USERS_EXTENSION); if (candidateUsersString != null) { List<String> candidateUsers = parseCommaSeparatedList(candidateUsersString); for (String candidateUser : candidateUsers) { taskDefinition.addCandidateUserIdExpression(expressionManager.createExpression(candidateUser.trim())); } } // Candidate groups String candidateGroupsString = taskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, CANDIDATE_GROUPS_EXTENSION); if (candidateGroupsString != null) { List<String> candidateGroups = parseCommaSeparatedList(candidateGroupsString); for (String candidateGroup : candidateGroups) { taskDefinition.addCandidateGroupIdExpression(expressionManager.createExpression(candidateGroup.trim())); } } // Task listeners parseTaskListeners(taskElement, taskDefinition); // Due date String dueDateExpression = taskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, DUE_DATE_EXTENSION); if (dueDateExpression != null) { taskDefinition.setDueDateExpression(expressionManager.createExpression(dueDateExpression)); } // Priority final String priorityExpression = taskElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, PRIORITY_EXTENSION); if (priorityExpression != null) { taskDefinition.setPriorityExpression(expressionManager.createExpression(priorityExpression)); } } /** * Parses the given String as a list of comma separated entries, where an * entry can possibly be an expression that has comma's. * * If somebody is smart enough to write a regex for this, please let us know. * * @return the entries of the comma separated list, trimmed. */ protected List<String> parseCommaSeparatedList(String s) { List<String> result = new ArrayList<String>(); if (s != null && !"".equals(s)) { StringCharacterIterator iterator = new StringCharacterIterator(s); char c = iterator.first(); StringBuilder strb = new StringBuilder(); boolean insideExpression = false; while (c != StringCharacterIterator.DONE) { if (c == '{' || c == '$') { insideExpression = true; } else if (c == '}') { insideExpression = false; } else if (c == ',' && !insideExpression) { result.add(strb.toString().trim()); strb.delete(0, strb.length()); } if (c != ',' || (insideExpression)) { strb.append(c); } c = iterator.next(); } if (strb.length() > 0) { result.add(strb.toString().trim()); } } return result; } protected void parseTaskListeners(Element userTaskElement, TaskDefinition taskDefinition) { Element extentionsElement = userTaskElement.element("extensionElements"); if (extentionsElement != null) { List<Element> taskListenerElements = extentionsElement.elementsNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "taskListener"); for (Element taskListenerElement : taskListenerElements) { String eventName = taskListenerElement.attribute("event"); if (eventName != null) { if (TaskListener.EVENTNAME_CREATE.equals(eventName) || TaskListener.EVENTNAME_ASSIGNMENT.equals(eventName) || TaskListener.EVENTNAME_COMPLETE.equals(eventName)) { TaskListener taskListener = parseTaskListener(taskListenerElement); taskDefinition.addTaskListener(eventName, taskListener); } else { addError("Invalid eventName for taskListener: choose 'create' | 'assignment' | 'complete'", userTaskElement); } } else { addError("Event is mandatory on taskListener", userTaskElement); } } } } protected TaskListener parseTaskListener(Element taskListenerElement) { TaskListener taskListener = null; String className = taskListenerElement.attribute("class"); String expression = taskListenerElement.attribute("expression"); String delegateExpression = taskListenerElement.attribute("delegateExpression"); if (className != null) { taskListener = new ClassDelegate(className, parseFieldDeclarations(taskListenerElement)); } else if (expression != null) { taskListener = new ExpressionTaskListener(expressionManager.createExpression(expression)); } else if (delegateExpression != null) { taskListener = new DelegateExpressionTaskListener(expressionManager.createExpression(delegateExpression), parseFieldDeclarations(taskListenerElement)); } else { addError("Element 'class', 'expression' or 'delegateExpression' is mandatory on taskListener", taskListenerElement); } return taskListener; } /** * Parses the end events of a certain level in the process (process, * subprocess or another scope). * * @param parentElement * The 'parent' element that contains the end events (process, * subprocess). * @param scope * The {@link ScopeImpl} to which the end events must be added. */ public void parseEndEvents(Element parentElement, ScopeImpl scope) { for (Element endEventElement : parentElement.elements("endEvent")) { ActivityImpl activity = createActivityOnScope(endEventElement, scope); Element errorEventDefinition = endEventElement.element("errorEventDefinition"); Element cancelEventDefinition = endEventElement.element("cancelEventDefinition"); Element terminateEventDefinition = endEventElement.element("terminateEventDefinition"); if (errorEventDefinition != null) { // error end event String errorRef = errorEventDefinition.attribute("errorRef"); if (errorRef == null || "".equals(errorRef)) { addError("'errorRef' attribute is mandatory on error end event", errorEventDefinition); } else { Error error = errors.get(errorRef); if (error != null && (error.getErrorCode() == null || "".equals(error.getErrorCode()))) { addError("'errorCode' is mandatory on errors referenced by throwing error event definitions, but the error '" + error.getId() + "' does not define one.", errorEventDefinition); } activity.setProperty("type", "errorEndEvent"); activity.setActivityBehavior(new ErrorEndEventActivityBehavior(error != null ? error.getErrorCode() : errorRef)); } } else if (cancelEventDefinition != null) { if (scope.getProperty("type")==null || !scope.getProperty("type").equals("transaction")) { addError("end event with cancelEventDefinition only supported inside transaction subprocess", cancelEventDefinition); } else { activity.setProperty("type", "cancelEndEvent"); activity.setActivityBehavior(new CancelEndEventActivityBehavior()); } } else if (terminateEventDefinition != null) { activity.setActivityBehavior(new TerminateEndEventActivityBehavior()); } else { // default: none end event activity.setActivityBehavior(new NoneEndEventActivityBehavior()); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseEndEvent(endEventElement, scope, activity); } parseExecutionListenersOnScope(endEventElement, activity); } } /** * Parses the boundary events of a certain 'level' (process, subprocess or * other scope). * * Note that the boundary events are not parsed during the parsing of the bpmn * activities, since the semantics are different (boundaryEvent needs to be * added as nested activity to the reference activity on PVM level). * * @param parentElement * The 'parent' element that contains the activities (process, * subprocess). * @param scopeElement * The {@link ScopeImpl} to which the activities must be added. */ public void parseBoundaryEvents(Element parentElement, ScopeImpl scopeElement) { for (Element boundaryEventElement : parentElement.elements("boundaryEvent")) { // The boundary event is attached to an activity, reference by the // 'attachedToRef' attribute String attachedToRef = boundaryEventElement.attribute("attachedToRef"); if (attachedToRef == null || attachedToRef.equals("")) { addError("AttachedToRef is required when using a timerEventDefinition", boundaryEventElement); } // Representation structure-wise is a nested activity in the activity to // which its attached String id = boundaryEventElement.attribute("id"); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Parsing boundary event " + id); } ActivityImpl parentActivity = scopeElement.findActivity(attachedToRef); if (parentActivity == null) { addError("Invalid reference in boundary event. Make sure that the referenced activity is " + "defined in the same scope as the boundary event", boundaryEventElement); } ActivityImpl nestedActivity = createActivityOnScope(boundaryEventElement, parentActivity); String cancelActivity = boundaryEventElement.attribute("cancelActivity", "true"); boolean interrupting = cancelActivity.equals("true") ? true : false; // Catch event behavior is the same for most types ActivityBehavior behavior = null; // Depending on the sub-element definition, the correct activityBehavior // parsing is selected Element timerEventDefinition = boundaryEventElement.element("timerEventDefinition"); Element errorEventDefinition = boundaryEventElement.element("errorEventDefinition"); Element signalEventDefinition = boundaryEventElement.element("signalEventDefinition"); Element cancelEventDefinition = boundaryEventElement.element("cancelEventDefinition"); Element compensateEventDefinition = boundaryEventElement.element("compensateEventDefinition"); Element messageEventDefinition = boundaryEventElement.element("messageEventDefinition"); if (timerEventDefinition != null) { behavior = new BoundaryEventActivityBehavior(interrupting, nestedActivity.getId()); parseBoundaryTimerEventDefinition(timerEventDefinition, interrupting, nestedActivity); } else if (errorEventDefinition != null) { interrupting = true; // non-interrupting not yet supported behavior = new BoundaryEventActivityBehavior(interrupting, nestedActivity.getId()); parseBoundaryErrorEventDefinition(errorEventDefinition, interrupting, parentActivity, nestedActivity); } else if (signalEventDefinition != null) { behavior = new BoundaryEventActivityBehavior(interrupting, nestedActivity.getId()); parseBoundarySignalEventDefinition(signalEventDefinition, interrupting, nestedActivity); } else if (cancelEventDefinition != null) { // always interrupting behavior = parseBoundaryCancelEventDefinition(cancelEventDefinition, nestedActivity); } else if(compensateEventDefinition != null) { behavior = new BoundaryEventActivityBehavior(interrupting, nestedActivity.getId()); parseCatchCompensateEventDefinition(compensateEventDefinition, nestedActivity); } else if(messageEventDefinition != null) { behavior = new BoundaryEventActivityBehavior(interrupting, nestedActivity.getId()); parseBoundaryMessageEventDefinition(messageEventDefinition, interrupting, nestedActivity); } else { addError("Unsupported boundary event type", boundaryEventElement); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseBoundaryEvent(boundaryEventElement, scopeElement, nestedActivity); } nestedActivity.setActivityBehavior(behavior); } } /** * Parses a boundary timer event. The end-result will be that the given nested * activity will get the appropriate {@link ActivityBehavior}. * * @param timerEventDefinition * The XML element corresponding with the timer event details * @param interrupting * Indicates whether this timer is interrupting. * @param timerActivity * The activity which maps to the structure of the timer event on the * boundary of another activity. Note that this is NOT the activity * onto which the boundary event is attached, but a nested activity * inside this activity, specifically created for this event. */ public void parseBoundaryTimerEventDefinition(Element timerEventDefinition, boolean interrupting, ActivityImpl timerActivity) { timerActivity.setProperty("type", "boundaryTimer"); TimerDeclarationImpl timerDeclaration = parseTimer(timerEventDefinition, timerActivity, TimerExecuteNestedActivityJobHandler.TYPE); // ACT-1427 if (interrupting) { timerDeclaration.setInterruptingTimer(true); } addTimerDeclaration(timerActivity.getParent(), timerDeclaration); if (timerActivity.getParent() instanceof ActivityImpl) { ((ActivityImpl) timerActivity.getParent()).setScope(true); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseBoundaryTimerEventDefinition(timerEventDefinition, interrupting, timerActivity); } } public void parseBoundarySignalEventDefinition(Element element, boolean interrupting, ActivityImpl signalActivity) { signalActivity.setProperty("type", "boundarySignal"); EventSubscriptionDeclaration signalDefinition = parseSignalEventDefinition(element); if(signalActivity.getId() == null) { addError("boundary event has no id", element); } signalDefinition.setActivityId(signalActivity.getId()); addEventSubscriptionDeclaration(signalDefinition, signalActivity.getParent(), element); if (signalActivity.getParent() instanceof ActivityImpl) { ((ActivityImpl) signalActivity.getParent()).setScope(true); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseBoundarySignalEventDefinition(element, interrupting, signalActivity); } } public void parseBoundaryMessageEventDefinition(Element element, boolean interrupting, ActivityImpl messageActivity) { messageActivity.setProperty("type", "boundaryMessage"); EventSubscriptionDeclaration messageEventDefinition = parseMessageEventDefinition(element); if(messageActivity.getId() == null) { addError("boundary event has no id", element); } messageEventDefinition.setActivityId(messageActivity.getId()); addEventSubscriptionDeclaration(messageEventDefinition, messageActivity.getParent(), element); if (messageActivity.getParent() instanceof ActivityImpl) { ((ActivityImpl) messageActivity.getParent()).setScope(true); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseBoundaryMessageEventDefinition(element, interrupting, messageActivity); } } @SuppressWarnings("unchecked") protected void parseTimerStartEventDefinition(Element timerEventDefinition, ActivityImpl timerActivity, ProcessDefinitionEntity processDefinition) { timerActivity.setProperty("type", "startTimerEvent"); TimerDeclarationImpl timerDeclaration = parseTimer(timerEventDefinition, timerActivity, TimerStartEventJobHandler.TYPE); timerDeclaration.setJobHandlerConfiguration(processDefinition.getKey()); List<TimerDeclarationImpl> timerDeclarations = (List<TimerDeclarationImpl>) processDefinition.getProperty(PROPERTYNAME_START_TIMER); if (timerDeclarations == null) { timerDeclarations = new ArrayList<TimerDeclarationImpl>(); processDefinition.setProperty(PROPERTYNAME_START_TIMER, timerDeclarations); } timerDeclarations.add(timerDeclaration); } protected void parseIntemediateSignalEventDefinition(Element element, ActivityImpl signalActivity, boolean isAfterEventBasedGateway) { signalActivity.setProperty("type", "intermediateSignalCatch"); EventSubscriptionDeclaration signalDefinition = parseSignalEventDefinition(element); if(isAfterEventBasedGateway) { signalDefinition.setActivityId(signalActivity.getId()); addEventSubscriptionDeclaration(signalDefinition, signalActivity.getParent(), element); }else { signalActivity.setScope(true); addEventSubscriptionDeclaration(signalDefinition, signalActivity, element); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseIntermediateSignalCatchEventDefinition(element, signalActivity); } } protected EventSubscriptionDeclaration parseSignalEventDefinition(Element signalEventDefinitionElement) { String signalRef = signalEventDefinitionElement.attribute("signalRef"); if (signalRef == null) { addError("signalEventDefinition does not have required property 'signalRef'", signalEventDefinitionElement); return null; } else { SignalDefinition signalDefinition = signals.get(resolveName(signalRef)); if (signalDefinition == null) { addError("Could not find signal with id '" + signalRef + "'", signalEventDefinitionElement); } EventSubscriptionDeclaration signalEventDefinition = new EventSubscriptionDeclaration(signalDefinition.getName(), "signal"); boolean asynch = "true".equals(signalEventDefinitionElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "async", "false")); signalEventDefinition.setAsync(asynch); return signalEventDefinition; } } protected void parseIntemediateTimerEventDefinition(Element timerEventDefinition, ActivityImpl timerActivity, boolean isAfterEventBasedGateway) { timerActivity.setProperty("type", "intermediateTimer"); TimerDeclarationImpl timerDeclaration = parseTimer(timerEventDefinition, timerActivity, TimerCatchIntermediateEventJobHandler.TYPE); if(isAfterEventBasedGateway) { addTimerDeclaration(timerActivity.getParent(), timerDeclaration); }else { addTimerDeclaration(timerActivity, timerDeclaration); timerActivity.setScope(true); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseIntermediateTimerEventDefinition(timerEventDefinition, timerActivity); } } protected TimerDeclarationImpl parseTimer(Element timerEventDefinition, ScopeImpl timerActivity, String jobHandlerType) { // TimeDate TimerDeclarationType type = TimerDeclarationType.DATE; Expression expression = parseExpression(timerEventDefinition, "timeDate"); // TimeCycle if (expression == null) { type = TimerDeclarationType.CYCLE; expression = parseExpression(timerEventDefinition, "timeCycle"); } // TimeDuration if (expression == null) { type = TimerDeclarationType.DURATION; expression = parseExpression(timerEventDefinition, "timeDuration"); } // neither date, cycle or duration configured! if (expression==null) { addError("Timer needs configuration (either timeDate, timeCycle or timeDuration is needed).", timerEventDefinition); } // Parse the timer declaration // TODO move the timer declaration into the bpmn activity or next to the // TimerSession TimerDeclarationImpl timerDeclaration = new TimerDeclarationImpl(expression, type, jobHandlerType); timerDeclaration.setJobHandlerConfiguration(timerActivity.getId()); timerDeclaration.setExclusive("true".equals(timerEventDefinition.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "exclusive", String.valueOf(JobEntity.DEFAULT_EXCLUSIVE)))); return timerDeclaration; } protected Expression parseExpression(Element parent, String name) { Element value = parent.element(name); if (value != null) { String expressionText = value.getText().trim(); return expressionManager.createExpression(expressionText); } return null; } public void parseBoundaryErrorEventDefinition(Element errorEventDefinition, boolean interrupting, ActivityImpl activity, ActivityImpl nestedErrorEventActivity) { nestedErrorEventActivity.setProperty("type", "boundaryError"); ScopeImpl catchingScope = nestedErrorEventActivity.getParent(); ((ActivityImpl) catchingScope).setScope(true); String errorRef = errorEventDefinition.attribute("errorRef"); Error error = null; ErrorEventDefinition definition = new ErrorEventDefinition(nestedErrorEventActivity.getId()); if (errorRef != null) { error = errors.get(errorRef); definition.setErrorCode(error == null ? errorRef : error.getErrorCode()); } addErrorEventDefinition(definition, catchingScope); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseBoundaryErrorEventDefinition(errorEventDefinition, interrupting, activity, nestedErrorEventActivity); } } protected void addErrorEventDefinition(ErrorEventDefinition errorEventDefinition, ScopeImpl catchingScope) { List<ErrorEventDefinition> errorEventDefinitions = (List<ErrorEventDefinition>) catchingScope.getProperty(PROPERTYNAME_ERROR_EVENT_DEFINITIONS); if(errorEventDefinitions == null) { errorEventDefinitions = new ArrayList<ErrorEventDefinition>(); catchingScope.setProperty(PROPERTYNAME_ERROR_EVENT_DEFINITIONS, errorEventDefinitions); } errorEventDefinitions.add(errorEventDefinition); Collections.sort(errorEventDefinitions, ErrorEventDefinition.comparator); } protected List<ActivityImpl> getAllChildActivitiesOfType(String type, ScopeImpl scope) { List<ActivityImpl> children = new ArrayList<ActivityImpl>(); for (ActivityImpl childActivity : scope.getActivities()) { if (type.equals(childActivity.getProperty("type"))) { children.add(childActivity); } children.addAll(getAllChildActivitiesOfType(type, childActivity)); } return children; } /** * Checks if the given activity is a child activity of the * possibleParentActivity. */ protected boolean isChildActivity(ActivityImpl activityToCheck, ActivityImpl possibleParentActivity) { for (ActivityImpl child : possibleParentActivity.getActivities()) { if (child.getId().equals(activityToCheck.getId()) || isChildActivity(activityToCheck, child)) { return true; } } return false; } @SuppressWarnings("unchecked") protected void addTimerDeclaration(ScopeImpl scope, TimerDeclarationImpl timerDeclaration) { List<TimerDeclarationImpl> timerDeclarations = (List<TimerDeclarationImpl>) scope.getProperty(PROPERTYNAME_TIMER_DECLARATION); if (timerDeclarations == null) { timerDeclarations = new ArrayList<TimerDeclarationImpl>(); scope.setProperty(PROPERTYNAME_TIMER_DECLARATION, timerDeclarations); } timerDeclarations.add(timerDeclaration); } @SuppressWarnings("unchecked") protected void addVariableDeclaration(ScopeImpl scope, VariableDeclaration variableDeclaration) { List<VariableDeclaration> variableDeclarations = (List<VariableDeclaration>) scope.getProperty(PROPERTYNAME_VARIABLE_DECLARATIONS); if (variableDeclarations == null) { variableDeclarations = new ArrayList<VariableDeclaration>(); scope.setProperty(PROPERTYNAME_VARIABLE_DECLARATIONS, variableDeclarations); } variableDeclarations.add(variableDeclaration); } /** * Parses a subprocess (formally known as an embedded subprocess): a subprocess * defined within another process definition. * * @param subProcessElement * The XML element corresponding with the subprocess definition * @param scope * The current scope on which the subprocess is defined. */ public ActivityImpl parseSubProcess(Element subProcessElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(subProcessElement, scope); activity.setAsync(isAsync(subProcessElement)); activity.setExclusive(isExclusive(subProcessElement)); Boolean isTriggeredByEvent = parseBooleanAttribute(subProcessElement.attribute("triggeredByEvent"), false); activity.setProperty("triggeredByEvent", isTriggeredByEvent); // event subprocesses are not scopes activity.setScope(!isTriggeredByEvent); activity.setActivityBehavior(new SubProcessActivityBehavior()); parseScope(subProcessElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseSubProcess(subProcessElement, scope, activity); } return activity; } protected ActivityImpl parseTransaction(Element transactionElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(transactionElement, scope); activity.setAsync(isAsync(transactionElement)); activity.setExclusive(isExclusive(transactionElement)); activity.setScope(true); activity.setActivityBehavior(new TransactionActivityBehavior()); parseScope(transactionElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseTransaction(transactionElement, scope, activity); } return activity; } /** * Parses a call activity (currently only supporting calling subprocesses). * * @param callActivityElement * The XML element defining the call activity * @param scope * The current scope on which the call activity is defined. */ public ActivityImpl parseCallActivity(Element callActivityElement, ScopeImpl scope) { ActivityImpl activity = createActivityOnScope(callActivityElement, scope); activity.setAsync(isAsync(callActivityElement)); activity.setExclusive(isExclusive(callActivityElement)); String calledElement = callActivityElement.attribute("calledElement"); if (calledElement == null) { addError("Missing attribute 'calledElement'", callActivityElement); } CallActivityBehavior callActivityBehaviour = null; String expressionRegex = "\\$+\\{+.+\\}"; if (calledElement != null && calledElement.matches(expressionRegex)) { callActivityBehaviour = new CallActivityBehavior(expressionManager.createExpression(calledElement)); } else { callActivityBehaviour = new CallActivityBehavior(calledElement); } Element extentionsElement = callActivityElement.element("extensionElements"); if (extentionsElement != null) { // input data elements for (Element listenerElement : extentionsElement.elementsNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "in")) { String sourceExpression = listenerElement.attribute("sourceExpression"); String target = listenerElement.attribute("target"); if (sourceExpression != null) { Expression expression = expressionManager.createExpression(sourceExpression.trim()); callActivityBehaviour.addDataInputAssociation(new SimpleDataInputAssociation(expression, target)); } else { String source = listenerElement.attribute("source"); callActivityBehaviour.addDataInputAssociation(new SimpleDataInputAssociation(source, target)); } } // output data elements for (Element listenerElement : extentionsElement.elementsNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "out")) { String sourceExpression = listenerElement.attribute("sourceExpression"); String target = listenerElement.attribute("target"); if (sourceExpression != null) { Expression expression = expressionManager.createExpression(sourceExpression.trim()); callActivityBehaviour.addDataOutputAssociation(new MessageImplicitDataOutputAssociation(target, expression)); } else { String source = listenerElement.attribute("source"); callActivityBehaviour.addDataOutputAssociation(new MessageImplicitDataOutputAssociation(target, source)); } } } // // parse data input and output // for (Element dataAssociationElement : // callActivityElement.elements("dataInputAssociation")) { // AbstractDataAssociation dataAssociation = // this.parseDataInputAssociation(dataAssociationElement); // callActivityBehaviour.addDataInputAssociation(dataAssociation); // } // // for (Element dataAssociationElement : // callActivityElement.elements("dataOutputAssociation")) { // AbstractDataAssociation dataAssociation = // this.parseDataOutputAssociation(dataAssociationElement); // callActivityBehaviour.addDataOutputAssociation(dataAssociation); // } activity.setScope(true); activity.setActivityBehavior(callActivityBehaviour); parseExecutionListenersOnScope(callActivityElement, activity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseCallActivity(callActivityElement, scope, activity); } return activity; } /** * Parses the properties of an element (if any) that can contain properties * (processes, activities, etc.) * * Returns true if property subelemens are found. * * @param element * The element that can contain properties. * @param activity * The activity where the property declaration is done. */ public void parseProperties(Element element, ActivityImpl activity) { List<Element> propertyElements = element.elements("property"); for (Element propertyElement : propertyElements) { parseProperty(propertyElement, activity); } } /** * Parses one property definition. * * @param propertyElement * The 'property' element that defines how a property looks like and * is handled. */ public void parseProperty(Element propertyElement, ActivityImpl activity) { String id = propertyElement.attribute("id"); String name = propertyElement.attribute("name"); // If name isn't given, use the id as name if (name == null) { if (id == null) { addError("Invalid property usage on line " + propertyElement.getLine() + ": no id or name specified.", propertyElement); } else { name = id; } } String itemSubjectRef = propertyElement.attribute("itemSubjectRef"); String type = null; if (itemSubjectRef != null) { ItemDefinition itemDefinition = itemDefinitions.get(itemSubjectRef); if (itemDefinition != null) { StructureDefinition structure = itemDefinition.getStructureDefinition(); type = structure.getId(); } else { addError("Invalid itemDefinition reference: " + itemSubjectRef + " not found", propertyElement); } } parsePropertyCustomExtensions(activity, propertyElement, name, type); } /** * Parses the custom extensions for properties. * * @param activity * The activity where the property declaration is done. * @param propertyElement * The 'property' element defining the property. * @param propertyName * The name of the property. * @param propertyType * The type of the property. */ public void parsePropertyCustomExtensions(ActivityImpl activity, Element propertyElement, String propertyName, String propertyType) { if (propertyType == null) { String type = propertyElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "type"); propertyType = type != null ? type : "string"; // default is string } VariableDeclaration variableDeclaration = new VariableDeclaration(propertyName, propertyType); addVariableDeclaration(activity, variableDeclaration); activity.setScope(true); String src = propertyElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "src"); if (src != null) { variableDeclaration.setSourceVariableName(src); } String srcExpr = propertyElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "srcExpr"); if (srcExpr != null) { Expression sourceExpression = expressionManager.createExpression(srcExpr); variableDeclaration.setSourceExpression(sourceExpression); } String dst = propertyElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "dst"); if (dst != null) { variableDeclaration.setDestinationVariableName(dst); } String destExpr = propertyElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "dstExpr"); if (destExpr != null) { Expression destinationExpression = expressionManager.createExpression(destExpr); variableDeclaration.setDestinationExpression(destinationExpression); } String link = propertyElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "link"); if (link != null) { variableDeclaration.setLink(link); } String linkExpr = propertyElement.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "linkExpr"); if (linkExpr != null) { Expression linkExpression = expressionManager.createExpression(linkExpr); variableDeclaration.setLinkExpression(linkExpression); } for (BpmnParseListener parseListener : parseListeners) { parseListener.parseProperty(propertyElement, variableDeclaration, activity); } } /** * Parses all sequence flow of a scope. * * @param processElement * The 'process' element wherein the sequence flow are defined. * @param scope * The scope to which the sequence flow must be added. */ public void parseSequenceFlow(Element processElement, ScopeImpl scope) { for (Element sequenceFlowElement : processElement.elements("sequenceFlow")) { String id = sequenceFlowElement.attribute("id"); String sourceRef = sequenceFlowElement.attribute("sourceRef"); String destinationRef = sequenceFlowElement.attribute("targetRef"); // Implicit check: sequence flow cannot cross (sub) process boundaries: we // don't do a processDefinition.findActivity here ActivityImpl sourceActivity = scope.findActivity(sourceRef); ActivityImpl destinationActivity = scope.findActivity(destinationRef); if (sourceActivity == null) { addError("Invalid source '" + sourceRef + "' of sequence flow '" + id + "'", sequenceFlowElement); } else if (destinationActivity == null) { addError("Invalid destination '" + destinationRef + "' of sequence flow '" + id + "'", sequenceFlowElement); /*} else if(sourceActivity.getActivityBehavior() instanceof EventBasedGatewayActivityBehavior) { // ignore*/ } else if(!(sourceActivity.getActivityBehavior() instanceof EventBasedGatewayActivityBehavior) && destinationActivity.getActivityBehavior() instanceof IntermediateCatchEventActivitiBehaviour && (destinationActivity.getParentActivity() != null) && (destinationActivity.getParentActivity().getActivityBehavior() instanceof EventBasedGatewayActivityBehavior)) { addError("Invalid incoming sequenceflow for intermediateCatchEvent with id '"+destinationActivity.getId()+"' connected to an event-based gateway.", sequenceFlowElement); } else { TransitionImpl transition = sourceActivity.createOutgoingTransition(id); sequenceFlows.put(id, transition); transition.setProperty("name", sequenceFlowElement.attribute("name")); transition.setProperty("documentation", parseDocumentation(sequenceFlowElement)); transition.setDestination(destinationActivity); parseSequenceFlowConditionExpression(sequenceFlowElement, transition); parseExecutionListenersOnTransition(sequenceFlowElement, transition); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseSequenceFlow(sequenceFlowElement, scope, transition); } } } } /** * Parses a condition expression on a sequence flow. * * @param seqFlowElement * The 'sequenceFlow' element that can contain a condition. * @param seqFlow * The sequenceFlow object representation to which the condition must * be added. */ public void parseSequenceFlowConditionExpression(Element seqFlowElement, TransitionImpl seqFlow) { Element conditionExprElement = seqFlowElement.element("conditionExpression"); if (conditionExprElement != null) { String expression = conditionExprElement.getText().trim(); String type = conditionExprElement.attributeNS(BpmnParser.XSI_NS, "type"); if (type != null) { String value = type.contains(":") ? resolveName(type) : BpmnParser.BPMN20_NS + ":" + type; if (!value.equals(ATTRIBUTEVALUE_T_FORMAL_EXPRESSION)) { addError("Invalid type, only tFormalExpression is currently supported", conditionExprElement); } } Condition expressionCondition = new UelExpressionCondition(expressionManager.createExpression(expression)); seqFlow.setProperty(PROPERTYNAME_CONDITION_TEXT, expression); seqFlow.setProperty(PROPERTYNAME_CONDITION, expressionCondition); } } /** * Parses all execution-listeners on a scope. * * @param scopeElement * the XML element containing the scope definition. * @param scope * the scope to add the executionListeners to. * @param postProcessActivities */ public void parseExecutionListenersOnScope(Element scopeElement, ScopeImpl scope) { Element extentionsElement = scopeElement.element("extensionElements"); if (extentionsElement != null) { List<Element> listenerElements = extentionsElement.elementsNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "executionListener"); for (Element listenerElement : listenerElements) { String eventName = listenerElement.attribute("event"); if (isValidEventNameForScope(eventName, listenerElement)) { ExecutionListener listener = parseExecutionListener(listenerElement); if (listener != null) { scope.addExecutionListener(eventName, listener); } } } } } /** * Check if the given event name is valid. If not, an appropriate error is * added. */ protected boolean isValidEventNameForScope(String eventName, Element listenerElement) { if (eventName != null && eventName.trim().length() > 0) { if ("start".equals(eventName) || "end".equals(eventName)) { return true; } else { addError("Attribute 'eventName' must be one of {start|end}", listenerElement); } } else { addError("Attribute 'eventName' is mandatory on listener", listenerElement); } return false; } public void parseExecutionListenersOnTransition(Element activitiElement, TransitionImpl activity) { Element extentionsElement = activitiElement.element("extensionElements"); if (extentionsElement != null) { List<Element> listenerElements = extentionsElement.elementsNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "executionListener"); for (Element listenerElement : listenerElements) { ExecutionListener listener = parseExecutionListener(listenerElement); if (listener != null) { // Since a transition only fires event 'take', we don't parse the // eventName, it is ignored activity.addExecutionListener(listener); } } } } /** * Parses an {@link ExecutionListener} implementation for the given * executionListener element. * * @param executionListenerElement * the XML element containing the executionListener definition. */ public ExecutionListener parseExecutionListener(Element executionListenerElement) { ExecutionListener executionListener = null; String className = executionListenerElement.attribute("class"); String expression = executionListenerElement.attribute("expression"); String delegateExpression = executionListenerElement.attribute("delegateExpression"); if (className != null) { executionListener = new ClassDelegate(className, parseFieldDeclarations(executionListenerElement)); } else if (expression != null) { executionListener = new ExpressionExecutionListener(expressionManager.createExpression(expression)); } else if (delegateExpression != null) { executionListener = new DelegateExpressionExecutionListener(expressionManager.createExpression(delegateExpression), parseFieldDeclarations(executionListenerElement)); } else { addError("Element 'class' or 'expression' is mandatory on executionListener", executionListenerElement); } return executionListener; } /** * Retrieves the {@link Operation} corresponding with the given operation * identifier. */ public Operation getOperation(String operationId) { return operations.get(operationId); } // Diagram interchange // ///////////////////////////////////////////////////////////////// public void parseDiagramInterchangeElements() { // Multiple BPMNDiagram possible List<Element> diagrams = rootElement.elementsNS(BpmnParser.BPMN_DI_NS, "BPMNDiagram"); if (!diagrams.isEmpty()) { for (Element diagramElement : diagrams) { parseBPMNDiagram(diagramElement); } } } public void parseBPMNDiagram(Element bpmndiagramElement) { // Each BPMNdiagram needs to have exactly one BPMNPlane Element bpmnPlane = bpmndiagramElement.elementNS(BpmnParser.BPMN_DI_NS, "BPMNPlane"); if (bpmnPlane != null) { parseBPMNPlane(bpmnPlane); } } public void parseBPMNPlane(Element bpmnPlaneElement) { String bpmnElement = bpmnPlaneElement.attribute("bpmnElement"); if (bpmnElement != null && !"".equals(bpmnElement)) { // there seems to be only on process without collaboration if (getProcessDefinition(bpmnElement) != null) { getProcessDefinition(bpmnElement).setGraphicalNotationDefined(true); } List<Element> shapes = bpmnPlaneElement.elementsNS(BpmnParser.BPMN_DI_NS, "BPMNShape"); for (Element shape : shapes) { parseBPMNShape(shape); } List<Element> edges = bpmnPlaneElement.elementsNS(BpmnParser.BPMN_DI_NS, "BPMNEdge"); for (Element edge : edges) { parseBPMNEdge(edge); } } else { addError("'bpmnElement' attribute is required on BPMNPlane ", bpmnPlaneElement); } } public void parseBPMNShape(Element bpmnShapeElement) { String bpmnElement = bpmnShapeElement.attribute("bpmnElement"); if (bpmnElement != null && !"".equals(bpmnElement)) { // For collaborations, their are also shape definitions for the // participants / processes if (participantProcesses.get(bpmnElement) != null) { ProcessDefinitionEntity procDef = getProcessDefinition(participantProcesses.get(bpmnElement)); procDef.setGraphicalNotationDefined(true); // The participation that references this process, has a bounds to be rendered + a name as wel parseDIBounds(bpmnShapeElement, procDef.getParticipantProcess()); return; } for (ProcessDefinitionEntity processDefinition : getProcessDefinitions()) { ActivityImpl activity = processDefinition.findActivity(bpmnElement); if (activity != null) { parseDIBounds(bpmnShapeElement, activity); // collapsed or expanded String isExpanded = bpmnShapeElement.attribute("isExpanded"); if (isExpanded != null) { activity.setProperty(PROPERTYNAME_ISEXPANDED, parseBooleanAttribute(isExpanded)); } } else { Lane lane = processDefinition.getLaneForId(bpmnElement); if(lane != null) { // The shape represents a lane parseDIBounds(bpmnShapeElement, lane); } else if(!elementIds.contains(bpmnElement)) { // It might not be an activity nor a lane, but it might still reference 'something' addError("Invalid reference in 'bpmnElement' attribute, activity " + bpmnElement + "not found", bpmnShapeElement); } } } } else { addError("'bpmnElement' attribute is required on BPMNShape", bpmnShapeElement); } } protected void parseDIBounds(Element bpmnShapeElement, HasDIBounds target) { Element bounds = bpmnShapeElement.elementNS(BpmnParser.BPMN_DC_NS, "Bounds"); if (bounds != null) { target.setX(parseDoubleAttribute(bpmnShapeElement, "x", bounds.attribute("x"), true).intValue()); target.setY(parseDoubleAttribute(bpmnShapeElement, "y", bounds.attribute("y"), true).intValue()); target.setWidth(parseDoubleAttribute(bpmnShapeElement, "width", bounds.attribute("width"), true).intValue()); target.setHeight(parseDoubleAttribute(bpmnShapeElement, "height", bounds.attribute("height"), true).intValue()); } else { addError("'Bounds' element is required", bpmnShapeElement); } } public void parseBPMNEdge(Element bpmnEdgeElement) { String sequenceFlowId = bpmnEdgeElement.attribute("bpmnElement"); if (sequenceFlowId != null && !"".equals(sequenceFlowId)) { if (sequenceFlows != null && sequenceFlows.containsKey(sequenceFlowId)) { TransitionImpl sequenceFlow = sequenceFlows.get(sequenceFlowId); List<Element> waypointElements = bpmnEdgeElement.elementsNS(BpmnParser.OMG_DI_NS, "waypoint"); if (waypointElements.size() >= 2) { List<Integer> waypoints = new ArrayList<Integer>(); for (Element waypointElement : waypointElements) { waypoints.add(parseDoubleAttribute(waypointElement, "x", waypointElement.attribute("x"), true).intValue()); waypoints.add(parseDoubleAttribute(waypointElement, "y", waypointElement.attribute("y"), true).intValue()); } sequenceFlow.setWaypoints(waypoints); } else { addError("Minimum 2 waypoint elements must be definted for a 'BPMNEdge'", bpmnEdgeElement); } } else if(!elementIds.contains(sequenceFlowId)) { // it might not be a sequenceFlow but it might still reference 'something' addError("Invalid reference in 'bpmnElement' attribute, sequenceFlow " + sequenceFlowId + "not found", bpmnEdgeElement); } } else { addError("'bpmnElement' attribute is required on BPMNEdge", bpmnEdgeElement); } } // Getters, setters and Parser overriden operations // //////////////////////////////////////// public List<ProcessDefinitionEntity> getProcessDefinitions() { return processDefinitions; } public ProcessDefinitionEntity getProcessDefinition(String processDefinitionKey) { for (ProcessDefinitionEntity processDefinition : processDefinitions) { if (processDefinition.getKey().equals(processDefinitionKey)) { return processDefinition; } } return null; } @Override public BpmnParse name(String name) { super.name(name); return this; } @Override public BpmnParse sourceInputStream(InputStream inputStream) { super.sourceInputStream(inputStream); return this; } @Override public BpmnParse sourceResource(String resource, ClassLoader classLoader) { super.sourceResource(resource, classLoader); return this; } @Override public BpmnParse sourceResource(String resource) { super.sourceResource(resource); return this; } @Override public BpmnParse sourceString(String string) { super.sourceString(string); return this; } @Override public BpmnParse sourceUrl(String url) { super.sourceUrl(url); return this; } @Override public BpmnParse sourceUrl(URL url) { super.sourceUrl(url); return this; } public void addStructure(StructureDefinition structure) { this.structures.put(structure.getId(), structure); } public void addService(BpmnInterfaceImplementation bpmnInterfaceImplementation) { this.interfaceImplementations.put(bpmnInterfaceImplementation.getName(), bpmnInterfaceImplementation); } public void addOperation(OperationImplementation operationImplementation) { this.operationImplementations.put(operationImplementation.getId(), operationImplementation); } public Boolean parseBooleanAttribute(String booleanText, boolean defaultValue) { if (booleanText == null) { return defaultValue; } else { return parseBooleanAttribute(booleanText); } } public Boolean parseBooleanAttribute(String booleanText) { if ("true".equals(booleanText) || "enabled".equals(booleanText) || "on".equals(booleanText) || "active".equals(booleanText) || "yes".equals(booleanText)) { return Boolean.TRUE; } if ("false".equals(booleanText) || "disabled".equals(booleanText) || "off".equals(booleanText) || "inactive".equals(booleanText) || "no".equals(booleanText)) { return Boolean.FALSE; } return null; } public Double parseDoubleAttribute(Element element, String attributename, String doubleText, boolean required) { if (required && (doubleText == null || "".equals(doubleText))) { addError(attributename + " is required", element); } else { try { return Double.parseDouble(doubleText); } catch (NumberFormatException e) { addError("Cannot parse " + attributename + ": " + e.getMessage(), element); } } return -1.0; } protected boolean isExclusive(Element element) { return "true".equals(element.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "exclusive", String.valueOf(JobEntity.DEFAULT_EXCLUSIVE))); } protected boolean isAsync(Element element) { return "true".equals(element.attributeNS(BpmnParser.ACTIVITI_BPMN_EXTENSIONS_NS, "async")); } }
Removed typeLanguage warning who everybody hates anyway.
modules/activiti-engine/src/main/java/org/activiti/engine/impl/bpmn/parser/BpmnParse.java
Removed typeLanguage warning who everybody hates anyway.
<ide><path>odules/activiti-engine/src/main/java/org/activiti/engine/impl/bpmn/parser/BpmnParse.java <ide> } <ide> <ide> protected void parseDefinitionsAttributes() { <del> String typeLanguage = rootElement.attribute("typeLanguage"); <del> String expressionLanguage = rootElement.attribute("expressionLanguage"); <ide> this.targetNamespace = rootElement.attribute("targetNamespace"); <del> <del> if (typeLanguage != null) { <del> if (typeLanguage.contains("XMLSchema")) { <del> LOGGER.info("XMLSchema currently not supported as typeLanguage"); <del> } <del> } <del> <del> if (expressionLanguage != null) { <del> if (expressionLanguage.contains("XPath")) { <del> LOGGER.info("XPath currently not supported as expressionLanguage"); <del> } <del> } <ide> <ide> for (String attribute : rootElement.attributes()) { <ide> if (attribute.startsWith("xmlns:")) {
JavaScript
agpl-3.0
44217c84a34b724d8782742087feb6fc4998e4e3
0
icescrum/iceScrum,icescrum/iceScrum
/* * Copyright (c) 2015 Kagilum SAS. * * This file is part of iceScrum. * * iceScrum is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License. * * iceScrum is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with iceScrum. If not, see <http://www.gnu.org/licenses/>. * * Authors: * * Vincent Barrier ([email protected]) * Nicolas Noullet ([email protected]) * */ // Try to dectect as early as possible that the root misses as slash // to trigger a redirect and lose as little time as possible (function() { if (window.location.hash == '') { var fullPath = window.location.href; if (fullPath[fullPath.length - 1] != '/' && fullPath.indexOf('/?') == -1) { if (fullPath.indexOf('?') > -1) { fullPath = fullPath.replace('?', '/?'); } else { fullPath = fullPath + '/' } window.location.replace(fullPath); throw new Error("Stopping page loading because a forward slash is missing, redirecting to the proper URL..."); } } })(); angular.module('isApp', [ 'ngRoute', 'ngAnimate', 'ngSanitize', 'controllers', 'services', 'filters', 'directives', 'ui.router', 'ui.router.stateHelper', 'ui.bootstrap', 'ui.select', 'monospaced.elastic', 'cfp.hotkeys', 'colorpicker.module', 'mgo-angular-wizard', 'ngPasswordStrength', 'flow', 'ngPDFViewer', 'remoteValidation', 'FBAngular', 'angular-extended-notifications', 'as.sortable', 'angular.atmosphere', 'nvd3' ]).config(['stateHelperProvider', '$httpProvider', '$urlRouterProvider', '$stateProvider', function(stateHelperProvider, $httpProvider, $urlRouterProvider, $stateProvider) { $httpProvider.interceptors.push([ '$injector', function($injector) { return $injector.get('AuthInterceptor'); } ]); $stateProvider.decorator('parent', function(state, parentFn) { state.self.$$state = function() { return state; }; state.self.isSetAuthorize = function() { return angular.isDefined(state.data) && angular.isDefined(state.data.authorize); }; return parentFn(state); }); $httpProvider.defaults.headers.common["X-Requested-With"] = 'XMLHttpRequest'; $urlRouterProvider.when('', '/'); var getDetailsModalState = function(detailsType, options) { return _.merge({ name: detailsType, url: '/' + detailsType, abstract: true, resolve: { modalHolder: [function() { return {}; }] }, onEnter: ['$state', '$uibModal', 'modalHolder', function($state, $uibModal, modalHolder) { var goToCaller = function(reason) { if (reason !== true) { $state.go(($state.params[detailsType + 'TabId'] ? '^.' : '') + '^.^'); } }; modalHolder.modal = $uibModal.open({ templateUrl: 'details.modal.html', controller: ['$scope', function($scope) { $scope.detailsType = detailsType; $scope.isModal = true; }] }); modalHolder.modal.result.then(goToCaller, goToCaller); }], onExit: ['modalHolder', function(modalHolder) { modalHolder.modal.dismiss(true) }] }, options); }; var getTaskDetailsState = function(viewContext) { var options = { name: 'details', url: "/{taskId:int}", resolve: { detailsTask: ['$stateParams', 'taskContext', function($stateParams, taskContext) { return _.find(taskContext.tasks, {id: $stateParams.taskId}) }] }, views: {}, children: [ { name: 'tab', url: "/{taskTabId:(?:comments|activities)}", resolve: { data: ['$stateParams', 'ActivityService', 'CommentService', 'detailsTask', function($stateParams, ActivityService, CommentService, detailsTask) { if ($stateParams.taskTabId == 'comments') { return CommentService.list(detailsTask); } else if ($stateParams.taskTabId == 'activities') { return ActivityService.activities(detailsTask, false); } return null; }], //we add data to wait for dynamic resolution - not used only for story.xxxx to be loaded selected: ['data', 'detailsTask', function(data, detailsTask) { return detailsTask; }] }, views: { "details-tab": { templateUrl: function($stateParams) { var tpl; if ($stateParams.taskTabId == 'comments') { tpl = 'comment.list.html'; } else if ($stateParams.taskTabId == 'activities') { tpl = 'activity.list.html'; } return tpl; }, controller: ['$scope', '$controller', '$stateParams', 'selected', function($scope, $controller, $stateParams, selected) { $scope.selected = selected; if ($stateParams.taskTabId == 'activities') { $controller('activityCtrl', {$scope: $scope, selected: selected}); } }] } } } ] }; options.views['details' + (viewContext ? viewContext : '')] = { templateUrl: 'task.details.html', controller: 'taskDetailsCtrl' }; return options; }; var getFeatureDetailsState = function(viewContext, isModal) { var options = { name: 'details', url: "/{featureId:int}", resolve: { //we add features to wait for dynamic resolution from parent state detailsFeature: ['FeatureService', '$stateParams', 'features', function(FeatureService, $stateParams, features) { return FeatureService.get($stateParams.featureId); }] }, views: {}, children: [ { name: 'tab', url: "/{featureTabId:stories}", resolve: { selected: ['StoryService', 'detailsFeature', function(StoryService, detailsFeature) { return StoryService.listByType(detailsFeature).then(function() { return detailsFeature; }); }] }, views: { "details-tab": { templateUrl: 'nested.stories.html', controller: ['$scope', 'selected', function($scope, selected) { $scope.selected = selected; }] } } } ] }; options.views['details' + (viewContext ? viewContext : '')] = { templateUrl: 'feature.details.html', controller: 'featureDetailsCtrl' }; if (!isModal) { options.children[0].children = [ getDetailsModalState('story', { children: [getStoryDetailsState('@', true)] }) ]; } return options; }; var getStoryDetailsState = function(viewContext, isModal) { var options = { name: 'details', url: "/{storyId:int}", resolve: { detailsStory: ['StoryService', '$stateParams', function(StoryService, $stateParams) { return StoryService.get($stateParams.storyId); }] }, views: {}, children: [ { name: 'tab', url: "/{storyTabId:(?:tests|tasks|comments|activities)}", resolve: { data: ['$stateParams', 'AcceptanceTestService', 'CommentService', 'TaskService', 'ActivityService', 'detailsStory', function($stateParams, AcceptanceTestService, CommentService, TaskService, ActivityService, detailsStory) { if ($stateParams.storyTabId == 'tests') { return AcceptanceTestService.list(detailsStory); } else if ($stateParams.storyTabId == 'tasks') { return TaskService.list(detailsStory); } else if ($stateParams.storyTabId == 'comments') { return CommentService.list(detailsStory); } else if ($stateParams.storyTabId == 'activities') { return ActivityService.activities(detailsStory, false); } return null; }], //we add data to wait for dynamic resolution - not used only for story.xxxx to be loaded selected: ['data', 'detailsStory', function(data, detailsStory) { return detailsStory; }] }, views: { "details-tab": { templateUrl: function($stateParams) { var tpl; if ($stateParams.storyTabId == 'tests') { tpl = 'story.acceptanceTests.html'; } else if ($stateParams.storyTabId == 'tasks') { tpl = 'story.tasks.html'; } else if ($stateParams.storyTabId == 'comments') { tpl = 'comment.list.html'; } else if ($stateParams.storyTabId == 'activities') { tpl = 'activity.list.html'; } return tpl; }, controller: ['$scope', '$controller', '$stateParams', 'selected', function($scope, $controller, $stateParams, selected) { $scope.selected = selected; if ($stateParams.storyTabId == 'activities') { $controller('activityCtrl', {$scope: $scope, selected: selected}); } else if ($stateParams.storyTabId == 'tasks') { $controller('taskStoryCtrl', {$scope: $scope}); } }] } } } ] }; options.views['details' + (viewContext ? viewContext : '')] = { templateUrl: 'story.details.html', controller: 'storyDetailsCtrl' }; if (!isModal) { options.children[0].children = [ getDetailsModalState('task', { resolve: { taskContext: ['selected', function(selected) { return selected; }] }, children: [getTaskDetailsState('@')] }) ]; options.children.push(getDetailsModalState('feature', { resolve: { features: ['FeatureService', function(FeatureService) { return FeatureService.list(); }] }, children: [getFeatureDetailsState('@', true)] })); } return options; }; stateHelperProvider .state({ name: 'root', url: '/', controller: ['$state', function($state) { var isInProject = window.location.pathname.indexOf('/p/') != -1; $state.go(isInProject ? 'project' : 'home'); }] }) .state({ name: 'home', // should not be acceded directly, called by 'root' templateUrl: 'home.html', controller: 'homeCtrl' }) .state({ name: 'userregister', url: "/user/register/:token", params: {token: {value: null}}, // doesn't work currently but it should, see https://github.com/angular-ui/ui-router/pull/1032 & https://github.com/angular-ui/ui-router/issues/1652 onEnter: ["$state", "$uibModal", "$rootScope", function($state, $uibModal, $rootScope) { $uibModal.open({ keyboard: false, templateUrl: $rootScope.serverUrl + '/user/register', controller: 'registerCtrl' }).result.then(function(username) { $state.transitionTo('root'); $rootScope.showAuthModal(username); }, function() { $state.transitionTo('root'); }); }] }) .state({ name: 'userretrieve', url: "/user/retrieve", onEnter: ["$state", "$uibModal", "$rootScope", function($state, $uibModal, $rootScope) { $uibModal.open({ templateUrl: $rootScope.serverUrl + '/user/retrieve', size: 'sm', controller: 'retrieveCtrl' }).result.then(function() { $state.transitionTo('root'); }, function() { $state.transitionTo('root'); }); }] }) .state({ name: 'project', // should not be acceded directly, called by 'root' templateUrl: 'openWindow/project', controller: 'projectCtrl' }) .state({ name: 'newProject', url: "/project/new", onEnter: ["$state", "$uibModal", "$rootScope", function($state, $uibModal, $rootScope) { $uibModal.open({ keyboard: false, backdrop: 'static', templateUrl: $rootScope.serverUrl + "/project/add", size: 'lg', controller: 'newProjectCtrl' }).result.then(function() { $state.transitionTo('root'); }, function() { $state.transitionTo('root'); }); }] }) .state({ name: 'backlog', url: "/backlog", templateUrl: 'openWindow/backlog', controller: 'backlogCtrl', //example //todo remove once it works well data: { authorize: { roles: ['authenticated'] } }, resolve: { backlogs: ['BacklogService', function(BacklogService) { return BacklogService.list(); }] }, children: [ { name: 'new', url: "/new", views: { "details": { templateUrl: 'story.new.html', controller: 'storyNewCtrl' } } }, { name: 'multiple', url: "/{listId:[0-9]+(?:[\,][0-9]+)+}", resolve: { listId: ['$stateParams', function($stateParams) { return $stateParams.listId.split(','); }] }, views: { "details": { templateUrl: 'story.multiple.html', controller: 'storyMultipleCtrl' } }, children: [ getDetailsModalState('feature', { resolve: { features: ['FeatureService', function(FeatureService) { return FeatureService.list(); }] }, children: [getFeatureDetailsState('@', true)] }) ] }, getStoryDetailsState() ] }) .state({ name: 'feature', url: "/feature", templateUrl: 'openWindow/feature', controller: 'featuresCtrl', resolve: { features: ['FeatureService', function(FeatureService) { return FeatureService.list(); }] }, children: [ { name: 'new', url: '/new', views: { "details": { templateUrl: 'feature.new.html', controller: 'featureNewCtrl' } } }, { name: 'multiple', url: "/{listId:[0-9]+(?:[\,][0-9]+)+}", resolve: { listId: ['$stateParams', function($stateParams) { return $stateParams.listId.split(','); }] }, views: { "details": { templateUrl: 'feature.multiple.html', controller: 'featureMultipleCtrl' } } }, getFeatureDetailsState() ] }) .state({ name: 'planning', url: "/planning", templateUrl: 'openWindow/planning', controller: 'planningCtrl', resolve: { project: ['Session', function(Session) { return Session.getProjectPromise(); }], releases: ['$q', 'ReleaseService', 'SprintService', 'project', function($q, ReleaseService, SprintService, project) { return ReleaseService.list(project).then(function(releases) { // Wait for releases return $q.all(_.map(releases, SprintService.list)).then(function() { // Wait for sprints return releases; // Finally resolve the releases }); }); }] }, children: [ { name: 'new', url: "/new", views: { "details": { templateUrl: 'release.new.html', controller: 'releaseNewCtrl' } } }, { name: 'release', url: "/{releaseId:int}", resolve: { detailsRelease: ['$stateParams', 'releases', function($stateParams, releases) { return _.find(releases, {id: $stateParams.releaseId}) }], sprints: ['detailsRelease', function(detailsRelease) { return detailsRelease.sprints; }] }, children: [ { name: 'details', url: "/details", views: { "details@planning": { templateUrl: 'release.details.html', controller: 'releaseDetailsCtrl' } } }, { name: 'story', url: "/story", children: [getStoryDetailsState('@planning')] }, { name: 'sprint', url: "/sprint", children: [ { name: 'new', url: "/new", views: { "details@planning": { templateUrl: 'sprint.new.html', controller: 'sprintNewCtrl' } } }, { name: 'withId', url: "/{sprintId:int}", resolve: { detailsSprint: ['$stateParams', 'detailsRelease', function($stateParams, detailsRelease) { return _.find(detailsRelease.sprints, {id: $stateParams.sprintId}); }] }, children: [ { name: 'details', url: "/details", views: { "details@planning": { templateUrl: 'sprint.details.html', controller: 'sprintDetailsCtrl' } } }, { name: 'story', url: "/story", children: [getStoryDetailsState('@planning')] } ] }, { name: 'multiple', url: "/{sprintListId:[0-9]+(?:[\,][0-9]+)+}", children: [ { name: 'details', url: "/details", views: { "details@planning": { templateUrl: 'sprint.multiple.html', controller: 'sprintMultipleCtrl' } } }, { name: 'story', url: "/story", children: [getStoryDetailsState('@planning')] } ] } ] } ] } ] }) .state({ name: 'taskBoard', url: "/taskBoard/{sprintId:int}", params: { sprintId: {value: null, squash: true} }, templateUrl: 'openWindow/taskBoard', controller: 'taskBoardCtrl', resolve: { project: ['Session', function(Session) { return Session.getProjectPromise(); }], sprint: ['$stateParams', '$q', 'SprintService', 'StoryService', 'TaskService', 'project', function($stateParams, $q, SprintService, StoryService, TaskService, project) { var promise = !$stateParams.sprintId ? SprintService.getCurrentOrNextSprint(project) : SprintService.get($stateParams.sprintId, project); return promise.then(function(sprint) { return sprint.id == undefined ? undefined : StoryService.listByType(sprint).then(function() { return TaskService.list(sprint).then(function() { return sprint; }); }); }) }] }, children: [ { name: 'details', url: "/details", resolve: { detailsSprint: ['sprint', function(sprint) { return sprint; }] }, views: { "details": { templateUrl: 'sprint.details.html', controller: 'sprintDetailsCtrl' } } }, { name: 'task', url: "/task", resolve: { taskContext: ['sprint', function(sprint) { return sprint; }] }, children: [ { name: 'new', url: "/new", params: { taskTemplate: null }, views: { "details@taskBoard": { templateUrl: 'task.new.html', controller: 'taskNewCtrl' } } }, getTaskDetailsState('@taskBoard') ] }, { name: 'story', url: "/story", children: [getStoryDetailsState('@taskBoard')] } ] }); }]) .config(['flowFactoryProvider', function(flowFactoryProvider) { flowFactoryProvider.defaults = { target: 'attachment/save', //only one at the time => prevent staleObjectException simultaneousUploads: 1 }; flowFactoryProvider.on('catchAll', function(event) { console.log('catchAll', arguments); }); }]) .config(['notificationsProvider', function(notificationsProvider) { notificationsProvider.setDefaults({ faIcons: true, closeOnRouteChange: 'state', duration: 4500 }); }]) .config(['$uibTooltipProvider', function($uibTooltipProvider) { $uibTooltipProvider.options({appendToBody: true}); }]) .config(['uibDatepickerConfig', function(uibDatepickerConfig) { angular.extend(uibDatepickerConfig, { startingDay: 1 // TODO make it i18n }); }]) .config(['uibDatepickerPopupConfig', function(uibDatepickerPopupConfig) { angular.extend(uibDatepickerPopupConfig, { showButtonBar: false, datepickerPopup: 'dd/MM/yyyy' // TODO make it i18n }); }]) .config(['uiSelectConfig', function(uiSelectConfig) { uiSelectConfig.theme = 'select2'; uiSelectConfig.appendToBody = true; uiSelectConfig.searchEnabled = false; }]) .config(['$animateProvider', function($animateProvider) { $animateProvider.classNameFilter(/ng-animate-enabled/); }]) .factory('AuthInterceptor', ['$rootScope', '$q', 'SERVER_ERRORS', function($rootScope, $q, SERVER_ERRORS) { return { responseError: function(response) { if (response.status === 401) { $rootScope.$broadcast(SERVER_ERRORS.notAuthenticated, response); } else if (response.status === 403) { $rootScope.$broadcast(SERVER_ERRORS.notAuthorized, response); } else if (response.status === 419 || response.status === 440) { $rootScope.$broadcast(SERVER_ERRORS.sessionTimeout, response); } else if (response.status > 399 && response.status <= 499) { $rootScope.$broadcast(SERVER_ERRORS.clientError, response); } else if (response.status > 499) { $rootScope.$broadcast(SERVER_ERRORS.serverError, response); } return $q.reject(response); } }; }]) .factory('UserTimeZone', function() { return jstz.determine(); }) .run(['Session', '$rootScope', '$timeout', '$state', '$uibModal', '$filter', '$document', '$window', '$interval', 'notifications', function(Session, $rootScope, $timeout, $state, $uibModal, $filter, $document, $window, $interval, notifications) { //used to handle click with shortcut hotkeys $rootScope.hotkeyClick = function(event, hotkey) { if (hotkey.el && (hotkey.el.is("a") || hotkey.el.is("button"))) { event.preventDefault(); $timeout(function() { hotkey.el.click(); }); } }; var $download; $rootScope.downloadFile = function(url) { if ($download) { $download.attr('src', url); } else { $download = $('<iframe>', {id: 'idown', src: url}).hide().appendTo('body'); } }; $rootScope.message = function(code, args) { var text = messages[code] ? messages[code] : code; angular.forEach(args, function(arg, index) { var placeholderMatcher = new RegExp('\\{' + index + '\\}', 'g'); text = text.replace(placeholderMatcher, arg); }); return text; }; $rootScope.notifySuccess = function(code, options) { return notifications.success('', $rootScope.message(code), options); }; $rootScope.notifyError = function(code, options) { return notifications.error('', $rootScope.message(code), options); }; $rootScope.notifyWarning = function(code, options) { return notifications.warning('', $rootScope.message(code), options); }; $rootScope.inEditingMode = false; $rootScope.setInEditingMode = function(inEditingMode) { $rootScope.inEditingMode = inEditingMode; }; $rootScope.isInEditingMode = function() { return $rootScope.inEditingMode; }; $rootScope.resetFormValidation = function(form) { if (form) { form.$setPristine(); form.$setUntouched(); } }; $rootScope.confirm = function(options) { var callCallback = function() { if (options.args) { options.callback.apply(options.callback, options.args); } else { options.callback(); } }; if (options.condition !== false) { var modal = $uibModal.open({ templateUrl: 'confirm.modal.html', size: 'sm', controller: ["$scope", "hotkeys", function($scope, hotkeys) { $scope.message = options.message; $scope.submit = function() { callCallback(); $scope.$close(true); }; // Required because there is not input so the form cannot be submitted by "return" hotkeys.bindTo($scope).add({ combo: 'return', callback: $scope.submit }); }] }); var callCloseCallback = function(confirmed) { if (!confirmed && options.closeCallback) { options.closeCallback(); } }; modal.result.then(callCloseCallback, callCloseCallback); } else { callCallback(); } }; $rootScope.showCopyModal = function(title, value) { $uibModal.open({ templateUrl: 'copy.html', size: 'sm', controller: ["$scope", function($scope) { $scope.title = title; $scope.value = value; }] }); }; $rootScope.immutableAddDaysToDate = function(date, days) { var newDate = new Date(date); newDate.setDate(date.getDate() + days); return newDate; }; $rootScope.immutableAddMonthsToDate = function(date, months) { var newDate = new Date(date); newDate.setMonth(date.getMonth() + months); return newDate; }; $rootScope.showAuthModal = function(username, loginSuccess, loginFailure) { var childScope = $rootScope.$new(); if (username) { childScope.username = username; } var loginCallback = null; if (loginSuccess) { childScope.loginCallback = true; loginCallback = function(loggedIn) { if (loggedIn) { Session.create().then(function() { loginSuccess(); }); } else { loginFailure(); } }; } $uibModal.open({ keyboard: false, templateUrl: $rootScope.serverUrl + '/login/auth', controller: 'loginCtrl', scope: childScope, size: 'sm' }).result.then(loginCallback); }; $rootScope.durationBetweenDates = function(startDateString, endDateString) { var duration = new Date(endDateString) - new Date(startDateString); return Math.floor(duration / (1000 * 3600 * 24)) + 1; }; $rootScope.openProject = function(project) { document.location = $rootScope.serverUrl + '/p/' + project.pkey + '/'; }; $rootScope.openDatepicker = function($event, holder) { $event.preventDefault(); $event.stopPropagation(); if (holder) { holder.opened = true; } }; // TODO Change ugly hack $rootScope.serverUrl = icescrum.grailsServer; $rootScope.integerSuite = []; for (var i = 0; i < 100; i++) { $rootScope.integerSuite.push(i); } $rootScope.integerSuiteNullable = ['?'].concat($rootScope.integerSuite); $rootScope.fibonacciSuite = [0, 1, 2, 3, 5, 8, 13, 21, 34]; $rootScope.fibonacciSuiteNullable = ['?'].concat($rootScope.fibonacciSuite); $rootScope.app = { asList: false, loading: true, loadingPercent: 0, isFullScreen: false, selectableMultiple: false }; // To be able to track state in views $rootScope.$state = $state; var messages = {}; $rootScope.initMessages = function(initMessages) { messages = initMessages; }; $rootScope.applicationMenus = []; $rootScope.initApplicationMenus = function(initMenus) { $rootScope.applicationMenus = initMenus; var menusByVisibility = _.groupBy(initMenus, 'visible'); $rootScope.menus = { visible: _.sortBy(menusByVisibility[true], 'position'), hidden: _.sortBy(menusByVisibility[false], 'position') } }; $rootScope.sortableScrollOptions = function(scrollableContainerSelector) { if (!scrollableContainerSelector) { scrollableContainerSelector = '.panel-body'; } var scrollSpeed = 0; var destScrollableContainer; // Store the dest container because it cannot be retrieved (mouse must be on panel to get the element) and used (mouse is out of panel when we must scroll) in the same move var scheduledScroll = null; var cancelScheduledScroll = function() { scrollSpeed = 0; if (scheduledScroll) { $interval.cancel(scheduledScroll); scheduledScroll = null; } }; return { dragMove: function(itemPosition, containment, eventObj) { $rootScope.app.sortableMoving = true; if (eventObj) { // This HORRIBLE SOUP isolated in a private function gets the dest panel body and stores it in a captured variable. // There may be a better way but it is the way ng-sortable does it (function(eventObj) { var destX = eventObj.pageX - $document[0].documentElement.scrollLeft; var destY = eventObj.pageY - ($window.pageYOffset || $document[0].documentElement.scrollTop); $document[0].elementFromPoint(destX, destY); // This is done twice on purpose, ng-sortable does it like that (don't know why though...) var destElement = angular.element($document[0].elementFromPoint(destX, destY)); // Gets the DOM element under the cursor function fetchScope(element) { var scope; while (!scope && element.length) { scope = element.data('_scope'); if (!scope) { element = element.parent(); } } return scope; } var destScope = fetchScope(destElement); // Retrieve the closest scope from the DOM element if (destScope) { destScrollableContainer = angular.element(destScope.element).closest(scrollableContainerSelector)[0]; // Store the dest scrollable container for later use } })(eventObj); // Retrieve scrollable container, very likely stored during a previous move, and scroll if needed (for the moment scroll occurs only when moving) if (destScrollableContainer) { var marginAroundCursor = 30; var targetY = eventObj.pageY - ($window.pageYOffset || $document[0].documentElement.scrollTop); var containerRect = destScrollableContainer.getBoundingClientRect(); var topDifference = containerRect.top - targetY + marginAroundCursor; var bottomDifference = containerRect.bottom - targetY - marginAroundCursor; var cursorUpperThanPanel = topDifference > 0; var cursorLowerThanPanel = bottomDifference < 0; if (cursorUpperThanPanel || cursorLowerThanPanel) { var computeSpeed = function(difference) { return Math.floor(difference / 4); // Magic formula }; scrollSpeed = cursorUpperThanPanel ? computeSpeed(topDifference) : computeSpeed(bottomDifference); var moveScroll = function() { destScrollableContainer.scrollTop = destScrollableContainer.scrollTop - scrollSpeed; }; moveScroll(); // With the solution above, scroll occurs only when moving the cursor so we define a recurring callback to sustain the scroll when not moving if (!scheduledScroll) { var timeInterval = 4; // 4 ms scheduledScroll between each automatic scroll scheduledScroll = $interval(moveScroll, timeInterval); } } else if (scheduledScroll != null) { cancelScheduledScroll(); } } } }, dragEnd: function() { $rootScope.app.sortableMoving = false; cancelScheduledScroll(); // Prevent persistent scroll in case of release out of sortable container } } }; $rootScope.$on('$stateChangeStart', function(event, toState, toParams, fromState, fromParams, options) { if (!event.defaultPrevented) { var state = toState.$$state(); authorized = true; if (state.isSetAuthorize()) { var authorized = false; _.every(state.data.authorize.roles, function(role) { authorized = role.indexOf('!') > -1 ? !Session[role.substring(role.indexOf('!') + 1)]() : (Session[role]() === true); return authorized; }); } if (!authorized) { event.preventDefault(); if (!Session.authenticated()) { $rootScope.showAuthModal('', function() { $state.go(toState.name); }); } else { $state.go(fromState.name); } } } }); }]) .constant('SERVER_ERRORS', { loginFailed: 'auth-login-failed', sessionTimeout: 'auth-session-timeout', notAuthenticated: 'auth-not-authenticated', notAuthorized: 'auth-not-authorized', clientError: 'client-error', serverError: 'server-error' }) .constant('BacklogCodes', { SANDBOX: 'sandbox', BACKLOG: 'backlog', DONE: 'done', ALL: 'all', SPRINT: 'sprint' }) .constant('StoryStatesByName', { "SUGGESTED": 1, "ACCEPTED": 2, "ESTIMATED": 3, "PLANNED": 4, "IN_PROGRESS": 5, "DONE": 7, "ICEBOX": -1 }) .constant('TaskStatesByName', { "WAIT": 0, "IN_PROGRESS": 1, "DONE": 2 }) .constant('AcceptanceTestStatesByName', { "TOCHECK": 1, "FAILED": 5, "SUCCESS": 10 }) .constant('SprintStatesByName', { "WAIT": 1, "IN_PROGRESS": 2, "DONE": 3 }) .constant('FeatureStatesByName', { "WAIT": 0, "IN_PROGRESS": 1, "DONE": 2 }) .constant('ReleaseStatesByName', { "WAIT": 1, "IN_PROGRESS": 2, "DONE": 3 }) .constant('MoodFeelingsByName', { "GOOD": 2, "MEH": 1, "BAD": 0 }) .constant('USER_ROLES', { // TODO consider deleting (used only for dev user role switch) PO_SM: 'PO_SM', PO: 'PO', SM: 'SM', TM: 'TM', SH: 'SH' }) .constant('IceScrumEventType', { CREATE: 'CREATE', UPDATE: 'UPDATE', DELETE: 'DELETE' }) .constant('TaskConstants', { ORDER_BY: [function(task) { return -task.type }, 'parentStory.rank', 'state', 'rank'] });
grails-app/assets/javascripts/app/app.js
/* * Copyright (c) 2015 Kagilum SAS. * * This file is part of iceScrum. * * iceScrum is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License. * * iceScrum is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with iceScrum. If not, see <http://www.gnu.org/licenses/>. * * Authors: * * Vincent Barrier ([email protected]) * Nicolas Noullet ([email protected]) * */ // Try to dectect as early as possible that the root misses as slash // to trigger a redirect and lose as little time as possible (function() { if (window.location.hash == '') { var fullPath = window.location.href; if (fullPath[fullPath.length - 1] != '/' && fullPath.indexOf('/?') == -1) { if (fullPath.indexOf('?') > -1) { fullPath = fullPath.replace('?', '/?'); } else { fullPath = fullPath + '/' } window.location.replace(fullPath); throw new Error("Stopping page loading because a forward slash is missing, redirecting to the proper URL..."); } } })(); angular.module('isApp', [ 'ngRoute', 'ngAnimate', 'ngSanitize', 'controllers', 'services', 'filters', 'directives', 'ui.router', 'ui.router.stateHelper', 'ui.bootstrap', 'ui.select', 'monospaced.elastic', 'cfp.hotkeys', 'colorpicker.module', 'mgo-angular-wizard', 'ngPasswordStrength', 'flow', 'ngPDFViewer', 'remoteValidation', 'FBAngular', 'angular-extended-notifications', 'as.sortable', 'angular.atmosphere', 'nvd3' ]).config(['stateHelperProvider', '$httpProvider', '$urlRouterProvider', '$stateProvider', function(stateHelperProvider, $httpProvider, $urlRouterProvider, $stateProvider) { $httpProvider.interceptors.push([ '$injector', function($injector) { return $injector.get('AuthInterceptor'); } ]); $stateProvider.decorator('parent', function (state, parentFn) { state.self.$$state = function () { return state; }; state.self.isSetAuthorize = function () { return angular.isDefined(state.data) && angular.isDefined(state.data.authorize); }; return parentFn(state); }); $httpProvider.defaults.headers.common["X-Requested-With"] = 'XMLHttpRequest'; $urlRouterProvider.when('', '/'); var getDetailsModalState = function(detailsType, options) { return _.merge({ name: detailsType, url: '/' + detailsType, abstract: true, resolve: { modalHolder: [function() { return {}; }] }, onEnter: ['$state', '$uibModal', 'modalHolder', function($state, $uibModal, modalHolder) { var goToCaller = function(reason) { if (reason !== true) { $state.go(($state.params[detailsType + 'TabId'] ? '^.' : '') + '^.^'); } }; modalHolder.modal = $uibModal.open({ templateUrl: 'details.modal.html', controller: ['$scope', function($scope) { $scope.detailsType = detailsType; $scope.isModal = true; }] }); modalHolder.modal.result.then(goToCaller, goToCaller); }], onExit: ['modalHolder', function(modalHolder) { modalHolder.modal.dismiss(true) }] }, options); }; var getTaskDetailsState = function(viewContext) { var options = { name: 'details', url: "/{taskId:int}", resolve: { detailsTask: ['$stateParams', 'taskContext', function($stateParams, taskContext) { return _.find(taskContext.tasks, {id: $stateParams.taskId}) }] }, views: {}, children: [ { name: 'tab', url: "/{taskTabId:(?:comments|activities)}", resolve: { data: ['$stateParams', 'ActivityService', 'CommentService', 'detailsTask', function($stateParams, ActivityService, CommentService, detailsTask) { if ($stateParams.taskTabId == 'comments') { return CommentService.list(detailsTask); } else if ($stateParams.taskTabId == 'activities') { return ActivityService.activities(detailsTask, false); } return null; }], //we add data to wait for dynamic resolution - not used only for story.xxxx to be loaded selected: ['data', 'detailsTask', function(data, detailsTask) { return detailsTask; }] }, views: { "details-tab": { templateUrl: function($stateParams) { var tpl; if ($stateParams.taskTabId == 'comments') { tpl = 'comment.list.html'; } else if ($stateParams.taskTabId == 'activities') { tpl = 'activity.list.html'; } return tpl; }, controller: ['$scope', '$controller', '$stateParams', 'selected', function($scope, $controller, $stateParams, selected) { $scope.selected = selected; if ($stateParams.taskTabId == 'activities') { $controller('activityCtrl', {$scope: $scope, selected: selected}); } }] } } } ] }; options.views['details' + (viewContext ? viewContext : '')] = { templateUrl: 'task.details.html', controller: 'taskDetailsCtrl' }; return options; }; var getFeatureDetailsState = function(viewContext, isModal) { var options = { name: 'details', url: "/{featureId:int}", resolve: { //we add features to wait for dynamic resolution from parent state detailsFeature: ['FeatureService', '$stateParams', 'features', function(FeatureService, $stateParams, features) { return FeatureService.get($stateParams.featureId); }] }, views: {}, children: [ { name: 'tab', url: "/{featureTabId:stories}", resolve: { selected: ['StoryService', 'detailsFeature', function(StoryService, detailsFeature) { return StoryService.listByType(detailsFeature).then(function() { return detailsFeature; }); }] }, views: { "details-tab": { templateUrl: 'nested.stories.html', controller: ['$scope', 'selected', function($scope, selected) { $scope.selected = selected; }] } } } ] }; options.views['details' + (viewContext ? viewContext : '')] = { templateUrl: 'feature.details.html', controller: 'featureDetailsCtrl' }; if (!isModal) { options.children[0].children = [ getDetailsModalState('story', { children: [getStoryDetailsState('@', true)] }) ]; } return options; }; var getStoryDetailsState = function(viewContext, isModal) { var options = { name: 'details', url: "/{storyId:int}", resolve: { detailsStory: ['StoryService', '$stateParams', function(StoryService, $stateParams) { return StoryService.get($stateParams.storyId); }] }, views: {}, children: [ { name: 'tab', url: "/{storyTabId:(?:tests|tasks|comments|activities)}", resolve: { data: ['$stateParams', 'AcceptanceTestService', 'CommentService', 'TaskService', 'ActivityService', 'detailsStory', function($stateParams, AcceptanceTestService, CommentService, TaskService, ActivityService, detailsStory) { if ($stateParams.storyTabId == 'tests') { return AcceptanceTestService.list(detailsStory); } else if ($stateParams.storyTabId == 'tasks') { return TaskService.list(detailsStory); } else if ($stateParams.storyTabId == 'comments') { return CommentService.list(detailsStory); } else if ($stateParams.storyTabId == 'activities') { return ActivityService.activities(detailsStory, false); } return null; }], //we add data to wait for dynamic resolution - not used only for story.xxxx to be loaded selected: ['data', 'detailsStory', function(data, detailsStory) { return detailsStory; }] }, views: { "details-tab": { templateUrl: function($stateParams) { var tpl; if ($stateParams.storyTabId == 'tests') { tpl = 'story.acceptanceTests.html'; } else if ($stateParams.storyTabId == 'tasks') { tpl = 'story.tasks.html'; } else if ($stateParams.storyTabId == 'comments') { tpl = 'comment.list.html'; } else if ($stateParams.storyTabId == 'activities') { tpl = 'activity.list.html'; } return tpl; }, controller: ['$scope', '$controller', '$stateParams', 'selected', function($scope, $controller, $stateParams, selected) { $scope.selected = selected; if ($stateParams.storyTabId == 'activities') { $controller('activityCtrl', {$scope: $scope, selected: selected}); } else if ($stateParams.storyTabId == 'tasks') { $controller('taskStoryCtrl', {$scope: $scope}); } }] } } } ] }; options.views['details' + (viewContext ? viewContext : '')] = { templateUrl: 'story.details.html', controller: 'storyDetailsCtrl' }; if (!isModal) { options.children[0].children = [ getDetailsModalState('task', { resolve: { taskContext: ['selected', function(selected) { return selected; }] }, children: [getTaskDetailsState('@')] }) ]; options.children.push(getDetailsModalState('feature', { resolve: { features: ['FeatureService', function(FeatureService) { return FeatureService.list(); }] }, children: [getFeatureDetailsState('@', true)] })); } return options; }; stateHelperProvider .state({ name: 'root', url: '/', controller: ['$state', function($state) { var isInProject = window.location.pathname.indexOf('/p/') != -1; $state.go(isInProject ? 'project' : 'home'); }] }) .state({ name: 'home', // should not be acceded directly, called by 'root' templateUrl: 'home.html', controller: 'homeCtrl' }) .state({ name: 'userregister', url: "/user/register/:token", params: {token: {value: null}}, // doesn't work currently but it should, see https://github.com/angular-ui/ui-router/pull/1032 & https://github.com/angular-ui/ui-router/issues/1652 onEnter: ["$state", "$uibModal", "$rootScope", function($state, $uibModal, $rootScope) { $uibModal.open({ keyboard: false, templateUrl: $rootScope.serverUrl + '/user/register', controller: 'registerCtrl' }).result.then(function(username) { $state.transitionTo('root'); $rootScope.showAuthModal(username); }, function() { $state.transitionTo('root'); }); }] }) .state({ name: 'userretrieve', url: "/user/retrieve", onEnter: ["$state", "$uibModal", "$rootScope", function($state, $uibModal, $rootScope) { $uibModal.open({ templateUrl: $rootScope.serverUrl + '/user/retrieve', size: 'sm', controller: 'retrieveCtrl' }).result.then(function() { $state.transitionTo('root'); }, function() { $state.transitionTo('root'); }); }] }) .state({ name: 'project', // should not be acceded directly, called by 'root' templateUrl: 'openWindow/project', controller: 'projectCtrl' }) .state({ name: 'newProject', url: "/project/new", onEnter: ["$state", "$uibModal", "$rootScope", function($state, $uibModal, $rootScope) { $uibModal.open({ keyboard: false, backdrop: 'static', templateUrl: $rootScope.serverUrl + "/project/add", size: 'lg', controller: 'newProjectCtrl' }).result.then(function() { $state.transitionTo('root'); }, function() { $state.transitionTo('root'); }); }] }) .state({ name: 'backlog', url: "/backlog", templateUrl: 'openWindow/backlog', controller: 'backlogCtrl', //example //todo remove once it works well data: { authorize: { roles: ['authenticated'] } }, resolve: { backlogs: ['BacklogService', function(BacklogService) { return BacklogService.list(); }] }, children: [ { name: 'new', url: "/new", views: { "details": { templateUrl: 'story.new.html', controller: 'storyNewCtrl' } } }, { name: 'multiple', url: "/{listId:[0-9]+(?:[\,][0-9]+)+}", resolve: { listId: ['$stateParams', function($stateParams) { return $stateParams.listId.split(','); }] }, views: { "details": { templateUrl: 'story.multiple.html', controller: 'storyMultipleCtrl' } }, children: [ getDetailsModalState('feature', { resolve: { features: ['FeatureService', function(FeatureService) { return FeatureService.list(); }] }, children: [getFeatureDetailsState('@', true)] }) ] }, getStoryDetailsState() ] }) .state({ name: 'feature', url: "/feature", templateUrl: 'openWindow/feature', controller: 'featuresCtrl', resolve: { features: ['FeatureService', function(FeatureService) { return FeatureService.list(); }] }, children: [ { name: 'new', url: '/new', views: { "details": { templateUrl: 'feature.new.html', controller: 'featureNewCtrl' } } }, { name: 'multiple', url: "/{listId:[0-9]+(?:[\,][0-9]+)+}", resolve: { listId: ['$stateParams', function($stateParams) { return $stateParams.listId.split(','); }] }, views: { "details": { templateUrl: 'feature.multiple.html', controller: 'featureMultipleCtrl' } } }, getFeatureDetailsState() ] }) .state({ name: 'planning', url: "/planning", templateUrl: 'openWindow/planning', controller: 'planningCtrl', resolve: { project: ['Session', function(Session) { return Session.getProjectPromise(); }], releases: ['$q', 'ReleaseService', 'SprintService', 'project', function($q, ReleaseService, SprintService, project) { return ReleaseService.list(project).then(function(releases) { // Wait for releases return $q.all(_.map(releases, SprintService.list)).then(function() { // Wait for sprints return releases; // Finally resolve the releases }); }); }] }, children: [ { name: 'new', url: "/new", views: { "details": { templateUrl: 'release.new.html', controller: 'releaseNewCtrl' } } }, { name: 'release', url: "/{releaseId:int}", resolve: { detailsRelease: ['$stateParams', 'releases', function($stateParams, releases) { return _.find(releases, {id: $stateParams.releaseId}) }], sprints: ['detailsRelease', function(detailsRelease) { return detailsRelease.sprints; }] }, children: [ { name: 'details', url: "/details", views: { "details@planning": { templateUrl: 'release.details.html', controller: 'releaseDetailsCtrl' } } }, { name: 'story', url: "/story", children: [getStoryDetailsState('@planning')] }, { name: 'sprint', url: "/sprint", children: [ { name: 'new', url: "/new", views: { "details@planning": { templateUrl: 'sprint.new.html', controller: 'sprintNewCtrl' } } }, { name: 'withId', url: "/{sprintId:int}", resolve: { detailsSprint: ['$stateParams', 'detailsRelease', function($stateParams, detailsRelease) { return _.find(detailsRelease.sprints, {id: $stateParams.sprintId}); }] }, children: [ { name: 'details', url: "/details", views: { "details@planning": { templateUrl: 'sprint.details.html', controller: 'sprintDetailsCtrl' } } }, { name: 'story', url: "/story", children: [getStoryDetailsState('@planning')] } ] }, { name: 'multiple', url: "/{sprintListId:[0-9]+(?:[\,][0-9]+)+}", children: [ { name: 'details', url: "/details", views: { "details@planning": { templateUrl: 'sprint.multiple.html', controller: 'sprintMultipleCtrl' } } }, { name: 'story', url: "/story", children: [getStoryDetailsState('@planning')] } ] } ] } ] } ] }) .state({ name: 'taskBoard', url: "/taskBoard/{sprintId:int}", params: { sprintId: {value: null, squash: true} }, templateUrl: 'openWindow/taskBoard', controller: 'taskBoardCtrl', resolve: { project: ['Session', function(Session) { return Session.getProjectPromise(); }], sprint: ['$stateParams', '$q', 'SprintService', 'StoryService', 'TaskService', 'project', function($stateParams, $q, SprintService, StoryService, TaskService, project) { var promise = !$stateParams.sprintId ? SprintService.getCurrentOrNextSprint(project) : SprintService.get($stateParams.sprintId, project); return promise.then(function(sprint) { return sprint.id == undefined ? undefined : StoryService.listByType(sprint).then(function() { return TaskService.list(sprint).then(function() { return sprint; }); }); }) }] }, children: [ { name: 'details', url: "/details", resolve: { detailsSprint: ['sprint', function(sprint) { return sprint; }] }, views: { "details": { templateUrl: 'sprint.details.html', controller: 'sprintDetailsCtrl' } } }, { name: 'task', url: "/task", resolve: { taskContext: ['sprint', function(sprint) { return sprint; }] }, children: [ { name: 'new', url: "/new", params: { taskTemplate: null }, views: { "details@taskBoard": { templateUrl: 'task.new.html', controller: 'taskNewCtrl' } } }, getTaskDetailsState('@taskBoard') ] }, { name: 'story', url: "/story", children: [getStoryDetailsState('@taskBoard')] } ] }); }]) .config(['flowFactoryProvider', function(flowFactoryProvider) { flowFactoryProvider.defaults = { target: 'attachment/save', //only one at the time => prevent staleObjectException simultaneousUploads: 1 }; flowFactoryProvider.on('catchAll', function(event) { console.log('catchAll', arguments); }); }]) .config(['notificationsProvider', function(notificationsProvider) { notificationsProvider.setDefaults({ faIcons: true, closeOnRouteChange: 'state', duration: 4500 }); }]) .config(['$uibTooltipProvider', function($uibTooltipProvider) { $uibTooltipProvider.options({appendToBody: true}); }]) .config(['uibDatepickerConfig', function(uibDatepickerConfig) { angular.extend(uibDatepickerConfig, { startingDay: 1 // TODO make it i18n }); }]) .config(['uibDatepickerPopupConfig', function(uibDatepickerPopupConfig) { angular.extend(uibDatepickerPopupConfig, { showButtonBar: false, datepickerPopup: 'dd/MM/yyyy' // TODO make it i18n }); }]) .config(['uiSelectConfig', function(uiSelectConfig) { uiSelectConfig.theme = 'select2'; uiSelectConfig.appendToBody = true; uiSelectConfig.searchEnabled = false; }]) .config(['$animateProvider', function($animateProvider) { $animateProvider.classNameFilter(/ng-animate-enabled/); }]) .factory('AuthInterceptor', ['$rootScope', '$q', 'SERVER_ERRORS', function($rootScope, $q, SERVER_ERRORS) { return { responseError: function(response) { if (response.status === 401) { $rootScope.$broadcast(SERVER_ERRORS.notAuthenticated, response); } else if (response.status === 403) { $rootScope.$broadcast(SERVER_ERRORS.notAuthorized, response); } else if (response.status === 419 || response.status === 440) { $rootScope.$broadcast(SERVER_ERRORS.sessionTimeout, response); } else if (response.status > 399 && response.status <= 499) { $rootScope.$broadcast(SERVER_ERRORS.clientError, response); } else if (response.status > 499) { $rootScope.$broadcast(SERVER_ERRORS.serverError, response); } return $q.reject(response); } }; }]) .factory('UserTimeZone', function() { return jstz.determine(); }) .run(['Session', '$rootScope', '$timeout', '$state', '$uibModal', '$filter', '$document', '$window', '$interval', 'notifications', function(Session, $rootScope, $timeout, $state, $uibModal, $filter, $document, $window, $interval, notifications) { //used to handle click with shortcut hotkeys $rootScope.hotkeyClick = function(event, hotkey) { if (hotkey.el && (hotkey.el.is("a") || hotkey.el.is("button"))) { event.preventDefault(); $timeout(function() { hotkey.el.click(); }); } }; var $download; $rootScope.downloadFile = function(url) { if ($download) { $download.attr('src', url); } else { $download = $('<iframe>', {id: 'idown', src: url}).hide().appendTo('body'); } }; $rootScope.message = function(code, args) { var text = messages[code] ? messages[code] : code; angular.forEach(args, function(arg, index) { var placeholderMatcher = new RegExp('\\{' + index + '\\}', 'g'); text = text.replace(placeholderMatcher, arg); }); return text; }; $rootScope.notifySuccess = function(code, options) { return notifications.success('', $rootScope.message(code), options); }; $rootScope.notifyError = function(code, options) { return notifications.error('', $rootScope.message(code), options); }; $rootScope.notifyWarning = function(code, options) { return notifications.warning('', $rootScope.message(code), options); }; $rootScope.inEditingMode = false; $rootScope.setInEditingMode = function(inEditingMode) { $rootScope.inEditingMode = inEditingMode; }; $rootScope.isInEditingMode = function() { return $rootScope.inEditingMode; }; $rootScope.resetFormValidation = function(form) { if (form) { form.$setPristine(); form.$setUntouched(); } }; $rootScope.confirm = function(options) { var callCallback = function() { if (options.args) { options.callback.apply(options.callback, options.args); } else { options.callback(); } }; if (options.condition !== false) { var modal = $uibModal.open({ templateUrl: 'confirm.modal.html', size: 'sm', controller: ["$scope", "hotkeys", function($scope, hotkeys) { $scope.message = options.message; $scope.submit = function() { callCallback(); $scope.$close(true); }; // Required because there is not input so the form cannot be submitted by "return" hotkeys.bindTo($scope).add({ combo: 'return', callback: $scope.submit }); }] }); var callCloseCallback = function(confirmed) { if (!confirmed && options.closeCallback) { options.closeCallback(); } }; modal.result.then(callCloseCallback, callCloseCallback); } else { callCallback(); } }; $rootScope.showCopyModal = function(title, value) { $uibModal.open({ templateUrl: 'copy.html', size: 'sm', controller: ["$scope", function($scope) { $scope.title = title; $scope.value = value; }] }); }; $rootScope.immutableAddDaysToDate = function(date, days) { var newDate = new Date(date); newDate.setDate(date.getDate() + days); return newDate; }; $rootScope.immutableAddMonthsToDate = function(date, months) { var newDate = new Date(date); newDate.setMonth(date.getMonth() + months); return newDate; }; $rootScope.showAuthModal = function(username, loginSuccess, loginFailure) { var childScope = $rootScope.$new(); if (username) { childScope.username = username; } var loginCallback = null; if(loginSuccess){ childScope.loginCallback = true; loginCallback = function(loggedIn){ if(loggedIn){ Session.create().then(function(){ loginSuccess(); }); } else { loginFailure(); } }; } $uibModal.open({ keyboard: false, templateUrl: $rootScope.serverUrl + '/login/auth', controller: 'loginCtrl', scope: childScope, size: 'sm' }).result.then(loginCallback); }; $rootScope.durationBetweenDates = function(startDateString, endDateString) { var duration = new Date(endDateString) - new Date(startDateString); return Math.floor(duration / (1000 * 3600 * 24)) + 1; }; $rootScope.openProject = function(project) { document.location = $rootScope.serverUrl + '/p/' + project.pkey + '/'; }; $rootScope.openDatepicker = function($event, holder) { $event.preventDefault(); $event.stopPropagation(); if (holder) { holder.opened = true; } }; // TODO Change ugly hack $rootScope.serverUrl = icescrum.grailsServer; $rootScope.integerSuite = []; for (var i = 0; i < 100; i++) { $rootScope.integerSuite.push(i); } $rootScope.integerSuiteNullable = ['?'].concat($rootScope.integerSuite); $rootScope.fibonacciSuite = [0, 1, 2, 3, 5, 8, 13, 21, 34]; $rootScope.fibonacciSuiteNullable = ['?'].concat($rootScope.fibonacciSuite); $rootScope.app = { asList: false, loading: true, loadingPercent: 0, isFullScreen: false, selectableMultiple: false }; // To be able to track state in views $rootScope.$state = $state; var messages = {}; $rootScope.initMessages = function(initMessages) { messages = initMessages; }; $rootScope.applicationMenus = []; $rootScope.initApplicationMenus = function(initMenus) { $rootScope.applicationMenus = initMenus; var menusByVisibility = _.groupBy(initMenus, 'visible'); $rootScope.menus = { visible: _.sortBy(menusByVisibility[true], 'position'), hidden: _.sortBy(menusByVisibility[false], 'position') } }; $rootScope.sortableScrollOptions = function(scrollableContainerSelector) { if (!scrollableContainerSelector) { scrollableContainerSelector = '.panel-body'; } var scrollSpeed = 0; var destScrollableContainer; // Store the dest container because it cannot be retrieved (mouse must be on panel to get the element) and used (mouse is out of panel when we must scroll) in the same move var scheduledScroll = null; var cancelScheduledScroll = function() { scrollSpeed = 0; if (scheduledScroll) { $interval.cancel(scheduledScroll); scheduledScroll = null; } }; return { dragMove: function(itemPosition, containment, eventObj) { $rootScope.app.sortableMoving = true; if (eventObj) { // This HORRIBLE SOUP isolated in a private function gets the dest panel body and stores it in a captured variable. // There may be a better way but it is the way ng-sortable does it (function(eventObj) { var destX = eventObj.pageX - $document[0].documentElement.scrollLeft; var destY = eventObj.pageY - ($window.pageYOffset || $document[0].documentElement.scrollTop); $document[0].elementFromPoint(destX, destY); // This is done twice on purpose, ng-sortable does it like that (don't know why though...) var destElement = angular.element($document[0].elementFromPoint(destX, destY)); // Gets the DOM element under the cursor function fetchScope(element) { var scope; while (!scope && element.length) { scope = element.data('_scope'); if (!scope) { element = element.parent(); } } return scope; } var destScope = fetchScope(destElement); // Retrieve the closest scope from the DOM element if (destScope) { destScrollableContainer = angular.element(destScope.element).closest(scrollableContainerSelector)[0]; // Store the dest scrollable container for later use } })(eventObj); // Retrieve scrollable container, very likely stored during a previous move, and scroll if needed (for the moment scroll occurs only when moving) if (destScrollableContainer) { var marginAroundCursor = 30; var targetY = eventObj.pageY - ($window.pageYOffset || $document[0].documentElement.scrollTop); var containerRect = destScrollableContainer.getBoundingClientRect(); var topDifference = containerRect.top - targetY + marginAroundCursor; var bottomDifference = containerRect.bottom - targetY - marginAroundCursor; var cursorUpperThanPanel = topDifference > 0; var cursorLowerThanPanel = bottomDifference < 0; if (cursorUpperThanPanel || cursorLowerThanPanel) { var computeSpeed = function(difference) { return Math.floor(difference / 4); // Magic formula }; scrollSpeed = cursorUpperThanPanel ? computeSpeed(topDifference) : computeSpeed(bottomDifference); var moveScroll = function() { destScrollableContainer.scrollTop = destScrollableContainer.scrollTop - scrollSpeed; }; moveScroll(); // With the solution above, scroll occurs only when moving the cursor so we define a recurring callback to sustain the scroll when not moving if (!scheduledScroll) { var timeInterval = 4; // 4 ms scheduledScroll between each automatic scroll scheduledScroll = $interval(moveScroll, timeInterval); } } else if (scheduledScroll != null) { cancelScheduledScroll(); } } } }, dragEnd: function() { $rootScope.app.sortableMoving = false; cancelScheduledScroll(); // Prevent persistent scroll in case of release out of sortable container } } }; $rootScope.$on('$stateChangeStart', function (event, toState, toParams, fromState, fromParams, options) { if (!event.defaultPrevented) { var state = toState.$$state(); authorized = true; if (state.isSetAuthorize()) { var authorized = false; _.every(state.data.authorize.roles, function (role) { authorized = role.indexOf('!') > -1 ? !Session[role.substring(role.indexOf('!') + 1)]() : (Session[role]() === true); return authorized; }); } if (!authorized) { event.preventDefault(); if (!Session.authenticated()) { $rootScope.showAuthModal('',function(){ $state.go(toState.name); }); } else { $state.go(fromState.name); } } } }); }]) .constant('SERVER_ERRORS', { loginFailed: 'auth-login-failed', sessionTimeout: 'auth-session-timeout', notAuthenticated: 'auth-not-authenticated', notAuthorized: 'auth-not-authorized', clientError: 'client-error', serverError: 'server-error' }) .constant('BacklogCodes', { SANDBOX: 'sandbox', BACKLOG: 'backlog', DONE: 'done', ALL: 'all', SPRINT: 'sprint' }) .constant('StoryStatesByName', { "SUGGESTED": 1, "ACCEPTED": 2, "ESTIMATED": 3, "PLANNED": 4, "IN_PROGRESS": 5, "DONE": 7, "ICEBOX": -1 }) .constant('TaskStatesByName', { "WAIT": 0, "IN_PROGRESS": 1, "DONE": 2 }) .constant('AcceptanceTestStatesByName', { "TOCHECK": 1, "FAILED": 5, "SUCCESS": 10 }) .constant('SprintStatesByName', { "WAIT": 1, "IN_PROGRESS": 2, "DONE": 3 }) .constant('FeatureStatesByName', { "WAIT": 0, "IN_PROGRESS": 1, "DONE": 2 }) .constant('ReleaseStatesByName', { "WAIT": 1, "IN_PROGRESS": 2, "DONE": 3 }) .constant('MoodFeelingsByName', { "GOOD": 2, "MEH": 1, "BAD": 0 }) .constant('USER_ROLES', { // TODO consider deleting (used only for dev user role switch) PO_SM: 'PO_SM', PO: 'PO', SM: 'SM', TM: 'TM', SH: 'SH' }) .constant('IceScrumEventType', { CREATE: 'CREATE', UPDATE: 'UPDATE', DELETE: 'DELETE' }) .constant('TaskConstants', { ORDER_BY:[function(task) { return - task.type }, 'parentStory.rank', 'state', 'rank'] });
Formatting
grails-app/assets/javascripts/app/app.js
Formatting
<ide><path>rails-app/assets/javascripts/app/app.js <ide> } <ide> ]); <ide> <del> $stateProvider.decorator('parent', function (state, parentFn) { <del> state.self.$$state = function () { <add> $stateProvider.decorator('parent', function(state, parentFn) { <add> state.self.$$state = function() { <ide> return state; <ide> }; <del> <del> state.self.isSetAuthorize = function () { <add> state.self.isSetAuthorize = function() { <ide> return angular.isDefined(state.data) && angular.isDefined(state.data.authorize); <ide> }; <ide> return parentFn(state); <ide> childScope.username = username; <ide> } <ide> var loginCallback = null; <del> if(loginSuccess){ <add> if (loginSuccess) { <ide> childScope.loginCallback = true; <del> loginCallback = function(loggedIn){ <del> if(loggedIn){ <del> Session.create().then(function(){ <add> loginCallback = function(loggedIn) { <add> if (loggedIn) { <add> Session.create().then(function() { <ide> loginSuccess(); <ide> }); <ide> } else { <ide> } <ide> }; <ide> <del> $rootScope.$on('$stateChangeStart', function (event, toState, toParams, fromState, fromParams, options) { <add> $rootScope.$on('$stateChangeStart', function(event, toState, toParams, fromState, fromParams, options) { <ide> if (!event.defaultPrevented) { <ide> var state = toState.$$state(); <ide> authorized = true; <ide> if (state.isSetAuthorize()) { <ide> var authorized = false; <del> _.every(state.data.authorize.roles, function (role) { <add> _.every(state.data.authorize.roles, function(role) { <ide> authorized = role.indexOf('!') > -1 ? !Session[role.substring(role.indexOf('!') + 1)]() : (Session[role]() === true); <ide> return authorized; <ide> }); <ide> if (!authorized) { <ide> event.preventDefault(); <ide> if (!Session.authenticated()) { <del> $rootScope.showAuthModal('',function(){ <add> $rootScope.showAuthModal('', function() { <ide> $state.go(toState.name); <ide> }); <ide> } else { <ide> DELETE: 'DELETE' <ide> }) <ide> .constant('TaskConstants', { <del> ORDER_BY:[function(task) { return - task.type }, 'parentStory.rank', 'state', 'rank'] <add> ORDER_BY: [function(task) { return -task.type }, 'parentStory.rank', 'state', 'rank'] <ide> });
Java
apache-2.0
07e41c0e45566389fa5fde689dc061475682183c
0
asanka88/VelocityTemplateMediator
package org.asanka.dev; import org.apache.axiom.om.OMElement; import org.apache.commons.lang.StringUtils; import org.apache.synapse.Mediator; import org.apache.synapse.config.xml.AbstractMediatorFactory; import org.apache.synapse.config.xml.XMLConfigConstants; import org.apache.synapse.deployers.SynapseArtifactDeploymentException; import org.apache.synapse.util.xpath.SynapseXPath; import org.jaxen.JaxenException; import javax.xml.namespace.QName; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Properties; /** * Created by asanka on 3/7/16. */ public class VelocityTemplateMediatorFactory extends AbstractMediatorFactory { public static final QName propertyTemplateElement=new QName(XMLConfigConstants.SYNAPSE_NAMESPACE,"velocityTemplate"); public static final QName formatElement=new QName(XMLConfigConstants.SYNAPSE_NAMESPACE,"format"); public static final QName argumentListElement =new QName(XMLConfigConstants.SYNAPSE_NAMESPACE,"args"); public static final QName argumentElement =new QName(XMLConfigConstants.SYNAPSE_NAMESPACE,"arg"); public static final QName targetElement =new QName(XMLConfigConstants.SYNAPSE_NAMESPACE,"target"); public static final QName expressionAttribute=new QName("expression"); public static final QName nameAttribute =new QName("name"); public static final QName scopeAttribute=new QName("scope"); public static final QName propertyTypeAttribute=new QName("property-type"); public static final QName mediaTypeAttribute=new QName("media-type"); public static final QName targetType=new QName("target-type"); @Override protected Mediator createSpecificMediator(OMElement omElement, Properties properties) { VelocityTemplateMediator mediator=new VelocityTemplateMediator(); String mediaTypeAttrValue = omElement.getAttributeValue(mediaTypeAttribute); String mediaType = StringUtils.isEmpty(mediaTypeAttrValue)?"xml":mediaTypeAttrValue; mediator.setMediaType(mediaType);//setting media type OMElement format = omElement.getFirstChildWithName(formatElement); if(format == null || (StringUtils.equals("xml",mediaType)&& format.getFirstElement()==null) || (StringUtils.equals("json",mediaType)&& StringUtils.isEmpty(format.getText()))){ //meets failure condition //format element is null or //if xml this doesn't have xml template body or //if json this doesn't have json string throw new SynapseArtifactDeploymentException("Template format is empty in PropertyTemplate Mediator"); } //if media type is xml then the template body is first element of the format element //other wise it is json, then it is json string wrapped by format element String templateBody=(StringUtils.equals("xml",mediaType))?format.getFirstElement().toString():format.getText(); mediator.setBody(templateBody); OMElement argumentList = omElement.getFirstChildWithName(argumentListElement); Iterator<OMElement> argumentsIterator = argumentList.getChildrenWithName(argumentElement); Map<String,SynapseXPath> synXpathMap= new HashMap<String, SynapseXPath>(); while(argumentsIterator.hasNext()){ OMElement argument = argumentsIterator.next(); String name = argument.getAttributeValue(nameAttribute); String xpathExpression = argument.getAttributeValue(expressionAttribute); if(StringUtils.isEmpty(xpathExpression) || StringUtils.isEmpty(name)){ throw new SynapseArtifactDeploymentException("expression or name attribute is missing in the arg element"); } try { synXpathMap.put(name,new SynapseXPath(xpathExpression)); } catch (JaxenException e) { handleException("Error while constructing xpath from argument "+xpathExpression); } } mediator.setxPathExpressions(synXpathMap); OMElement targetEle = omElement.getFirstChildWithName(targetElement); if(targetElement==null){ throw new SynapseArtifactDeploymentException("Target element is missing in the Template Mediator"); } String targetTypeValue =targetEle.getAttributeValue(targetType); targetTypeValue=(StringUtils.isEmpty(targetTypeValue))?"body":targetTypeValue; mediator.setTargetType(targetTypeValue); if(StringUtils.equalsIgnoreCase("property",targetTypeValue)){ //if the target type is property then property name is mandotary String propertyName = targetEle.getAttributeValue(nameAttribute); String propertyType = targetEle.getAttributeValue(propertyTypeAttribute); if(StringUtils.isEmpty(propertyName)){ throw new SynapseArtifactDeploymentException("property name attribute is required in Template Mediator," + " when the type is property"); } String scope = targetEle.getAttributeValue(scopeAttribute); scope=(StringUtils.isEmpty(scope))?"synapse":scope; propertyType=(StringUtils.isEmpty(propertyType))?"string":propertyType; mediator.setPropertyName(propertyName); mediator.setScope(scope); mediator.setPropertyType(propertyType); } return mediator; } public QName getTagQName() { return propertyTemplateElement; } }
src/main/java/org/asanka/dev/VelocityTemplateMediatorFactory.java
package org.asanka.dev; import org.apache.axiom.om.OMElement; import org.apache.commons.lang.StringUtils; import org.apache.synapse.Mediator; import org.apache.synapse.config.xml.AbstractMediatorFactory; import org.apache.synapse.config.xml.XMLConfigConstants; import org.apache.synapse.deployers.SynapseArtifactDeploymentException; import org.apache.synapse.util.xpath.SynapseXPath; import org.jaxen.JaxenException; import javax.xml.namespace.QName; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Properties; /** * Created by asanka on 3/7/16. */ public class VelocityTemplateMediatorFactory extends AbstractMediatorFactory { public static final QName propertyTemplateElement=new QName(XMLConfigConstants.SYNAPSE_NAMESPACE,"velocityTemplate"); public static final QName formatElement=new QName(XMLConfigConstants.SYNAPSE_NAMESPACE,"format"); public static final QName argumentListElement =new QName(XMLConfigConstants.SYNAPSE_NAMESPACE,"args"); public static final QName argumentElement =new QName(XMLConfigConstants.SYNAPSE_NAMESPACE,"arg"); public static final QName targetElement =new QName(XMLConfigConstants.SYNAPSE_NAMESPACE,"target"); public static final QName expressionAttribute=new QName("expression"); public static final QName nameAttribute =new QName("name"); public static final QName scopeAttribute=new QName("scope"); public static final QName propertyTypeAttribute=new QName("property-type"); public static final QName mediaTypeAttribute=new QName("media-type"); public static final QName targetType=new QName("target-type"); @Override protected Mediator createSpecificMediator(OMElement omElement, Properties properties) { VelocityTemplateMediator mediator=new VelocityTemplateMediator(); String mediaTypeAttrValue = omElement.getAttributeValue(mediaTypeAttribute); String mediaType = StringUtils.isEmpty(mediaTypeAttrValue)?"xml":mediaTypeAttrValue; mediator.setMediaType(mediaType);//setting media type OMElement format = omElement.getFirstChildWithName(formatElement); if(format == null || (StringUtils.equals("xml",mediaType)&& format.getFirstElement()==null) || (StringUtils.equals("json",mediaType)&& StringUtils.isEmpty(format.getText()))){ //meets failure condition //format element is null or //if xml this doesn't have xml template body or //if json this doesn't have json string throw new SynapseArtifactDeploymentException("Template format is empty in PropertyTemplate Mediator"); } //if media type is xml then the template body is first element of the format element //other wise it is json, then it is json string wrapped by format element String templateBody=(StringUtils.equals("xml",mediaType))?format.getFirstElement().toString():format.getText(); mediator.setBody(templateBody); OMElement argumentList = omElement.getFirstChildWithName(argumentListElement); Iterator<OMElement> argumentsIterator = argumentList.getChildrenWithName(argumentElement); Map<String,SynapseXPath> synXpathMap= new HashMap<String, SynapseXPath>(); while(argumentsIterator.hasNext()){ OMElement argument = argumentsIterator.next(); String name = argument.getAttributeValue(nameAttribute); String xpathExpression = argument.getAttributeValue(expressionAttribute); if(StringUtils.isEmpty(xpathExpression) || StringUtils.isEmpty(name)){ throw new SynapseArtifactDeploymentException("expression or name attribute is missing in the arg element"); } try { synXpathMap.put(name,new SynapseXPath(xpathExpression)); } catch (JaxenException e) { handleException("Error while construcing xpaths from argument "+xpathExpression); } } mediator.setxPathExpressions(synXpathMap); OMElement targetEle = omElement.getFirstChildWithName(targetElement); if(targetElement==null){ throw new SynapseArtifactDeploymentException("Target element is missing in the Template Mediator"); } String targetTypeValue =targetEle.getAttributeValue(targetType); targetTypeValue=(StringUtils.isEmpty(targetTypeValue))?"body":targetTypeValue; mediator.setTargetType(targetTypeValue); if(StringUtils.equalsIgnoreCase("property",targetTypeValue)){ //if the target type is property then property name is mandotary String propertyName = targetEle.getAttributeValue(nameAttribute); String propertyType = targetEle.getAttributeValue(propertyTypeAttribute); if(StringUtils.isEmpty(propertyName)){ throw new SynapseArtifactDeploymentException("property name attribute is required in Template Mediator," + " when the type is property"); } String scope = targetEle.getAttributeValue(scopeAttribute); scope=(StringUtils.isEmpty(scope))?"synapse":scope; propertyType=(StringUtils.isEmpty(propertyType))?"string":propertyType; mediator.setPropertyName(propertyName); mediator.setScope(scope); mediator.setPropertyType(propertyType); } return mediator; } public QName getTagQName() { return propertyTemplateElement; } }
Fixing a typo
src/main/java/org/asanka/dev/VelocityTemplateMediatorFactory.java
Fixing a typo
<ide><path>rc/main/java/org/asanka/dev/VelocityTemplateMediatorFactory.java <ide> try { <ide> synXpathMap.put(name,new SynapseXPath(xpathExpression)); <ide> } catch (JaxenException e) { <del> handleException("Error while construcing xpaths from argument "+xpathExpression); <add> handleException("Error while constructing xpath from argument "+xpathExpression); <ide> } <ide> } <ide>
Java
apache-2.0
c29c9e6241da29622364483a38bfc1185754837e
0
jalkjaer/voldemort,voldemort/voldemort,medallia/voldemort,dallasmarlow/voldemort,medallia/voldemort,rickbw/voldemort,gnb/voldemort,PratikDeshpande/voldemort,PratikDeshpande/voldemort,dallasmarlow/voldemort,voldemort/voldemort,bhasudha/voldemort,jeffpc/voldemort,FelixGV/voldemort,jwlent55/voldemort,HB-SI/voldemort,bhasudha/voldemort,rickbw/voldemort,PratikDeshpande/voldemort,dallasmarlow/voldemort,HB-SI/voldemort,medallia/voldemort,LeoYao/voldemort,gnb/voldemort,dallasmarlow/voldemort,jalkjaer/voldemort,mabh/voldemort,gnb/voldemort,PratikDeshpande/voldemort,jalkjaer/voldemort,jwlent55/voldemort,bitti/voldemort,bitti/voldemort,voldemort/voldemort,cshaxu/voldemort,stotch/voldemort,arunthirupathi/voldemort,medallia/voldemort,squarY/voldemort,gnb/voldemort,bhasudha/voldemort,medallia/voldemort,bitti/voldemort,FelixGV/voldemort,arunthirupathi/voldemort,birendraa/voldemort,squarY/voldemort,rickbw/voldemort,squarY/voldemort,jeffpc/voldemort,LeoYao/voldemort,rickbw/voldemort,bhasudha/voldemort,arunthirupathi/voldemort,mabh/voldemort,jalkjaer/voldemort,HB-SI/voldemort,mabh/voldemort,gnb/voldemort,jeffpc/voldemort,stotch/voldemort,cshaxu/voldemort,birendraa/voldemort,jeffpc/voldemort,voldemort/voldemort,jalkjaer/voldemort,jwlent55/voldemort,mabh/voldemort,FelixGV/voldemort,FelixGV/voldemort,voldemort/voldemort,squarY/voldemort,PratikDeshpande/voldemort,jwlent55/voldemort,cshaxu/voldemort,jalkjaer/voldemort,FelixGV/voldemort,cshaxu/voldemort,FelixGV/voldemort,birendraa/voldemort,HB-SI/voldemort,squarY/voldemort,birendraa/voldemort,null-exception/voldemort,birendraa/voldemort,bitti/voldemort,jwlent55/voldemort,PratikDeshpande/voldemort,arunthirupathi/voldemort,dallasmarlow/voldemort,jeffpc/voldemort,rickbw/voldemort,LeoYao/voldemort,squarY/voldemort,squarY/voldemort,voldemort/voldemort,FelixGV/voldemort,bhasudha/voldemort,null-exception/voldemort,bitti/voldemort,cshaxu/voldemort,rickbw/voldemort,null-exception/voldemort,null-exception/voldemort,bitti/voldemort,birendraa/voldemort,mabh/voldemort,HB-SI/voldemort,jalkjaer/voldemort,arunthirupathi/voldemort,null-exception/voldemort,jwlent55/voldemort,jeffpc/voldemort,LeoYao/voldemort,LeoYao/voldemort,dallasmarlow/voldemort,arunthirupathi/voldemort,voldemort/voldemort,stotch/voldemort,bitti/voldemort,bhasudha/voldemort,stotch/voldemort,stotch/voldemort,LeoYao/voldemort,HB-SI/voldemort,medallia/voldemort,stotch/voldemort,arunthirupathi/voldemort,mabh/voldemort,null-exception/voldemort,cshaxu/voldemort,gnb/voldemort
/* * Copyright 2010 LinkedIn, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package voldemort.store.slop; import java.util.Collection; import java.util.Collections; import java.util.List; import voldemort.cluster.Cluster; import voldemort.cluster.Node; import voldemort.cluster.Zone; import com.google.common.collect.Lists; /** * A strategy that hands a hint off to any random live node in the ring */ public class HandoffToAnyStrategy implements HintedHandoffStrategy { private final List<Node> nodes; private final Collection<Zone> zones; private final boolean enableZoneRouting; private final int clientZoneId; /** * Creates a to-any handoff strategy instance * * @param cluster The cluster * @param enableZoneRouting Is zone routing enabled * @param clientZoneId Client zone id */ public HandoffToAnyStrategy(Cluster cluster, boolean enableZoneRouting, int clientZoneId) { this.nodes = Lists.newArrayList(cluster.getNodes()); this.zones = cluster.getZones(); this.enableZoneRouting = enableZoneRouting; this.clientZoneId = clientZoneId; } public List<Node> routeHint(Node origin) { List<Node> prefList = Lists.newArrayListWithCapacity(nodes.size()); int originZoneId = origin.getZoneId(); for(Node node: nodes) { if(node.getId() != origin.getId()) { if(enableZoneRouting && zones.size() > 1) { if(originZoneId == clientZoneId) { if(node.getZoneId() != clientZoneId) continue; } else { if(node.getZoneId() == originZoneId) continue; } } prefList.add(node); } } Collections.shuffle(prefList); return prefList; } @Override public String toString() { return "HandoffToAllStrategy(" + nodes + ")"; } }
src/java/voldemort/store/slop/HandoffToAnyStrategy.java
/* * Copyright 2010 LinkedIn, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package voldemort.store.slop; import java.util.Collection; import java.util.Collections; import java.util.List; import voldemort.cluster.Cluster; import voldemort.cluster.Node; import voldemort.cluster.Zone; import com.google.common.collect.Lists; /** * A strategy that hands a hint off to any random live node in the ring */ public class HandoffToAnyStrategy implements HintedHandoffStrategy { private final List<Node> nodes; private final Collection<Zone> zones; private final boolean enableZoneRouting; private final int clientZoneId; /** * Creates a to-any handoff strategy instance * * @param cluster The cluster * @param enableZoneRouting Is zone routing enabled * @param clientZoneId Client zone id */ public HandoffToAnyStrategy(Cluster cluster, boolean enableZoneRouting, int clientZoneId) { this.nodes = Lists.newArrayList(cluster.getNodes()); this.zones = cluster.getZones(); this.enableZoneRouting = enableZoneRouting; this.clientZoneId = clientZoneId; } public List<Node> routeHint(Node origin) { List<Node> prefList = Lists.newArrayListWithCapacity(nodes.size()); int originZoneId = origin.getZoneId(); // Adding shuffle before rather than after since then it prefers nodes // closeby Collections.shuffle(prefList); for(Node node: nodes) { if(node.getId() != origin.getId()) { if(enableZoneRouting && zones.size() > 1) { if(originZoneId == clientZoneId) { if(node.getZoneId() != clientZoneId) continue; } else { if(node.getZoneId() == originZoneId) continue; } } prefList.add(node); } } return prefList; } @Override public String toString() { return "HandoffToAllStrategy(" + nodes + ")"; } }
Fixed bug in HandoffToAny
src/java/voldemort/store/slop/HandoffToAnyStrategy.java
Fixed bug in HandoffToAny
<ide><path>rc/java/voldemort/store/slop/HandoffToAnyStrategy.java <ide> List<Node> prefList = Lists.newArrayListWithCapacity(nodes.size()); <ide> int originZoneId = origin.getZoneId(); <ide> <del> // Adding shuffle before rather than after since then it prefers nodes <del> // closeby <del> Collections.shuffle(prefList); <ide> for(Node node: nodes) { <ide> if(node.getId() != origin.getId()) { <ide> if(enableZoneRouting && zones.size() > 1) { <ide> prefList.add(node); <ide> } <ide> } <add> Collections.shuffle(prefList); <ide> return prefList; <ide> } <ide>
Java
bsd-3-clause
e28f9736c635a6c9ae0fc29ad78fd4590282e71f
0
UCDenver-ccp/ccp-nlp,UCDenver-ccp/ccp-nlp
/** * */ package edu.ucdenver.ccp.nlp.ext.uima.annotators.entitynormalization.protein.evaluation; import java.io.File; import org.apache.uima.analysis_engine.AnalysisEngineDescription; import org.apache.uima.analysis_engine.AnalysisEngineProcessException; import org.apache.uima.resource.ResourceInitializationException; import org.apache.uima.resource.metadata.TypeSystemDescription; import org.junit.Ignore; import org.junit.Test; import org.uimafit.factory.AnalysisEngineFactory; import org.uimafit.factory.TypeSystemDescriptionFactory; import edu.ucdenver.ccp.common.test.DefaultTestCase; import edu.ucdenver.ccp.nlp.core.uima.util.AnalysisEngineType; import edu.ucdenver.ccp.nlp.core.uima.util.TypeSystemUtil; import edu.ucdenver.ccp.nlp.ext.uima.annotators.entitynormalization.gene.evaluation.BioCreative3GnEvaluationPipeline; import edu.ucdenver.ccp.nlp.ext.uima.annotators.entitynormalization.protein.ProOntologyProteinNormalization_AAE; import edu.ucdenver.ccp.nlp.ext.uima.annotators.sentencedetectors.LingPipeSentenceDetector_AE; import edu.ucdenver.ccp.nlp.ext.uima.annotators.sentencedetectors.SentenceDetector_AE; /** * Evaluates the HomologeneGroupGeneNormalization pipeline against gold standard corpora * * @author Center for Computational Pharmacology, UC Denver; [email protected] * */ @Ignore("need to either convert from entrez gene to pro ID, or use a different gold standard") public class ProteinOntologyNormalizationAE_Evaluation extends DefaultTestCase { private static final TypeSystemDescription TSD = TypeSystemDescriptionFactory.createTypeSystemDescription( TypeSystemUtil.CCP_TYPE_SYSTEM, "edu.ucdenver.ccp.nlp.ext.uima.syntax.TypeSystem"); private static final File PRO_DICTIONARY_DIRECTORY = new File( "/data/NOT_BACKED_UP/projects/btrc/pro-dictionary/dictionary"); @Test public void evaluateOnBioCreative3TrainingDataSet() throws AnalysisEngineProcessException, ResourceInitializationException { AnalysisEngineDescription sentenceDetectorDescription = AnalysisEngineFactory.createPrimitiveDescription( LingPipeSentenceDetector_AE.class, TSD, SentenceDetector_AE.PARAM_TREAT_LINE_BREAKS_AS_SENTENCE_BOUNDARIES, true); BioCreative3GnEvaluationPipeline evalPipeline = new BioCreative3GnEvaluationPipeline(TSD); evalPipeline.evaluateNormalizationPipeline(ProOntologyProteinNormalization_AAE.getAggregateDescription(TSD, PRO_DICTIONARY_DIRECTORY, sentenceDetectorDescription), AnalysisEngineType.AGGREGATE); } }
ccp-nlp-ext-uima-annotators-entitynormalization/src/test/java/edu/ucdenver/ccp/nlp/ext/uima/annotators/entitynormalization/protein/evaluation/ProteinOntologyNormalizationAE_Evaluation.java
/** * */ package edu.ucdenver.ccp.nlp.ext.uima.annotators.entitynormalization.protein.evaluation; import java.io.File; import org.apache.uima.analysis_engine.AnalysisEngineProcessException; import org.apache.uima.resource.ResourceInitializationException; import org.apache.uima.resource.metadata.TypeSystemDescription; import org.junit.Ignore; import org.junit.Test; import org.uimafit.factory.TypeSystemDescriptionFactory; import edu.ucdenver.ccp.common.test.DefaultTestCase; import edu.ucdenver.ccp.nlp.core.uima.util.AnalysisEngineType; import edu.ucdenver.ccp.nlp.core.uima.util.TypeSystemUtil; import edu.ucdenver.ccp.nlp.ext.uima.annotators.entitynormalization.gene.evaluation.BioCreative3GnEvaluationPipeline; import edu.ucdenver.ccp.nlp.ext.uima.annotators.entitynormalization.protein.ProOntologyProteinNormalization_AAE; /** * Evaluates the HomologeneGroupGeneNormalization pipeline against gold standard corpora * * @author Center for Computational Pharmacology, UC Denver; [email protected] * */ @Ignore("need to either convert from entrez gene to pro ID, or use a different gold standard") public class ProteinOntologyNormalizationAE_Evaluation extends DefaultTestCase { private static final TypeSystemDescription TSD = TypeSystemDescriptionFactory.createTypeSystemDescription( TypeSystemUtil.CCP_TYPE_SYSTEM, "edu.ucdenver.ccp.nlp.ext.uima.syntax.TypeSystem"); private static final File PRO_DICTIONARY_DIRECTORY = new File( "/data/NOT_BACKED_UP/projects/btrc/pro-dictionary/dictionary"); @Test public void evaluateOnBioCreative3TrainingDataSet() throws AnalysisEngineProcessException, ResourceInitializationException { BioCreative3GnEvaluationPipeline evalPipeline = new BioCreative3GnEvaluationPipeline(TSD); evalPipeline.evaluateNormalizationPipeline( ProOntologyProteinNormalization_AAE.getAggregateDescription(TSD, PRO_DICTIONARY_DIRECTORY), AnalysisEngineType.AGGREGATE); } }
IN PROGRESS - issue BEL-1: PRO normalization over BEL small corpus http://compbio.ucdenver.edu:8080/browse/BEL-1 Made the sentence detector descriptor a parameter for the getAggregateDescription() method
ccp-nlp-ext-uima-annotators-entitynormalization/src/test/java/edu/ucdenver/ccp/nlp/ext/uima/annotators/entitynormalization/protein/evaluation/ProteinOntologyNormalizationAE_Evaluation.java
IN PROGRESS - issue BEL-1: PRO normalization over BEL small corpus http://compbio.ucdenver.edu:8080/browse/BEL-1
<ide><path>cp-nlp-ext-uima-annotators-entitynormalization/src/test/java/edu/ucdenver/ccp/nlp/ext/uima/annotators/entitynormalization/protein/evaluation/ProteinOntologyNormalizationAE_Evaluation.java <ide> <ide> import java.io.File; <ide> <add>import org.apache.uima.analysis_engine.AnalysisEngineDescription; <ide> import org.apache.uima.analysis_engine.AnalysisEngineProcessException; <ide> import org.apache.uima.resource.ResourceInitializationException; <ide> import org.apache.uima.resource.metadata.TypeSystemDescription; <ide> import org.junit.Ignore; <ide> import org.junit.Test; <add>import org.uimafit.factory.AnalysisEngineFactory; <ide> import org.uimafit.factory.TypeSystemDescriptionFactory; <ide> <ide> import edu.ucdenver.ccp.common.test.DefaultTestCase; <ide> import edu.ucdenver.ccp.nlp.core.uima.util.TypeSystemUtil; <ide> import edu.ucdenver.ccp.nlp.ext.uima.annotators.entitynormalization.gene.evaluation.BioCreative3GnEvaluationPipeline; <ide> import edu.ucdenver.ccp.nlp.ext.uima.annotators.entitynormalization.protein.ProOntologyProteinNormalization_AAE; <add>import edu.ucdenver.ccp.nlp.ext.uima.annotators.sentencedetectors.LingPipeSentenceDetector_AE; <add>import edu.ucdenver.ccp.nlp.ext.uima.annotators.sentencedetectors.SentenceDetector_AE; <ide> <ide> /** <ide> * Evaluates the HomologeneGroupGeneNormalization pipeline against gold standard corpora <ide> @Test <ide> public void evaluateOnBioCreative3TrainingDataSet() throws AnalysisEngineProcessException, <ide> ResourceInitializationException { <add> AnalysisEngineDescription sentenceDetectorDescription = AnalysisEngineFactory.createPrimitiveDescription( <add> LingPipeSentenceDetector_AE.class, TSD, <add> SentenceDetector_AE.PARAM_TREAT_LINE_BREAKS_AS_SENTENCE_BOUNDARIES, true); <ide> BioCreative3GnEvaluationPipeline evalPipeline = new BioCreative3GnEvaluationPipeline(TSD); <del> evalPipeline.evaluateNormalizationPipeline( <del> ProOntologyProteinNormalization_AAE.getAggregateDescription(TSD, PRO_DICTIONARY_DIRECTORY), <del> AnalysisEngineType.AGGREGATE); <add> evalPipeline.evaluateNormalizationPipeline(ProOntologyProteinNormalization_AAE.getAggregateDescription(TSD, <add> PRO_DICTIONARY_DIRECTORY, sentenceDetectorDescription), AnalysisEngineType.AGGREGATE); <ide> } <ide> <ide> }
Java
apache-2.0
7220202a17bb05cf215a1077617a294feb216fa5
0
LLParse/hdfs,mesosphere/hdfs,jan-zajic/mesos-hdfs,dmitrypekar/hdfs,nalingarg2/hdfs,jmlvanre/hdfs,jan-zajic/mesos-hbase,mesosphere/hdfs,Banno/hdfs,jan-zajic/mesos-hdfs,smorin/hdfs,smorin/hdfs,jan-zajic/mesos-hbase,Banno/hdfs,LLParse/hdfs,dmitrypekar/hdfs,edgefox/hdfs,kensipe/hdfs,andrewrothstein/hdfs,ryane/hdfs,kensipe/hdfs,nalingarg2/hdfs,andrewrothstein/hdfs,edgefox/hdfs,jmlvanre/hdfs,ryane/hdfs
package org.apache.mesos.hdfs.state; import com.google.protobuf.InvalidProtocolBufferException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.mesos.MesosNativeLibrary; import org.apache.mesos.Protos; import org.apache.mesos.Protos.FrameworkID; import org.apache.mesos.hdfs.config.SchedulerConf; import org.apache.mesos.hdfs.util.HDFSConstants; import org.apache.mesos.state.Variable; import org.apache.mesos.state.ZooKeeperState; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; public class PersistentState { public static final Log log = LogFactory.getLog(PersistentState.class); private static String FRAMEWORK_ID_KEY = "frameworkId"; private static String NAMENODES_KEY = "nameNodes"; private static String JOURNALNODES_KEY = "journalNodes"; private static String DATANODES_KEY = "dataNodes"; private ZooKeeperState zkState; public PersistentState(SchedulerConf conf) { MesosNativeLibrary.load(conf.getNativeLibrary()); this.zkState = new ZooKeeperState(conf.getStateZkServers(), conf.getStateZkTimeout(), TimeUnit.MILLISECONDS, "/hdfs-mesos/" + conf.getFrameworkName()); } public FrameworkID getFrameworkID() throws InterruptedException, ExecutionException, InvalidProtocolBufferException { byte[] existingFrameworkId = zkState.fetch(FRAMEWORK_ID_KEY).get().value(); if (existingFrameworkId.length > 0) { return FrameworkID.parseFrom(existingFrameworkId); } else { return null; } } public void setFrameworkId(FrameworkID frameworkId) throws InterruptedException, ExecutionException { Variable value = zkState.fetch(FRAMEWORK_ID_KEY).get(); value = value.mutate(frameworkId.toByteArray()); zkState.store(value).get(); } public List<String> getDeadJournalNodes() { HashMap<String, String> journalNodes = getJournalNodes(); Set<String> journalHosts = journalNodes.keySet(); List<String> deadJournalHosts = new ArrayList<>(); for (String journalHost: journalHosts) { if (journalNodes.get(journalHost) == null) { deadJournalHosts.add(journalHost); } } return deadJournalHosts; } public List<String> getDeadNameNodes() { HashMap<String, String> nameNodes = getNameNodes(); Set<String> nameHosts = nameNodes.keySet(); List<String> deadNameHosts = new ArrayList<>(); for (String nameHost : nameHosts) { if (nameNodes.get(nameHost) == null) { deadNameHosts.add(nameHost); } } return deadNameHosts; } public List<String> getDeadDataNodes() { HashMap<String, String> dataNodes = getDataNodes(); Set<String> dataHosts = dataNodes.keySet(); List<String> deadDataHosts = new ArrayList<>(); for (String dataHost : dataHosts) { if (dataNodes.get(dataHost) == null) { deadDataHosts.add(dataHost); } } return deadDataHosts; } // TODO (nicgrayson) add tests with in-memory state implementation for zookeeper public HashMap<String, String> getJournalNodes() { return getHashMap(JOURNALNODES_KEY); } public HashMap<String, String> getNameNodes() { return getHashMap(NAMENODES_KEY); } public HashMap<String, String> getDataNodes() { return getHashMap(DATANODES_KEY); } public Collection<String> getAllTaskIds() { HashMap<String, String> allTasksIds = getJournalNodes(); allTasksIds.putAll(getNameNodes()); allTasksIds.putAll(getDataNodes()); return allTasksIds.values(); } public void addHdfsNode(Protos.TaskID taskId, String hostname, String taskName) { switch (taskName) { case HDFSConstants.NAME_NODE_ID : HashMap<String, String> nameNodes = getNameNodes(); nameNodes.put(hostname, taskId.getValue()); setNameNodes(nameNodes); break; case HDFSConstants.JOURNAL_NODE_ID : HashMap<String, String> journalNodes = getJournalNodes(); journalNodes.put(hostname, taskId.getValue()); setJournalNodes(journalNodes); break; case HDFSConstants.DATA_NODE_ID : HashMap<String, String> dataNodes = getDataNodes(); dataNodes.put(hostname, taskId.getValue()); setDataNodes(dataNodes); break; case HDFSConstants.ZKFC_NODE_ID : break; default : log.error("Task name unknown"); } } // TODO (elingg) optimize this method/ Possibly index by task id instead of hostname/ // Possibly call removeTask(slaveId, taskId) to avoid iterating through all maps public void removeTaskId(String taskId) { HashMap<String, String> journalNodes = getJournalNodes(); if (journalNodes.values().contains(taskId)) { for (Map.Entry<String, String> entry : journalNodes.entrySet()) { if (entry.getValue() != null && entry.getValue().equals(taskId)) { journalNodes.put(entry.getKey(), null); setJournalNodes(journalNodes); return; } } } HashMap<String, String> nameNodes = getNameNodes(); if (nameNodes.values().contains(taskId)) { for (Map.Entry<String, String> entry : nameNodes.entrySet()) { if (entry.getValue() != null && entry.getValue().equals(taskId)) { nameNodes.put(entry.getKey(), null); setNameNodes(nameNodes); return; } } } HashMap<String, String> dataNodes = getDataNodes(); if (dataNodes.values().contains(taskId)) { for (Map.Entry<String, String> entry : dataNodes.entrySet()) { if (entry.getValue() != null && entry.getValue().equals(taskId)) { dataNodes.put(entry.getKey(), null); setDataNodes(dataNodes); return; } } } } public boolean journalNodeRunningOnSlave(String hostname) { return getJournalNodes().containsKey(hostname); } public boolean nameNodeRunningOnSlave(String hostname) { return getNameNodes().containsKey(hostname); } public boolean dataNodeRunningOnSlave(String hostname) { return getDataNodes().containsKey(hostname); } private void setNameNodes(HashMap<String, String> nameNodes) { try { set(NAMENODES_KEY, nameNodes); } catch (Exception e) { log.error("Error while setting namenodes in persistent state", e); } } private void setJournalNodes(HashMap<String, String> journalNodes) { try { set(JOURNALNODES_KEY, journalNodes); } catch (Exception e) { log.error("Error while setting journalnodes in persistent state", e); } } private void setDataNodes(HashMap<String, String> dataNodes) { try { set(DATANODES_KEY, dataNodes); } catch (Exception e) { log.error("Error while setting datanodes in persistent state", e); } } private HashMap<String, String> getHashMap(String key) { try { HashMap<String, String> nodesMap = get(key); if (nodesMap == null) { return new HashMap<>(); } return nodesMap; } catch (Exception e) { log.error(String.format("Error while getting %s in persistent state", key), e); return new HashMap<>(); } } /** * Get serializable object from store. * * @return serialized object or null if none * @throws ExecutionException * @throws InterruptedException * @throws IOException * @throws ClassNotFoundException */ @SuppressWarnings("unchecked") private <T extends Object> T get(String key) throws InterruptedException, ExecutionException, IOException, ClassNotFoundException { byte[] existingNodes = zkState.fetch(key).get().value(); if (existingNodes.length > 0) { ByteArrayInputStream bis = new ByteArrayInputStream(existingNodes); ObjectInputStream in = null; try { in = new ObjectInputStream(bis); return (T) in.readObject(); } finally { try { bis.close(); } finally { if (in != null) { in.close(); } } } } else { return null; } } /** * Set serializable object in store * * @throws ExecutionException * @throws InterruptedException * @throws IOException */ private <T extends Object> void set(String key, T object) throws InterruptedException, ExecutionException, IOException { Variable value = zkState.fetch(key).get(); ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream out = null; try { out = new ObjectOutputStream(bos); out.writeObject(object); value = value.mutate(bos.toByteArray()); zkState.store(value).get(); } finally { try { if (out != null) { out.close(); } } finally { bos.close(); } } } }
src/main/java/org/apache/mesos/hdfs/state/PersistentState.java
package org.apache.mesos.hdfs.state; import com.google.protobuf.InvalidProtocolBufferException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.mesos.MesosNativeLibrary; import org.apache.mesos.Protos; import org.apache.mesos.Protos.FrameworkID; import org.apache.mesos.hdfs.config.SchedulerConf; import org.apache.mesos.hdfs.util.HDFSConstants; import org.apache.mesos.state.Variable; import org.apache.mesos.state.ZooKeeperState; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; public class PersistentState { public static final Log log = LogFactory.getLog(PersistentState.class); private static String FRAMEWORK_ID_KEY = "frameworkId"; private static String NAMENODES_KEY = "nameNodes"; private static String JOURNALNODES_KEY = "journalNodes"; private static String DATANODES_KEY = "dataNodes"; private ZooKeeperState zkState; public PersistentState(SchedulerConf conf) { MesosNativeLibrary.load(conf.getNativeLibrary()); this.zkState = new ZooKeeperState(conf.getStateZkServers(), conf.getStateZkTimeout(), TimeUnit.MILLISECONDS, "/hdfs-mesos/" + conf.getFrameworkName()); } public FrameworkID getFrameworkID() throws InterruptedException, ExecutionException, InvalidProtocolBufferException { byte[] existingFrameworkId = zkState.fetch(FRAMEWORK_ID_KEY).get().value(); if (existingFrameworkId.length > 0) { return FrameworkID.parseFrom(existingFrameworkId); } else { return null; } } public void setFrameworkId(FrameworkID frameworkId) throws InterruptedException, ExecutionException { Variable value = zkState.fetch(FRAMEWORK_ID_KEY).get(); value = value.mutate(frameworkId.toByteArray()); zkState.store(value).get(); } public List<String> getDeadJournalNodes() { HashMap<String, String> journalNodes = getJournalNodes(); Set<String> journalHosts = journalNodes.keySet(); List<String> deadJournalHosts = new ArrayList<>(); for (String journalHost: journalHosts) { if (journalNodes.get(journalHost) == null) { deadJournalHosts.add(journalHost); } } return deadJournalHosts; } public List<String> getDeadNameNodes() { HashMap<String, String> nameNodes = getNameNodes(); Set<String> nameHosts = nameNodes.keySet(); List<String> deadNameHosts = new ArrayList<>(); for (String nameHost : nameHosts) { if (nameNodes.get(nameHost) == null) { deadNameHosts.add(nameHost); } } return deadNameHosts; } public List<String> getDeadDataNodes() { HashMap<String, String> dataNodes = getDataNodes(); Set<String> dataHosts = dataNodes.keySet(); List<String> deadDataHosts = new ArrayList<>(); for (String dataHost : dataHosts) { if (dataNodes.get(dataHost) == null) { deadDataHosts.add(dataHost); } } return deadDataHosts; } // TODO (nicgrayson) add tests with in-memory state implementation for zookeeper public HashMap<String, String> getJournalNodes() { return getHashMap(JOURNALNODES_KEY); } public HashMap<String, String> getNameNodes() { return getHashMap(NAMENODES_KEY); } public HashMap<String, String> getDataNodes() { return getHashMap(DATANODES_KEY); } public Collection<String> getAllTaskIds() { HashMap<String, String> allTasksIds = getJournalNodes(); allTasksIds.putAll(getNameNodes()); allTasksIds.putAll(getDataNodes()); return allTasksIds.values(); } public void addHdfsNode(Protos.TaskID taskId, String hostname, String taskName) { switch (taskName) { case HDFSConstants.NAME_NODE_ID : HashMap<String, String> nameNodes = getNameNodes(); nameNodes.put(hostname, taskId.getValue()); setNameNodes(nameNodes); break; case HDFSConstants.JOURNAL_NODE_ID : HashMap<String, String> journalNodes = getJournalNodes(); journalNodes.put(hostname, taskId.getValue()); setJournalNodes(journalNodes); break; case HDFSConstants.DATA_NODE_ID : HashMap<String, String> dataNodes = getDataNodes(); dataNodes.put(hostname, taskId.getValue()); setDataNodes(dataNodes); break; case HDFSConstants.ZKFC_NODE_ID : break; default : log.error("Task name unknown"); } } // TODO (elingg) optimize this method/ Possibly index by task id instead of hostname/ // Possibly call removeTask(slaveId, taskId) to avoid iterating through all maps public void removeTaskId(String taskId) { HashMap<String, String> journalNodes = getJournalNodes(); if (journalNodes.values().contains(taskId)) { for (Map.Entry<String, String> entry : journalNodes.entrySet()) { if (entry.getValue() != null && entry.getValue().equals(taskId)) { journalNodes.put(entry.getKey(), null); setJournalNodes(journalNodes); return; } } } HashMap<String, String> nameNodes = getNameNodes(); if (nameNodes.values().contains(taskId)) { for (Map.Entry<String, String> entry : nameNodes.entrySet()) { if (entry.getValue() != null && entry.getValue().equals(taskId)) { nameNodes.put(entry.getKey(), null); setNameNodes(nameNodes); return; } } } HashMap<String, String> dataNodes = getDataNodes(); if (dataNodes.values().contains(taskId)) { for (Map.Entry<String, String> entry : dataNodes.entrySet()) { if (entry.getValue() != null && entry.getValue().equals(taskId)) { dataNodes.put(entry.getKey(), null); setDataNodes(dataNodes); return; } } } } public boolean journalNodeRunningOnSlave(String hostname) { return getJournalNodes().keySet().contains(hostname); } public boolean nameNodeRunningOnSlave(String hostname) { return getNameNodes().keySet().contains(hostname); } public boolean dataNodeRunningOnSlave(String hostname) { return getDataNodes().keySet().contains(hostname); } private void setNameNodes(HashMap<String, String> nameNodes) { try { set(NAMENODES_KEY, nameNodes); } catch (Exception e) { log.error("Error while setting namenodes in persistent state", e); } } private void setJournalNodes(HashMap<String, String> journalNodes) { try { set(JOURNALNODES_KEY, journalNodes); } catch (Exception e) { log.error("Error while setting journalnodes in persistent state", e); } } private void setDataNodes(HashMap<String, String> dataNodes) { try { set(DATANODES_KEY, dataNodes); } catch (Exception e) { log.error("Error while setting datanodes in persistent state", e); } } private HashMap<String, String> getHashMap(String key) { try { HashMap<String, String> nodesMap = get(key); if (nodesMap == null) { return new HashMap<>(); } return nodesMap; } catch (Exception e) { log.error(String.format("Error while getting %s in persistent state", key), e); return new HashMap<>(); } } /** * Get serializable object from store. * * @return serialized object or null if none * @throws ExecutionException * @throws InterruptedException * @throws IOException * @throws ClassNotFoundException */ @SuppressWarnings("unchecked") private <T extends Object> T get(String key) throws InterruptedException, ExecutionException, IOException, ClassNotFoundException { byte[] existingNodes = zkState.fetch(key).get().value(); if (existingNodes.length > 0) { ByteArrayInputStream bis = new ByteArrayInputStream(existingNodes); ObjectInputStream in = null; try { in = new ObjectInputStream(bis); return (T) in.readObject(); } finally { try { bis.close(); } finally { if (in != null) { in.close(); } } } } else { return null; } } /** * Set serializable object in store * * @throws ExecutionException * @throws InterruptedException * @throws IOException */ private <T extends Object> void set(String key, T object) throws InterruptedException, ExecutionException, IOException { Variable value = zkState.fetch(key).get(); ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream out = null; try { out = new ObjectOutputStream(bos); out.writeObject(object); value = value.mutate(bos.toByteArray()); zkState.store(value).get(); } finally { try { if (out != null) { out.close(); } } finally { bos.close(); } } } }
keyset -> containsKey
src/main/java/org/apache/mesos/hdfs/state/PersistentState.java
keyset -> containsKey
<ide><path>rc/main/java/org/apache/mesos/hdfs/state/PersistentState.java <ide> } <ide> <ide> public boolean journalNodeRunningOnSlave(String hostname) { <del> return getJournalNodes().keySet().contains(hostname); <add> return getJournalNodes().containsKey(hostname); <ide> } <ide> <ide> public boolean nameNodeRunningOnSlave(String hostname) { <del> return getNameNodes().keySet().contains(hostname); <add> return getNameNodes().containsKey(hostname); <ide> } <ide> <ide> public boolean dataNodeRunningOnSlave(String hostname) { <del> return getDataNodes().keySet().contains(hostname); <add> return getDataNodes().containsKey(hostname); <ide> } <ide> <ide> private void setNameNodes(HashMap<String, String> nameNodes) {
Java
apache-2.0
880aa3204ec718172e04b08b69b62f260686db4c
0
arthurdm/microprofile-open-api
/** * Copyright (c) 2017 Contributors to the Eclipse Foundation * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.eclipse.microprofile.openapi.tck; import java.util.Map; import org.eclipse.microprofile.openapi.OASFactory; import org.eclipse.microprofile.openapi.models.Constructible; import org.eclipse.microprofile.openapi.models.info.License; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.testng.Arquillian; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.WebArchive; import org.testng.annotations.Test; public class OASFactoryErrorTest extends Arquillian { public interface MyConstructible extends Constructible {} public interface MyLicense extends License {} public abstract class MyAbstractLicenseImpl implements License {} public final class MyLicenseImpl implements License { @Override public Map<String, Object> getExtensions() { return null; } @Override public License addExtension(String name, Object value) { return null; } @Override public void setExtensions(Map<String, Object> extensions) {} @Override public String getName() { return null; } @Override public void setName(String name) {} @Override public License name(String name) { return null; } @Override public String getUrl() { return null; } @Override public void setUrl(String url) {} @Override public License url(String url) { return null; } } @Deployment public static WebArchive createDeployment() { return ShrinkWrap.create(WebArchive.class); } @Test(expectedExceptions = { NullPointerException.class }) public void nullValueTest() { @SuppressWarnings("unused") final Object o = OASFactory.createObject(null); } @Test(expectedExceptions = { IllegalArgumentException.class }) public void baseInterfaceTest() { @SuppressWarnings("unused") final Constructible c = OASFactory.createObject(Constructible.class); } @Test(expectedExceptions = { IllegalArgumentException.class }) public void extendedBaseInterfaceTest() { @SuppressWarnings("unused") final MyConstructible m = OASFactory.createObject(MyConstructible.class); } @Test(expectedExceptions = { IllegalArgumentException.class }) public void extendedInterfaceTest() { @SuppressWarnings("unused") final MyLicense m = OASFactory.createObject(MyLicense.class); } @Test(expectedExceptions = { IllegalArgumentException.class }) public void customAbstractClassTest() { @SuppressWarnings("unused") final MyAbstractLicenseImpl m = OASFactory.createObject(MyAbstractLicenseImpl.class); } @Test(expectedExceptions = { IllegalArgumentException.class }) public void customClassTest() { @SuppressWarnings("unused") final MyLicenseImpl m = OASFactory.createObject(MyLicenseImpl.class); } }
tck/src/main/java/org/eclipse/microprofile/openapi/tck/OASFactoryErrorTest.java
/** * Copyright (c) 2017 Contributors to the Eclipse Foundation * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.eclipse.microprofile.openapi.tck; import java.util.Map; import org.eclipse.microprofile.openapi.OASFactory; import org.eclipse.microprofile.openapi.models.Constructible; import org.eclipse.microprofile.openapi.models.info.License; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.testng.Arquillian; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.WebArchive; import org.testng.annotations.Test; public class OASFactoryErrorTest extends Arquillian { public interface MyConstructible extends Constructible {} public interface MyLicense extends License {} public abstract class MyAbstractLicenseImpl implements License {} public final class MyLicenseImpl implements License { @Override public Map<String, Object> getExtensions() { return null; } @Override public void addExtension(String name, Object value) {} @Override public void setExtensions(Map<String, Object> extensions) {} @Override public String getName() { return null; } @Override public void setName(String name) {} @Override public License name(String name) { return null; } @Override public String getUrl() { return null; } @Override public void setUrl(String url) {} @Override public License url(String url) { return null; } } @Deployment public static WebArchive createDeployment() { return ShrinkWrap.create(WebArchive.class); } @Test(expectedExceptions = { NullPointerException.class }) public void nullValueTest() { @SuppressWarnings("unused") final Object o = OASFactory.createObject(null); } @Test(expectedExceptions = { IllegalArgumentException.class }) public void baseInterfaceTest() { @SuppressWarnings("unused") final Constructible c = OASFactory.createObject(Constructible.class); } @Test(expectedExceptions = { IllegalArgumentException.class }) public void extendedBaseInterfaceTest() { @SuppressWarnings("unused") final MyConstructible m = OASFactory.createObject(MyConstructible.class); } @Test(expectedExceptions = { IllegalArgumentException.class }) public void extendedInterfaceTest() { @SuppressWarnings("unused") final MyLicense m = OASFactory.createObject(MyLicense.class); } @Test(expectedExceptions = { IllegalArgumentException.class }) public void customAbstractClassTest() { @SuppressWarnings("unused") final MyAbstractLicenseImpl m = OASFactory.createObject(MyAbstractLicenseImpl.class); } @Test(expectedExceptions = { IllegalArgumentException.class }) public void customClassTest() { @SuppressWarnings("unused") final MyLicenseImpl m = OASFactory.createObject(MyLicenseImpl.class); } }
Fix compile error Signed-off-by: Jeremie Bresson <[email protected]>
tck/src/main/java/org/eclipse/microprofile/openapi/tck/OASFactoryErrorTest.java
Fix compile error
<ide><path>ck/src/main/java/org/eclipse/microprofile/openapi/tck/OASFactoryErrorTest.java <ide> return null; <ide> } <ide> @Override <del> public void addExtension(String name, Object value) {} <add> public License addExtension(String name, Object value) { <add> return null; <add> } <ide> @Override <ide> public void setExtensions(Map<String, Object> extensions) {} <ide> @Override
Java
apache-2.0
eb7bbfbb83c911883ddda69aff9a6ff9b7b79b7b
0
SpineEventEngine/base,SpineEventEngine/base,SpineEventEngine/base
/* * Copyright 2018, TeamDev. All rights reserved. * * Redistribution and use in source and/or binary forms, with or without * modification, must retain the above copyright notice and the following * disclaimer. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.spine.tools.compiler.validation; import io.spine.code.Indent; import io.spine.code.proto.MessageType; import io.spine.test.tools.validation.builder.VbtProject; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junitpioneer.jupiter.TempDirectory; import java.io.File; import java.nio.file.Path; import static org.junit.jupiter.api.Assertions.assertTrue; @ExtendWith(TempDirectory.class) @DisplayName("VBuilderCode should") class VBuilderCodeTest { private File targetDir; @BeforeEach void setUp(@TempDirectory.TempDir Path tempDirPath) { targetDir = tempDirPath.toFile(); } @Test void writeTopLevelMessage() { MessageType type = MessageType.of(VbtProject.getDescriptor()); VBuilderCode code = new VBuilderCode(targetDir, Indent.of4(), type); File created = code.write(); assertTrue(created.exists()); } }
tools/model-compiler/src/test/java/io/spine/tools/compiler/validation/VBuilderCodeTest.java
/* * Copyright 2018, TeamDev. All rights reserved. * * Redistribution and use in source and/or binary forms, with or without * modification, must retain the above copyright notice and the following * disclaimer. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.spine.tools.compiler.validation; import io.spine.code.Indent; import io.spine.code.proto.MessageType; import io.spine.test.tools.validation.builder.VbtProject; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junitpioneer.jupiter.TempDirectory; import java.io.File; import java.nio.file.Path; import static org.junit.jupiter.api.Assertions.assertTrue; @ExtendWith(TempDirectory.class) @DisplayName("VBuilderCode should") class VBuilderCodeTest { private File targetDir; @BeforeEach void setUp(@TempDirectory.TempDir Path tempDirPath) { targetDir = tempDirPath.toFile(); } @Test @Disabled("until merging of descriptor set files is available for tests") void writeTopLevelMessage() { MessageType type = MessageType.of(VbtProject.getDescriptor()); VBuilderCode code = new VBuilderCode(targetDir, Indent.of4(), type); File created = code.write(); assertTrue(created.exists()); } }
Enable test back
tools/model-compiler/src/test/java/io/spine/tools/compiler/validation/VBuilderCodeTest.java
Enable test back
<ide><path>ools/model-compiler/src/test/java/io/spine/tools/compiler/validation/VBuilderCodeTest.java <ide> import io.spine.code.proto.MessageType; <ide> import io.spine.test.tools.validation.builder.VbtProject; <ide> import org.junit.jupiter.api.BeforeEach; <del>import org.junit.jupiter.api.Disabled; <ide> import org.junit.jupiter.api.DisplayName; <ide> import org.junit.jupiter.api.Test; <ide> import org.junit.jupiter.api.extension.ExtendWith; <ide> } <ide> <ide> @Test <del> @Disabled("until merging of descriptor set files is available for tests") <ide> void writeTopLevelMessage() { <ide> MessageType type = MessageType.of(VbtProject.getDescriptor()); <ide>
Java
apache-2.0
5bb281c57c1818106f25904d75b00deff96c092f
0
Cubiccl/RPGProject
package main.graphics.states; import java.awt.Color; import java.awt.Font; import java.awt.FontFormatException; import java.awt.FontMetrics; import java.awt.Graphics; import java.awt.event.KeyEvent; import java.io.FileNotFoundException; import java.io.IOException; import java.net.MalformedURLException; import main.game.input.KeyCustom; import main.graphics.textures.FontBuilder; import main.RPGProject; public class MenuState extends State { private static final Color DEFAULT = Color.WHITE, SELECTED = Color.RED, BACKGROUND = Color.BLACK; private static final String START = "Start this awesome game !", QUIT = "Quit"; private static final Font FONT = new Font("Impact", Font.PLAIN, 30); private int selected; public MenuState() { this.selected = 0; } @Override public void render(Graphics g) { int width = RPGProject.getWindow().getWidth(); int height = RPGProject.getWindow().getHeight(); g.setFont(FONT); try { g.setFont(main.graphics.textures.FontBuilder.createfont("res/Ruritania.ttf")); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (MalformedURLException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (FontFormatException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } FontMetrics metrics = g.getFontMetrics(); g.setColor(BACKGROUND); g.fillRect(0, 0, width, height); g.setColor(DEFAULT); if (this.selected == 0) g.setColor(SELECTED); g.drawString(START, width / 2 - metrics.stringWidth(START) / 2, height / 3); g.setColor(DEFAULT); if (this.selected == 1) g.setColor(SELECTED); g.drawString(QUIT, width / 2 - metrics.stringWidth(QUIT) / 2, height * 2 / 3); } @Override public void update() { if (RPGProject.getGame().getKeyManager() .isKeyPressedInstant(KeyCustom.up.getKeyCode())) this.selected--; if (RPGProject.getGame().getKeyManager() .isKeyPressedInstant(KeyCustom.down.getKeyCode())) this.selected++; if (this.selected > 1) this.selected = 1; if (this.selected < 0) this.selected = 0; if (RPGProject.getGame().getKeyManager() .isKeyPressedInstant(KeyCustom.enter.getKeyCode())) { switch (this.selected) { case 0: RPGProject.getGame().setState(StateManager.GAME); break; case 1: System.exit(0); break; default: break; } } } }
src/main/graphics/states/MenuState.java
package main.graphics.states; import java.awt.Color; import java.awt.Font; import java.awt.FontFormatException; import java.awt.FontMetrics; import java.awt.Graphics; import java.awt.event.KeyEvent; import java.io.FileNotFoundException; import java.io.IOException; import java.net.MalformedURLException; import main.graphics.textures.FontBuilder; import main.RPGProject; public class MenuState extends State { private static final Color DEFAULT = Color.WHITE, SELECTED = Color.RED, BACKGROUND = Color.BLACK; private static final String START = "Start this awesome game !", QUIT = "Quit"; private static final Font FONT = new Font("Impact", Font.PLAIN, 30); private int selected; public MenuState() { this.selected = 0; } @Override public void render(Graphics g) { int width = RPGProject.getWindow().getWidth(); int height = RPGProject.getWindow().getHeight(); g.setFont(FONT); try { g.setFont(main.graphics.textures.FontBuilder.createfont("res/Ruritania.ttf")); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (MalformedURLException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (FontFormatException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } FontMetrics metrics = g.getFontMetrics(); g.setColor(BACKGROUND); g.fillRect(0, 0, width, height); g.setColor(DEFAULT); if (this.selected == 0) g.setColor(SELECTED); g.drawString(START, width / 2 - metrics.stringWidth(START) / 2, height / 3); g.setColor(DEFAULT); if (this.selected == 1) g.setColor(SELECTED); g.drawString(QUIT, width / 2 - metrics.stringWidth(QUIT) / 2, height * 2 / 3); } @Override public void update() { if (RPGProject.getGame().getKeyManager() .isKeyPressedInstant(KeyEvent.VK_UP)) this.selected--; if (RPGProject.getGame().getKeyManager() .isKeyPressedInstant(KeyEvent.VK_DOWN)) this.selected++; if (this.selected > 1) this.selected = 1; if (this.selected < 0) this.selected = 0; if (RPGProject.getGame().getKeyManager() .isKeyPressedInstant(KeyEvent.VK_ENTER)) { switch (this.selected) { case 0: RPGProject.getGame().setState(StateManager.GAME); break; case 1: System.exit(0); break; default: break; } } } }
Update 2 keyCustom
src/main/graphics/states/MenuState.java
Update 2 keyCustom
<ide><path>rc/main/graphics/states/MenuState.java <ide> import java.io.IOException; <ide> import java.net.MalformedURLException; <ide> <add>import main.game.input.KeyCustom; <ide> import main.graphics.textures.FontBuilder; <ide> import main.RPGProject; <ide> <ide> public void update() { <ide> <ide> if (RPGProject.getGame().getKeyManager() <del> .isKeyPressedInstant(KeyEvent.VK_UP)) <add> .isKeyPressedInstant(KeyCustom.up.getKeyCode())) <ide> this.selected--; <ide> if (RPGProject.getGame().getKeyManager() <del> .isKeyPressedInstant(KeyEvent.VK_DOWN)) <add> .isKeyPressedInstant(KeyCustom.down.getKeyCode())) <ide> this.selected++; <ide> <ide> if (this.selected > 1) <ide> this.selected = 0; <ide> <ide> if (RPGProject.getGame().getKeyManager() <del> .isKeyPressedInstant(KeyEvent.VK_ENTER)) { <add> .isKeyPressedInstant(KeyCustom.enter.getKeyCode())) { <ide> switch (this.selected) { <ide> case 0: <ide> RPGProject.getGame().setState(StateManager.GAME);
Java
mit
cbbbf604df7efa6d752da5b133a5af131b89a5b8
0
flintproject/Flint,flintproject/Flint
/* -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:set ts=4 sw=4 sts=4 et: */ package jp.oist.flint.form; import jp.oist.flint.backend.ModelLoader; import jp.oist.flint.command.Session; import jp.oist.flint.control.FileChooser; import jp.oist.flint.control.ModelFileTransferHandler; import jp.oist.flint.dao.SimulationDao; import jp.oist.flint.dao.TaskDao; import jp.oist.flint.desktop.Desktop; import jp.oist.flint.desktop.Document; import jp.oist.flint.desktop.IDesktopListener; import jp.oist.flint.executor.PhspProgressMonitor; import jp.oist.flint.executor.PhspSimulator; import jp.oist.flint.executor.SimulatorService; import jp.oist.flint.form.job.IProgressManager; import jp.oist.flint.form.sub.SubFrame; import jp.oist.flint.job.Progress; import jp.oist.flint.k3.K3Client; import jp.oist.flint.k3.K3Request; import jp.oist.flint.k3.K3RequestBuilder; import jp.oist.flint.phsp.PhspException; import jp.oist.flint.phsp.PhspReader; import jp.oist.flint.phsp.PhspReaderListener; import jp.oist.flint.phsp.PhspWriter; import jp.oist.flint.rpc.ICallee; import jp.oist.flint.sedml.ISimulationConfiguration; import jp.oist.flint.sedml.ISimulationConfigurationList; import jp.oist.flint.sedml.SedmlException; import jp.oist.flint.sedml.SedmlReader; import jp.oist.flint.sedml.SedmlWriter; import jp.oist.flint.util.Utility; import com.google.protobuf.ByteString; import org.apache.log4j.Logger; import org.xml.sax.SAXException; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.HeadlessException; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyVetoException; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.nio.file.Paths; import java.sql.SQLException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.prefs.BackingStoreException; import java.util.prefs.Preferences; import javax.swing.ImageIcon; import javax.swing.JComponent; import javax.swing.JFileChooser; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JSplitPane; import javax.swing.SwingUtilities; import javax.swing.SwingWorker; import javax.swing.WindowConstants; import javax.swing.filechooser.FileNameExtensionFilter; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; /** * This is the class of the main window. */ public class MainFrame extends javax.swing.JFrame implements ICallee, IDesktopListener, IMenuDelegator, ISimulationConfigurationList, IFrame { static { try { jp.oist.flint.plotter.PlotterLoader.register("gnuplot", jp.oist.flint.gnuplot.Plotter.class); } catch (BackingStoreException bse) { Logger.getRootLogger().error(bse.getMessage()); } } public final static int WIDTH = 800; public final static int HEIGHT = 600; public final static int MIN_WIDTH = 800; public final static int MIN_HEIGHT = 600; private final Desktop mDesktop; private final Session mSession; private File mPhspFile = null; private PhspSimulator mSimulator = null; private ProgressPane mProgressPane; private ControlPane mControlPane; public MainFrame(Desktop desktop, Session session) throws IOException { super(); mDesktop = desktop; mSession = session; URL iconUrl = getClass().getResource("/jp/oist/flint/image/icon.png"); setIconImage(new ImageIcon(iconUrl).getImage()); setTransferHandler(new ModelFileTransferHandler(this)); initComponents(); } private void initComponents () { setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); setTitle("Flint"); setMinimumSize(new Dimension(MIN_WIDTH, MIN_HEIGHT)); setMaximumSize(new Dimension( Short.MAX_VALUE, Short.MAX_VALUE )); setSize(new Dimension (WIDTH, HEIGHT)); setPreferredSize(new Dimension(WIDTH, HEIGHT)); setLocationRelativeTo(null); MenuBar menuBar = MenuBar.getInstance(); menuBar.setDelegator(this); setJMenuBar(menuBar); mDesktop.addListener(menuBar); setContentPane(createContentPane()); pack(); } private JComponent createContentPane () { final JSplitPane contentPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT); contentPane.setDividerSize(10); contentPane.setDividerLocation(638); contentPane.setOneTouchExpandable(true); final JPanel peripheralPane = new JPanel(new BorderLayout()); peripheralPane.setEnabled(false); peripheralPane.setMaximumSize(new Dimension(Short.MAX_VALUE, Short.MAX_VALUE)); peripheralPane.setMinimumSize(new Dimension(150, Short.MAX_VALUE)); contentPane.setLeftComponent(mDesktop.getPane()); contentPane.setRightComponent(peripheralPane); mProgressPane = ProgressPane.getInstance(); mProgressPane.setMaximumSize(new Dimension(Short.MAX_VALUE, Short.MAX_VALUE)); mProgressPane.setMinimumSize(new Dimension(0, 0)); mProgressPane.setPreferredSize(new Dimension(150, 510)); mDesktop.addListener(mProgressPane); mControlPane = ControlPane.getInstance(); mControlPane.setMaximumSize(new Dimension(Short.MAX_VALUE, 60)); mControlPane.setMinimumSize(new Dimension(0, 60)); mControlPane.setPreferredSize(new Dimension(150, 60)); peripheralPane.add(mProgressPane, BorderLayout.CENTER); peripheralPane.add(mControlPane, BorderLayout.SOUTH); return contentPane; } private void simulationRun () { try { for (SubFrame subFrame : getSubFrames()) subFrame.reloadJobViewer(); // TOOD for (ProgressCell cell : mProgressPane.getListCells()) cell.progressStarted(); SimulatorService service = new SimulatorService(this); final PhspSimulator simulator = new PhspSimulator(service, this, mDesktop); final PhspProgressMonitor monitor = new PhspProgressMonitor(simulator); simulator.addSimulationListener(new PhspSimulator.Listener() { @Override public void onSimulationStarted(PhspSimulator.Event evt) { mProgressPane.repaint(); } @Override public void onSimulationExited(PhspSimulator.Event evt) { mProgressPane.repaint(); PhspSimulator simulator = (PhspSimulator)evt.getSource(); try { monitor.stop(); //TODO ProgressPane progressView; Boolean result = simulator.get(); if (result) { JOptionPane.showMessageDialog(MainFrame.this, "Simulation completed", "Simulation completed", JOptionPane.PLAIN_MESSAGE); } else { } } catch (InterruptedException | ExecutionException | HeadlessException ex) { File logFile = simulator.getLogFile(); StringBuilder sb = new StringBuilder(); if (logFile != null) { try (FileInputStream fis = new FileInputStream(logFile); InputStreamReader isr = new InputStreamReader(fis, StandardCharsets.UTF_8); BufferedReader reader = new BufferedReader(isr)) { String line; while ((line = reader.readLine()) != null) { sb.append(line).append(System.getProperty("line.separator")); } } catch (IOException ex1) { Logger.getRootLogger().error(ex1.getMessage()); } } String detail = sb.toString(); MessageDialog.showMessageDialog(MainFrame.this, "The following error occurred during simulation:", detail, "Error on simulation", JOptionPane.ERROR_MESSAGE, null, new Object[]{" OK "}); } } }); for (SubFrame subFrame : getSubFrames()) simulator.addSimulationListener(subFrame); simulator.addSimulationListener(MenuBar.getInstance()); simulator.addSimulationListener(mControlPane); monitor.addPropertyChangeListener(new PropertyChangeListener(){ @Override public void propertyChange(PropertyChangeEvent e) { String propertyName = e.getPropertyName(); if ("progress".equals(propertyName)) { if (e instanceof PhspProgressMonitor.Event) { final PhspProgressMonitor.Event evt = (PhspProgressMonitor.Event)e; SwingUtilities.invokeLater(new Runnable() { @Override public void run() { String modelPath = (String)evt.getClientProperty("modelPath"); SubFrame subFrame = findSubFrame(modelPath); SimulationDao simulationDao = simulator.getSimulationDao(); TaskDao taskDao = simulationDao.obtainTask(new File(subFrame.getRelativeModelPath())); Progress progress = (Progress)evt.getNewValue(); Map<String, Number> target = (Map<String, Number>)evt.getClientProperty("target"); IProgressManager progressMgr = subFrame.getProgressManager(); int index = progressMgr.indexOf(target); progressMgr.setProgress(index, progress); if (taskDao.isCancelled()) progressMgr.setCancelled(index, rootPaneCheckingEnabled); int taskProgress = taskDao.getProgress(); ProgressCell cell = mProgressPane.getListCellOfModel(new File(modelPath)); String status; if (taskDao.isFinished()) { status = (taskDao.isCancelled())? "finished" : "completed"; cell.progressFinished(status, 0, 100, taskProgress); } else if (taskDao.isStarted()) { status = (taskDao.isCancelled())? "cancelling..." : taskProgress + " %"; cell.setProgress(status, 0, 100, taskProgress); } } }); } } } }); simulator.execute(); mSimulator = simulator; monitor.start(); } catch (IOException | ParserConfigurationException | PhspException | SQLException | SedmlException | TransformerException ex) { showErrorDialog(ex.getMessage(), "ERROR"); } } public boolean openPhsp(final File phspFile) { List<SubFrame> subFrames = getSubFrames(); int numberOfSubView = subFrames.size(); if (numberOfSubView > 0) { int ans = JOptionPane.showConfirmDialog(this, "Is it ok to close editing files?", "Close a file", JOptionPane.YES_NO_OPTION); if (ans != JOptionPane.YES_OPTION) return false; closeAll(); } mPhspFile = phspFile; try { setEditable(false); ModelLoaderLogger logger = new ModelLoaderLogger(mDesktop); PhspReader phspLoader = new PhspReader(phspFile); phspLoader.addPropertyChangeListener(new PhspReaderListener(logger)); phspLoader.addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { String propertyName = evt.getPropertyName(); Object newValue = evt.getNewValue(); if ("state".equals(propertyName) && SwingWorker.StateValue.DONE.equals(newValue)) { mSession.updateRecentModels(phspFile); setEditable(true); } } }); phspLoader.execute(); return true; } catch (IOException ex) { setEditable(true); showErrorDialog(ex.getMessage(), "Error on load file of phsp"); return false; } } public boolean closeModel (SubFrame subFrame) { if (mSimulator != null && mSimulator.isStarted()) { JOptionPane.showMessageDialog(this, "Could not close the model. \n" + "It's running the simulation yet.", "Error on close the model", JOptionPane.INFORMATION_MESSAGE); return false; } if (subFrame == null) return false; if (!subFrame.isClosed()) mDesktop.removeDocument(subFrame.getDocument()); return true; } public void closeAll () { for (SubFrame subFrame : getSubFrames()) closeModel(subFrame); } public SubFrame getSelectedSubFrame () { return (SubFrame) mDesktop.getPane().getSelectedFrame(); } public List<SubFrame> getSubFrames() { return mDesktop.getSubFrames(); } public void setProgress(Object key, int progress) { if (key instanceof String) { String modelPath = (String)key; ProgressCell cell = mProgressPane.getListCellOfModel(new File(modelPath)); String msg = progress + " %"; cell.setProgress(msg, 0, 100, progress); return; } throw new IllegalArgumentException("key must be set model file path."); } public void setEditable (boolean editable) { List<SubFrame> subFrames = getSubFrames(); for (SubFrame subFrame : subFrames) subFrame.setEditable(editable); mControlPane.setSimulationRunEnabled(editable); } /* * Implements MenuBar Delegater */ @Override public void openPerformed (Object source) { FileChooser fc = new FileChooser(this, "Open model", FileChooser.Mode.LOAD, mSession.getLastPath()); JFileChooser jfc = fc.getJFileChooser(); if (jfc != null) { jfc.setAcceptAllFileFilterUsed(false); FileNameExtensionFilter xmlFilter = new FileNameExtensionFilter("XML files (*.xml)", "xml"); FileNameExtensionFilter modelFilter = new FileNameExtensionFilter("Model files (*.isml, *.phml, *.phz, *.sbml)", "isml", "phml", "phz", "sbml"); FileNameExtensionFilter phspFilter = new FileNameExtensionFilter("PHSP files (*.phsp)", "phsp"); jfc.addChoosableFileFilter(modelFilter); jfc.addChoosableFileFilter(phspFilter); jfc.addChoosableFileFilter(xmlFilter); jfc.addChoosableFileFilter(jfc.getAcceptAllFileFilter()); jfc.setFileFilter(modelFilter); } if (!fc.showDialog()) return; openModel(fc.getSelectedFile()); } @Override public void recentModelPerformed (Object source, File f) { openModel(f); } @Override public void closePerformed (Object source) { closeModel(getSelectedSubFrame()); } @Override public void loadConfigurationPerformed (Object source) { Preferences prefs = Preferences.userRoot().node("/jp/oist/flint"); String defaultPath = prefs.get("defaultConfigurationPath", ""); FileChooser fc = new FileChooser(this, "Open SED-ML file", FileChooser.Mode.LOAD, defaultPath); JFileChooser jfc = fc.getJFileChooser(); if (jfc != null) { jfc.setAcceptAllFileFilterUsed(false); FileNameExtensionFilter sedmlFilter = new FileNameExtensionFilter("SED-ML files (*.sedml, *.xml)", "sedml", "xml"); jfc.addChoosableFileFilter(sedmlFilter); jfc.addChoosableFileFilter(jfc.getAcceptAllFileFilter()); jfc.setFileFilter(sedmlFilter); } if (!fc.showDialog()) return; File file = fc.getSelectedFile(); SedmlReader reader = new SedmlReader(file); try { if (reader.parse()) { prefs.put("defaultConfigurationPath", file.getParent()); ISimulationConfigurationList configs = (ISimulationConfigurationList)reader.getHandler(); for (SubFrame subFrame : getSubFrames()) { ISimulationConfiguration config = configs.getConfigurationByModelPath(subFrame.getRelativeModelPath()); subFrame.load(config); } } } catch (IOException | ParserConfigurationException | SAXException e) { showErrorDialog(e.getMessage(), "Error on load configuration"); } } @Override public void saveConfigurationPerformed (Object source) { FileChooser fc = new FileChooser(this, "Select SED-ML file", FileChooser.Mode.SAVE, ""); if (fc.showDialog()) { final File file = fc.getSelectedFile(); if (file.exists()) { int ans = JOptionPane.showConfirmDialog(this, "Is it OK to replace the existing file?", "Replace the existing file?", JOptionPane.YES_NO_OPTION); if (ans != JOptionPane.YES_OPTION) return; } try (FileOutputStream fos = new FileOutputStream(file)) { SedmlWriter writer = new SedmlWriter(true); writer.writeSimulationConfiguration(this, fos); } catch (HeadlessException e) { showErrorDialog(e.getMessage(), "Error on saving configuration"); return; } catch (IOException | ArithmeticException | SedmlException e) { showErrorDialog(e.getMessage(), "Error on saving configuration"); return; } JOptionPane.showMessageDialog(this, "Saved configuration as " + file.getPath()); } } @Override public void saveAsPhspPerformed (Object source) { try { if (mDesktop.isEmpty()) { showErrorDialog("Please open the phml/sbml model.", "Error on saving phsp"); return; } FileChooser fc = new FileChooser(this, "Select PHSP file", FileChooser.Mode.SAVE, ""); JFileChooser jfc = fc.getJFileChooser(); FileNameExtensionFilter xmlFilter = new FileNameExtensionFilter("XML files (*.xml)", "xml"); FileNameExtensionFilter modelFilter = new FileNameExtensionFilter("PHSP files (*.phsp)", "phsp"); jfc.addChoosableFileFilter(modelFilter); jfc.addChoosableFileFilter(xmlFilter); jfc.addChoosableFileFilter(jfc.getAcceptAllFileFilter()); jfc.setFileFilter(modelFilter); File initFile = (mPhspFile == null)? new File("Untitled.phsp"): mPhspFile; jfc.setSelectedFile(initFile); if (fc.showDialog()) { File file = jfc.getSelectedFile(); if (!file.getName().endsWith(".phsp")) file = new File(file.getPath()+".phsp"); if (file.exists()) { int ans = JOptionPane.showConfirmDialog(this, "Is it OK to replace the existing file?", "Replace the existing file?", JOptionPane.YES_NO_OPTION); if (ans != JOptionPane.YES_OPTION) return; } setEnabled(false); try (FileOutputStream fos = new FileOutputStream(file)) { PhspWriter writer = new PhspWriter(); writer.write(mDesktop, fos, false); } JOptionPane.showMessageDialog(this, "Saved phsp as " + file.getPath()); } } catch (IOException | ParserConfigurationException | PhspException | TransformerException ex) { showErrorDialog(ex.getMessage(), "Saving as PHSP failed"); } finally { setEnabled(true); } } @Override public void exitPerformed (Object source) { if (mSimulator != null) mSimulator.cancel(true); System.exit(0); } @Override public void copyPerformed (Object source) { SubFrame subFrame = getSelectedSubFrame(); subFrame.copy(); } @Override public void cutPerformed (Object source) { SubFrame subFrame = getSelectedSubFrame(); subFrame.cut(); } @Override public void preferencePerformed (Object source) { PreferenceDialog ad = new PreferenceDialog(this, true); ad.setVisible(true); } @Override public void simulationRunPerformed(Object source) { simulationRun(); } @Override public void sendToK3Performed(Object source) { Preferences prefs = Preferences.userRoot().node("/jp/oist/flint/session/k3"); final String encryptedUserId = prefs.get("encryptedUserId", null); final String encryptedPassword = prefs.get("encryptedPassword", null); if (encryptedUserId == null || encryptedUserId.isEmpty() || encryptedPassword == null || encryptedPassword.isEmpty()) { StringBuilder sb = new StringBuilder("Please specify your account of Flint K3."); sb.append(System.getProperty("line.separator")); sb.append("(Edit -> Preference -> K3)"); showErrorDialog(sb.toString(), "Error on preference"); return; } SubFrame subFrame = getSelectedSubFrame(); final Object retval = JOptionPane.showInputDialog(this, "New job's title:", "New job's title", JOptionPane.QUESTION_MESSAGE, null, null, subFrame.getModelFile().getName()); if (retval == null) return; SwingWorker<Integer, Void> worker = new SwingWorker<Integer, Void>() { @Override protected Integer doInBackground() throws Exception { SubFrame subFrame = getSelectedSubFrame(); String jobName = (String)retval; String userId = Utility.decrypt(encryptedUserId); String passwd = Utility.decrypt(encryptedPassword); K3RequestBuilder reqBuilder = new K3RequestBuilder( subFrame.getModelFile(), subFrame); K3Request request = reqBuilder.build(jobName, userId, passwd); K3Client k3 = new K3Client(); return k3.submit(request); } @Override protected void done () { try { int jobId = get(); String message = String.format("Submitted successfully your job to Flint K3 (Job ID : %d)", jobId); String title = "Job submitted to Flint K3"; JOptionPane.showMessageDialog(MainFrame.this, message, title, JOptionPane.INFORMATION_MESSAGE); } catch (InterruptedException | ExecutionException ex) { showErrorDialog(ex.getMessage(), "Error on comminicating with Flint K3"); } } }; worker.execute(); } @Override public void aboutPerformed (Object source) { AboutDialog ad = new AboutDialog(this); ad.setVisible(true); } /* * Implements IFrame */ @Override public void showErrorDialog(String message, String title) { JOptionPane.showMessageDialog(this, message, title, JOptionPane.ERROR_MESSAGE); } @Override public void showErrorDialog(ByteString message, String title) { JOptionPane.showMessageDialog(this, message.toStringUtf8(), title, JOptionPane.ERROR_MESSAGE); } @Override public void appendLog(String s) { Date date = new Date(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); String d = sdf.format(date); Logger.getRootLogger().error("[" + d + "] " + s + System.getProperty("line.separator")); } public void notifyK3Enabled() { List<SubFrame> subFrames = getSubFrames(); for (SubFrame subFrame : subFrames) { subFrame.notifyK3Enabled(); } } public boolean openModel (final File file) { if (file == null) { // just ignore it return false; } if (!file.exists()) { String separator = System.getProperty("line.separator"); String msg = String.format("The file named \"%s\" " + separator + "does not exist.", file.getPath()); showErrorDialog(msg, "Error on opening model"); return false; } // check xml format String format = Utility.detectXMLFormat(file); if ("phsp".equals(format)) return openPhsp(file); // check if the file is opened. for (SubFrame child : getSubFrames()) { if (child.getModelFile().getPath().equals(file.getPath())) { try { child.setSelected(true); } catch (PropertyVetoException ex) { // ignored } return true; } } String path; try { path = file.getCanonicalPath(); } catch (IOException ex) { showErrorDialog("could not get canonical path : " + file.toString(), "Error on opening model"); return false; } if (!file.isFile()) { showErrorDialog("could not get canonical path : " + file.toString(), "Error on opening model"); return false; } int len = (int)file.length(); if (len == 0) { showErrorDialog("file has length 0 : " + path, "Error on opening model"); return false; } ModelLoaderLogger logger = new ModelLoaderLogger(mDesktop); setEditable(false); ModelLoader loader = new ModelLoader(file); loader.addPropertyChangeListener(new ModelFileLoaderListener(logger, loader)); loader.addPropertyChangeListener(new ModelLoaderProgressDialog(this, path)); loader.addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { String propertyName = evt.getPropertyName(); Object newValue = evt.getNewValue(); if ("state".equals(propertyName) && SwingWorker.StateValue.DONE.equals(newValue)) { mSession.updateRecentModels(file); setEditable(true); } } }); loader.execute(); return true; } @Override public void openModel(final String name) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { Path path = Paths.get(name); openModel(path.toFile()); } }); } public void setPlotterSettingTabEnabled(String defaultPlotter) { SubFrame subFrame = this.getSelectedSubFrame(); if (subFrame != null) { subFrame.setPlotterSettingTabEnabled("gnuplot".equals(defaultPlotter)); } } /* * implements ISimulationConfigurationList */ @Override public ISimulationConfiguration getConfiguration(int index) { return getSubFrames().get(index); } @Override public ISimulationConfiguration getConfigurationByModelPath(String modelPath) { return findSubFrame(modelPath); } public SubFrame findSubFrame (String modelPath) { for (SubFrame subFrame : getSubFrames()) { if (modelPath.equals(subFrame.getRelativeModelPath())) return subFrame; } return null; } @Override public List<ISimulationConfiguration> toList() { List<ISimulationConfiguration> configs = new ArrayList<>(); for (SubFrame subFrame : getSubFrames()) configs.add(subFrame); return configs; } @Override public int getConfigurationCount () { return mDesktop.getSize(); } /* IDesktopListener */ @Override public void documentAdded(Document doc) { Preferences prefs = Preferences.userRoot().node("/jp/oist/flint"); String defaultPlotter = prefs.get("defaultPlotter", ""); setPlotterSettingTabEnabled(defaultPlotter); } @Override public void documentRemoved(Document doc, boolean empty) { if (empty) { setEditable(false); requestFocus(); } } }
flint/src/jp/oist/flint/form/MainFrame.java
/* -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:set ts=4 sw=4 sts=4 et: */ package jp.oist.flint.form; import jp.oist.flint.backend.ModelLoader; import jp.oist.flint.command.Session; import jp.oist.flint.control.FileChooser; import jp.oist.flint.control.ModelFileTransferHandler; import jp.oist.flint.dao.SimulationDao; import jp.oist.flint.dao.TaskDao; import jp.oist.flint.desktop.Desktop; import jp.oist.flint.desktop.Document; import jp.oist.flint.desktop.IDesktopListener; import jp.oist.flint.executor.PhspProgressMonitor; import jp.oist.flint.executor.PhspSimulator; import jp.oist.flint.executor.SimulatorService; import jp.oist.flint.form.job.IProgressManager; import jp.oist.flint.form.sub.SubFrame; import jp.oist.flint.job.Progress; import jp.oist.flint.k3.K3Client; import jp.oist.flint.k3.K3Request; import jp.oist.flint.k3.K3RequestBuilder; import jp.oist.flint.phsp.PhspException; import jp.oist.flint.phsp.PhspReader; import jp.oist.flint.phsp.PhspReaderListener; import jp.oist.flint.phsp.PhspWriter; import jp.oist.flint.rpc.ICallee; import jp.oist.flint.sedml.ISimulationConfiguration; import jp.oist.flint.sedml.ISimulationConfigurationList; import jp.oist.flint.sedml.SedmlException; import jp.oist.flint.sedml.SedmlReader; import jp.oist.flint.sedml.SedmlWriter; import jp.oist.flint.util.Utility; import com.google.protobuf.ByteString; import org.apache.log4j.Logger; import org.xml.sax.SAXException; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.HeadlessException; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyVetoException; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.nio.file.Paths; import java.sql.SQLException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.prefs.BackingStoreException; import java.util.prefs.Preferences; import javax.swing.ImageIcon; import javax.swing.JComponent; import javax.swing.JFileChooser; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JSplitPane; import javax.swing.SwingUtilities; import javax.swing.SwingWorker; import javax.swing.WindowConstants; import javax.swing.event.EventListenerList; import javax.swing.filechooser.FileNameExtensionFilter; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; /** * This is the class of the main window. */ public class MainFrame extends javax.swing.JFrame implements ICallee, IDesktopListener, IMenuDelegator, ISimulationConfigurationList, IFrame { static { try { jp.oist.flint.plotter.PlotterLoader.register("gnuplot", jp.oist.flint.gnuplot.Plotter.class); } catch (BackingStoreException bse) { Logger.getRootLogger().error(bse.getMessage()); } } public final static int WIDTH = 800; public final static int HEIGHT = 600; public final static int MIN_WIDTH = 800; public final static int MIN_HEIGHT = 600; private final Desktop mDesktop; private final Session mSession; private final EventListenerList mEventListenerList; private File mPhspFile = null; private PhspSimulator mSimulator = null; private ProgressPane mProgressPane; private ControlPane mControlPane; public MainFrame(Desktop desktop, Session session) throws IOException { super(); mDesktop = desktop; mSession = session; mEventListenerList = new EventListenerList(); URL iconUrl = getClass().getResource("/jp/oist/flint/image/icon.png"); setIconImage(new ImageIcon(iconUrl).getImage()); setTransferHandler(new ModelFileTransferHandler(this)); initComponents(); } private void initComponents () { setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); setTitle("Flint"); setMinimumSize(new Dimension(MIN_WIDTH, MIN_HEIGHT)); setMaximumSize(new Dimension( Short.MAX_VALUE, Short.MAX_VALUE )); setSize(new Dimension (WIDTH, HEIGHT)); setPreferredSize(new Dimension(WIDTH, HEIGHT)); setLocationRelativeTo(null); MenuBar menuBar = MenuBar.getInstance(); menuBar.setDelegator(this); setJMenuBar(menuBar); mDesktop.addListener(menuBar); setContentPane(createContentPane()); pack(); } private JComponent createContentPane () { final JSplitPane contentPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT); contentPane.setDividerSize(10); contentPane.setDividerLocation(638); contentPane.setOneTouchExpandable(true); final JPanel peripheralPane = new JPanel(new BorderLayout()); peripheralPane.setEnabled(false); peripheralPane.setMaximumSize(new Dimension(Short.MAX_VALUE, Short.MAX_VALUE)); peripheralPane.setMinimumSize(new Dimension(150, Short.MAX_VALUE)); contentPane.setLeftComponent(mDesktop.getPane()); contentPane.setRightComponent(peripheralPane); mProgressPane = ProgressPane.getInstance(); mProgressPane.setMaximumSize(new Dimension(Short.MAX_VALUE, Short.MAX_VALUE)); mProgressPane.setMinimumSize(new Dimension(0, 0)); mProgressPane.setPreferredSize(new Dimension(150, 510)); mDesktop.addListener(mProgressPane); mControlPane = ControlPane.getInstance(); mControlPane.setMaximumSize(new Dimension(Short.MAX_VALUE, 60)); mControlPane.setMinimumSize(new Dimension(0, 60)); mControlPane.setPreferredSize(new Dimension(150, 60)); peripheralPane.add(mProgressPane, BorderLayout.CENTER); peripheralPane.add(mControlPane, BorderLayout.SOUTH); return contentPane; } private void simulationRun () { try { for (SubFrame subFrame : getSubFrames()) subFrame.reloadJobViewer(); // TOOD for (ProgressCell cell : mProgressPane.getListCells()) cell.progressStarted(); SimulatorService service = new SimulatorService(this); final PhspSimulator simulator = new PhspSimulator(service, this, mDesktop); final PhspProgressMonitor monitor = new PhspProgressMonitor(simulator); simulator.addSimulationListener(new PhspSimulator.Listener() { @Override public void onSimulationStarted(PhspSimulator.Event evt) { mProgressPane.repaint(); } @Override public void onSimulationExited(PhspSimulator.Event evt) { mProgressPane.repaint(); PhspSimulator simulator = (PhspSimulator)evt.getSource(); try { monitor.stop(); //TODO ProgressPane progressView; Boolean result = simulator.get(); if (result) { JOptionPane.showMessageDialog(MainFrame.this, "Simulation completed", "Simulation completed", JOptionPane.PLAIN_MESSAGE); } else { } } catch (InterruptedException | ExecutionException | HeadlessException ex) { File logFile = simulator.getLogFile(); StringBuilder sb = new StringBuilder(); if (logFile != null) { try (FileInputStream fis = new FileInputStream(logFile); InputStreamReader isr = new InputStreamReader(fis, StandardCharsets.UTF_8); BufferedReader reader = new BufferedReader(isr)) { String line; while ((line = reader.readLine()) != null) { sb.append(line).append(System.getProperty("line.separator")); } } catch (IOException ex1) { Logger.getRootLogger().error(ex1.getMessage()); } } String detail = sb.toString(); MessageDialog.showMessageDialog(MainFrame.this, "The following error occurred during simulation:", detail, "Error on simulation", JOptionPane.ERROR_MESSAGE, null, new Object[]{" OK "}); } } }); for (SubFrame subFrame : getSubFrames()) simulator.addSimulationListener(subFrame); simulator.addSimulationListener(MenuBar.getInstance()); simulator.addSimulationListener(mControlPane); monitor.addPropertyChangeListener(new PropertyChangeListener(){ @Override public void propertyChange(PropertyChangeEvent e) { String propertyName = e.getPropertyName(); if ("progress".equals(propertyName)) { if (e instanceof PhspProgressMonitor.Event) { final PhspProgressMonitor.Event evt = (PhspProgressMonitor.Event)e; SwingUtilities.invokeLater(new Runnable() { @Override public void run() { String modelPath = (String)evt.getClientProperty("modelPath"); SubFrame subFrame = findSubFrame(modelPath); SimulationDao simulationDao = simulator.getSimulationDao(); TaskDao taskDao = simulationDao.obtainTask(new File(subFrame.getRelativeModelPath())); Progress progress = (Progress)evt.getNewValue(); Map<String, Number> target = (Map<String, Number>)evt.getClientProperty("target"); IProgressManager progressMgr = subFrame.getProgressManager(); int index = progressMgr.indexOf(target); progressMgr.setProgress(index, progress); if (taskDao.isCancelled()) progressMgr.setCancelled(index, rootPaneCheckingEnabled); int taskProgress = taskDao.getProgress(); ProgressCell cell = mProgressPane.getListCellOfModel(new File(modelPath)); String status; if (taskDao.isFinished()) { status = (taskDao.isCancelled())? "finished" : "completed"; cell.progressFinished(status, 0, 100, taskProgress); } else if (taskDao.isStarted()) { status = (taskDao.isCancelled())? "cancelling..." : taskProgress + " %"; cell.setProgress(status, 0, 100, taskProgress); } } }); } } } }); simulator.execute(); mSimulator = simulator; monitor.start(); } catch (IOException | ParserConfigurationException | PhspException | SQLException | SedmlException | TransformerException ex) { showErrorDialog(ex.getMessage(), "ERROR"); } } public boolean openPhsp(final File phspFile) { List<SubFrame> subFrames = getSubFrames(); int numberOfSubView = subFrames.size(); if (numberOfSubView > 0) { int ans = JOptionPane.showConfirmDialog(this, "Is it ok to close editing files?", "Close a file", JOptionPane.YES_NO_OPTION); if (ans != JOptionPane.YES_OPTION) return false; closeAll(); } mPhspFile = phspFile; try { setEditable(false); ModelLoaderLogger logger = new ModelLoaderLogger(mDesktop); PhspReader phspLoader = new PhspReader(phspFile); phspLoader.addPropertyChangeListener(new PhspReaderListener(logger)); phspLoader.addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { String propertyName = evt.getPropertyName(); Object newValue = evt.getNewValue(); if ("state".equals(propertyName) && SwingWorker.StateValue.DONE.equals(newValue)) { mSession.updateRecentModels(phspFile); setEditable(true); } } }); phspLoader.execute(); return true; } catch (IOException ex) { setEditable(true); showErrorDialog(ex.getMessage(), "Error on load file of phsp"); return false; } } public boolean closeModel (SubFrame subFrame) { if (mSimulator != null && mSimulator.isStarted()) { JOptionPane.showMessageDialog(this, "Could not close the model. \n" + "It's running the simulation yet.", "Error on close the model", JOptionPane.INFORMATION_MESSAGE); return false; } if (subFrame == null) return false; if (!subFrame.isClosed()) mDesktop.removeDocument(subFrame.getDocument()); return true; } public void closeAll () { for (SubFrame subFrame : getSubFrames()) closeModel(subFrame); } public SubFrame getSelectedSubFrame () { return (SubFrame) mDesktop.getPane().getSelectedFrame(); } public List<SubFrame> getSubFrames() { return mDesktop.getSubFrames(); } public void setProgress(Object key, int progress) { if (key instanceof String) { String modelPath = (String)key; ProgressCell cell = mProgressPane.getListCellOfModel(new File(modelPath)); String msg = progress + " %"; cell.setProgress(msg, 0, 100, progress); return; } throw new IllegalArgumentException("key must be set model file path."); } public void setEditable (boolean editable) { List<SubFrame> subFrames = getSubFrames(); for (SubFrame subFrame : subFrames) subFrame.setEditable(editable); mControlPane.setSimulationRunEnabled(editable); } /* * Implements MenuBar Delegater */ @Override public void openPerformed (Object source) { FileChooser fc = new FileChooser(this, "Open model", FileChooser.Mode.LOAD, mSession.getLastPath()); JFileChooser jfc = fc.getJFileChooser(); if (jfc != null) { jfc.setAcceptAllFileFilterUsed(false); FileNameExtensionFilter xmlFilter = new FileNameExtensionFilter("XML files (*.xml)", "xml"); FileNameExtensionFilter modelFilter = new FileNameExtensionFilter("Model files (*.isml, *.phml, *.phz, *.sbml)", "isml", "phml", "phz", "sbml"); FileNameExtensionFilter phspFilter = new FileNameExtensionFilter("PHSP files (*.phsp)", "phsp"); jfc.addChoosableFileFilter(modelFilter); jfc.addChoosableFileFilter(phspFilter); jfc.addChoosableFileFilter(xmlFilter); jfc.addChoosableFileFilter(jfc.getAcceptAllFileFilter()); jfc.setFileFilter(modelFilter); } if (!fc.showDialog()) return; openModel(fc.getSelectedFile()); } @Override public void recentModelPerformed (Object source, File f) { openModel(f); } @Override public void closePerformed (Object source) { closeModel(getSelectedSubFrame()); } @Override public void loadConfigurationPerformed (Object source) { Preferences prefs = Preferences.userRoot().node("/jp/oist/flint"); String defaultPath = prefs.get("defaultConfigurationPath", ""); FileChooser fc = new FileChooser(this, "Open SED-ML file", FileChooser.Mode.LOAD, defaultPath); JFileChooser jfc = fc.getJFileChooser(); if (jfc != null) { jfc.setAcceptAllFileFilterUsed(false); FileNameExtensionFilter sedmlFilter = new FileNameExtensionFilter("SED-ML files (*.sedml, *.xml)", "sedml", "xml"); jfc.addChoosableFileFilter(sedmlFilter); jfc.addChoosableFileFilter(jfc.getAcceptAllFileFilter()); jfc.setFileFilter(sedmlFilter); } if (!fc.showDialog()) return; File file = fc.getSelectedFile(); SedmlReader reader = new SedmlReader(file); try { if (reader.parse()) { prefs.put("defaultConfigurationPath", file.getParent()); ISimulationConfigurationList configs = (ISimulationConfigurationList)reader.getHandler(); for (SubFrame subFrame : getSubFrames()) { ISimulationConfiguration config = configs.getConfigurationByModelPath(subFrame.getRelativeModelPath()); subFrame.load(config); } } } catch (IOException | ParserConfigurationException | SAXException e) { showErrorDialog(e.getMessage(), "Error on load configuration"); } } @Override public void saveConfigurationPerformed (Object source) { FileChooser fc = new FileChooser(this, "Select SED-ML file", FileChooser.Mode.SAVE, ""); if (fc.showDialog()) { final File file = fc.getSelectedFile(); if (file.exists()) { int ans = JOptionPane.showConfirmDialog(this, "Is it OK to replace the existing file?", "Replace the existing file?", JOptionPane.YES_NO_OPTION); if (ans != JOptionPane.YES_OPTION) return; } try (FileOutputStream fos = new FileOutputStream(file)) { SedmlWriter writer = new SedmlWriter(true); writer.writeSimulationConfiguration(this, fos); } catch (HeadlessException e) { showErrorDialog(e.getMessage(), "Error on saving configuration"); return; } catch (IOException | ArithmeticException | SedmlException e) { showErrorDialog(e.getMessage(), "Error on saving configuration"); return; } JOptionPane.showMessageDialog(this, "Saved configuration as " + file.getPath()); } } @Override public void saveAsPhspPerformed (Object source) { try { if (mDesktop.isEmpty()) { showErrorDialog("Please open the phml/sbml model.", "Error on saving phsp"); return; } FileChooser fc = new FileChooser(this, "Select PHSP file", FileChooser.Mode.SAVE, ""); JFileChooser jfc = fc.getJFileChooser(); FileNameExtensionFilter xmlFilter = new FileNameExtensionFilter("XML files (*.xml)", "xml"); FileNameExtensionFilter modelFilter = new FileNameExtensionFilter("PHSP files (*.phsp)", "phsp"); jfc.addChoosableFileFilter(modelFilter); jfc.addChoosableFileFilter(xmlFilter); jfc.addChoosableFileFilter(jfc.getAcceptAllFileFilter()); jfc.setFileFilter(modelFilter); File initFile = (mPhspFile == null)? new File("Untitled.phsp"): mPhspFile; jfc.setSelectedFile(initFile); if (fc.showDialog()) { File file = jfc.getSelectedFile(); if (!file.getName().endsWith(".phsp")) file = new File(file.getPath()+".phsp"); if (file.exists()) { int ans = JOptionPane.showConfirmDialog(this, "Is it OK to replace the existing file?", "Replace the existing file?", JOptionPane.YES_NO_OPTION); if (ans != JOptionPane.YES_OPTION) return; } setEnabled(false); try (FileOutputStream fos = new FileOutputStream(file)) { PhspWriter writer = new PhspWriter(); writer.write(mDesktop, fos, false); } JOptionPane.showMessageDialog(this, "Saved phsp as " + file.getPath()); } } catch (IOException | ParserConfigurationException | PhspException | TransformerException ex) { showErrorDialog(ex.getMessage(), "Saving as PHSP failed"); } finally { setEnabled(true); } } @Override public void exitPerformed (Object source) { if (mSimulator != null) mSimulator.cancel(true); System.exit(0); } @Override public void copyPerformed (Object source) { SubFrame subFrame = getSelectedSubFrame(); subFrame.copy(); } @Override public void cutPerformed (Object source) { SubFrame subFrame = getSelectedSubFrame(); subFrame.cut(); } @Override public void preferencePerformed (Object source) { PreferenceDialog ad = new PreferenceDialog(this, true); ad.setVisible(true); } @Override public void simulationRunPerformed(Object source) { simulationRun(); } @Override public void sendToK3Performed(Object source) { Preferences prefs = Preferences.userRoot().node("/jp/oist/flint/session/k3"); final String encryptedUserId = prefs.get("encryptedUserId", null); final String encryptedPassword = prefs.get("encryptedPassword", null); if (encryptedUserId == null || encryptedUserId.isEmpty() || encryptedPassword == null || encryptedPassword.isEmpty()) { StringBuilder sb = new StringBuilder("Please specify your account of Flint K3."); sb.append(System.getProperty("line.separator")); sb.append("(Edit -> Preference -> K3)"); showErrorDialog(sb.toString(), "Error on preference"); return; } SubFrame subFrame = getSelectedSubFrame(); final Object retval = JOptionPane.showInputDialog(this, "New job's title:", "New job's title", JOptionPane.QUESTION_MESSAGE, null, null, subFrame.getModelFile().getName()); if (retval == null) return; SwingWorker<Integer, Void> worker = new SwingWorker<Integer, Void>() { @Override protected Integer doInBackground() throws Exception { SubFrame subFrame = getSelectedSubFrame(); String jobName = (String)retval; String userId = Utility.decrypt(encryptedUserId); String passwd = Utility.decrypt(encryptedPassword); K3RequestBuilder reqBuilder = new K3RequestBuilder( subFrame.getModelFile(), subFrame); K3Request request = reqBuilder.build(jobName, userId, passwd); K3Client k3 = new K3Client(); return k3.submit(request); } @Override protected void done () { try { int jobId = get(); String message = String.format("Submitted successfully your job to Flint K3 (Job ID : %d)", jobId); String title = "Job submitted to Flint K3"; JOptionPane.showMessageDialog(MainFrame.this, message, title, JOptionPane.INFORMATION_MESSAGE); } catch (InterruptedException | ExecutionException ex) { showErrorDialog(ex.getMessage(), "Error on comminicating with Flint K3"); } } }; worker.execute(); } @Override public void aboutPerformed (Object source) { AboutDialog ad = new AboutDialog(this); ad.setVisible(true); } /* * Implements IFrame */ @Override public void showErrorDialog(String message, String title) { JOptionPane.showMessageDialog(this, message, title, JOptionPane.ERROR_MESSAGE); } @Override public void showErrorDialog(ByteString message, String title) { JOptionPane.showMessageDialog(this, message.toStringUtf8(), title, JOptionPane.ERROR_MESSAGE); } @Override public void appendLog(String s) { Date date = new Date(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); String d = sdf.format(date); Logger.getRootLogger().error("[" + d + "] " + s + System.getProperty("line.separator")); } public void notifyK3Enabled() { List<SubFrame> subFrames = getSubFrames(); for (SubFrame subFrame : subFrames) { subFrame.notifyK3Enabled(); } } public boolean openModel (final File file) { if (file == null) { // just ignore it return false; } if (!file.exists()) { String separator = System.getProperty("line.separator"); String msg = String.format("The file named \"%s\" " + separator + "does not exist.", file.getPath()); showErrorDialog(msg, "Error on opening model"); return false; } // check xml format String format = Utility.detectXMLFormat(file); if ("phsp".equals(format)) return openPhsp(file); // check if the file is opened. for (SubFrame child : getSubFrames()) { if (child.getModelFile().getPath().equals(file.getPath())) { try { child.setSelected(true); } catch (PropertyVetoException ex) { // ignored } return true; } } String path; try { path = file.getCanonicalPath(); } catch (IOException ex) { showErrorDialog("could not get canonical path : " + file.toString(), "Error on opening model"); return false; } if (!file.isFile()) { showErrorDialog("could not get canonical path : " + file.toString(), "Error on opening model"); return false; } int len = (int)file.length(); if (len == 0) { showErrorDialog("file has length 0 : " + path, "Error on opening model"); return false; } ModelLoaderLogger logger = new ModelLoaderLogger(mDesktop); setEditable(false); ModelLoader loader = new ModelLoader(file); loader.addPropertyChangeListener(new ModelFileLoaderListener(logger, loader)); loader.addPropertyChangeListener(new ModelLoaderProgressDialog(this, path)); loader.addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { String propertyName = evt.getPropertyName(); Object newValue = evt.getNewValue(); if ("state".equals(propertyName) && SwingWorker.StateValue.DONE.equals(newValue)) { mSession.updateRecentModels(file); setEditable(true); } } }); loader.execute(); return true; } @Override public void openModel(final String name) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { Path path = Paths.get(name); openModel(path.toFile()); } }); } public void setPlotterSettingTabEnabled(String defaultPlotter) { SubFrame subFrame = this.getSelectedSubFrame(); if (subFrame != null) { subFrame.setPlotterSettingTabEnabled("gnuplot".equals(defaultPlotter)); } } /* * implements ISimulationConfigurationList */ @Override public ISimulationConfiguration getConfiguration(int index) { return getSubFrames().get(index); } @Override public ISimulationConfiguration getConfigurationByModelPath(String modelPath) { return findSubFrame(modelPath); } public SubFrame findSubFrame (String modelPath) { for (SubFrame subFrame : getSubFrames()) { if (modelPath.equals(subFrame.getRelativeModelPath())) return subFrame; } return null; } @Override public List<ISimulationConfiguration> toList() { List<ISimulationConfiguration> configs = new ArrayList<>(); for (SubFrame subFrame : getSubFrames()) configs.add(subFrame); return configs; } @Override public int getConfigurationCount () { return mDesktop.getSize(); } /* IDesktopListener */ @Override public void documentAdded(Document doc) { Preferences prefs = Preferences.userRoot().node("/jp/oist/flint"); String defaultPlotter = prefs.get("defaultPlotter", ""); setPlotterSettingTabEnabled(defaultPlotter); } @Override public void documentRemoved(Document doc, boolean empty) { if (empty) { setEditable(false); requestFocus(); } } }
drop unused field
flint/src/jp/oist/flint/form/MainFrame.java
drop unused field
<ide><path>lint/src/jp/oist/flint/form/MainFrame.java <ide> import javax.swing.SwingUtilities; <ide> import javax.swing.SwingWorker; <ide> import javax.swing.WindowConstants; <del>import javax.swing.event.EventListenerList; <ide> import javax.swing.filechooser.FileNameExtensionFilter; <ide> import javax.xml.parsers.ParserConfigurationException; <ide> import javax.xml.transform.TransformerException; <ide> <ide> private final Session mSession; <ide> <del> private final EventListenerList mEventListenerList; <del> <ide> private File mPhspFile = null; <ide> <ide> private PhspSimulator mSimulator = null; <ide> super(); <ide> mDesktop = desktop; <ide> mSession = session; <del> mEventListenerList = new EventListenerList(); <ide> URL iconUrl = getClass().getResource("/jp/oist/flint/image/icon.png"); <ide> setIconImage(new ImageIcon(iconUrl).getImage()); <ide> setTransferHandler(new ModelFileTransferHandler(this));
Java
apache-2.0
7e691897adafa5e5e847c259a88dfc81b8cec880
0
apache/commons-compress,apache/commons-compress,apache/commons-compress
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.commons.compress.archivers; import java.io.InputStream; import java.io.OutputStream; import java.util.Set; /** * Creates Archive {@link ArchiveInputStream}s and {@link ArchiveOutputStream}s. * * @since 1.13 */ public interface ArchiveStreamProvider { /** * Creates an archive input stream from an archiver name and an input * stream. * * @param name * the archive name, i.e. * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#AR}, * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#ARJ}, * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#ZIP}, * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#TAR}, * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#JAR}, * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#CPIO}, * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#DUMP} * or * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#SEVEN_Z} * @param in * the input stream * @param encoding * encoding name or null for the default * @return the archive input stream * @throws ArchiveException * if the archiver name is not known * @throws StreamingNotSupportedException * if the format cannot be read from a stream * @throws IllegalArgumentException * if the archiver name or stream is null */ ArchiveInputStream createArchiveInputStream(final String name, final InputStream in, final String encoding) throws ArchiveException; /** * Creates an archive output stream from an archiver name and an output * stream. * * @param archiverName * the archive name, i.e. * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#AR}, * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#ZIP}, * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#TAR}, * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#JAR} * or * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#CPIO} * @param out * the output stream * @param encoding * encoding name or null for the default * @return the archive output stream * @throws ArchiveException * if the archiver name is not known * @throws StreamingNotSupportedException * if the format cannot be written to a stream * @throws IllegalArgumentException * if the archiver name or stream is null */ ArchiveOutputStream createArchiveOutputStream(final String name, final OutputStream out, final String encoding) throws ArchiveException; /** * Gets all the input stream archive names for this provider * * @return all the input archive names for this provider */ Set<String> getInputStreamArchiveNames(); /** * Gets all the output stream archive names for this provider * * @return all the output archive names for this provider */ Set<String> getOutputStreamArchiveNames(); }
src/main/java/org/apache/commons/compress/archivers/ArchiveStreamProvider.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.commons.compress.archivers; import java.io.InputStream; import java.io.OutputStream; import java.util.Set; /** * Creates Archive {@link ArchiveInputStream}s and {@link ArchiveOutputStream}s. * * @since 1.13 */ public interface ArchiveStreamProvider { /** * Creates an archive input stream from an archiver name and an input * stream. * * @param archiverName * the archive name, i.e. {@value #AR}, {@value #ARJ}, * {@value #ZIP}, {@value #TAR}, {@value #JAR}, {@value #CPIO}, * {@value #DUMP} or {@value #SEVEN_Z} * @param in * the input stream * @param encoding, * or null for the default * @return the archive input stream * @throws ArchiveException * if the archiver name is not known * @throws StreamingNotSupportedException * if the format cannot be read from a stream * @throws IllegalArgumentException * if the archiver name or stream is null */ ArchiveInputStream createArchiveInputStream(final String name, final InputStream in, final String encoding) throws ArchiveException; /** * Creates an archive output stream from an archiver name and an output * stream. * * @param archiverName * the archive name, i.e. {@value #AR}, {@value #ZIP}, * {@value #TAR}, {@value #JAR} or {@value #CPIO} * @param out * the output stream * @param encoding, * or null for the default * @return the archive output stream * @throws ArchiveException * if the archiver name is not known * @throws StreamingNotSupportedException * if the format cannot be written to a stream * @throws IllegalArgumentException * if the archiver name or stream is null */ ArchiveOutputStream createArchiveOutputStream(final String name, final OutputStream out, final String encoding) throws ArchiveException; /** * Gets all the input stream archive names for this provider * * @return all the input archive names for this provider */ Set<String> getInputStreamArchiveNames(); /** * Gets all the output stream archive names for this provider * * @return all the output archive names for this provider */ Set<String> getOutputStreamArchiveNames(); }
Fix Javadoc 8 errors.
src/main/java/org/apache/commons/compress/archivers/ArchiveStreamProvider.java
Fix Javadoc 8 errors.
<ide><path>rc/main/java/org/apache/commons/compress/archivers/ArchiveStreamProvider.java <ide> * Creates an archive input stream from an archiver name and an input <ide> * stream. <ide> * <del> * @param archiverName <del> * the archive name, i.e. {@value #AR}, {@value #ARJ}, <del> * {@value #ZIP}, {@value #TAR}, {@value #JAR}, {@value #CPIO}, <del> * {@value #DUMP} or {@value #SEVEN_Z} <add> * @param name <add> * the archive name, i.e. <add> * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#AR}, <add> * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#ARJ}, <add> * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#ZIP}, <add> * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#TAR}, <add> * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#JAR}, <add> * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#CPIO}, <add> * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#DUMP} <add> * or <add> * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#SEVEN_Z} <ide> * @param in <ide> * the input stream <del> * @param encoding, <del> * or null for the default <add> * @param encoding <add> * encoding name or null for the default <ide> * @return the archive input stream <ide> * @throws ArchiveException <ide> * if the archiver name is not known <ide> * stream. <ide> * <ide> * @param archiverName <del> * the archive name, i.e. {@value #AR}, {@value #ZIP}, <del> * {@value #TAR}, {@value #JAR} or {@value #CPIO} <add> * the archive name, i.e. <add> * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#AR}, <add> * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#ZIP}, <add> * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#TAR}, <add> * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#JAR} <add> * or <add> * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#CPIO} <ide> * @param out <ide> * the output stream <del> * @param encoding, <del> * or null for the default <add> * @param encoding <add> * encoding name or null for the default <ide> * @return the archive output stream <ide> * @throws ArchiveException <ide> * if the archiver name is not known
Java
mit
5b7763e4bac320ba586bbed338517f25b59c6336
0
ewanld/fjdbc-codegen
package fjdbc.codegen; import java.io.Closeable; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.Writer; import java.sql.SQLException; import java.sql.Types; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import com.github.stream4j.Function; import com.github.stream4j.Stream; import fjdbc.codegen.DbUtil.ColumnDescriptor; import fjdbc.codegen.DbUtil.TableDescriptor; public class CodeGenerator { private final DbUtil dbUtil; private final Map<Integer, JdbcType> jdbcTypeMap; private final String packageName; private final String outputDir; //writers private Writer dto; private Writer sequences; public CodeGenerator(DbUtil dbUtil, String outputDir, String packageName) { this.dbUtil = dbUtil; this.outputDir = outputDir; this.packageName = packageName; // see http://www.tutorialspoint.com/jdbc/jdbc-data-types.htm // see http://docs.oracle.com/javase/1.5.0/docs/guide/jdbc/getstart/mapping.html final Collection<JdbcType> jdbcTypes = new ArrayList<JdbcType>(); //@formatter:off jdbcTypes.add(new JdbcType(Types.VARCHAR , "String" , "String" , "FieldString" )); jdbcTypes.add(new JdbcType(Types.CHAR , "String" , "String" , "FieldString" )); jdbcTypes.add(new JdbcType(Types.LONGNVARCHAR, "String" , "String" , "FieldString" )); jdbcTypes.add(new JdbcType(Types.BIT , "boolean" , "Boolean" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.NUMERIC , "java.math.BigDecimal", "BigDecimal", "FieldBigDecimal")); jdbcTypes.add(new JdbcType(Types.DECIMAL , "java.math.BigDecimal", "BigDecimal", "FieldBigDecimal")); jdbcTypes.add(new JdbcType(Types.TINYINT , "byte" , "Byte" , "FieldBigDecimal")); jdbcTypes.add(new JdbcType(Types.SMALLINT , "short" , "Short" , "FieldBigDecimal")); jdbcTypes.add(new JdbcType(Types.INTEGER , "int" , "Int" , "FieldBigDecimal")); jdbcTypes.add(new JdbcType(Types.BIGINT , "long" , "Long" , "FieldBigDecimal")); jdbcTypes.add(new JdbcType(Types.REAL , "float" , "Float" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.DOUBLE , "double" , "Double" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.VARBINARY , "byte[]" , "Bytes" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.BINARY , "byte[]" , "Bytes" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.DATE , "java.sql.Date" , "Date" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.TIME , "java.sql.Time" , "Time" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.TIMESTAMP , "java.sql.Timestamp" , "Timestamp" , "FieldTimestamp" )); jdbcTypes.add(new JdbcType(Types.CLOB , "java.sql.Clob" , "Clob" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.BLOB , "java.sql.Blob" , "Blob" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.ARRAY , "java.sql.Array" , "ARRAY" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.REF , "java.sql.Ref" , "Ref" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.STRUCT , "java.sql.Struct" , "Struct" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.OTHER , "Object" , "Object" , "FieldString" )); //@formatter:on jdbcTypeMap = Stream.of(jdbcTypes).toMap(JdbcType.getJdbcType); } private void write_dto(String format, Object... args) throws IOException { final String s = args.length == 0 ? format : String.format(format, args); dto.write(s); dto.write("\n"); } public void gen_dto_header() throws IOException { write_dto("package %s;", packageName); write_dto(""); } public JdbcType getJdbcType(int type) { final JdbcType jdbcType = jdbcTypeMap.get(type); if (jdbcType == null) System.out.println("Warning: unknown jdbc type: " + type); final JdbcType res = jdbcType == null ? new JdbcType(type, "Object", "Object", "FieldObject") : jdbcType; return res; } public void gen() throws SQLException, IOException { final String sourceDir = outputDir + "/" + packageName.replace('.', '/'); new File(sourceDir).mkdirs(); final TablesGenerator tbl = new TablesGenerator(new FileWriter(sourceDir + "/Tables.java")); this.dto = new FileWriter(sourceDir + "/Dto.java"); this.sequences = new FileWriter(sourceDir + "/Sequences.java"); final Collection<TableDescriptor> _tables = dbUtil.searchTables(); tbl.gen_header(); gen_dto_header(); //@formatter:off // class Dto write_dto("public class Dto {"); // class Tables tbl.write("public class Tables {"); // fields for (final TableDescriptor table : _tables) { tbl.write(" public final %s_Dao %s;", table.getName(), table.getName().toLowerCase()); } tbl.write(" "); // constructor Tables tbl.write(" public Tables(Connection cnx) {"); for (final TableDescriptor table : _tables) { tbl.write(" %s = new %s_Dao(cnx);", table.getName().toLowerCase(), table.getName()); } tbl.write(" }"); for (final TableDescriptor table : _tables) { final Collection<ColumnDescriptor> columns = dbUtil.searchColumns(table.getName()); // class TABLE write_dto(" public static class %s {", table.getName()); // field column from class TABLE for (final ColumnDescriptor col : columns) { final JdbcType type = getJdbcType(col.getType()); if (type == null) throw new RuntimeException(String.format("Unknown type: %s", col.getType())); write_dto(" public %s %s;", type.getJavaType(), col.getName().toLowerCase()); } // TABLE constructor final List<String> colDefs = Stream.of(columns).map(new Function<ColumnDescriptor, String>() { @Override public String apply(ColumnDescriptor t) { final JdbcType jdbcType = getJdbcType(t.getType()); final String javaType = jdbcType == null ? "Object" : jdbcType.getJavaType(); return String.format("%s %s", javaType, t.getName().toLowerCase()); } }).toList(); write_dto(" public %s(%s) {", table.getName(), StringUtils.join(colDefs.iterator(), ", ")); for (final ColumnDescriptor col : columns) { write_dto(" this.%s = %s;", col.getName().toLowerCase(), col.getName().toLowerCase()); } write_dto(" }"); write_dto(" }\n"); // class TABLE_Dao tbl.write(" public static class %s_Dao extends Dao {", table.getName()); tbl.write(" private Connection cnx;"); // enum Field for (final ColumnDescriptor col : columns) { final JdbcType type = getJdbcType(col.getType()); tbl.write(" public final %s %s = new %s(\"%s\");", type.getFieldClassName(), col.getName().toLowerCase(), type.getFieldClassName(), col.getName()); } tbl.write(" "); tbl.gen_TABLE_Dao(table); tbl.gen_search(table, columns); tbl.gen_search2(table); if (!table.isReadOnly()) { tbl.gen_update(table); tbl.gen_delete(table); tbl.gen_merge(table, columns); tbl.gen_insert(table, columns); tbl.gen_insert2(table, columns); tbl.gen_insertBatch(table, columns); } // end class TABLE_Dao tbl.write(" }\n"); } // end class Tables tbl.write("}\n"); // end class Dto write_dto("}\n"); //@formatter:on tbl.close(); dto.close(); sequences.close(); } private class TablesGenerator implements Closeable { private final Writer wrapped; public TablesGenerator(Writer wrapped) { this.wrapped = wrapped; } public void write(String format, Object... args) throws IOException { final String s = args.length == 0 ? format : String.format(format, args); wrapped.write(s); wrapped.write("\n"); } public void gen_header() throws IOException { write("package %s;", packageName); write(""); write("import java.util.List;"); write("import java.util.Collection;"); write("import java.util.ArrayList;"); write("import java.sql.*;"); write("import com.github.stream4j.Consumer;"); write("import com.github.stream4j.Stream;"); write("import fjdbc.codegen.DaoUtil;"); write("import fjdbc.codegen.DaoUtil.*;"); write("import fjdbc.codegen.Condition;"); write("import fjdbc.codegen.SqlFragment;"); write("import fjdbc.codegen.SqlExpr;"); write("import %s.Dto.*;", packageName); write(""); } public void gen_insert2(final TableDescriptor table, final Collection<ColumnDescriptor> columns) throws IOException { final List<String> colNames = Stream.of(columns).map(ColumnDescriptor.getName).toList(); //@formatter:off write(" public int insert("); boolean first = true; for (final ColumnDescriptor col : columns) { final JdbcType type = getJdbcType(col.getType()); write(" %s SqlExpr<%s> _%s", first ? " " : ",", type.getJavaType(), col.getName().toLowerCase()); first = false; } write(" ) {"); write(" PreparedStatement st = null;"); write(" final StringBuilder sql = new StringBuilder(\"insert into %s(%s) values(\");", table.getName(), StringUtils.join(colNames.iterator(), ", ")); first = true; for (final ColumnDescriptor col : columns) { write(" sql.%sappend(_%s.toSql());", first ? "" : "append(\", \").", col.getName().toLowerCase()); first = false; } write(" sql.append(\")\");"); write(" try {"); write(" st = cnx.prepareStatement(sql.toString());"); write(" Sequence parameterIndex = new Sequence(1);"); for (final ColumnDescriptor col : columns) { write(" _%s.bind(st, parameterIndex);", col.getName().toLowerCase()); } write(" final int nRows = st.executeUpdate();"); write(" cnx.commit();"); write(" return nRows;"); write(" } catch (SQLException e) {"); write(" throw new RuntimeException(e);"); write(" } finally {"); write(" DaoUtil.close(st);"); write(" }"); write(" }"); //@formatter:on } public void gen_insert(final TableDescriptor table, final Collection<ColumnDescriptor> columns) throws IOException { final List<String> colNames = Stream.of(columns).map(ColumnDescriptor.getName).toList(); //@formatter:off write(" public int insert(%s _value) {", table.getName()); write(" PreparedStatement st = null;"); write(" final String sql = \"insert into %s(%s) values(%s)\";", table.getName(), StringUtils.join(colNames.iterator(), ", "), StringUtils.join(Collections.nCopies(columns.size(), "?").iterator(), ", ")); write(" try {"); write(" st = cnx.prepareStatement(sql);"); int index = 1; for (final ColumnDescriptor col : columns) { final JdbcType type = getJdbcType(col.getType()); write(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); } write(" final int nRows = st.executeUpdate();"); write(" cnx.commit();"); write(" return nRows;"); write(" } catch (SQLException e) {"); write(" throw new RuntimeException(e);"); write(" } finally {"); write(" DaoUtil.close(st);"); write(" }"); write(" }"); //@formatter:on } public void gen_merge(final TableDescriptor table, final Collection<ColumnDescriptor> columns) throws IOException { final List<String> colNames = Stream.of(columns).map(ColumnDescriptor.getName).toList(); Collection<ColumnDescriptor> pk = Stream.of(columns).filter(ColumnDescriptor.isPrimaryKey).toList(); if (pk.size() == 0) pk = columns; final List<ColumnDescriptor> nonPk = Stream.of(columns).filter(ColumnDescriptor.isPrimaryKey.negate()) .toList(); final List<String> pkAssignments = Stream.of(pk).map(new Function<ColumnDescriptor, String>() { @Override public String apply(ColumnDescriptor t) { return t.getName() + " = ?"; } }).toList(); final List<String> nonPkAssignments = Stream.of(nonPk).map(new Function<ColumnDescriptor, String>() { @Override public String apply(ColumnDescriptor t) { return t.getName() + " = ?"; } }).toList(); //@formatter:off write(" public int merge(%s _value) {", table.getName()); write(" final String sql ="); write(" \" merge into %s using dual on (%s)\"", table.getName(), StringUtils.join(pkAssignments.iterator(), " and ")); if (pk.size() < columns.size()) { write(" + \" when matched then update set %s\"", StringUtils.join(nonPkAssignments.iterator(), ", ")); } write(" + \" when not matched then insert (%s) values (%s)\";", StringUtils.join(colNames.iterator(), ", "), StringUtils.join(Collections.nCopies(columns.size(), "?").iterator(), ", ")); write(" PreparedStatement st = null;"); write(" try {"); write(" st = cnx.prepareStatement(sql);"); int index = 1; for (final ColumnDescriptor col : pk) { final JdbcType type = getJdbcType(col.getType()); write(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); } if (pk.size() < columns.size()) { for (final ColumnDescriptor col : nonPk) { final JdbcType type = getJdbcType(col.getType()); write(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); } } for (final ColumnDescriptor col : columns) { final JdbcType type = getJdbcType(col.getType()); write(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); } write(" final int nRows = st.executeUpdate();"); write(" cnx.commit();"); write(" return nRows;"); write(" } catch (SQLException e) {"); write(" throw new RuntimeException(e);"); write(" } finally {"); write(" DaoUtil.close(st);"); write(" }"); write(" }\n"); //@formatter:on } public void gen_delete(final TableDescriptor table) throws IOException { //@formatter:off write(" public int delete(Condition condition) {"); write(" int res = DaoUtil.delete(cnx, \"%s\", condition);", table.getName()); write(" return res;"); write(" }\n"); //@formatter:ofn } public void gen_update(final TableDescriptor table) throws IOException { //@formatter:off write(" public int update(Collection<UpdateSetClause> updates, Condition condition) {", table.getName()); write(" assert updates != null;"); write(" assert updates.size() >= 1;"); write(" PreparedStatement st = null;"); write(" final StringBuilder sql = new StringBuilder();"); write(" sql.append(\"update %s set \");", table.getName()); write(" final List<String> updates_str = Stream.of(updates).map(SqlFragment.toSql).toList();"); write(" sql.append(DaoUtil.join(updates_str.iterator(), \", \"));"); write(" if (condition != null) sql.append(\" where \").append(condition.toSql());"); write(" final Sequence parameterIndex = new Sequence(1);"); write(" try {"); write(" st = cnx.prepareStatement(sql.toString());", table.getName()); write(" for (UpdateSetClause update : updates) {"); write(" update.bind(st, parameterIndex);"); write(" }"); write(" if (condition != null) condition.bind(st, parameterIndex);", table.getName()); write(" final int nRows = st.executeUpdate();"); write(" cnx.commit();"); write(" return nRows;"); write(" } catch (SQLException e) {"); write(" throw new RuntimeException(e);"); write(" } finally {"); write(" DaoUtil.close(st);"); write(" }"); write(" }\n"); //@formatter:on } public void gen_search2(final TableDescriptor table) throws IOException { //@formatter:off write(" public List<%s> search(Condition condition, Collection<OrderByClause> orderBy) {", table.getName(), table.getName(), table.getName()); write(" List<%s> res = new ArrayList<%s>();", table.getName(), table.getName()); write(" search(condition, orderBy, DaoUtil.toList(res));"); write(" return res;"); write(" }\n"); //@formatter:on } public void gen_search(final TableDescriptor table, final Collection<ColumnDescriptor> columns) throws IOException { final List<String> colNames = Stream.of(columns).map(ColumnDescriptor.getName).toList(); //@formatter:off write(" public void search(Condition condition, Collection<OrderByClause> orderBy, Consumer<%s> callback) {", table.getName(), table.getName(), table.getName()); write(" PreparedStatement st = null;"); write(" final StringBuilder query = new StringBuilder();"); write(" query.append(\"select * from %s\");", table.getName()); write(" if (condition != null) query.append(\" where \").append(condition.toSql());"); write(" if (orderBy != null) {"); write(" query.append(\" order by \");"); write(" final List<String> orderBy_str = Stream.of(orderBy).map(OrderByClause.toSql).toList();"); write(" query.append(DaoUtil.join(orderBy_str.iterator(), \", \"));"); write(" }"); write(" try {"); write(" st = cnx.prepareStatement(query.toString());", table.getName()); write(" if (condition != null) condition.bind(st, new Sequence(1));", table.getName()); write(" final ResultSet rs = st.executeQuery();"); write(" while(rs.next()) {"); for (final ColumnDescriptor col : columns) { final JdbcType type = getJdbcType(col.getType()); write(" final %-10s %-20s = rs.%-13s(\"%s\");", type.getJavaType(), col.getName(), type.getGetterMethodName(), col.getName()); } write(" final %s obj = new %s(%s);", table.getName(), table.getName(), StringUtils.join(colNames.iterator(), ", ")); write(" callback.accept(obj);"); write(" }"); write(" rs.close();"); write(" } catch (SQLException e) {"); write(" throw new RuntimeException(e);"); write(" } finally {"); write(" DaoUtil.close(st);"); write(" }"); write(" }\n"); //@formatter:on } public void gen_TABLE_Dao(final TableDescriptor table) throws IOException { //@formatter:off write(" public %s_Dao(Connection cnx) {", table.getName()); write(" super(cnx, \"%s\");", table.getName()); write(" this.cnx = cnx;"); write(" }\n"); //@formatter:on } public void gen_insertBatch(final TableDescriptor table, final Collection<ColumnDescriptor> columns) throws IOException { final List<String> colNames = Stream.of(columns).map(ColumnDescriptor.getName).toList(); //@formatter:off write(" public int[] insertBatch(Iterable<%s> _values) {", table.getName()); write(" PreparedStatement st = null;"); write(" final String sql = \"insert into %s(%s) values(%s)\";", table.getName(), StringUtils.join(colNames.iterator(), ", "), StringUtils.join(Collections.nCopies(columns.size(), "?").iterator(), ", ")); write(" try {"); write(" st = cnx.prepareStatement(sql);"); write(" for (%s _value : _values) {", table.getName()); int index = 1; for (final ColumnDescriptor col : columns) { final JdbcType type = getJdbcType(col.getType()); write(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); } write(" st.addBatch();"); write(" }"); write(" final int[] nRows = st.executeBatch();"); write(" cnx.commit();"); write(" return nRows;"); write(" } catch (SQLException e) {"); write(" throw new RuntimeException(e);"); write(" } finally {"); write(" DaoUtil.close(st);"); write(" }"); write(" }"); //@formatter:on } @Override public void close() throws IOException { wrapped.close(); } } private static class JdbcType { private final int jdbcType; private final String javaType; private final String identifier; private final String fieldClassName; public JdbcType(int jdbcType, String javaType, String identifier, String fieldClassName) { this.jdbcType = jdbcType; this.javaType = javaType; this.identifier = identifier; this.fieldClassName = fieldClassName; } public int getJdbcType() { return jdbcType; } public String getJavaType() { return javaType; } public static final Function<JdbcType, Integer> getJdbcType = new Function<JdbcType, Integer>() { @Override public Integer apply(JdbcType t) { return t.getJdbcType(); } }; public String getSetterMethodName() { return String.format("set%s", identifier); } public String getGetterMethodName() { return String.format("get%s", identifier); } public String getFieldClassName() { return fieldClassName; } } }
src/fjdbc/codegen/CodeGenerator.java
package fjdbc.codegen; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.Writer; import java.sql.SQLException; import java.sql.Types; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import com.github.stream4j.Function; import com.github.stream4j.Stream; import fjdbc.codegen.DbUtil.ColumnDescriptor; import fjdbc.codegen.DbUtil.TableDescriptor; public class CodeGenerator { private final DbUtil dbUtil; private final Map<Integer, JdbcType> jdbcTypeMap; private final String packageName; private final String outputDir; //writers private Writer tables; private Writer dto; private Writer sequences; public CodeGenerator(DbUtil dbUtil, String outputDir, String packageName) { this.dbUtil = dbUtil; this.outputDir = outputDir; this.packageName = packageName; // see http://www.tutorialspoint.com/jdbc/jdbc-data-types.htm // see http://docs.oracle.com/javase/1.5.0/docs/guide/jdbc/getstart/mapping.html final Collection<JdbcType> jdbcTypes = new ArrayList<JdbcType>(); //@formatter:off jdbcTypes.add(new JdbcType(Types.VARCHAR , "String" , "String" , "FieldString" )); jdbcTypes.add(new JdbcType(Types.CHAR , "String" , "String" , "FieldString" )); jdbcTypes.add(new JdbcType(Types.LONGNVARCHAR, "String" , "String" , "FieldString" )); jdbcTypes.add(new JdbcType(Types.BIT , "boolean" , "Boolean" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.NUMERIC , "java.math.BigDecimal", "BigDecimal", "FieldBigDecimal")); jdbcTypes.add(new JdbcType(Types.DECIMAL , "java.math.BigDecimal", "BigDecimal", "FieldBigDecimal")); jdbcTypes.add(new JdbcType(Types.TINYINT , "byte" , "Byte" , "FieldBigDecimal")); jdbcTypes.add(new JdbcType(Types.SMALLINT , "short" , "Short" , "FieldBigDecimal")); jdbcTypes.add(new JdbcType(Types.INTEGER , "int" , "Int" , "FieldBigDecimal")); jdbcTypes.add(new JdbcType(Types.BIGINT , "long" , "Long" , "FieldBigDecimal")); jdbcTypes.add(new JdbcType(Types.REAL , "float" , "Float" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.DOUBLE , "double" , "Double" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.VARBINARY , "byte[]" , "Bytes" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.BINARY , "byte[]" , "Bytes" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.DATE , "java.sql.Date" , "Date" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.TIME , "java.sql.Time" , "Time" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.TIMESTAMP , "java.sql.Timestamp" , "Timestamp" , "FieldTimestamp" )); jdbcTypes.add(new JdbcType(Types.CLOB , "java.sql.Clob" , "Clob" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.BLOB , "java.sql.Blob" , "Blob" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.ARRAY , "java.sql.Array" , "ARRAY" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.REF , "java.sql.Ref" , "Ref" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.STRUCT , "java.sql.Struct" , "Struct" , "FieldString" )); // TODO jdbcTypes.add(new JdbcType(Types.OTHER , "Object" , "Object" , "FieldString" )); //@formatter:on jdbcTypeMap = Stream.of(jdbcTypes).toMap(JdbcType.getJdbcType); } private void write_dto(String format, Object... args) throws IOException { final String s = args.length == 0 ? format : String.format(format, args); dto.write(s); dto.write("\n"); } public void gen_dto_header() throws IOException { write_dto("package %s;", packageName); write_dto(""); } public JdbcType getJdbcType(int type) { final JdbcType jdbcType = jdbcTypeMap.get(type); if (jdbcType == null) System.out.println("Warning: unknown jdbc type: " + type); final JdbcType res = jdbcType == null ? new JdbcType(type, "Object", "Object", "FieldObject") : jdbcType; return res; } public void gen() throws SQLException, IOException { final String sourceDir = outputDir + "/" + packageName.replace('.', '/'); new File(sourceDir).mkdirs(); this.tables = new FileWriter(sourceDir + "/Tables.java"); final TablesGenerator tbl = new TablesGenerator(tables); this.dto = new FileWriter(sourceDir + "/Dto.java"); this.sequences = new FileWriter(sourceDir + "/Sequences.java"); final Collection<TableDescriptor> _tables = dbUtil.searchTables(); tbl.gen_dao_header(); gen_dto_header(); //@formatter:off // class Dto write_dto("public class Dto {"); // class Tables tbl.write_tables("public class Tables {"); // fields for (final TableDescriptor table : _tables) { tbl.write_tables(" public final %s_Dao %s;", table.getName(), table.getName().toLowerCase()); } tbl.write_tables(" "); // constructor Tables tbl.write_tables(" public Tables(Connection cnx) {"); for (final TableDescriptor table : _tables) { tbl.write_tables(" %s = new %s_Dao(cnx);", table.getName().toLowerCase(), table.getName()); } tbl.write_tables(" }"); for (final TableDescriptor table : _tables) { final Collection<ColumnDescriptor> columns = dbUtil.searchColumns(table.getName()); // class TABLE write_dto(" public static class %s {", table.getName()); // field column from class TABLE for (final ColumnDescriptor col : columns) { final JdbcType type = getJdbcType(col.getType()); if (type == null) throw new RuntimeException(String.format("Unknown type: %s", col.getType())); write_dto(" public %s %s;", type.getJavaType(), col.getName().toLowerCase()); } // TABLE constructor final List<String> colDefs = Stream.of(columns).map(new Function<ColumnDescriptor, String>() { @Override public String apply(ColumnDescriptor t) { final JdbcType jdbcType = getJdbcType(t.getType()); final String javaType = jdbcType == null ? "Object" : jdbcType.getJavaType(); return String.format("%s %s", javaType, t.getName().toLowerCase()); } }).toList(); write_dto(" public %s(%s) {", table.getName(), StringUtils.join(colDefs.iterator(), ", ")); for (final ColumnDescriptor col : columns) { write_dto(" this.%s = %s;", col.getName().toLowerCase(), col.getName().toLowerCase()); } write_dto(" }"); write_dto(" }\n"); // class TABLE_Dao tbl.write_tables(" public static class %s_Dao extends Dao {", table.getName()); tbl.write_tables(" private Connection cnx;"); // enum Field for (final ColumnDescriptor col : columns) { final JdbcType type = getJdbcType(col.getType()); tbl.write_tables(" public final %s %s = new %s(\"%s\");", type.getFieldClassName(), col.getName().toLowerCase(), type.getFieldClassName(), col.getName()); } tbl.write_tables(" "); tbl.gen_TABLE_Dao(table); tbl.gen_search(table, columns); tbl.gen_search2(table); if (!table.isReadOnly()) { tbl.gen_update(table); tbl.gen_delete(table); tbl.gen_merge(table, columns); tbl.gen_insert(table, columns); tbl.gen_insert2(table, columns); tbl.gen_insertBatch(table, columns); } // end class TABLE_Dao tbl.write_tables(" }\n"); } // end class Tables tbl.write_tables("}\n"); // end class Dto write_dto("}\n"); //@formatter:on tables.close(); dto.close(); sequences.close(); } private class TablesGenerator { private final Writer wrapped; public TablesGenerator(Writer wrapped) { this.wrapped = wrapped; } public void write_tables(String format, Object... args) throws IOException { final String s = args.length == 0 ? format : String.format(format, args); wrapped.write(s); wrapped.write("\n"); } public void gen_dao_header() throws IOException { write_tables("package %s;", packageName); write_tables(""); write_tables("import java.util.List;"); write_tables("import java.util.Collection;"); write_tables("import java.util.ArrayList;"); write_tables("import java.sql.*;"); write_tables("import com.github.stream4j.Consumer;"); write_tables("import com.github.stream4j.Stream;"); write_tables("import fjdbc.codegen.DaoUtil;"); write_tables("import fjdbc.codegen.DaoUtil.*;"); write_tables("import fjdbc.codegen.Condition;"); write_tables("import fjdbc.codegen.SqlFragment;"); write_tables("import fjdbc.codegen.SqlExpr;"); write_tables("import %s.Dto.*;", packageName); write_tables(""); } public void gen_insert2(final TableDescriptor table, final Collection<ColumnDescriptor> columns) throws IOException { final List<String> colNames = Stream.of(columns).map(ColumnDescriptor.getName).toList(); //@formatter:off write_tables(" public int insert("); boolean first = true; for (final ColumnDescriptor col : columns) { final JdbcType type = getJdbcType(col.getType()); write_tables(" %s SqlExpr<%s> _%s", first ? " " : ",", type.getJavaType(), col.getName().toLowerCase()); first = false; } write_tables(" ) {"); write_tables(" PreparedStatement st = null;"); write_tables(" final StringBuilder sql = new StringBuilder(\"insert into %s(%s) values(\");", table.getName(), StringUtils.join(colNames.iterator(), ", ")); first = true; for (final ColumnDescriptor col : columns) { write_tables(" sql.%sappend(_%s.toSql());", first ? "" : "append(\", \").", col.getName().toLowerCase()); first = false; } write_tables(" sql.append(\")\");"); write_tables(" try {"); write_tables(" st = cnx.prepareStatement(sql.toString());"); write_tables(" Sequence parameterIndex = new Sequence(1);"); for (final ColumnDescriptor col : columns) { write_tables(" _%s.bind(st, parameterIndex);", col.getName().toLowerCase()); } write_tables(" final int nRows = st.executeUpdate();"); write_tables(" cnx.commit();"); write_tables(" return nRows;"); write_tables(" } catch (SQLException e) {"); write_tables(" throw new RuntimeException(e);"); write_tables(" } finally {"); write_tables(" DaoUtil.close(st);"); write_tables(" }"); write_tables(" }"); //@formatter:on } public void gen_insert(final TableDescriptor table, final Collection<ColumnDescriptor> columns) throws IOException { final List<String> colNames = Stream.of(columns).map(ColumnDescriptor.getName).toList(); //@formatter:off write_tables(" public int insert(%s _value) {", table.getName()); write_tables(" PreparedStatement st = null;"); write_tables(" final String sql = \"insert into %s(%s) values(%s)\";", table.getName(), StringUtils.join(colNames.iterator(), ", "), StringUtils.join(Collections.nCopies(columns.size(), "?").iterator(), ", ")); write_tables(" try {"); write_tables(" st = cnx.prepareStatement(sql);"); int index = 1; for (final ColumnDescriptor col : columns) { final JdbcType type = getJdbcType(col.getType()); write_tables(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); } write_tables(" final int nRows = st.executeUpdate();"); write_tables(" cnx.commit();"); write_tables(" return nRows;"); write_tables(" } catch (SQLException e) {"); write_tables(" throw new RuntimeException(e);"); write_tables(" } finally {"); write_tables(" DaoUtil.close(st);"); write_tables(" }"); write_tables(" }"); //@formatter:on } public void gen_merge(final TableDescriptor table, final Collection<ColumnDescriptor> columns) throws IOException { final List<String> colNames = Stream.of(columns).map(ColumnDescriptor.getName).toList(); Collection<ColumnDescriptor> pk = Stream.of(columns).filter(ColumnDescriptor.isPrimaryKey).toList(); if (pk.size() == 0) pk = columns; final List<ColumnDescriptor> nonPk = Stream.of(columns).filter(ColumnDescriptor.isPrimaryKey.negate()) .toList(); final List<String> pkAssignments = Stream.of(pk).map(new Function<ColumnDescriptor, String>() { @Override public String apply(ColumnDescriptor t) { return t.getName() + " = ?"; } }).toList(); final List<String> nonPkAssignments = Stream.of(nonPk).map(new Function<ColumnDescriptor, String>() { @Override public String apply(ColumnDescriptor t) { return t.getName() + " = ?"; } }).toList(); //@formatter:off write_tables(" public int merge(%s _value) {", table.getName()); write_tables(" final String sql ="); write_tables(" \" merge into %s using dual on (%s)\"", table.getName(), StringUtils.join(pkAssignments.iterator(), " and ")); if (pk.size() < columns.size()) { write_tables(" + \" when matched then update set %s\"", StringUtils.join(nonPkAssignments.iterator(), ", ")); } write_tables(" + \" when not matched then insert (%s) values (%s)\";", StringUtils.join(colNames.iterator(), ", "), StringUtils.join(Collections.nCopies(columns.size(), "?").iterator(), ", ")); write_tables(" PreparedStatement st = null;"); write_tables(" try {"); write_tables(" st = cnx.prepareStatement(sql);"); int index = 1; for (final ColumnDescriptor col : pk) { final JdbcType type = getJdbcType(col.getType()); write_tables(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); } if (pk.size() < columns.size()) { for (final ColumnDescriptor col : nonPk) { final JdbcType type = getJdbcType(col.getType()); write_tables(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); } } for (final ColumnDescriptor col : columns) { final JdbcType type = getJdbcType(col.getType()); write_tables(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); } write_tables(" final int nRows = st.executeUpdate();"); write_tables(" cnx.commit();"); write_tables(" return nRows;"); write_tables(" } catch (SQLException e) {"); write_tables(" throw new RuntimeException(e);"); write_tables(" } finally {"); write_tables(" DaoUtil.close(st);"); write_tables(" }"); write_tables(" }\n"); //@formatter:on } public void gen_delete(final TableDescriptor table) throws IOException { //@formatter:off write_tables(" public int delete(Condition condition) {"); write_tables(" int res = DaoUtil.delete(cnx, \"%s\", condition);", table.getName()); write_tables(" return res;"); write_tables(" }\n"); //@formatter:ofn } public void gen_update(final TableDescriptor table) throws IOException { //@formatter:off write_tables(" public int update(Collection<UpdateSetClause> updates, Condition condition) {", table.getName()); write_tables(" assert updates != null;"); write_tables(" assert updates.size() >= 1;"); write_tables(" PreparedStatement st = null;"); write_tables(" final StringBuilder sql = new StringBuilder();"); write_tables(" sql.append(\"update %s set \");", table.getName()); write_tables(" final List<String> updates_str = Stream.of(updates).map(SqlFragment.toSql).toList();"); write_tables(" sql.append(DaoUtil.join(updates_str.iterator(), \", \"));"); write_tables(" if (condition != null) sql.append(\" where \").append(condition.toSql());"); write_tables(" final Sequence parameterIndex = new Sequence(1);"); write_tables(" try {"); write_tables(" st = cnx.prepareStatement(sql.toString());", table.getName()); write_tables(" for (UpdateSetClause update : updates) {"); write_tables(" update.bind(st, parameterIndex);"); write_tables(" }"); write_tables(" if (condition != null) condition.bind(st, parameterIndex);", table.getName()); write_tables(" final int nRows = st.executeUpdate();"); write_tables(" cnx.commit();"); write_tables(" return nRows;"); write_tables(" } catch (SQLException e) {"); write_tables(" throw new RuntimeException(e);"); write_tables(" } finally {"); write_tables(" DaoUtil.close(st);"); write_tables(" }"); write_tables(" }\n"); //@formatter:on } public void gen_search2(final TableDescriptor table) throws IOException { //@formatter:off write_tables(" public List<%s> search(Condition condition, Collection<OrderByClause> orderBy) {", table.getName(), table.getName(), table.getName()); write_tables(" List<%s> res = new ArrayList<%s>();", table.getName(), table.getName()); write_tables(" search(condition, orderBy, DaoUtil.toList(res));"); write_tables(" return res;"); write_tables(" }\n"); //@formatter:on } public void gen_search(final TableDescriptor table, final Collection<ColumnDescriptor> columns) throws IOException { final List<String> colNames = Stream.of(columns).map(ColumnDescriptor.getName).toList(); //@formatter:off write_tables(" public void search(Condition condition, Collection<OrderByClause> orderBy, Consumer<%s> callback) {", table.getName(), table.getName(), table.getName()); write_tables(" PreparedStatement st = null;"); write_tables(" final StringBuilder query = new StringBuilder();"); write_tables(" query.append(\"select * from %s\");", table.getName()); write_tables(" if (condition != null) query.append(\" where \").append(condition.toSql());"); write_tables(" if (orderBy != null) {"); write_tables(" query.append(\" order by \");"); write_tables(" final List<String> orderBy_str = Stream.of(orderBy).map(OrderByClause.toSql).toList();"); write_tables(" query.append(DaoUtil.join(orderBy_str.iterator(), \", \"));"); write_tables(" }"); write_tables(" try {"); write_tables(" st = cnx.prepareStatement(query.toString());", table.getName()); write_tables(" if (condition != null) condition.bind(st, new Sequence(1));", table.getName()); write_tables(" final ResultSet rs = st.executeQuery();"); write_tables(" while(rs.next()) {"); for (final ColumnDescriptor col : columns) { final JdbcType type = getJdbcType(col.getType()); write_tables(" final %-10s %-20s = rs.%-13s(\"%s\");", type.getJavaType(), col.getName(), type.getGetterMethodName(), col.getName()); } write_tables(" final %s obj = new %s(%s);", table.getName(), table.getName(), StringUtils.join(colNames.iterator(), ", ")); write_tables(" callback.accept(obj);"); write_tables(" }"); write_tables(" rs.close();"); write_tables(" } catch (SQLException e) {"); write_tables(" throw new RuntimeException(e);"); write_tables(" } finally {"); write_tables(" DaoUtil.close(st);"); write_tables(" }"); write_tables(" }\n"); //@formatter:on } public void gen_TABLE_Dao(final TableDescriptor table) throws IOException { //@formatter:off write_tables(" public %s_Dao(Connection cnx) {", table.getName()); write_tables(" super(cnx, \"%s\");", table.getName()); write_tables(" this.cnx = cnx;"); write_tables(" }\n"); //@formatter:on } public void gen_insertBatch(final TableDescriptor table, final Collection<ColumnDescriptor> columns) throws IOException { final List<String> colNames = Stream.of(columns).map(ColumnDescriptor.getName).toList(); //@formatter:off write_tables(" public int[] insertBatch(Iterable<%s> _values) {", table.getName()); write_tables(" PreparedStatement st = null;"); write_tables(" final String sql = \"insert into %s(%s) values(%s)\";", table.getName(), StringUtils.join(colNames.iterator(), ", "), StringUtils.join(Collections.nCopies(columns.size(), "?").iterator(), ", ")); write_tables(" try {"); write_tables(" st = cnx.prepareStatement(sql);"); write_tables(" for (%s _value : _values) {", table.getName()); int index = 1; for (final ColumnDescriptor col : columns) { final JdbcType type = getJdbcType(col.getType()); write_tables(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); } write_tables(" st.addBatch();"); write_tables(" }"); write_tables(" final int[] nRows = st.executeBatch();"); write_tables(" cnx.commit();"); write_tables(" return nRows;"); write_tables(" } catch (SQLException e) {"); write_tables(" throw new RuntimeException(e);"); write_tables(" } finally {"); write_tables(" DaoUtil.close(st);"); write_tables(" }"); write_tables(" }"); //@formatter:on } } private static class JdbcType { private final int jdbcType; private final String javaType; private final String identifier; private final String fieldClassName; public JdbcType(int jdbcType, String javaType, String identifier, String fieldClassName) { this.jdbcType = jdbcType; this.javaType = javaType; this.identifier = identifier; this.fieldClassName = fieldClassName; } public int getJdbcType() { return jdbcType; } public String getJavaType() { return javaType; } public static final Function<JdbcType, Integer> getJdbcType = new Function<JdbcType, Integer>() { @Override public Integer apply(JdbcType t) { return t.getJdbcType(); } }; public String getSetterMethodName() { return String.format("set%s", identifier); } public String getGetterMethodName() { return String.format("get%s", identifier); } public String getFieldClassName() { return fieldClassName; } } }
refactor
src/fjdbc/codegen/CodeGenerator.java
refactor
<ide><path>rc/fjdbc/codegen/CodeGenerator.java <ide> package fjdbc.codegen; <ide> <add>import java.io.Closeable; <ide> import java.io.File; <ide> import java.io.FileWriter; <ide> import java.io.IOException; <ide> private final String outputDir; <ide> <ide> //writers <del> private Writer tables; <ide> private Writer dto; <ide> private Writer sequences; <ide> <ide> final String sourceDir = outputDir + "/" + packageName.replace('.', '/'); <ide> new File(sourceDir).mkdirs(); <ide> <del> this.tables = new FileWriter(sourceDir + "/Tables.java"); <del> final TablesGenerator tbl = new TablesGenerator(tables); <add> final TablesGenerator tbl = new TablesGenerator(new FileWriter(sourceDir + "/Tables.java")); <ide> this.dto = new FileWriter(sourceDir + "/Dto.java"); <ide> this.sequences = new FileWriter(sourceDir + "/Sequences.java"); <ide> <ide> final Collection<TableDescriptor> _tables = dbUtil.searchTables(); <ide> <del> tbl.gen_dao_header(); <add> tbl.gen_header(); <ide> gen_dto_header(); <ide> <ide> //@formatter:off <ide> write_dto("public class Dto {"); <ide> <ide> // class Tables <del> tbl.write_tables("public class Tables {"); <add> tbl.write("public class Tables {"); <ide> <ide> // fields <ide> for (final TableDescriptor table : _tables) { <del> tbl.write_tables(" public final %s_Dao %s;", table.getName(), table.getName().toLowerCase()); <del> } <del> tbl.write_tables(" "); <add> tbl.write(" public final %s_Dao %s;", table.getName(), table.getName().toLowerCase()); <add> } <add> tbl.write(" "); <ide> <ide> // constructor Tables <del> tbl.write_tables(" public Tables(Connection cnx) {"); <add> tbl.write(" public Tables(Connection cnx) {"); <ide> for (final TableDescriptor table : _tables) { <del> tbl.write_tables(" %s = new %s_Dao(cnx);", table.getName().toLowerCase(), table.getName()); <del> } <del> tbl.write_tables(" }"); <add> tbl.write(" %s = new %s_Dao(cnx);", table.getName().toLowerCase(), table.getName()); <add> } <add> tbl.write(" }"); <ide> <ide> for (final TableDescriptor table : _tables) { <ide> final Collection<ColumnDescriptor> columns = dbUtil.searchColumns(table.getName()); <ide> write_dto(" }\n"); <ide> <ide> // class TABLE_Dao <del> tbl.write_tables(" public static class %s_Dao extends Dao {", table.getName()); <del> tbl.write_tables(" private Connection cnx;"); <add> tbl.write(" public static class %s_Dao extends Dao {", table.getName()); <add> tbl.write(" private Connection cnx;"); <ide> <ide> // enum Field <ide> for (final ColumnDescriptor col : columns) { <ide> final JdbcType type = getJdbcType(col.getType()); <del> tbl.write_tables(" public final %s %s = new %s(\"%s\");", type.getFieldClassName(), col.getName().toLowerCase(), type.getFieldClassName(), col.getName()); <del> } <del> tbl.write_tables(" "); <add> tbl.write(" public final %s %s = new %s(\"%s\");", type.getFieldClassName(), col.getName().toLowerCase(), type.getFieldClassName(), col.getName()); <add> } <add> tbl.write(" "); <ide> <ide> tbl.gen_TABLE_Dao(table); <ide> tbl.gen_search(table, columns); <ide> } <ide> <ide> // end class TABLE_Dao <del> tbl.write_tables(" }\n"); <add> tbl.write(" }\n"); <ide> } <ide> <ide> // end class Tables <del> tbl.write_tables("}\n"); <add> tbl.write("}\n"); <ide> <ide> // end class Dto <ide> write_dto("}\n"); <ide> //@formatter:on <ide> <del> tables.close(); <add> tbl.close(); <ide> dto.close(); <ide> sequences.close(); <ide> } <ide> <del> private class TablesGenerator { <add> private class TablesGenerator implements Closeable { <ide> private final Writer wrapped; <ide> <ide> public TablesGenerator(Writer wrapped) { <ide> this.wrapped = wrapped; <ide> } <ide> <del> public void write_tables(String format, Object... args) throws IOException { <add> public void write(String format, Object... args) throws IOException { <ide> final String s = args.length == 0 ? format : String.format(format, args); <ide> wrapped.write(s); <ide> wrapped.write("\n"); <ide> } <ide> <del> public void gen_dao_header() throws IOException { <del> write_tables("package %s;", packageName); <del> write_tables(""); <del> write_tables("import java.util.List;"); <del> write_tables("import java.util.Collection;"); <del> write_tables("import java.util.ArrayList;"); <del> write_tables("import java.sql.*;"); <del> write_tables("import com.github.stream4j.Consumer;"); <del> write_tables("import com.github.stream4j.Stream;"); <del> write_tables("import fjdbc.codegen.DaoUtil;"); <del> write_tables("import fjdbc.codegen.DaoUtil.*;"); <del> write_tables("import fjdbc.codegen.Condition;"); <del> write_tables("import fjdbc.codegen.SqlFragment;"); <del> write_tables("import fjdbc.codegen.SqlExpr;"); <del> write_tables("import %s.Dto.*;", packageName); <del> write_tables(""); <add> public void gen_header() throws IOException { <add> write("package %s;", packageName); <add> write(""); <add> write("import java.util.List;"); <add> write("import java.util.Collection;"); <add> write("import java.util.ArrayList;"); <add> write("import java.sql.*;"); <add> write("import com.github.stream4j.Consumer;"); <add> write("import com.github.stream4j.Stream;"); <add> write("import fjdbc.codegen.DaoUtil;"); <add> write("import fjdbc.codegen.DaoUtil.*;"); <add> write("import fjdbc.codegen.Condition;"); <add> write("import fjdbc.codegen.SqlFragment;"); <add> write("import fjdbc.codegen.SqlExpr;"); <add> write("import %s.Dto.*;", packageName); <add> write(""); <ide> } <ide> <ide> public void gen_insert2(final TableDescriptor table, final Collection<ColumnDescriptor> columns) <ide> final List<String> colNames = Stream.of(columns).map(ColumnDescriptor.getName).toList(); <ide> <ide> //@formatter:off <del> write_tables(" public int insert("); <add> write(" public int insert("); <ide> boolean first = true; <ide> for (final ColumnDescriptor col : columns) { <ide> final JdbcType type = getJdbcType(col.getType()); <del> write_tables(" %s SqlExpr<%s> _%s", first ? " " : ",", type.getJavaType(), col.getName().toLowerCase()); <add> write(" %s SqlExpr<%s> _%s", first ? " " : ",", type.getJavaType(), col.getName().toLowerCase()); <ide> first = false; <ide> } <del> write_tables(" ) {"); <del> write_tables(" PreparedStatement st = null;"); <del> write_tables(" final StringBuilder sql = new StringBuilder(\"insert into %s(%s) values(\");", table.getName(), StringUtils.join(colNames.iterator(), ", ")); <add> write(" ) {"); <add> write(" PreparedStatement st = null;"); <add> write(" final StringBuilder sql = new StringBuilder(\"insert into %s(%s) values(\");", table.getName(), StringUtils.join(colNames.iterator(), ", ")); <ide> first = true; <ide> for (final ColumnDescriptor col : columns) { <del> write_tables(" sql.%sappend(_%s.toSql());", first ? "" : "append(\", \").", col.getName().toLowerCase()); <add> write(" sql.%sappend(_%s.toSql());", first ? "" : "append(\", \").", col.getName().toLowerCase()); <ide> first = false; <ide> } <del> write_tables(" sql.append(\")\");"); <del> write_tables(" try {"); <del> write_tables(" st = cnx.prepareStatement(sql.toString());"); <del> write_tables(" Sequence parameterIndex = new Sequence(1);"); <del> for (final ColumnDescriptor col : columns) { <del> write_tables(" _%s.bind(st, parameterIndex);", col.getName().toLowerCase()); <del> } <del> write_tables(" final int nRows = st.executeUpdate();"); <del> write_tables(" cnx.commit();"); <del> write_tables(" return nRows;"); <del> write_tables(" } catch (SQLException e) {"); <del> write_tables(" throw new RuntimeException(e);"); <del> write_tables(" } finally {"); <del> write_tables(" DaoUtil.close(st);"); <del> write_tables(" }"); <del> write_tables(" }"); <add> write(" sql.append(\")\");"); <add> write(" try {"); <add> write(" st = cnx.prepareStatement(sql.toString());"); <add> write(" Sequence parameterIndex = new Sequence(1);"); <add> for (final ColumnDescriptor col : columns) { <add> write(" _%s.bind(st, parameterIndex);", col.getName().toLowerCase()); <add> } <add> write(" final int nRows = st.executeUpdate();"); <add> write(" cnx.commit();"); <add> write(" return nRows;"); <add> write(" } catch (SQLException e) {"); <add> write(" throw new RuntimeException(e);"); <add> write(" } finally {"); <add> write(" DaoUtil.close(st);"); <add> write(" }"); <add> write(" }"); <ide> //@formatter:on <ide> } <ide> <ide> final List<String> colNames = Stream.of(columns).map(ColumnDescriptor.getName).toList(); <ide> <ide> //@formatter:off <del> write_tables(" public int insert(%s _value) {", table.getName()); <del> write_tables(" PreparedStatement st = null;"); <del> write_tables(" final String sql = \"insert into %s(%s) values(%s)\";", table.getName(), StringUtils.join(colNames.iterator(), ", "), StringUtils.join(Collections.nCopies(columns.size(), "?").iterator(), ", ")); <del> write_tables(" try {"); <del> write_tables(" st = cnx.prepareStatement(sql);"); <add> write(" public int insert(%s _value) {", table.getName()); <add> write(" PreparedStatement st = null;"); <add> write(" final String sql = \"insert into %s(%s) values(%s)\";", table.getName(), StringUtils.join(colNames.iterator(), ", "), StringUtils.join(Collections.nCopies(columns.size(), "?").iterator(), ", ")); <add> write(" try {"); <add> write(" st = cnx.prepareStatement(sql);"); <ide> int index = 1; <ide> for (final ColumnDescriptor col : columns) { <ide> final JdbcType type = getJdbcType(col.getType()); <del> write_tables(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); <del> } <del> write_tables(" final int nRows = st.executeUpdate();"); <del> write_tables(" cnx.commit();"); <del> write_tables(" return nRows;"); <del> write_tables(" } catch (SQLException e) {"); <del> write_tables(" throw new RuntimeException(e);"); <del> write_tables(" } finally {"); <del> write_tables(" DaoUtil.close(st);"); <del> write_tables(" }"); <del> write_tables(" }"); <add> write(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); <add> } <add> write(" final int nRows = st.executeUpdate();"); <add> write(" cnx.commit();"); <add> write(" return nRows;"); <add> write(" } catch (SQLException e) {"); <add> write(" throw new RuntimeException(e);"); <add> write(" } finally {"); <add> write(" DaoUtil.close(st);"); <add> write(" }"); <add> write(" }"); <ide> //@formatter:on <ide> } <ide> <ide> }).toList(); <ide> <ide> //@formatter:off <del> write_tables(" public int merge(%s _value) {", table.getName()); <del> write_tables(" final String sql ="); <del> write_tables(" \" merge into %s using dual on (%s)\"", table.getName(), StringUtils.join(pkAssignments.iterator(), " and ")); <add> write(" public int merge(%s _value) {", table.getName()); <add> write(" final String sql ="); <add> write(" \" merge into %s using dual on (%s)\"", table.getName(), StringUtils.join(pkAssignments.iterator(), " and ")); <ide> if (pk.size() < columns.size()) { <del> write_tables(" + \" when matched then update set %s\"", StringUtils.join(nonPkAssignments.iterator(), ", ")); <del> } <del> write_tables(" + \" when not matched then insert (%s) values (%s)\";", StringUtils.join(colNames.iterator(), ", "), StringUtils.join(Collections.nCopies(columns.size(), "?").iterator(), ", ")); <del> write_tables(" PreparedStatement st = null;"); <del> write_tables(" try {"); <del> write_tables(" st = cnx.prepareStatement(sql);"); <add> write(" + \" when matched then update set %s\"", StringUtils.join(nonPkAssignments.iterator(), ", ")); <add> } <add> write(" + \" when not matched then insert (%s) values (%s)\";", StringUtils.join(colNames.iterator(), ", "), StringUtils.join(Collections.nCopies(columns.size(), "?").iterator(), ", ")); <add> write(" PreparedStatement st = null;"); <add> write(" try {"); <add> write(" st = cnx.prepareStatement(sql);"); <ide> int index = 1; <ide> for (final ColumnDescriptor col : pk) { <ide> final JdbcType type = getJdbcType(col.getType()); <del> write_tables(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); <add> write(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); <ide> } <ide> if (pk.size() < columns.size()) { <ide> for (final ColumnDescriptor col : nonPk) { <ide> final JdbcType type = getJdbcType(col.getType()); <del> write_tables(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); <del> } <del> } <del> for (final ColumnDescriptor col : columns) { <del> final JdbcType type = getJdbcType(col.getType()); <del> write_tables(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); <del> } <del> write_tables(" final int nRows = st.executeUpdate();"); <del> write_tables(" cnx.commit();"); <del> write_tables(" return nRows;"); <del> write_tables(" } catch (SQLException e) {"); <del> write_tables(" throw new RuntimeException(e);"); <del> write_tables(" } finally {"); <del> write_tables(" DaoUtil.close(st);"); <del> write_tables(" }"); <del> write_tables(" }\n"); <add> write(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); <add> } <add> } <add> for (final ColumnDescriptor col : columns) { <add> final JdbcType type = getJdbcType(col.getType()); <add> write(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); <add> } <add> write(" final int nRows = st.executeUpdate();"); <add> write(" cnx.commit();"); <add> write(" return nRows;"); <add> write(" } catch (SQLException e) {"); <add> write(" throw new RuntimeException(e);"); <add> write(" } finally {"); <add> write(" DaoUtil.close(st);"); <add> write(" }"); <add> write(" }\n"); <ide> //@formatter:on <ide> } <ide> <ide> public void gen_delete(final TableDescriptor table) throws IOException { <ide> //@formatter:off <del> write_tables(" public int delete(Condition condition) {"); <del> write_tables(" int res = DaoUtil.delete(cnx, \"%s\", condition);", table.getName()); <del> write_tables(" return res;"); <del> write_tables(" }\n"); <add> write(" public int delete(Condition condition) {"); <add> write(" int res = DaoUtil.delete(cnx, \"%s\", condition);", table.getName()); <add> write(" return res;"); <add> write(" }\n"); <ide> //@formatter:ofn <ide> } <ide> <ide> public void gen_update(final TableDescriptor table) throws IOException { <ide> //@formatter:off <del> write_tables(" public int update(Collection<UpdateSetClause> updates, Condition condition) {", table.getName()); <del> write_tables(" assert updates != null;"); <del> write_tables(" assert updates.size() >= 1;"); <del> write_tables(" PreparedStatement st = null;"); <del> write_tables(" final StringBuilder sql = new StringBuilder();"); <del> write_tables(" sql.append(\"update %s set \");", table.getName()); <del> write_tables(" final List<String> updates_str = Stream.of(updates).map(SqlFragment.toSql).toList();"); <del> write_tables(" sql.append(DaoUtil.join(updates_str.iterator(), \", \"));"); <del> write_tables(" if (condition != null) sql.append(\" where \").append(condition.toSql());"); <del> write_tables(" final Sequence parameterIndex = new Sequence(1);"); <del> write_tables(" try {"); <del> write_tables(" st = cnx.prepareStatement(sql.toString());", table.getName()); <del> write_tables(" for (UpdateSetClause update : updates) {"); <del> write_tables(" update.bind(st, parameterIndex);"); <del> write_tables(" }"); <del> write_tables(" if (condition != null) condition.bind(st, parameterIndex);", table.getName()); <del> write_tables(" final int nRows = st.executeUpdate();"); <del> write_tables(" cnx.commit();"); <del> write_tables(" return nRows;"); <del> write_tables(" } catch (SQLException e) {"); <del> write_tables(" throw new RuntimeException(e);"); <del> write_tables(" } finally {"); <del> write_tables(" DaoUtil.close(st);"); <del> write_tables(" }"); <del> write_tables(" }\n"); <add> write(" public int update(Collection<UpdateSetClause> updates, Condition condition) {", table.getName()); <add> write(" assert updates != null;"); <add> write(" assert updates.size() >= 1;"); <add> write(" PreparedStatement st = null;"); <add> write(" final StringBuilder sql = new StringBuilder();"); <add> write(" sql.append(\"update %s set \");", table.getName()); <add> write(" final List<String> updates_str = Stream.of(updates).map(SqlFragment.toSql).toList();"); <add> write(" sql.append(DaoUtil.join(updates_str.iterator(), \", \"));"); <add> write(" if (condition != null) sql.append(\" where \").append(condition.toSql());"); <add> write(" final Sequence parameterIndex = new Sequence(1);"); <add> write(" try {"); <add> write(" st = cnx.prepareStatement(sql.toString());", table.getName()); <add> write(" for (UpdateSetClause update : updates) {"); <add> write(" update.bind(st, parameterIndex);"); <add> write(" }"); <add> write(" if (condition != null) condition.bind(st, parameterIndex);", table.getName()); <add> write(" final int nRows = st.executeUpdate();"); <add> write(" cnx.commit();"); <add> write(" return nRows;"); <add> write(" } catch (SQLException e) {"); <add> write(" throw new RuntimeException(e);"); <add> write(" } finally {"); <add> write(" DaoUtil.close(st);"); <add> write(" }"); <add> write(" }\n"); <ide> //@formatter:on <ide> } <ide> <ide> public void gen_search2(final TableDescriptor table) throws IOException { <ide> //@formatter:off <del> write_tables(" public List<%s> search(Condition condition, Collection<OrderByClause> orderBy) {", table.getName(), table.getName(), table.getName()); <del> write_tables(" List<%s> res = new ArrayList<%s>();", table.getName(), table.getName()); <del> write_tables(" search(condition, orderBy, DaoUtil.toList(res));"); <del> write_tables(" return res;"); <del> write_tables(" }\n"); <add> write(" public List<%s> search(Condition condition, Collection<OrderByClause> orderBy) {", table.getName(), table.getName(), table.getName()); <add> write(" List<%s> res = new ArrayList<%s>();", table.getName(), table.getName()); <add> write(" search(condition, orderBy, DaoUtil.toList(res));"); <add> write(" return res;"); <add> write(" }\n"); <ide> //@formatter:on <ide> } <ide> <ide> throws IOException { <ide> final List<String> colNames = Stream.of(columns).map(ColumnDescriptor.getName).toList(); <ide> //@formatter:off <del> write_tables(" public void search(Condition condition, Collection<OrderByClause> orderBy, Consumer<%s> callback) {", table.getName(), table.getName(), table.getName()); <del> write_tables(" PreparedStatement st = null;"); <del> write_tables(" final StringBuilder query = new StringBuilder();"); <del> write_tables(" query.append(\"select * from %s\");", table.getName()); <del> write_tables(" if (condition != null) query.append(\" where \").append(condition.toSql());"); <del> write_tables(" if (orderBy != null) {"); <del> write_tables(" query.append(\" order by \");"); <del> write_tables(" final List<String> orderBy_str = Stream.of(orderBy).map(OrderByClause.toSql).toList();"); <del> write_tables(" query.append(DaoUtil.join(orderBy_str.iterator(), \", \"));"); <del> write_tables(" }"); <del> write_tables(" try {"); <del> write_tables(" st = cnx.prepareStatement(query.toString());", table.getName()); <del> write_tables(" if (condition != null) condition.bind(st, new Sequence(1));", table.getName()); <del> write_tables(" final ResultSet rs = st.executeQuery();"); <del> write_tables(" while(rs.next()) {"); <del> for (final ColumnDescriptor col : columns) { <del> final JdbcType type = getJdbcType(col.getType()); <del> write_tables(" final %-10s %-20s = rs.%-13s(\"%s\");", type.getJavaType(), col.getName(), type.getGetterMethodName(), col.getName()); <del> } <del> write_tables(" final %s obj = new %s(%s);", table.getName(), table.getName(), StringUtils.join(colNames.iterator(), ", ")); <del> write_tables(" callback.accept(obj);"); <del> write_tables(" }"); <del> write_tables(" rs.close();"); <del> write_tables(" } catch (SQLException e) {"); <del> write_tables(" throw new RuntimeException(e);"); <del> write_tables(" } finally {"); <del> write_tables(" DaoUtil.close(st);"); <del> write_tables(" }"); <del> write_tables(" }\n"); <add> write(" public void search(Condition condition, Collection<OrderByClause> orderBy, Consumer<%s> callback) {", table.getName(), table.getName(), table.getName()); <add> write(" PreparedStatement st = null;"); <add> write(" final StringBuilder query = new StringBuilder();"); <add> write(" query.append(\"select * from %s\");", table.getName()); <add> write(" if (condition != null) query.append(\" where \").append(condition.toSql());"); <add> write(" if (orderBy != null) {"); <add> write(" query.append(\" order by \");"); <add> write(" final List<String> orderBy_str = Stream.of(orderBy).map(OrderByClause.toSql).toList();"); <add> write(" query.append(DaoUtil.join(orderBy_str.iterator(), \", \"));"); <add> write(" }"); <add> write(" try {"); <add> write(" st = cnx.prepareStatement(query.toString());", table.getName()); <add> write(" if (condition != null) condition.bind(st, new Sequence(1));", table.getName()); <add> write(" final ResultSet rs = st.executeQuery();"); <add> write(" while(rs.next()) {"); <add> for (final ColumnDescriptor col : columns) { <add> final JdbcType type = getJdbcType(col.getType()); <add> write(" final %-10s %-20s = rs.%-13s(\"%s\");", type.getJavaType(), col.getName(), type.getGetterMethodName(), col.getName()); <add> } <add> write(" final %s obj = new %s(%s);", table.getName(), table.getName(), StringUtils.join(colNames.iterator(), ", ")); <add> write(" callback.accept(obj);"); <add> write(" }"); <add> write(" rs.close();"); <add> write(" } catch (SQLException e) {"); <add> write(" throw new RuntimeException(e);"); <add> write(" } finally {"); <add> write(" DaoUtil.close(st);"); <add> write(" }"); <add> write(" }\n"); <ide> //@formatter:on <ide> } <ide> <ide> public void gen_TABLE_Dao(final TableDescriptor table) throws IOException { <ide> //@formatter:off <del> write_tables(" public %s_Dao(Connection cnx) {", table.getName()); <del> write_tables(" super(cnx, \"%s\");", table.getName()); <del> write_tables(" this.cnx = cnx;"); <del> write_tables(" }\n"); <add> write(" public %s_Dao(Connection cnx) {", table.getName()); <add> write(" super(cnx, \"%s\");", table.getName()); <add> write(" this.cnx = cnx;"); <add> write(" }\n"); <ide> //@formatter:on <ide> } <ide> <ide> throws IOException { <ide> final List<String> colNames = Stream.of(columns).map(ColumnDescriptor.getName).toList(); <ide> //@formatter:off <del> write_tables(" public int[] insertBatch(Iterable<%s> _values) {", table.getName()); <del> write_tables(" PreparedStatement st = null;"); <del> write_tables(" final String sql = \"insert into %s(%s) values(%s)\";", table.getName(), StringUtils.join(colNames.iterator(), ", "), StringUtils.join(Collections.nCopies(columns.size(), "?").iterator(), ", ")); <del> write_tables(" try {"); <del> write_tables(" st = cnx.prepareStatement(sql);"); <del> write_tables(" for (%s _value : _values) {", table.getName()); <add> write(" public int[] insertBatch(Iterable<%s> _values) {", table.getName()); <add> write(" PreparedStatement st = null;"); <add> write(" final String sql = \"insert into %s(%s) values(%s)\";", table.getName(), StringUtils.join(colNames.iterator(), ", "), StringUtils.join(Collections.nCopies(columns.size(), "?").iterator(), ", ")); <add> write(" try {"); <add> write(" st = cnx.prepareStatement(sql);"); <add> write(" for (%s _value : _values) {", table.getName()); <ide> int index = 1; <ide> for (final ColumnDescriptor col : columns) { <ide> final JdbcType type = getJdbcType(col.getType()); <del> write_tables(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); <del> } <del> write_tables(" st.addBatch();"); <del> write_tables(" }"); <del> write_tables(" final int[] nRows = st.executeBatch();"); <del> write_tables(" cnx.commit();"); <del> write_tables(" return nRows;"); <del> write_tables(" } catch (SQLException e) {"); <del> write_tables(" throw new RuntimeException(e);"); <del> write_tables(" } finally {"); <del> write_tables(" DaoUtil.close(st);"); <del> write_tables(" }"); <del> write_tables(" }"); <del> //@formatter:on <add> write(" st.%-13s(%3s, _value.%s);", type.getSetterMethodName(), index++, col.getName() .toLowerCase()); <add> } <add> write(" st.addBatch();"); <add> write(" }"); <add> write(" final int[] nRows = st.executeBatch();"); <add> write(" cnx.commit();"); <add> write(" return nRows;"); <add> write(" } catch (SQLException e) {"); <add> write(" throw new RuntimeException(e);"); <add> write(" } finally {"); <add> write(" DaoUtil.close(st);"); <add> write(" }"); <add> write(" }"); <add> //@formatter:on <add> } <add> <add> @Override <add> public void close() throws IOException { <add> wrapped.close(); <ide> } <ide> } <ide>
Java
bsd-3-clause
636ff5b5b3295536749745621ac8d952df3dac00
0
CyanogenMod/android_external_owasp_sanitizer,thiz11/platform_external_owasp_sanitizer,xin3liang/platform_external_owasp_sanitizer,android-ia/platform_external_owasp_sanitizer,Omegaphora/external_owasp_sanitizer,android-ia/platform_external_owasp_sanitizer,thiz11/platform_external_owasp_sanitizer,geekboxzone/mmallow_external_owasp_sanitizer,geekboxzone/lollipop_external_owasp_sanitizer,geekboxzone/mmallow_external_owasp_sanitizer,geekboxzone/lollipop_external_owasp_sanitizer,Omegaphora/external_owasp_sanitizer,geekboxzone/lollipop_external_owasp_sanitizer,CyanogenMod/android_external_owasp_sanitizer,android-ia/platform_external_owasp_sanitizer,Omegaphora/external_owasp_sanitizer,geekboxzone/mmallow_external_owasp_sanitizer,xin3liang/platform_external_owasp_sanitizer
// Copyright (c) 2011, Mike Samuel // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // Neither the name of the OWASP nor the names of its contributors may // be used to endorse or promote products derived from this software // without specific prior written permission. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE // COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT // LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN // ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. package org.owasp.html; import java.util.LinkedList; import java.util.List; import javax.annotation.Nullable; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; /** * Consumes an HTML stream, and dispatches events to a policy object which * decides which elements and attributes to allow. */ public final class HtmlSanitizer { /** * Receives events based on the HTML stream, and applies a policy to decide * what HTML constructs to allow. * Typically, implementations use an {@link HtmlStreamRenderer} to produce * the sanitized output. * * <p> * <b>Implementations of this class are in the TCB.</b></p> */ @TCB public interface Policy extends HtmlStreamEventReceiver { /** * Called when an HTML tag like {@code <foo bar=baz>} is seen in the input. * * @param elementName a normalized (lower-case for non-namespaced names) * element name. * @param attrs a list of alternating attribute name and value pairs. * For efficiency, this list may be mutated by this during this method * call, but ownership reverts to the caller on method exit. * The values are raw -- HTML entities have been decoded. * Specifically, implementations are allowed to use a list iterator * and remove all disallowed attributes, add necessary attributes, and * then pass the list to an {@link HtmlStreamRenderer}. */ void openTag(String elementName, List<String> attrs); /** * Called when an HTML tag like {@code </foo>} is seen in the input. * * @param elementName a normalized (lower-case for non-namespaced names) * element name. */ void closeTag(String elementName); /** * Called when textual content is seen. * @param textChunk raw content -- HTML entities have been decoded. */ void text(String textChunk); } /** * Sanitizes the given HTML by applying the given policy to it. * * <p> * This method is not in the TCB. * * <p> * This method has no return value since policies are assumed to render things * they accept and do nothing on things they reject. * Use {@link HtmlStreamRenderer} to render content to an output buffer. * * @param html A snippet of HTML to sanitize. {@code null} is treated as the * empty string and will not result in a {@code NullPointerException}. * @param policy The Policy that will receive events based on the tokens in * html. Typically, this policy ends up routing the events to an * {@link HtmlStreamRenderer} after filtering. * {@link HtmlPolicyBuilder} provides an easy way to create policies. */ public static void sanitize(@Nullable String html, final Policy policy) { if (html == null) { html = ""; } HtmlStreamEventReceiver balancer = new TagBalancingHtmlStreamEventReceiver( policy); balancer.openDocument(); HtmlLexer lexer = new HtmlLexer(html); // Use a linked list so that policies can use Iterator.remove() in an O(1) // way. LinkedList<String> attrs = Lists.newLinkedList(); while (lexer.hasNext()) { HtmlToken token = lexer.next(); switch (token.type) { case TEXT: balancer.text(decodeHtml(html.substring(token.start, token.end))); break; case UNESCAPED: balancer.text(html.substring(token.start, token.end)); break; case TAGBEGIN: if (html.charAt(token.start + 1) == '/') { // A close tag. balancer.closeTag(HtmlLexer.canonicalName( html.substring(token.start + 2, token.end))); while (lexer.hasNext() && lexer.next().type != HtmlTokenType.TAGEND) { // skip tokens until we see a ">" } } else { attrs.clear(); boolean attrsReadyForName = true; tagBody: while (lexer.hasNext()) { HtmlToken tagBodyToken = lexer.next(); switch (tagBodyToken.type) { case ATTRNAME: if (!attrsReadyForName) { // Last attribute added was valueless. attrs.add(attrs.getLast()); } else { attrsReadyForName = false; } attrs.add(HtmlLexer.canonicalName( html.substring(tagBodyToken.start, tagBodyToken.end))); break; case ATTRVALUE: attrs.add(decodeHtml(stripQuotes( html.substring(tagBodyToken.start, tagBodyToken.end)))); attrsReadyForName = true; break; case TAGEND: break tagBody; default: // Just drop anything not recognized } } if (!attrsReadyForName) { attrs.add(attrs.getLast()); } balancer.openTag( HtmlLexer.canonicalName( html.substring(token.start + 1, token.end)), attrs); } break; default: // Ignore comments, directives, and other stuff that shouldn't show // up in the output. break; } } balancer.closeDocument(); } private static String stripQuotes(String encodedAttributeValue) { int n = encodedAttributeValue.length(); if (n > 0) { char last = encodedAttributeValue.charAt(n - 1); if (last == '"' || last == '\'') { int start = 0; if (n != 1 && last == encodedAttributeValue.charAt(0)) { start = 1; } else { // Browsers deal with missing left quotes : <img src=foo.png"> // but generally do not deal with missing right : <img src="foo.png> } return encodedAttributeValue.substring(start, n - 1); } } return encodedAttributeValue; } @VisibleForTesting static String decodeHtml(String s) { int amp = s.indexOf('&'); if (amp < 0) { return s; } int pos = 0; int n = s.length(); StringBuilder sb = new StringBuilder(n); int end; do { long endAndCodepoint = HtmlEntities.decodeEntityAt(s, amp, n); end = (int) (endAndCodepoint >>> 32); int codepoint = (int) endAndCodepoint; sb.append(s, pos, amp).appendCodePoint(codepoint); pos = end; } while ((amp = s.indexOf('&', end)) >= 0); return sb.append(s, pos, n).toString(); } }
src/main/org/owasp/html/HtmlSanitizer.java
// Copyright (c) 2011, Mike Samuel // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // Neither the name of the OWASP nor the names of its contributors may // be used to endorse or promote products derived from this software // without specific prior written permission. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE // COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT // LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN // ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. package org.owasp.html; import java.util.LinkedList; import java.util.List; import javax.annotation.Nullable; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; /** * Consumes an HTML stream, and dispatches events to a policy object which * decides which elements and attributes to allow. */ public final class HtmlSanitizer { /** * Receives events based on the HTML stream, and applies a policy to decide * what HTML constructs to allow. * Typically, implementations use an {@link HtmlStreamRenderer} to produce * the sanitized output. * * <p> * <b>Implementations of this class are in the TCB.</b></p> */ @TCB public interface Policy extends HtmlStreamEventReceiver { /** * Called when an HTML tag like {@code <foo bar=baz>} is seen in the input. * * @param elementName a normalized (lower-case for non-namespaced names) * element name. * @param attrs a list of alternating attribute name and value pairs. * For efficiency, this list may be mutated by this during this method * call, but ownership reverts to the caller on method exit. * The values are raw -- HTML entities have been decoded. * Specifically, implementations are allowed to use a list iterator * and remove all disallowed attributes, add necessary attributes, and * then pass the list to an {@link HtmlStreamRenderer}. */ void openTag(String elementName, List<String> attrs); /** * Called when an HTML tag like {@code </foo>} is seen in the input. * * @param elementName a normalized (lower-case for non-namespaced names) * element name. */ void closeTag(String elementName); /** * Called when textual content is seen. * @param textChunk raw content -- HTML entities have been decoded. */ void text(String textChunk); } /** * Sanitizes the given HTML by applying the given policy to it. * * <p> * This method is not in the TCB. * * <p> * This method has no return value since policies are assumed to render things * they accept and do nothing on things they reject. * Use {@link HtmlStreamRenderer} to render content to an output buffer. * * @param html A snippet of HTML to sanitize. {@code null} is treated as the * empty string and will not result in a {@code NullPointerException}. * @param policy The Policy that will receive events based on the tokens in * html. Typically, this policy ends up routing the events to an * {@link HtmlStreamRenderer} after filtering. * {@link HtmlPolicyBuilder} provides an easy way to create policies. */ public static void sanitize(@Nullable String html, final Policy policy) { HtmlStreamEventReceiver balancer = new TagBalancingHtmlStreamEventReceiver( policy); balancer.openDocument(); HtmlLexer lexer = new HtmlLexer(html != null ? html : ""); // Use a linked list so that policies can use Iterator.remove() in an O(1) // way. LinkedList<String> attrs = Lists.newLinkedList(); while (lexer.hasNext()) { HtmlToken token = lexer.next(); switch (token.type) { case TEXT: balancer.text(decodeHtml(html.substring(token.start, token.end))); break; case UNESCAPED: balancer.text(html.substring(token.start, token.end)); break; case TAGBEGIN: if (html.charAt(token.start + 1) == '/') { // A close tag. balancer.closeTag(HtmlLexer.canonicalName( html.substring(token.start + 2, token.end))); while (lexer.hasNext() && lexer.next().type != HtmlTokenType.TAGEND) { // skip tokens until we see a ">" } } else { attrs.clear(); boolean attrsReadyForName = true; tagBody: while (lexer.hasNext()) { HtmlToken tagBodyToken = lexer.next(); switch (tagBodyToken.type) { case ATTRNAME: if (!attrsReadyForName) { // Last attribute added was valueless. attrs.add(attrs.getLast()); } else { attrsReadyForName = false; } attrs.add(HtmlLexer.canonicalName( html.substring(tagBodyToken.start, tagBodyToken.end))); break; case ATTRVALUE: attrs.add(decodeHtml(stripQuotes( html.substring(tagBodyToken.start, tagBodyToken.end)))); attrsReadyForName = true; break; case TAGEND: break tagBody; default: // Just drop anything not recognized } } if (!attrsReadyForName) { attrs.add(attrs.getLast()); } balancer.openTag( HtmlLexer.canonicalName( html.substring(token.start + 1, token.end)), attrs); } break; default: // Ignore comments, directives, and other stuff that shouldn't show // up in the output. break; } } balancer.closeDocument(); } private static String stripQuotes(String encodedAttributeValue) { int n = encodedAttributeValue.length(); if (n > 0) { char last = encodedAttributeValue.charAt(n - 1); if (last == '"' || last == '\'') { int start = 0; if (n != 1 && last == encodedAttributeValue.charAt(0)) { start = 1; } else { // Browsers deal with missing left quotes : <img src=foo.png"> // but generally do not deal with missing right : <img src="foo.png> } return encodedAttributeValue.substring(start, n - 1); } } return encodedAttributeValue; } @VisibleForTesting static String decodeHtml(String s) { int amp = s.indexOf('&'); if (amp < 0) { return s; } int pos = 0; int n = s.length(); StringBuilder sb = new StringBuilder(n); int end; do { long endAndCodepoint = HtmlEntities.decodeEntityAt(s, amp, n); end = (int) (endAndCodepoint >>> 32); int codepoint = (int) endAndCodepoint; sb.append(s, pos, amp).appendCodePoint(codepoint); pos = end; } while ((amp = s.indexOf('&', end)) >= 0); return sb.append(s, pos, n).toString(); } }
am 75d905c9: Simplified null parameter handling in HtmlSanitizer.sanitize to present a consistently non-null html parameter to the whole function body. If html is null, the loop will be entered but there\'s no need to confuse the JIT with calls to substring on a value * commit '75d905c90100b9b05602b1878f847142e39836aa': Simplified null parameter handling in HtmlSanitizer.sanitize to present a consistently non-null html parameter to the whole function body. If html is null, the loop will be entered but there's no need to confuse the JIT with calls to substring on a value that's been checked for null earlier in the method.
src/main/org/owasp/html/HtmlSanitizer.java
am 75d905c9: Simplified null parameter handling in HtmlSanitizer.sanitize to present a consistently non-null html parameter to the whole function body. If html is null, the loop will be entered but there\'s no need to confuse the JIT with calls to substring on a value
<ide><path>rc/main/org/owasp/html/HtmlSanitizer.java <ide> * {@link HtmlPolicyBuilder} provides an easy way to create policies. <ide> */ <ide> public static void sanitize(@Nullable String html, final Policy policy) { <add> if (html == null) { html = ""; } <add> <ide> HtmlStreamEventReceiver balancer = new TagBalancingHtmlStreamEventReceiver( <ide> policy); <ide> <ide> balancer.openDocument(); <ide> <del> HtmlLexer lexer = new HtmlLexer(html != null ? html : ""); <add> HtmlLexer lexer = new HtmlLexer(html); <ide> // Use a linked list so that policies can use Iterator.remove() in an O(1) <ide> // way. <ide> LinkedList<String> attrs = Lists.newLinkedList();
Java
apache-2.0
7cd2e07b622633380741db7ffe8a77174b172e9d
0
gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom
package stroom.core.query; import stroom.datasource.api.v2.AbstractField; import stroom.docref.DocRef; import stroom.feed.api.FeedStore; import stroom.meta.api.MetaService; import stroom.meta.shared.MetaFields; import stroom.meta.shared.Status; import stroom.pipeline.PipelineStore; import stroom.query.shared.FetchSuggestionsRequest; import stroom.security.api.SecurityContext; import stroom.task.api.TaskContext; import stroom.task.api.TaskContextFactory; import stroom.util.filter.QuickFilterPredicateFactory; import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.inject.Inject; import javax.inject.Singleton; @Singleton @SuppressWarnings("unused") public class SuggestionsServiceImpl implements SuggestionsService { private static final Logger LOGGER = LoggerFactory.getLogger(SuggestionsServiceImpl.class); private static final int LIMIT = 20; private final MetaService metaService; private final PipelineStore pipelineStore; private final SecurityContext securityContext; private final FeedStore feedStore; private final TaskContextFactory taskContextFactory; // This may need changing if we have suggestions that are not for the stream store data source private final Map<String, Function<String, List<String>>> fieldNameToFunctionMap = Map.of( MetaFields.FEED.getName(), this::createFeedList, MetaFields.PIPELINE.getName(), this::createPipelineList, MetaFields.TYPE.getName(), this::createStreamTypeList, MetaFields.STATUS.getName(), this::createStatusList); @SuppressWarnings("unused") @Inject SuggestionsServiceImpl(final MetaService metaService, final PipelineStore pipelineStore, final SecurityContext securityContext, final FeedStore feedStore, final TaskContextFactory taskContextFactory) { this.metaService = metaService; this.pipelineStore = pipelineStore; this.securityContext = securityContext; this.feedStore = feedStore; this.taskContextFactory = taskContextFactory; } @Override public List<String> fetch(final FetchSuggestionsRequest request) { return securityContext.secureResult(() -> { List<String> result = Collections.emptyList(); if (request.getDataSource() != null) { if (MetaFields.STREAM_STORE_DOC_REF.equals(request.getDataSource())) { final String fieldName = request.getField().getName(); final Function<String, List<String>> suggestionFunc = fieldNameToFunctionMap.get(fieldName); if (suggestionFunc != null) { result = suggestionFunc.apply(request.getText()); } } } return result; }); } private boolean matchesMetaField(final FetchSuggestionsRequest request, final AbstractField fieldToMatch) { Objects.requireNonNull(fieldToMatch); return fieldToMatch.getName().equals(request.getField().getName()); } @NotNull private List<String> createPipelineList(final String userInput) { final List<String> result; final Stream<String> stream = pipelineStore.list().stream() .map(DocRef::getName); result = QuickFilterPredicateFactory.filterStream(userInput, stream) .limit(LIMIT) .collect(Collectors.toList()); return result; } @NotNull private List<String> createStatusList(final String userInput) { final List<String> result; Stream<String> stream = Arrays.stream(Status.values()) .map(Status::getDisplayValue); result = QuickFilterPredicateFactory.filterStream(userInput, stream) .limit(LIMIT) .collect(Collectors.toList()); return result; } private List<String> createFeedList(final String userInput) { // TODO this seems pretty inefficient as each call hits the db to get ALL feeds // then limits/filters in java. Needs to work off a cached feed name list return taskContextFactory.contextResult("Get all feed names", parentTaskContext -> createFeedList(parentTaskContext, userInput)).get(); } private List<String> createFeedList(final TaskContext parentTaskContext, final String userInput) { // To get a list of feed names we need to combine the names from the meta service // and the feed store. Meta service only has feeds which have data, but may contain // feeds that have been deleted as docs. final CompletableFuture<Set<String>> metaFeedsFuture = CompletableFuture.supplyAsync( taskContextFactory.contextResult( "Get meta feed names", taskContext -> metaService.getFeeds())); final CompletableFuture<List<String>> docFeedsFuture = CompletableFuture.supplyAsync( taskContextFactory.contextResult( "Get doc feed names", taskContext -> feedStore.list() .stream() .map(DocRef::getName) .collect(Collectors.toList()))); try { // Make async calls to get the two lists then combine return metaFeedsFuture .thenCombine(docFeedsFuture, (metaFeedNames, docFeedNames) -> QuickFilterPredicateFactory.filterStream( userInput, Stream.concat(metaFeedNames.stream(), docFeedNames.stream()) .parallel()) .distinct() .limit(LIMIT) .collect(Collectors.toList())) .get(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); LOGGER.error("Thread interrupted", e); return Collections.emptyList(); } catch (ExecutionException e) { throw new RuntimeException("Error getting feed name suggestions: " + e.getMessage(), e); } } private List<String> createStreamTypeList(final String userInput) { // TODO this seems pretty inefficient as each call hits the db to get ALL feeds // then limits/filters in java. Needs to work off a cached feed name list return QuickFilterPredicateFactory.filterStream( userInput, metaService.getTypes().parallelStream()) .limit(LIMIT) .collect(Collectors.toList()); } }
stroom-core/src/main/java/stroom/core/query/SuggestionsServiceImpl.java
package stroom.core.query; import stroom.datasource.api.v2.AbstractField; import stroom.docref.DocRef; import stroom.feed.api.FeedStore; import stroom.meta.api.MetaService; import stroom.meta.shared.MetaFields; import stroom.meta.shared.Status; import stroom.pipeline.PipelineStore; import stroom.query.shared.FetchSuggestionsRequest; import stroom.security.api.SecurityContext; import stroom.task.api.TaskContext; import stroom.task.api.TaskContextFactory; import stroom.util.filter.QuickFilterPredicateFactory; import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.inject.Inject; import javax.inject.Singleton; @Singleton @SuppressWarnings("unused") public class SuggestionsServiceImpl implements SuggestionsService { private static final Logger LOGGER = LoggerFactory.getLogger(SuggestionsServiceImpl.class); private static final int LIMIT = 20; private final MetaService metaService; private final PipelineStore pipelineStore; private final SecurityContext securityContext; private final FeedStore feedStore; private final TaskContextFactory taskContextFactory; // This may need changing if we have suggestions that are not for the stream store data source private final Map<String, Function<String, List<String>>> fieldNameToFunctionMap = Map.of( MetaFields.FEED.getName(), this::createFeedList, MetaFields.PIPELINE.getName(), this::createPipelineList, MetaFields.TYPE.getName(), this::createStreamTypeList, MetaFields.STATUS.getName(), this::createStatusList); @SuppressWarnings("unused") @Inject SuggestionsServiceImpl(final MetaService metaService, final PipelineStore pipelineStore, final SecurityContext securityContext, final FeedStore feedStore, final TaskContextFactory taskContextFactory) { this.metaService = metaService; this.pipelineStore = pipelineStore; this.securityContext = securityContext; this.feedStore = feedStore; this.taskContextFactory = taskContextFactory; } @Override public List<String> fetch(final FetchSuggestionsRequest request) { return securityContext.secureResult(() -> { List<String> result = Collections.emptyList(); if (request.getDataSource() != null) { if (MetaFields.STREAM_STORE_DOC_REF.equals(request.getDataSource())) { final String fieldName = request.getField().getName(); final Function<String, List<String>> suggestionFunc = fieldNameToFunctionMap.get(fieldName); if (suggestionFunc != null) { result = suggestionFunc.apply(request.getText()); } } } return result; }); } private boolean matchesMetaField(final FetchSuggestionsRequest request, final AbstractField fieldToMatch) { Objects.requireNonNull(fieldToMatch); return fieldToMatch.getName().equals(request.getField().getName()); } @NotNull private List<String> createPipelineList(final String userInput) { final List<String> result; final Stream<String> stream = pipelineStore.list().stream() .map(DocRef::getName); result = QuickFilterPredicateFactory.filterStream(userInput, stream) .limit(LIMIT) .collect(Collectors.toList()); return result; } @NotNull private List<String> createStatusList(final String userInput) { final List<String> result; Stream<String> stream = Arrays.stream(Status.values()) .map(Status::getDisplayValue); result = QuickFilterPredicateFactory.filterStream(userInput, stream) .limit(LIMIT) .collect(Collectors.toList()); return result; } private List<String> createFeedList(final String userInput) { // TODO this seems pretty inefficient as each call hits the db to get ALL feeds // then limits/filters in java. Needs to work off a cached feed name list return taskContextFactory.contextResult("Get all feed names", parentTaskContext -> createFeedList(parentTaskContext, userInput)).get(); } private List<String> createFeedList(final TaskContext parentTaskContext, final String userInput) { // To get a list of feed names we need to combine the names from the meta service // and the feed store. Meta service only has feeds which have data, but may contain // feeds that have been deleted as docs. final CompletableFuture<Set<String>> metaFeedsFuture = CompletableFuture.supplyAsync( taskContextFactory.contextResult( "Get meta feed names", taskContext -> metaService.getFeeds())); final CompletableFuture<List<String>> docFeedsFuture = CompletableFuture.supplyAsync( taskContextFactory.contextResult( "Get doc feed names", taskContext -> feedStore.list() .stream() .map(DocRef::getName) .collect(Collectors.toList()))); try { // Make async calls to get the two lists then combine return metaFeedsFuture .thenCombine(docFeedsFuture, (metaFeedNames, docFeedNames) -> QuickFilterPredicateFactory.filterStream( userInput, Stream.concat(metaFeedNames.stream(), docFeedNames.stream()) .parallel()) .limit(LIMIT) .collect(Collectors.toList())) .get(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); LOGGER.error("Thread interrupted", e); return Collections.emptyList(); } catch (ExecutionException e) { throw new RuntimeException("Error getting feed name suggestions: " + e.getMessage(), e); } } private List<String> createStreamTypeList(final String userInput) { // TODO this seems pretty inefficient as each call hits the db to get ALL feeds // then limits/filters in java. Needs to work off a cached feed name list return QuickFilterPredicateFactory.filterStream( userInput, metaService.getTypes().parallelStream()) .limit(LIMIT) .collect(Collectors.toList()); } }
gh-2155 Add removed distinct clause
stroom-core/src/main/java/stroom/core/query/SuggestionsServiceImpl.java
gh-2155 Add removed distinct clause
<ide><path>troom-core/src/main/java/stroom/core/query/SuggestionsServiceImpl.java <ide> userInput, <ide> Stream.concat(metaFeedNames.stream(), docFeedNames.stream()) <ide> .parallel()) <add> .distinct() <ide> .limit(LIMIT) <ide> .collect(Collectors.toList())) <ide> .get();
Java
mit
352f5c851f012dba2e93fd9064e5ff1c8e6b0924
0
elBukkit/MagicPlugin,elBukkit/MagicPlugin,elBukkit/MagicLib,elBukkit/MagicPlugin
package com.elmakers.mine.bukkit.utility; import com.elmakers.mine.bukkit.api.action.CastContext; import com.elmakers.mine.bukkit.api.block.UndoList; import com.elmakers.mine.bukkit.api.magic.Mage; import com.elmakers.mine.bukkit.api.spell.TargetType; import org.bukkit.ChatColor; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.World; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; import org.bukkit.command.BlockCommandSender; import org.bukkit.command.CommandSender; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.entity.Entity; import org.bukkit.entity.LivingEntity; import org.bukkit.metadata.FixedMetadataValue; import org.bukkit.plugin.Plugin; import org.bukkit.util.BlockIterator; import org.bukkit.util.Vector; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.UUID; public class Targeting { private TargetingResult result = TargetingResult.NONE; private Location source = null; private Target target = null; private List<Target> targets = null; private TargetType targetType = TargetType.NONE; private BlockIterator blockIterator = null; private Block currentBlock = null; private Block previousBlock = null; private Block previousPreviousBlock = null; private Vector targetLocationOffset; private Vector targetDirectionOverride; private String targetLocationWorldName; protected float distanceWeight = 1; protected float fovWeight = 4; protected int npcWeight = -1; protected int mageWeight = 5; protected int playerWeight = 4; protected int livingEntityWeight = 3; private boolean ignoreBlocks = false; private int targetBreakableDepth = 2; private double hitboxPadding = 0; private double hitboxBlockPadding = 0; private double rangeQueryPadding = 1; private boolean useHitbox = true; private double fov = 0.3; private double closeRange = 0; private double closeFOV = 0; private double yOffset = 0; private boolean targetSpaceRequired = false; private int targetMinOffset = 0; private Set<UUID> ignoreEntities = null; public enum TargetingResult { NONE, BLOCK, ENTITY, MISS }; public void reset() { result = TargetingResult.NONE; source = null; target = null; targets = null; blockIterator = null; currentBlock = null; previousBlock = null; previousPreviousBlock = null; targetSpaceRequired = false; targetMinOffset = 0; yOffset = 0; } protected boolean initializeBlockIterator(Location location, double range) { if (blockIterator != null) { return true; } if (location.getBlockY() < 0) { location = location.clone(); location.setY(0); } int maxHeight = CompatibilityUtils.getMaxHeight(location.getWorld()); if (location.getBlockY() > maxHeight) { location = location.clone(); location.setY(maxHeight); } try { blockIterator = new BlockIterator(location, yOffset, (int)Math.ceil(range)); } catch (Exception ex) { if (Target.DEBUG_TARGETING) { org.bukkit.Bukkit.getLogger().warning("Exception creating BlockIterator"); ex.printStackTrace(); } // This seems to happen randomly, like when you use the same target. // Very annoying, and I now kind of regret switching to BlockIterator. // At any rate, we're going to just re-use the last target block and // cross our fingers! return false; } return true; } public Target getOrCreateTarget(Location defaultLocation) { if (target == null) { target = new Target(defaultLocation); } return target; } public Target getTarget() { return target; } public boolean hasTarget() { return target != null; } public void setTargetSpaceRequired(boolean required) { targetSpaceRequired = required; } public void setTargetMinOffset(int offset) { targetMinOffset = offset; } public void targetBlock(Location source, Block block) { target = new Target(source, block, useHitbox, hitboxBlockPadding); } public void setYOffset(int offset) { yOffset = offset; } /** * Move "steps" forward along line of vision and returns the block there * * @return The block at the new location */ protected Block getNextBlock() { previousPreviousBlock = previousBlock; previousBlock = currentBlock; if (blockIterator == null || !blockIterator.hasNext()) { currentBlock = null; } else { currentBlock = blockIterator.next(); } return currentBlock; } /** * Returns the current block along the line of vision * * @return The block */ public Block getCurBlock() { return currentBlock; } /** * Returns the previous block along the line of vision * * @return The block */ public Block getPreviousBlock() { return previousBlock; } public void setFOV(double fov) { this.fov = fov; } public void setCloseRange(double closeRange) { this.closeRange = closeRange; } public void setCloseFOV(double closeFOV) { this.closeFOV = closeFOV; } public void setUseHitbox(boolean useHitbox) { this.useHitbox = useHitbox; } public TargetType getTargetType() { return targetType; } public void setTargetType(TargetType type) { targetType = type; } public void start(Location source) { reset(); this.source = source.clone(); } public Target target(CastContext context, double range) { if (source == null) { source = context.getEyeLocation(); } target = findTarget(context, range); if (targetLocationOffset != null) { target.add(targetLocationOffset); } if (targetDirectionOverride != null) { target.setDirection(targetDirectionOverride); } if (targetLocationWorldName != null && targetLocationWorldName.length() > 0) { Location location = target.getLocation(); if (location != null) { World targetWorld = location.getWorld(); target.setWorld(ConfigurationUtils.overrideWorld(targetLocationWorldName, targetWorld, context.getController().canCreateWorlds())); } } Mage mage = context.getMage(); if (mage != null && mage.getDebugLevel() > 6) { Location targetLocation = target.getLocation(); String message = ChatColor.GREEN + "Targeted from " + ChatColor.GRAY + source.getBlockX() + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + source.getBlockY() + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + source.getBlockZ() + ChatColor.DARK_GREEN + " with range of " + ChatColor.GREEN + range + ChatColor.DARK_GREEN + ": " + ChatColor.GOLD + result; Entity targetEntity = target.getEntity(); if (targetEntity != null) { message = message + ChatColor.DARK_GREEN + " (" + ChatColor.YELLOW + targetEntity.getType() + ChatColor.DARK_GREEN + ")"; } if (targetLocation != null) { message = message + ChatColor.DARK_GREEN + " (" + ChatColor.LIGHT_PURPLE + targetLocation.getBlock().getType() + ChatColor.DARK_GREEN + ")"; message = message + ChatColor.DARK_GREEN + " at " + ChatColor.GRAY + targetLocation.getBlockX() + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + targetLocation.getBlockY() + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + targetLocation.getBlockZ(); } mage.sendDebugMessage(message); } return target; } /** * Returns the block at the cursor, or null if out of range * * @return The target block */ protected Target findTarget(CastContext context, double range) { if (targetType == TargetType.NONE) { return new Target(source); } boolean isBlock = targetType == TargetType.BLOCK || targetType == TargetType.SELECT; Mage mage = context.getMage(); final Entity mageEntity = mage.getEntity(); if (targetType == TargetType.SELF && mageEntity != null) { result = TargetingResult.ENTITY; return new Target(source, mageEntity); } CommandSender sender = mage.getCommandSender(); if (targetType == TargetType.SELF && mageEntity == null && sender != null && (sender instanceof BlockCommandSender)) { BlockCommandSender commandBlock = (BlockCommandSender)mage.getCommandSender(); return new Target(commandBlock.getBlock().getLocation(), commandBlock.getBlock()); } if (targetType == TargetType.SELF && source != null) { return new Target(source, source.getBlock()); } if (targetType == TargetType.SELF) { return new Target(source); } Block block = null; if (!ignoreBlocks) { findTargetBlock(context, range); block = currentBlock; } if (isBlock) { return new Target(source, block, useHitbox, hitboxBlockPadding); } Target targetBlock = block == null ? null : new Target(source, block, useHitbox, hitboxBlockPadding); // Don't target entities beyond the block we just hit if (targetBlock != null && source != null && source.getWorld().equals(block.getWorld())) { range = Math.min(range, source.distance(targetBlock.getLocation())); } // Pick the closest candidate entity Target entityTarget = null; List<Target> scored = getAllTargetEntities(context, range); if (scored.size() > 0) { entityTarget = scored.get(0); } // Don't allow targeting entities in an area you couldn't cast the spell in if (entityTarget != null && !context.canCast(entityTarget.getLocation())) { entityTarget = null; } if (targetBlock != null && !context.canCast(targetBlock.getLocation())) { result = TargetingResult.MISS; targetBlock = null; } if (targetType == TargetType.OTHER_ENTITY && entityTarget == null) { result = TargetingResult.MISS; return new Target(source); } if (targetType == TargetType.ANY_ENTITY && entityTarget == null) { result = TargetingResult.ENTITY; return new Target(source, mageEntity); } if (entityTarget == null && targetType == TargetType.ANY && mageEntity != null) { result = TargetingResult.ENTITY; return new Target(source, mageEntity, targetBlock == null ? null : targetBlock.getBlock()); } if (targetBlock != null && entityTarget != null) { if (targetBlock.getDistanceSquared() < entityTarget.getDistanceSquared()) { entityTarget = null; } else { targetBlock = null; } } if (entityTarget != null) { result = TargetingResult.ENTITY; return entityTarget; } else if (targetBlock != null) { return targetBlock; } result = TargetingResult.MISS; return new Target(source); } protected void findTargetBlock(CastContext context, double range) { if (source == null) { return; } currentBlock = source.getBlock(); if (context.isTargetable(currentBlock)) { result = TargetingResult.BLOCK; return; } // Pre-check for no block movement Location targetLocation = source.clone().add(source.getDirection().multiply(range)); if (targetLocation.getBlockX() == source.getBlockX() && targetLocation.getBlockY() == source.getBlockY() && targetLocation.getBlockZ() == source.getBlockZ()) { result = TargetingResult.MISS; return; } if (!initializeBlockIterator(source, range)) { return; } Block block = getNextBlock(); result = TargetingResult.BLOCK; while (block != null) { if (targetMinOffset <= 0) { if (targetSpaceRequired) { if (!context.allowPassThrough(block.getType())) { break; } if (context.isOkToStandIn(block.getType()) && context.isOkToStandIn(block.getRelative(BlockFace.UP).getType())) { break; } } else if (context.isTargetable(block)) { break; } } else { targetMinOffset--; } block = getNextBlock(); } if (block == null) { result = TargetingResult.MISS; currentBlock = previousBlock; previousBlock = previousPreviousBlock; } } public List<Target> getAllTargetEntities(CastContext context, double range) { Entity sourceEntity = context.getEntity(); Mage mage = context.getMage(); if (targets != null) { return targets; } targets = new ArrayList<Target>(); // A fuzzy optimization range-check. A hard range limit is enforced in the final target consolidator double rangeSquaredPadded = (range + 1) * (range + 1); List<Entity> entities = null; boolean debugMessage = true; if (source == null && sourceEntity != null) { range = Math.min(range + hitboxPadding + rangeQueryPadding, CompatibilityUtils.MAX_ENTITY_RANGE); entities = sourceEntity.getNearbyEntities(range, range, range); if (sourceEntity instanceof LivingEntity) { source = ((LivingEntity)sourceEntity).getEyeLocation(); } else { source = sourceEntity.getLocation(); } } else if (source != null) { Vector queryRange = null; Location sourceLocation = source; if (useHitbox) { range = Math.min(range, CompatibilityUtils.MAX_ENTITY_RANGE); Vector direction = source.getDirection(); Location targetLocation = source.clone().add(direction.multiply(range)); BoundingBox bounds = new BoundingBox(source.toVector(), targetLocation.toVector()); bounds.expand(hitboxPadding + rangeQueryPadding); Vector center = bounds.center(); sourceLocation = new Location(source.getWorld(), center.getX(), center.getY(), center.getZ()); queryRange = bounds.size(); } else { queryRange = new Vector(range * 2, range * 2, range * 2); sourceLocation = source; } if (mage != null && mage.getDebugLevel() > 8) { mage.sendDebugMessage(ChatColor.GREEN + "Targeting entities from " + ChatColor.GRAY + source.getBlockX() + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + source.getBlockY() + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + source.getBlockZ() + " via bounding box " + ChatColor.GRAY + (int)Math.ceil(queryRange.getX()) + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + (int)Math.ceil(queryRange.getY()) + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + (int)Math.ceil(queryRange.getZ()) + ChatColor.DARK_GREEN + " with range of " + ChatColor.GREEN + range); debugMessage = false; } entities = CompatibilityUtils.getNearbyEntities(sourceLocation, queryRange.getX() / 2, queryRange.getY() / 2, queryRange.getZ() / 2); } if (debugMessage && mage != null && mage.getDebugLevel() > 8) { mage.sendDebugMessage(ChatColor.GREEN + "Targeting entities from " + ChatColor.GRAY + source.getBlockX() + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + source.getBlockY() + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + source.getBlockZ() + ChatColor.DARK_GREEN + " with range of " + ChatColor.GREEN + range); } if (entities == null) return targets; for (Entity entity : entities) { if (ignoreEntities != null && ignoreEntities.contains(entity.getUniqueId())) continue; if (sourceEntity != null && entity.equals(sourceEntity) && !context.getTargetsCaster()) continue; Location entityLocation = entity instanceof LivingEntity ? ((LivingEntity)entity).getEyeLocation() : entity.getLocation(); if (!entityLocation.getWorld().equals(source.getWorld())) continue; if (entityLocation.distanceSquared(source) > rangeSquaredPadded) continue; if (!context.canTarget(entity)) continue; Target newScore = null; if (useHitbox) { newScore = new Target(source, entity, (int)Math.ceil(range), useHitbox, hitboxPadding); } else { newScore = new Target(source, entity, (int)Math.ceil(range), fov, closeRange, closeFOV, distanceWeight, fovWeight, mageWeight, npcWeight, playerWeight, livingEntityWeight); } if (newScore.getScore() > 0) { if (mage != null && mage.getDebugLevel() > 5) { mage.sendDebugMessage(ChatColor.DARK_GREEN + "Target " + ChatColor.GREEN + entity.getType() + ChatColor.DARK_GREEN + ": " + ChatColor.YELLOW + newScore.getScore()); } targets.add(newScore); } } Collections.sort(targets); return targets; } public void parseTargetType(String targetTypeName) { targetType = TargetType.NONE; if (targetTypeName != null) { try { targetType = TargetType.valueOf(targetTypeName.toUpperCase()); } catch (Exception ex) { targetType = TargetType.NONE; } } } public void processParameters(ConfigurationSection parameters) { parseTargetType(parameters.getString("target")); useHitbox = parameters.getBoolean("hitbox", !parameters.contains("fov")); hitboxPadding = parameters.getDouble("hitbox_size", 0); hitboxBlockPadding = parameters.getDouble("hitbox_block_size", 0); rangeQueryPadding = parameters.getDouble("range_padding", 1); fov = parameters.getDouble("fov", 0.3); closeRange = parameters.getDouble("close_range", 1); closeFOV = parameters.getDouble("close_fov", 0.5); distanceWeight = (float)parameters.getDouble("distance_weight", 1); fovWeight = (float)parameters.getDouble("fov_weight", 4); npcWeight = parameters.getInt("npc_weight", -1); playerWeight = parameters.getInt("player_weight", 4); livingEntityWeight = parameters.getInt("entity_weight", 3); targetMinOffset = parameters.getInt("target_min_offset", 0); targetMinOffset = parameters.getInt("tmo", targetMinOffset); ignoreBlocks = parameters.getBoolean("ignore_blocks", false); targetBreakableDepth = parameters.getInt("target_breakable_depth", 2); targetLocationOffset = null; targetDirectionOverride = null; Double otxValue = ConfigurationUtils.getDouble(parameters, "otx", null); Double otyValue = ConfigurationUtils.getDouble(parameters, "oty", null); Double otzValue = ConfigurationUtils.getDouble(parameters, "otz", null); if (otxValue != null || otzValue != null || otyValue != null) { targetLocationOffset = new Vector( (otxValue == null ? 0 : otxValue), (otyValue == null ? 0 : otyValue), (otzValue == null ? 0 : otzValue)); } targetLocationWorldName = parameters.getString("otworld"); Double tdxValue = ConfigurationUtils.getDouble(parameters, "otdx", null); Double tdyValue = ConfigurationUtils.getDouble(parameters, "otdy", null); Double tdzValue = ConfigurationUtils.getDouble(parameters, "otdz", null); if (tdxValue != null || tdzValue != null || tdyValue != null) { targetDirectionOverride = new Vector( (tdxValue == null ? 0 : tdxValue), (tdyValue == null ? 0 : tdyValue), (tdzValue == null ? 0 : tdzValue)); } } public TargetingResult getResult() { return result; } public void getTargetEntities(CastContext context, double range, int targetCount, Collection<WeakReference<Entity>> entities) { List<Target> candidates = getAllTargetEntities(context, range); if (targetCount < 0) { targetCount = entities.size(); } for (int i = 0; i < targetCount && i < candidates.size(); i++) { Target target = candidates.get(i); entities.add(new WeakReference<Entity>(target.getEntity())); } } protected int breakBlockRecursively(CastContext context, Block block, int depth) { if (depth <= 0) return 0; if (!context.isBreakable(block)) return 0; // Play break FX Location blockLocation = block.getLocation(); Location effectLocation = blockLocation.add(0.5, 0.5, 0.5); context.playEffects("break", 1, context.getLocation(), null, effectLocation, null); // TODO: Re-examime this? UndoList undoList = com.elmakers.mine.bukkit.block.UndoList.getUndoList(blockLocation); if (undoList != null) { undoList.add(block); } context.clearBreakable(block); context.clearReflective(block); block.setType(Material.AIR); int broken = 1; if (depth > broken) { broken += breakBlockRecursively(context, block.getRelative(BlockFace.UP), Math.min(targetBreakableDepth, depth - broken)); broken += breakBlockRecursively(context, block.getRelative(BlockFace.DOWN), Math.min(targetBreakableDepth, depth - broken)); broken += breakBlockRecursively(context, block.getRelative(BlockFace.EAST), Math.min(targetBreakableDepth, depth - broken)); broken += breakBlockRecursively(context, block.getRelative(BlockFace.WEST), Math.min(targetBreakableDepth, depth - broken)); broken += breakBlockRecursively(context, block.getRelative(BlockFace.NORTH), Math.min(targetBreakableDepth, depth - broken)); broken += breakBlockRecursively(context, block.getRelative(BlockFace.SOUTH), Math.min(targetBreakableDepth, depth - broken)); } return broken; } public int breakBlock(CastContext context, Block block, double amount) { if (amount <= 0) return 0; Double breakableAmount = context.getBreakable(block); if (breakableAmount == null) return 0; double breakable = (int)(amount > 1 ? amount : (context.getRandom().nextDouble() < amount ? 1 : 0)); if (breakable <= 0) return 0; return breakBlockRecursively(context, block, (int)Math.ceil(breakableAmount + breakable - 1)); } public static void track(Plugin plugin, Entity tracked) { tracked.setMetadata("tracking", new FixedMetadataValue(plugin, true)); } public static boolean checkTracking(Plugin plugin, Entity tracked, Entity target) { if (tracked == null || !tracked.hasMetadata("tracking")) { return false; } if (target != null) { tracked.setMetadata("hit", new FixedMetadataValue(plugin, new WeakReference<Entity>(target))); } else if (!tracked.hasMetadata("hit")) { tracked.setMetadata("hit", new FixedMetadataValue(plugin, null)); } return true; } public void setIgnoreEntities(Set<UUID> ignoreEntities) { this.ignoreEntities = ignoreEntities; } }
src/main/java/com/elmakers/mine/bukkit/utility/Targeting.java
package com.elmakers.mine.bukkit.utility; import com.elmakers.mine.bukkit.api.action.CastContext; import com.elmakers.mine.bukkit.api.block.UndoList; import com.elmakers.mine.bukkit.api.magic.Mage; import com.elmakers.mine.bukkit.api.spell.TargetType; import org.bukkit.ChatColor; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.World; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; import org.bukkit.command.BlockCommandSender; import org.bukkit.command.CommandSender; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.entity.Entity; import org.bukkit.entity.LivingEntity; import org.bukkit.metadata.FixedMetadataValue; import org.bukkit.plugin.Plugin; import org.bukkit.util.BlockIterator; import org.bukkit.util.Vector; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; public class Targeting { private TargetingResult result = TargetingResult.NONE; private Location source = null; private Target target = null; private List<Target> targets = null; private TargetType targetType = TargetType.NONE; private BlockIterator blockIterator = null; private Block currentBlock = null; private Block previousBlock = null; private Block previousPreviousBlock = null; private Vector targetLocationOffset; private Vector targetDirectionOverride; private String targetLocationWorldName; protected float distanceWeight = 1; protected float fovWeight = 4; protected int npcWeight = -1; protected int mageWeight = 5; protected int playerWeight = 4; protected int livingEntityWeight = 3; private boolean ignoreBlocks = false; private int targetBreakableDepth = 2; private double hitboxPadding = 0; private double hitboxBlockPadding = 0; private double rangeQueryPadding = 1; private boolean useHitbox = true; private double fov = 0.3; private double closeRange = 0; private double closeFOV = 0; private double yOffset = 0; private boolean targetSpaceRequired = false; private int targetMinOffset = 0; public enum TargetingResult { NONE, BLOCK, ENTITY, MISS }; public void reset() { result = TargetingResult.NONE; source = null; target = null; targets = null; blockIterator = null; currentBlock = null; previousBlock = null; previousPreviousBlock = null; targetSpaceRequired = false; targetMinOffset = 0; yOffset = 0; } protected boolean initializeBlockIterator(Location location, double range) { if (blockIterator != null) { return true; } if (location.getBlockY() < 0) { location = location.clone(); location.setY(0); } int maxHeight = CompatibilityUtils.getMaxHeight(location.getWorld()); if (location.getBlockY() > maxHeight) { location = location.clone(); location.setY(maxHeight); } try { blockIterator = new BlockIterator(location, yOffset, (int)Math.ceil(range)); } catch (Exception ex) { if (Target.DEBUG_TARGETING) { org.bukkit.Bukkit.getLogger().warning("Exception creating BlockIterator"); ex.printStackTrace(); } // This seems to happen randomly, like when you use the same target. // Very annoying, and I now kind of regret switching to BlockIterator. // At any rate, we're going to just re-use the last target block and // cross our fingers! return false; } return true; } public Target getOrCreateTarget(Location defaultLocation) { if (target == null) { target = new Target(defaultLocation); } return target; } public Target getTarget() { return target; } public boolean hasTarget() { return target != null; } public void setTargetSpaceRequired(boolean required) { targetSpaceRequired = required; } public void setTargetMinOffset(int offset) { targetMinOffset = offset; } public void targetBlock(Location source, Block block) { target = new Target(source, block, useHitbox, hitboxBlockPadding); } public void setYOffset(int offset) { yOffset = offset; } /** * Move "steps" forward along line of vision and returns the block there * * @return The block at the new location */ protected Block getNextBlock() { previousPreviousBlock = previousBlock; previousBlock = currentBlock; if (blockIterator == null || !blockIterator.hasNext()) { currentBlock = null; } else { currentBlock = blockIterator.next(); } return currentBlock; } /** * Returns the current block along the line of vision * * @return The block */ public Block getCurBlock() { return currentBlock; } /** * Returns the previous block along the line of vision * * @return The block */ public Block getPreviousBlock() { return previousBlock; } public void setFOV(double fov) { this.fov = fov; } public void setCloseRange(double closeRange) { this.closeRange = closeRange; } public void setCloseFOV(double closeFOV) { this.closeFOV = closeFOV; } public void setUseHitbox(boolean useHitbox) { this.useHitbox = useHitbox; } public TargetType getTargetType() { return targetType; } public void setTargetType(TargetType type) { targetType = type; } public void start(Location source) { reset(); this.source = source.clone(); } public Target target(CastContext context, double range) { if (source == null) { source = context.getEyeLocation(); } target = findTarget(context, range); if (targetLocationOffset != null) { target.add(targetLocationOffset); } if (targetDirectionOverride != null) { target.setDirection(targetDirectionOverride); } if (targetLocationWorldName != null && targetLocationWorldName.length() > 0) { Location location = target.getLocation(); if (location != null) { World targetWorld = location.getWorld(); target.setWorld(ConfigurationUtils.overrideWorld(targetLocationWorldName, targetWorld, context.getController().canCreateWorlds())); } } Mage mage = context.getMage(); if (mage != null && mage.getDebugLevel() > 6) { Location targetLocation = target.getLocation(); String message = ChatColor.GREEN + "Targeted from " + ChatColor.GRAY + source.getBlockX() + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + source.getBlockY() + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + source.getBlockZ() + ChatColor.DARK_GREEN + " with range of " + ChatColor.GREEN + range + ChatColor.DARK_GREEN + ": " + ChatColor.GOLD + result; Entity targetEntity = target.getEntity(); if (targetEntity != null) { message = message + ChatColor.DARK_GREEN + " (" + ChatColor.YELLOW + targetEntity.getType() + ChatColor.DARK_GREEN + ")"; } if (targetLocation != null) { message = message + ChatColor.DARK_GREEN + " (" + ChatColor.LIGHT_PURPLE + targetLocation.getBlock().getType() + ChatColor.DARK_GREEN + ")"; message = message + ChatColor.DARK_GREEN + " at " + ChatColor.GRAY + targetLocation.getBlockX() + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + targetLocation.getBlockY() + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + targetLocation.getBlockZ(); } mage.sendDebugMessage(message); } return target; } /** * Returns the block at the cursor, or null if out of range * * @return The target block */ protected Target findTarget(CastContext context, double range) { if (targetType == TargetType.NONE) { return new Target(source); } boolean isBlock = targetType == TargetType.BLOCK || targetType == TargetType.SELECT; Mage mage = context.getMage(); final Entity mageEntity = mage.getEntity(); if (targetType == TargetType.SELF && mageEntity != null) { result = TargetingResult.ENTITY; return new Target(source, mageEntity); } CommandSender sender = mage.getCommandSender(); if (targetType == TargetType.SELF && mageEntity == null && sender != null && (sender instanceof BlockCommandSender)) { BlockCommandSender commandBlock = (BlockCommandSender)mage.getCommandSender(); return new Target(commandBlock.getBlock().getLocation(), commandBlock.getBlock()); } if (targetType == TargetType.SELF && source != null) { return new Target(source, source.getBlock()); } if (targetType == TargetType.SELF) { return new Target(source); } Block block = null; if (!ignoreBlocks) { findTargetBlock(context, range); block = currentBlock; } if (isBlock) { return new Target(source, block, useHitbox, hitboxBlockPadding); } Target targetBlock = block == null ? null : new Target(source, block, useHitbox, hitboxBlockPadding); // Don't target entities beyond the block we just hit if (targetBlock != null && source != null && source.getWorld().equals(block.getWorld())) { range = Math.min(range, source.distance(targetBlock.getLocation())); } // Pick the closest candidate entity Target entityTarget = null; List<Target> scored = getAllTargetEntities(context, range); if (scored.size() > 0) { entityTarget = scored.get(0); } // Don't allow targeting entities in an area you couldn't cast the spell in if (entityTarget != null && !context.canCast(entityTarget.getLocation())) { entityTarget = null; } if (targetBlock != null && !context.canCast(targetBlock.getLocation())) { result = TargetingResult.MISS; targetBlock = null; } if (targetType == TargetType.OTHER_ENTITY && entityTarget == null) { result = TargetingResult.MISS; return new Target(source); } if (targetType == TargetType.ANY_ENTITY && entityTarget == null) { result = TargetingResult.ENTITY; return new Target(source, mageEntity); } if (entityTarget == null && targetType == TargetType.ANY && mageEntity != null) { result = TargetingResult.ENTITY; return new Target(source, mageEntity, targetBlock == null ? null : targetBlock.getBlock()); } if (targetBlock != null && entityTarget != null) { if (targetBlock.getDistanceSquared() < entityTarget.getDistanceSquared()) { entityTarget = null; } else { targetBlock = null; } } if (entityTarget != null) { result = TargetingResult.ENTITY; return entityTarget; } else if (targetBlock != null) { return targetBlock; } result = TargetingResult.MISS; return new Target(source); } protected void findTargetBlock(CastContext context, double range) { if (source == null) { return; } currentBlock = source.getBlock(); if (context.isTargetable(currentBlock)) { result = TargetingResult.BLOCK; return; } // Pre-check for no block movement Location targetLocation = source.clone().add(source.getDirection().multiply(range)); if (targetLocation.getBlockX() == source.getBlockX() && targetLocation.getBlockY() == source.getBlockY() && targetLocation.getBlockZ() == source.getBlockZ()) { result = TargetingResult.MISS; return; } if (!initializeBlockIterator(source, range)) { return; } Block block = getNextBlock(); result = TargetingResult.BLOCK; while (block != null) { if (targetMinOffset <= 0) { if (targetSpaceRequired) { if (!context.allowPassThrough(block.getType())) { break; } if (context.isOkToStandIn(block.getType()) && context.isOkToStandIn(block.getRelative(BlockFace.UP).getType())) { break; } } else if (context.isTargetable(block)) { break; } } else { targetMinOffset--; } block = getNextBlock(); } if (block == null) { result = TargetingResult.MISS; currentBlock = previousBlock; previousBlock = previousPreviousBlock; } } public List<Target> getAllTargetEntities(CastContext context, double range) { Entity sourceEntity = context.getEntity(); Mage mage = context.getMage(); if (targets != null) { return targets; } targets = new ArrayList<Target>(); // A fuzzy optimization range-check. A hard range limit is enforced in the final target consolidator double rangeSquaredPadded = (range + 1) * (range + 1); List<Entity> entities = null; boolean debugMessage = true; if (source == null && sourceEntity != null) { range = Math.min(range + hitboxPadding + rangeQueryPadding, CompatibilityUtils.MAX_ENTITY_RANGE); entities = sourceEntity.getNearbyEntities(range, range, range); if (sourceEntity instanceof LivingEntity) { source = ((LivingEntity)sourceEntity).getEyeLocation(); } else { source = sourceEntity.getLocation(); } } else if (source != null) { Vector queryRange = null; Location sourceLocation = source; if (useHitbox) { range = Math.min(range, CompatibilityUtils.MAX_ENTITY_RANGE); Vector direction = source.getDirection(); Location targetLocation = source.clone().add(direction.multiply(range)); BoundingBox bounds = new BoundingBox(source.toVector(), targetLocation.toVector()); bounds.expand(hitboxPadding + rangeQueryPadding); Vector center = bounds.center(); sourceLocation = new Location(source.getWorld(), center.getX(), center.getY(), center.getZ()); queryRange = bounds.size(); } else { queryRange = new Vector(range * 2, range * 2, range * 2); sourceLocation = source; } if (mage != null && mage.getDebugLevel() > 8) { mage.sendDebugMessage(ChatColor.GREEN + "Targeting entities from " + ChatColor.GRAY + source.getBlockX() + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + source.getBlockY() + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + source.getBlockZ() + " via bounding box " + ChatColor.GRAY + (int)Math.ceil(queryRange.getX()) + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + (int)Math.ceil(queryRange.getY()) + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + (int)Math.ceil(queryRange.getZ()) + ChatColor.DARK_GREEN + " with range of " + ChatColor.GREEN + range); debugMessage = false; } entities = CompatibilityUtils.getNearbyEntities(sourceLocation, queryRange.getX() / 2, queryRange.getY() / 2, queryRange.getZ() / 2); } if (debugMessage && mage != null && mage.getDebugLevel() > 8) { mage.sendDebugMessage(ChatColor.GREEN + "Targeting entities from " + ChatColor.GRAY + source.getBlockX() + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + source.getBlockY() + ChatColor.DARK_GRAY + "," + ChatColor.GRAY + source.getBlockZ() + ChatColor.DARK_GREEN + " with range of " + ChatColor.GREEN + range); } if (entities == null) return targets; for (Entity entity : entities) { if (sourceEntity != null && entity.equals(sourceEntity) && !context.getTargetsCaster()) continue; Location entityLocation = entity instanceof LivingEntity ? ((LivingEntity)entity).getEyeLocation() : entity.getLocation(); if (!entityLocation.getWorld().equals(source.getWorld())) continue; if (entityLocation.distanceSquared(source) > rangeSquaredPadded) continue; if (!context.canTarget(entity)) continue; Target newScore = null; if (useHitbox) { newScore = new Target(source, entity, (int)Math.ceil(range), useHitbox, hitboxPadding); } else { newScore = new Target(source, entity, (int)Math.ceil(range), fov, closeRange, closeFOV, distanceWeight, fovWeight, mageWeight, npcWeight, playerWeight, livingEntityWeight); } if (newScore.getScore() > 0) { if (mage != null && mage.getDebugLevel() > 5) { mage.sendDebugMessage(ChatColor.DARK_GREEN + "Target " + ChatColor.GREEN + entity.getType() + ChatColor.DARK_GREEN + ": " + ChatColor.YELLOW + newScore.getScore()); } targets.add(newScore); } } Collections.sort(targets); return targets; } public void parseTargetType(String targetTypeName) { targetType = TargetType.NONE; if (targetTypeName != null) { try { targetType = TargetType.valueOf(targetTypeName.toUpperCase()); } catch (Exception ex) { targetType = TargetType.NONE; } } } public void processParameters(ConfigurationSection parameters) { parseTargetType(parameters.getString("target")); useHitbox = parameters.getBoolean("hitbox", !parameters.contains("fov")); hitboxPadding = parameters.getDouble("hitbox_size", 0); hitboxBlockPadding = parameters.getDouble("hitbox_block_size", 0); rangeQueryPadding = parameters.getDouble("range_padding", 1); fov = parameters.getDouble("fov", 0.3); closeRange = parameters.getDouble("close_range", 1); closeFOV = parameters.getDouble("close_fov", 0.5); distanceWeight = (float)parameters.getDouble("distance_weight", 1); fovWeight = (float)parameters.getDouble("fov_weight", 4); npcWeight = parameters.getInt("npc_weight", -1); playerWeight = parameters.getInt("player_weight", 4); livingEntityWeight = parameters.getInt("entity_weight", 3); targetMinOffset = parameters.getInt("target_min_offset", 0); targetMinOffset = parameters.getInt("tmo", targetMinOffset); ignoreBlocks = parameters.getBoolean("ignore_blocks", false); targetBreakableDepth = parameters.getInt("target_breakable_depth", 2); targetLocationOffset = null; targetDirectionOverride = null; Double otxValue = ConfigurationUtils.getDouble(parameters, "otx", null); Double otyValue = ConfigurationUtils.getDouble(parameters, "oty", null); Double otzValue = ConfigurationUtils.getDouble(parameters, "otz", null); if (otxValue != null || otzValue != null || otyValue != null) { targetLocationOffset = new Vector( (otxValue == null ? 0 : otxValue), (otyValue == null ? 0 : otyValue), (otzValue == null ? 0 : otzValue)); } targetLocationWorldName = parameters.getString("otworld"); Double tdxValue = ConfigurationUtils.getDouble(parameters, "otdx", null); Double tdyValue = ConfigurationUtils.getDouble(parameters, "otdy", null); Double tdzValue = ConfigurationUtils.getDouble(parameters, "otdz", null); if (tdxValue != null || tdzValue != null || tdyValue != null) { targetDirectionOverride = new Vector( (tdxValue == null ? 0 : tdxValue), (tdyValue == null ? 0 : tdyValue), (tdzValue == null ? 0 : tdzValue)); } } public TargetingResult getResult() { return result; } public void getTargetEntities(CastContext context, double range, int targetCount, Collection<WeakReference<Entity>> entities) { List<Target> candidates = getAllTargetEntities(context, range); if (targetCount < 0) { targetCount = entities.size(); } for (int i = 0; i < targetCount && i < candidates.size(); i++) { Target target = candidates.get(i); entities.add(new WeakReference<Entity>(target.getEntity())); } } protected int breakBlockRecursively(CastContext context, Block block, int depth) { if (depth <= 0) return 0; if (!context.isBreakable(block)) return 0; // Play break FX Location blockLocation = block.getLocation(); Location effectLocation = blockLocation.add(0.5, 0.5, 0.5); context.playEffects("break", 1, context.getLocation(), null, effectLocation, null); // TODO: Re-examime this? UndoList undoList = com.elmakers.mine.bukkit.block.UndoList.getUndoList(blockLocation); if (undoList != null) { undoList.add(block); } context.clearBreakable(block); context.clearReflective(block); block.setType(Material.AIR); int broken = 1; if (depth > broken) { broken += breakBlockRecursively(context, block.getRelative(BlockFace.UP), Math.min(targetBreakableDepth, depth - broken)); broken += breakBlockRecursively(context, block.getRelative(BlockFace.DOWN), Math.min(targetBreakableDepth, depth - broken)); broken += breakBlockRecursively(context, block.getRelative(BlockFace.EAST), Math.min(targetBreakableDepth, depth - broken)); broken += breakBlockRecursively(context, block.getRelative(BlockFace.WEST), Math.min(targetBreakableDepth, depth - broken)); broken += breakBlockRecursively(context, block.getRelative(BlockFace.NORTH), Math.min(targetBreakableDepth, depth - broken)); broken += breakBlockRecursively(context, block.getRelative(BlockFace.SOUTH), Math.min(targetBreakableDepth, depth - broken)); } return broken; } public int breakBlock(CastContext context, Block block, double amount) { if (amount <= 0) return 0; Double breakableAmount = context.getBreakable(block); if (breakableAmount == null) return 0; double breakable = (int)(amount > 1 ? amount : (context.getRandom().nextDouble() < amount ? 1 : 0)); if (breakable <= 0) return 0; return breakBlockRecursively(context, block, (int)Math.ceil(breakableAmount + breakable - 1)); } public static void track(Plugin plugin, Entity tracked) { tracked.setMetadata("tracking", new FixedMetadataValue(plugin, true)); } public static boolean checkTracking(Plugin plugin, Entity tracked, Entity target) { if (tracked == null || !tracked.hasMetadata("tracking")) { return false; } if (target != null) { tracked.setMetadata("hit", new FixedMetadataValue(plugin, new WeakReference<Entity>(target))); } else if (!tracked.hasMetadata("hit")) { tracked.setMetadata("hit", new FixedMetadataValue(plugin, null)); } return true; } }
Add ability to ignore specific entities by UUID when targeting
src/main/java/com/elmakers/mine/bukkit/utility/Targeting.java
Add ability to ignore specific entities by UUID when targeting
<ide><path>rc/main/java/com/elmakers/mine/bukkit/utility/Targeting.java <ide> import java.util.Collection; <ide> import java.util.Collections; <ide> import java.util.List; <add>import java.util.Set; <add>import java.util.UUID; <ide> <ide> public class Targeting { <ide> private TargetingResult result = TargetingResult.NONE; <ide> private double yOffset = 0; <ide> private boolean targetSpaceRequired = false; <ide> private int targetMinOffset = 0; <add> private Set<UUID> ignoreEntities = null; <ide> <ide> public enum TargetingResult { <ide> NONE, <ide> if (entities == null) return targets; <ide> for (Entity entity : entities) <ide> { <add> if (ignoreEntities != null && ignoreEntities.contains(entity.getUniqueId())) continue; <ide> if (sourceEntity != null && entity.equals(sourceEntity) && !context.getTargetsCaster()) continue; <ide> Location entityLocation = entity instanceof LivingEntity ? ((LivingEntity)entity).getEyeLocation() : entity.getLocation(); <ide> if (!entityLocation.getWorld().equals(source.getWorld())) continue; <ide> <ide> return true; <ide> } <add> <add> public void setIgnoreEntities(Set<UUID> ignoreEntities) { <add> this.ignoreEntities = ignoreEntities; <add> } <ide> }
JavaScript
mit
5d5bd8e991d6965ae603c507409962a9345601a4
0
phetsims/sun,phetsims/sun,phetsims/sun
// Copyright 2013-2015, University of Colorado Boulder /** * Check box. * * @author Chris Malley (PixelZoom, Inc.) */ define( function( require ) { 'use strict'; // modules var ButtonListener = require( 'SCENERY/input/ButtonListener' ); var FontAwesomeNode = require( 'SUN/FontAwesomeNode' ); var inherit = require( 'PHET_CORE/inherit' ); var Node = require( 'SCENERY/nodes/Node' ); var Rectangle = require( 'SCENERY/nodes/Rectangle' ); var Text = require( 'SCENERY/nodes/Text' ); var AccessiblePeer = require( 'SCENERY/accessibility/AccessiblePeer' ); // constants var DISABLED_OPACITY = 0.3; /** * @param {Node} content * @param {Property.<boolean>} property * @constructor * @param {Object} [options] */ function CheckBox( content, property, options ) { // @public (together) Store for dispose(); Use a unique name to reduce the risk of collisions with parent/child classes // Made public for together so that clients can access the checkbox value and change it through the together API this.checkBoxValueProperty = property; options = _.extend( { spacing: 5, boxWidth: 21, cursor: 'pointer', checkBoxColor: 'black', checkBoxColorBackground: 'white', tabIndex: 0, focusable: true, tandem: null, /* * {function( {Node} checkBox, {boolean} enabled ) } * Strategy for controlling the check box's appearance, excluding any content. * This can be a stock strategy from this file or custom. * To create a custom one, model it off of the stock strategies defined in this file. */ checkBoxAppearanceStrategy: CheckBox.fadeCheckBoxWhenDisabled, /* * {function( {Node} content, {boolean} enabled )} * Strategy for controlling the appearance of the content based on the check box's state. * This can be a stock strategy from this file, or custom. * To create a custom one, model it off of the stock version(s) defined in this file. */ contentAppearanceStrategy: CheckBox.fadeContentWhenDisabled }, options ); var thisNode = this; Node.call( this ); thisNode.content = content; // @private thisNode.checkBoxAppearanceStrategy = options.checkBoxAppearanceStrategy; // @private thisNode.contentAppearanceStrategy = options.contentAppearanceStrategy; // @private thisNode._enabled = true; // @private // Make the background. Until we are creating our own shapes, just // put a rectangle behind the font awesome check box icons. thisNode.backgroundNode = new Rectangle( 0, -options.boxWidth, options.boxWidth * 0.95, options.boxWidth * 0.95, options.boxWidth * 0.2, options.boxWidth * 0.2, { fill: options.checkBoxColorBackground } ); // @private thisNode.uncheckedNode = new FontAwesomeNode( 'check_empty', { fill: options.checkBoxColor } ); var iconScale = options.boxWidth / thisNode.uncheckedNode.width; thisNode.uncheckedNode.scale( iconScale ); // @private thisNode.checkedNode = new FontAwesomeNode( 'check', { scale: iconScale, fill: options.checkBoxColor } ); // @private this.checkBoxNode = new Node( { children: [ thisNode.backgroundNode, thisNode.checkedNode, thisNode.uncheckedNode ] } ); thisNode.addChild( this.checkBoxNode ); thisNode.addChild( content ); content.left = thisNode.checkedNode.right + options.spacing; content.centerY = thisNode.checkedNode.centerY; // put a rectangle on top of everything to prevent dead zones when clicking thisNode.addChild( new Rectangle( thisNode.left, thisNode.top, thisNode.width, thisNode.height ) ); content.pickable = false; // since there's a pickable rectangle on top of content // @private interactivity this.fire = function() { if ( thisNode._enabled ) { var oldValue = property.value; var newValue = !property.value; thisNode.trigger2( 'startedCallbacksForToggled', oldValue, newValue ); property.value = newValue; thisNode.trigger0( 'endedCallbacksForToggled' ); } }; this.checkBoxButtonListener = new ButtonListener( { fire: this.fire } ); thisNode.addInputListener( this.checkBoxButtonListener ); // sync with property this.checkBoxCheckedListener = function( checked ) { thisNode.checkedNode.visible = checked; thisNode.uncheckedNode.visible = !checked; _.each( thisNode.instances, function( instance ) { //Make sure accessibility is enabled, then apply the change to the peer _.each( instance.peers, function( peer ) { peer.element.setAttribute( 'checked', checked ); } ); } ); }; property.link( this.checkBoxCheckedListener ); // Apply additional options thisNode.mutate( options ); // Tandem support // Give it a novel name to reduce the risk of parent or child collisions this.checkBoxTandem = options.tandem; this.checkBoxTandem && this.checkBoxTandem.addInstance( this ); // Accessibility support this.setAccessibleContent( { createPeer: function( accessibleInstance ) { return new CheckBoxAccessiblePeer( accessibleInstance, thisNode.fire, options.accessibleLabel ); } } ); } inherit( Node, CheckBox, { dispose: function() { this.checkBoxTandem && this.checkBoxTandem.removeInstance( this ); this.checkBoxValueProperty.unlink( this.checkBoxCheckedListener ); this.removeInputListener( this.checkBoxButtonListener ); }, get checkBoxColorBackground() { return this.backgroundNode.fill; }, set checkBoxColorBackground( value ) { this.backgroundNode.fill = value; }, get checkBoxColor() { return this.checkedNode.fill; }, set checkBoxColor( value ) { this.checkedNode.fill = this.uncheckedNode.fill = value; }, /** * Is the check box enabled? * @returns {boolean} */ getEnabled: function() { return this._enabled; }, get enabled() { return this.getEnabled(); }, /** * Sets whether the check box is enabled. * @param {boolean} value */ setEnabled: function( value ) { this._enabled = this.pickable = value; this.checkBoxAppearanceStrategy( this.checkBoxNode, value ); this.contentAppearanceStrategy( this.content, value ); }, set enabled( value ) { this.setEnabled( value ); } }, { /** * Default for options.checkBoxAppearanceStrategy, fades the check box by changing opacity. * @param {Node} checkBoxNode the check box * @param {boolean} enabled * @static */ fadeCheckBoxWhenDisabled: function( checkBoxNode, enabled ) { checkBoxNode.opacity = enabled ? 1 : DISABLED_OPACITY; }, /** * Default for options.contentAppearanceStrategy, fades the content by changing opacity. * @param {Node} content the content that appears next to the check box * @param {boolean} enabled * @static */ fadeContentWhenDisabled: function( content, enabled ) { content.opacity = enabled ? 1 : DISABLED_OPACITY; }, /** * Factory method, creates a check box with a text label and optional icon. * @param {string} text * @param {Object} textOptions options passed to scenery.Text constructor * @param {Property.<boolean>} property * @param {Object} [checkBoxOptions] options passed to CheckBox constructor * @returns {CheckBox} * @static */ createTextCheckBox: function( text, textOptions, property, checkBoxOptions ) { textOptions = textOptions || {}; checkBoxOptions = _.extend( { icon: null, // an optional node, added to the right of the text iconSpacing: 15 }, checkBoxOptions ); var content = new Node(); // text var textNode = new Text( text, textOptions ); content.addChild( textNode ); // optional icon if ( checkBoxOptions.icon ) { content.addChild( checkBoxOptions.icon ); //TODO support different layouts of text and image? checkBoxOptions.icon.left = textNode.right + checkBoxOptions.iconSpacing; checkBoxOptions.icon.centerY = textNode.centerY; } return new CheckBox( content, property, checkBoxOptions ); } } ); /** * An accessible peer for creating a check box element in the Parallel DOM. * See https://github.com/phetsims/scenery/issues/461 * * @param {AccessibleInstance} accessibleInstance * @param {function} fire - listener function fired by this checkbox * @param {string} accessibleLabel - invisible string description for accessible technologies */ function CheckBoxAccessiblePeer( accessibleInstance, fire, accessibleLabel ) { this.initialize( accessibleInstance, fire, accessibleLabel ); } inherit( AccessiblePeer, CheckBoxAccessiblePeer, { /** * Initialize dom element and its attributes for the accessible check box peer of the parallel DOM. * * @param {AccessibleInstance} accessibleInstance * @param {function} fire * @param {string} accessibleLabel */ initialize: function( accessibleInstance, fire, accessibleLabel ) { var trail = accessibleInstance.trail; // will look like <input id="checkBoxId" value="check box value" type="checkbox">Check Box Name<br> this.domElement = document.createElement( 'input' ); // @private this.initializeAccessiblePeer( accessibleInstance, this.domElement ); this.domElement.type = 'checkbox'; // if an accessible label has been passed in, add it as a label to the dom element if ( accessibleLabel ) { var uniqueId = trail.getUniqueId(); this.domElement.id = 'checkBox-' + uniqueId; var checkBoxLabel = document.createElement( 'label' ); checkBoxLabel.setAttribute( 'for', this.domElement.id ); checkBoxLabel.innerText = accessibleLabel; this.domElement.appendChild( checkBoxLabel ); } this.domElement.tabIndex = '0'; this.domElement.addEventListener( 'click', function() { fire(); } ); }, /** * Dispose function for the accessible check box. */ dispose: function() { // TODO } } ); return CheckBox; } );
js/CheckBox.js
// Copyright 2013-2015, University of Colorado Boulder /** * Check box. * * @author Chris Malley (PixelZoom, Inc.) */ define( function( require ) { 'use strict'; // modules var ButtonListener = require( 'SCENERY/input/ButtonListener' ); var FontAwesomeNode = require( 'SUN/FontAwesomeNode' ); var inherit = require( 'PHET_CORE/inherit' ); var Node = require( 'SCENERY/nodes/Node' ); var Rectangle = require( 'SCENERY/nodes/Rectangle' ); var Text = require( 'SCENERY/nodes/Text' ); var AccessiblePeer = require( 'SCENERY/accessibility/AccessiblePeer' ); // constants var DISABLED_OPACITY = 0.3; /** * @param {Node} content * @param {Property.<boolean>} property * @constructor * @param {Object} [options] */ function CheckBox( content, property, options ) { // Store for dispose(); Use a unique name to reduce the risk of collisions with parent/child classes this.checkBoxValueProperty = property; options = _.extend( { spacing: 5, boxWidth: 21, cursor: 'pointer', checkBoxColor: 'black', checkBoxColorBackground: 'white', tabIndex: 0, focusable: true, tandem: null, /* * {function( {Node} checkBox, {boolean} enabled ) } * Strategy for controlling the check box's appearance, excluding any content. * This can be a stock strategy from this file or custom. * To create a custom one, model it off of the stock strategies defined in this file. */ checkBoxAppearanceStrategy: CheckBox.fadeCheckBoxWhenDisabled, /* * {function( {Node} content, {boolean} enabled )} * Strategy for controlling the appearance of the content based on the check box's state. * This can be a stock strategy from this file, or custom. * To create a custom one, model it off of the stock version(s) defined in this file. */ contentAppearanceStrategy: CheckBox.fadeContentWhenDisabled }, options ); var thisNode = this; Node.call( this ); thisNode.content = content; // @private thisNode.checkBoxAppearanceStrategy = options.checkBoxAppearanceStrategy; // @private thisNode.contentAppearanceStrategy = options.contentAppearanceStrategy; // @private thisNode._enabled = true; // @private // Make the background. Until we are creating our own shapes, just // put a rectangle behind the font awesome check box icons. thisNode.backgroundNode = new Rectangle( 0, -options.boxWidth, options.boxWidth * 0.95, options.boxWidth * 0.95, options.boxWidth * 0.2, options.boxWidth * 0.2, { fill: options.checkBoxColorBackground } ); // @private thisNode.uncheckedNode = new FontAwesomeNode( 'check_empty', { fill: options.checkBoxColor } ); var iconScale = options.boxWidth / thisNode.uncheckedNode.width; thisNode.uncheckedNode.scale( iconScale ); // @private thisNode.checkedNode = new FontAwesomeNode( 'check', { scale: iconScale, fill: options.checkBoxColor } ); // @private this.checkBoxNode = new Node( { children: [ thisNode.backgroundNode, thisNode.checkedNode, thisNode.uncheckedNode ] } ); thisNode.addChild( this.checkBoxNode ); thisNode.addChild( content ); content.left = thisNode.checkedNode.right + options.spacing; content.centerY = thisNode.checkedNode.centerY; // put a rectangle on top of everything to prevent dead zones when clicking thisNode.addChild( new Rectangle( thisNode.left, thisNode.top, thisNode.width, thisNode.height ) ); content.pickable = false; // since there's a pickable rectangle on top of content // @private interactivity this.fire = function() { if ( thisNode._enabled ) { var oldValue = property.value; var newValue = !property.value; thisNode.trigger2( 'startedCallbacksForToggled', oldValue, newValue ); property.value = newValue; thisNode.trigger0( 'endedCallbacksForToggled' ); } }; this.checkBoxButtonListener = new ButtonListener( { fire: this.fire } ); thisNode.addInputListener( this.checkBoxButtonListener ); // sync with property this.checkBoxCheckedListener = function( checked ) { thisNode.checkedNode.visible = checked; thisNode.uncheckedNode.visible = !checked; _.each( thisNode.instances, function( instance ) { //Make sure accessibility is enabled, then apply the change to the peer _.each( instance.peers, function( peer ) { peer.element.setAttribute( 'checked', checked ); } ); } ); }; property.link( this.checkBoxCheckedListener ); // Apply additional options thisNode.mutate( options ); // Tandem support // Give it a novel name to reduce the risk of parent or child collisions this.checkBoxTandem = options.tandem; this.checkBoxTandem && this.checkBoxTandem.addInstance( this ); // Accessibility support this.setAccessibleContent( { createPeer: function( accessibleInstance ) { return new CheckBoxAccessiblePeer( accessibleInstance, thisNode.fire, options.accessibleLabel ); } } ); } inherit( Node, CheckBox, { dispose: function() { this.checkBoxTandem && this.checkBoxTandem.removeInstance( this ); this.checkBoxValueProperty.unlink( this.checkBoxCheckedListener ); this.removeInputListener( this.checkBoxButtonListener ); }, get checkBoxColorBackground() { return this.backgroundNode.fill; }, set checkBoxColorBackground( value ) { this.backgroundNode.fill = value; }, get checkBoxColor() { return this.checkedNode.fill; }, set checkBoxColor( value ) { this.checkedNode.fill = this.uncheckedNode.fill = value; }, /** * Is the check box enabled? * @returns {boolean} */ getEnabled: function() { return this._enabled; }, get enabled() { return this.getEnabled(); }, /** * Sets whether the check box is enabled. * @param {boolean} value */ setEnabled: function( value ) { this._enabled = this.pickable = value; this.checkBoxAppearanceStrategy( this.checkBoxNode, value ); this.contentAppearanceStrategy( this.content, value ); }, set enabled( value ) { this.setEnabled( value ); } }, { /** * Default for options.checkBoxAppearanceStrategy, fades the check box by changing opacity. * @param {Node} checkBoxNode the check box * @param {boolean} enabled * @static */ fadeCheckBoxWhenDisabled: function( checkBoxNode, enabled ) { checkBoxNode.opacity = enabled ? 1 : DISABLED_OPACITY; }, /** * Default for options.contentAppearanceStrategy, fades the content by changing opacity. * @param {Node} content the content that appears next to the check box * @param {boolean} enabled * @static */ fadeContentWhenDisabled: function( content, enabled ) { content.opacity = enabled ? 1 : DISABLED_OPACITY; }, /** * Factory method, creates a check box with a text label and optional icon. * @param {string} text * @param {Object} textOptions options passed to scenery.Text constructor * @param {Property.<boolean>} property * @param {Object} [checkBoxOptions] options passed to CheckBox constructor * @returns {CheckBox} * @static */ createTextCheckBox: function( text, textOptions, property, checkBoxOptions ) { textOptions = textOptions || {}; checkBoxOptions = _.extend( { icon: null, // an optional node, added to the right of the text iconSpacing: 15 }, checkBoxOptions ); var content = new Node(); // text var textNode = new Text( text, textOptions ); content.addChild( textNode ); // optional icon if ( checkBoxOptions.icon ) { content.addChild( checkBoxOptions.icon ); //TODO support different layouts of text and image? checkBoxOptions.icon.left = textNode.right + checkBoxOptions.iconSpacing; checkBoxOptions.icon.centerY = textNode.centerY; } return new CheckBox( content, property, checkBoxOptions ); } } ); /** * An accessible peer for creating a check box element in the Parallel DOM. * See https://github.com/phetsims/scenery/issues/461 * * @param {AccessibleInstance} accessibleInstance * @param {function} fire - listener function fired by this checkbox * @param {string} accessibleLabel - invisible string description for accessible technologies */ function CheckBoxAccessiblePeer( accessibleInstance, fire, accessibleLabel ) { this.initialize( accessibleInstance, fire, accessibleLabel ); } inherit( AccessiblePeer, CheckBoxAccessiblePeer, { /** * Initialize dom element and its attributes for the accessible check box peer of the parallel DOM. * * @param {AccessibleInstance} accessibleInstance * @param {function} fire * @param {string} accessibleLabel */ initialize: function( accessibleInstance, fire, accessibleLabel ) { var trail = accessibleInstance.trail; // will look like <input id="checkBoxId" value="check box value" type="checkbox">Check Box Name<br> this.domElement = document.createElement( 'input' ); // @private this.initializeAccessiblePeer( accessibleInstance, this.domElement ); this.domElement.type = 'checkbox'; // if an accessible label has been passed in, add it as a label to the dom element if( accessibleLabel ) { var uniqueId = trail.getUniqueId(); this.domElement.id = 'checkBox-' + uniqueId; var checkBoxLabel = document.createElement( 'label' ); checkBoxLabel.setAttribute( 'for', this.domElement.id ); checkBoxLabel.innerText = accessibleLabel; this.domElement.appendChild( checkBoxLabel ); } this.domElement.tabIndex = '0'; this.domElement.addEventListener( 'click', function() { fire(); } ); }, /** * Dispose function for the accessible check box. */ dispose: function() { // TODO } } ); return CheckBox; } );
Added docs regarding visibility of a member variable
js/CheckBox.js
Added docs regarding visibility of a member variable
<ide><path>s/CheckBox.js <ide> */ <ide> function CheckBox( content, property, options ) { <ide> <del> // Store for dispose(); Use a unique name to reduce the risk of collisions with parent/child classes <add> // @public (together) Store for dispose(); Use a unique name to reduce the risk of collisions with parent/child classes <add> // Made public for together so that clients can access the checkbox value and change it through the together API <ide> this.checkBoxValueProperty = property; <ide> <ide> options = _.extend( { <ide> this.domElement.type = 'checkbox'; <ide> <ide> // if an accessible label has been passed in, add it as a label to the dom element <del> if( accessibleLabel ) { <add> if ( accessibleLabel ) { <ide> var uniqueId = trail.getUniqueId(); <ide> this.domElement.id = 'checkBox-' + uniqueId; <ide>