lang
stringclasses
2 values
license
stringclasses
13 values
stderr
stringlengths
0
343
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
6
87.7k
new_contents
stringlengths
0
6.23M
new_file
stringlengths
3
311
old_contents
stringlengths
0
6.23M
message
stringlengths
6
9.1k
old_file
stringlengths
3
311
subject
stringlengths
0
4k
git_diff
stringlengths
0
6.31M
JavaScript
mit
5075ad33323ded0915a279969ef125d6fd2efd09
0
git-clay/jaywalk-slapp-app,git-clay/jaywalk-slapp-app
'use strict' const slapp = require('../slackSetup.js').slapp const getRadius = require('../radius.js').getRadius const tinyurl = require('tinyurl'); const request = require('request') //db imports const firebase = require('../firebaseSetup.js'), db = firebase.db, snaps = firebase.snaps, tags = firebase.tags, users = firebase.users let options = { url: "https://beepboophq.com/api/v1/chronos/tasks", method: "POST", headers: { authorization: "Bearer "+process.env.BEEPBOOP_TOKEN, 'content-type': 'application/json', 'cache-control': 'no-cache' }, body: { "method": "POST", "schedule": "", "url":"https://hooks.slack.com/services/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", "payload":{ "text": "" } }, json:true }; function setCron(options,answer,time,msg){ options.body.payload.text = `If you would like ${answer} suggestions type /${answer}` request(options, function (error, response, body) { if (error) throw new Error(error); // msg.say(JSON.stringify(response)) msg.say(`${answer} set at ${time}`) }); } let notify = function() { let randomNum = 0; slapp.command('/jaywalkNotify', (msg, text) => { let state = { requested: Date.now() } msg .say({ text: '', attachments: [{ text: 'What notifications do you want on this channel?', fallback: 'Set timed events.', callback_id: 'doit_confirm_callback', actions: [{ name: 'answer', text: 'Coffee Spots', type: 'button', value: 'Breakfast' }, { name: 'answer', text: 'Lunch', type: 'button', value: 'Lunch' }, { name: 'answer', text: 'Dinner', type: 'button', value: 'Dinner' }, { name: 'answer', text: 'Happy Hour', type: 'button', value: 'Happy Hour' }, { name: 'answer', text: 'Weekend fun', type: 'button', value: 'Local Bar' } ] }] }) .route('scheduler', state, 60) //expires after 60 sec }) .route('scheduler', (msg, state) => { let answer = msg.body.actions[0].value // user may not have typed text as their next action, ask again and re-route // if (!randSnap || !randTag) { // return msg // .say("Whoops, you just have to pick a button...") // .say('Click a button!') // .route('getid1', state) // } if(answer == 'Breakfast'){ // posts a /command VV - needs a stable auth token though // https://slack.com/api/chat.command?token=xoxp-72951567362-161234057782-169954154770-3be4727d4a5086453d9f42c5bb2af872&channel=C4T6LKUP7&command=%2Ftest&as_user=true&pretty=1 // https://slack.com/api/chat.command?token=xoxp-72951567362-161234057782-178935658950-f3ba1bd9902f7494344d472e1f123882&channel=C4T6LKUP7&command=%2Ftest&pretty=1 //need to handle timezones.... /* ! Based on UTC time (GMT +06:00:00) or utc is denver+6hours min 0-59 hour 0-23 day of month 1-31 month 1-12 day of week 0-6 (sun-sat) year 2016-9999 */ options.body.schedule = "0 13 * * 1-5 *" // mon-fri @ 7:00am gmt let time = "mon-fri @ 7:00am gmt" setCron(options,answer,time,msg) }else if(answer == 'Lunch'){ options.body.schedule = "30 17 * * 1-5 *" // mon-fri @ 11:30am gmt let time = "mon-fri @ 11:30am gmt" setCron(options,answer,time,msg) }else if(answer == 'Dinner'){ options.body.schedule = "30 23 * * 1-5 *" // mon-fri @ 5:30pm gmt let time = "mon-fri @ 5:30pm gmt" setCron(options,answer,time,msg) }else if(answer == 'Happy Hour'){ options.body.schedule = "30 22 * * 1-5 *" // mon-fri @ 4:30pm gmt let time = "mon-fri @ 4:30pm gmt" setCron(options,answer,time,msg) }else if(answer == 'Local Bar'){ options.body.schedule = "0 02 * * 0,5,6 *" // thurs-sat @ 8:00pm gmt let time = "mon-fri @ 8:00pm gmt" setCron(options,answer,time,msg) }else{ //handle error return msg .say("Whoops, you just have to pick a button...") .say('Click a button!') .route('getDbinfo', state) } }) } module.exports = { notify: notify() }
slashCommands/notifySlash.js
'use strict' const slapp = require('../slackSetup.js').slapp const getRadius = require('../radius.js').getRadius const tinyurl = require('tinyurl'); const request = require('request') //db imports const firebase = require('../firebaseSetup.js'), db = firebase.db, snaps = firebase.snaps, tags = firebase.tags, users = firebase.users let options = { url: "https://beepboophq.com/api/v1/chronos/tasks", method: "POST", headers: { authorization: "Bearer "+process.env.BEEPBOOP_TOKEN, 'content-type': 'application/json', 'cache-control': 'no-cache' }, body: { "method": "POST", "schedule": "", "url":"https://hooks.slack.com/services/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", "payload":{ "text": "" } }, json:true }; function setCron(options,answer,time){ options.body.payload.text = `If you would like ${answer} suggestions type /${answer}` request(options, function (error, response, body) { if (error) throw new Error(error); // msg.say(JSON.stringify(response)) msg.say(`${answer} set at ${time}`) }); } let notify = function() { let randomNum = 0; slapp.command('/jaywalkNotify', (msg, text) => { let state = { requested: Date.now() } msg .say({ text: '', attachments: [{ text: 'What notifications do you want on this channel?', fallback: 'Set timed events.', callback_id: 'doit_confirm_callback', actions: [{ name: 'answer', text: 'Coffee Spots', type: 'button', value: 'Breakfast' }, { name: 'answer', text: 'Lunch', type: 'button', value: 'Lunch' }, { name: 'answer', text: 'Dinner', type: 'button', value: 'Dinner' }, { name: 'answer', text: 'Happy Hour', type: 'button', value: 'Happy Hour' }, { name: 'answer', text: 'Weekend fun', type: 'button', value: 'Local Bar' } ] }] }) .route('scheduler', state, 60) //expires after 60 sec }) .route('scheduler', (msg, state) => { let answer = msg.body.actions[0].value // user may not have typed text as their next action, ask again and re-route // if (!randSnap || !randTag) { // return msg // .say("Whoops, you just have to pick a button...") // .say('Click a button!') // .route('getid1', state) // } if(answer == 'Breakfast'){ // posts a /command VV - needs a stable auth token though // https://slack.com/api/chat.command?token=xoxp-72951567362-161234057782-169954154770-3be4727d4a5086453d9f42c5bb2af872&channel=C4T6LKUP7&command=%2Ftest&as_user=true&pretty=1 // https://slack.com/api/chat.command?token=xoxp-72951567362-161234057782-178935658950-f3ba1bd9902f7494344d472e1f123882&channel=C4T6LKUP7&command=%2Ftest&pretty=1 //need to handle timezones.... /* ! Based on UTC time (GMT +06:00:00) or utc is denver+6hours min 0-59 hour 0-23 day of month 1-31 month 1-12 day of week 0-6 (sun-sat) year 2016-9999 */ options.body.schedule = "0 13 * * 1-5 *" // mon-fri @ 7:00am gmt let time = "mon-fri @ 7:00am gmt" setCron(options,answer,time) }else if(answer == 'Lunch'){ options.body.schedule = "30 17 * * 1-5 *" // mon-fri @ 11:30am gmt let time = "mon-fri @ 11:30am gmt" setCron(options,answer,time) }else if(answer == 'Dinner'){ options.body.schedule = "30 23 * * 1-5 *" // mon-fri @ 5:30pm gmt let time = "mon-fri @ 5:30pm gmt" setCron(options,answer,time) }else if(answer == 'Happy Hour'){ options.body.schedule = "30 22 * * 1-5 *" // mon-fri @ 4:30pm gmt let time = "mon-fri @ 4:30pm gmt" setCron(options,answer,time) }else if(answer == 'Local Bar'){ options.body.schedule = "0 02 * * 0,5,6 *" // thurs-sat @ 8:00pm gmt let time = "mon-fri @ 8:00pm gmt" setCron(options,answer,time) }else{ //handle error return msg .say("Whoops, you just have to pick a button...") .say('Click a button!') .route('getDbinfo', state) } }) } module.exports = { notify: notify() }
msg is now defined
slashCommands/notifySlash.js
msg is now defined
<ide><path>lashCommands/notifySlash.js <ide> }, json:true <ide> }; <ide> <del>function setCron(options,answer,time){ <add>function setCron(options,answer,time,msg){ <ide> options.body.payload.text = `If you would like ${answer} suggestions type /${answer}` <ide> request(options, function (error, response, body) { <ide> if (error) throw new Error(error); <ide> */ <ide> options.body.schedule = "0 13 * * 1-5 *" // mon-fri @ 7:00am gmt <ide> let time = "mon-fri @ 7:00am gmt" <del> setCron(options,answer,time) <add> setCron(options,answer,time,msg) <ide> }else if(answer == 'Lunch'){ <ide> options.body.schedule = "30 17 * * 1-5 *" // mon-fri @ 11:30am gmt <ide> let time = "mon-fri @ 11:30am gmt" <del> setCron(options,answer,time) <add> setCron(options,answer,time,msg) <ide> }else if(answer == 'Dinner'){ <ide> options.body.schedule = "30 23 * * 1-5 *" // mon-fri @ 5:30pm gmt <ide> let time = "mon-fri @ 5:30pm gmt" <del> setCron(options,answer,time) <add> setCron(options,answer,time,msg) <ide> }else if(answer == 'Happy Hour'){ <ide> options.body.schedule = "30 22 * * 1-5 *" // mon-fri @ 4:30pm gmt <ide> let time = "mon-fri @ 4:30pm gmt" <del> setCron(options,answer,time) <add> setCron(options,answer,time,msg) <ide> }else if(answer == 'Local Bar'){ <ide> options.body.schedule = "0 02 * * 0,5,6 *" // thurs-sat @ 8:00pm gmt <ide> let time = "mon-fri @ 8:00pm gmt" <del> setCron(options,answer,time) <add> setCron(options,answer,time,msg) <ide> }else{ //handle error <ide> return msg <ide> .say("Whoops, you just have to pick a button...")
JavaScript
isc
b6130c8eb6545bc1feecde2539bbea513a14068f
0
pfryga/carouselApi
var express = require('express'); var bodyParser = require('body-parser'); var mobiusModule = require('./lib/mobiusClient'); var mongooseModule = require('./lib/mongooseClient'); var appConfig = require('./config/config.json'); var router = express.Router(); var mobiusIns = mobiusModule(appConfig.mobius); var mongooseIns = mongooseModule(appConfig.mongo); var mongoose = mongooseIns.init(); var app = express(); app.use(bodyParser.urlencoded({ extended: false })); // models var Carousel = mongoose.model('Carousel', { key: String, name: String, department: String }); var Department = mongoose.model('Department', { key: String, name: String }); var Offer = mongoose.model('Offer', { department: String, carousel: String, id: String, name: String, mainImage: String }); // status router.get('/status/ping', function(req, res) { res.send('pong') }) // carousels router.get('/carousels/:department', function(req, res) { Carousel.where({ department: req.params.department }).find(function (err, elem) { if (err) return res.send(err); if (elem) { var response = { 'collection': req.params.department, 'items': elem }; res.send(response); } }); }); router.post('/carousels', function(req, res) { var carousel = new Carousel({ key: req.body.key, name: req.body.name, department: req.body.department }); carousel.save(function () { res.send('created!'); }); }); router.delete('/carousels', function(req, res) { Carousel.findOneAndRemove({ key: req.body.key, department: req.body.department }, function () { res.send('removed!'); }); }); // departments router.get('/departments', function(req, res) { Department.find(function (err, elem) { if (err) return res.send(err); if (elem) { var response = { 'collection': req.params.department, 'items': elem }; res.send(response); } }); }); router.post('/departments', function(req, res) { var department = new Department({ key: req.body.key, name: req.body.name }); department.save(function () { res.send('created!'); }); }); router.delete('/departments', function(req, res) { Department.findOneAndRemove({ key: req.body.key }, function () { res.send('removed!'); }); }); // offers router.get('/getOffer/:id', function(req, res) { var offerId = req.params.id; mobiusIns.getOffer(offerId, function (error, data) { if (error) { res.status(404).send(); } else { res.send(data); } }); }); router.get('/getOffers/:department/:carousel', function(req, res) { Offer.where({ department: req.params.department, carousel: req.params.carousel }).find(function (err, elem) { if (err) return res.send(err); if (elem) { var response = { 'collection': req.params.department, 'items': elem }; res.send(response); } }); }); router.post('/addOffer/:id', function(req, res) { var offerId = req.params.id; mobiusIns.getOffer(offerId, function (error, data) { if (error) { res.status(404).send(); } else { var offer = new Offer({ department: req.body.department, carousel: req.body.carousel, id: data.id, name: data.name, mainImage: data.mainImage.small }); offer.save(function () { res.send('created!'); }); } }); }); router.delete('/removeOffer/:id', function(req, res) { Offer.findOneAndRemove({ id: req.params.id, department: req.body.department, carousel: req.body.carousel }, function () { res.send('removed!'); }); }); app.use('/', router); app.listen(8080, function () { console.log('server started!'); });
app.js
var express = require('express'); var bodyParser = require('body-parser'); var mobiusModule = require('./lib/mobiusClient'); var mongooseModule = require('./lib/mongooseClient'); var appConfig = require('./config/config.json'); var router = express.Router(); var mobiusIns = mobiusModule(appConfig.mobius); var mongooseIns = mongooseModule(appConfig.mongo); var mongoose = mongooseIns.init(); var app = express(); app.use(bodyParser.urlencoded({ extended: false })); // models var Carousel = mongoose.model('Carousel', { key: String, name: String, department: String }); var Department = mongoose.model('Department', { key: String, name: String }); // status router.get('/status/ping', function(req, res) { res.send('pong') }) // getOffer router.get('/getOffer/:id', function(req, res) { var offerId = req.params.id; mobiusIns.getOffer(offerId, function (error, offer) { if (error) { res.status(404).send(); } else { res.send(offer); } }); }); // carousels router.get('/carousels/:department', function(req, res) { Carousel.where({ department: req.params.department }).find(function (err, elem) { if (err) return res.send(err); if (elem) { var response = { 'collection': req.params.department, 'items': elem }; res.send(response); } }); }); router.post('/carousels', function(req, res) { var carousel = new Carousel({ key: req.body.key, name: req.body.name, department: req.body.department }); carousel.save(function () { res.send('created!'); }); }); router.delete('/carousels', function(req, res) { Carousel.findOneAndRemove({ key: req.body.key, department: req.body.department }, function () { res.send('removed!'); }); }); // departments router.get('/departments', function(req, res) { Department.find(function (err, elem) { if (err) return res.send(err); if (elem) { var response = { 'collection': req.params.department, 'items': elem }; res.send(response); } }); }); router.post('/departments', function(req, res) { var department = new Department({ key: req.body.key, name: req.body.name }); department.save(function () { res.send('created!'); }); }); router.delete('/departments', function(req, res) { Department.findOneAndRemove({ key: req.body.key }, function () { res.send('removed!'); }); }); app.use('/', router); app.listen(8080, function () { console.log('server started!'); });
offers endpoints
app.js
offers endpoints
<ide><path>pp.js <ide> name: String <ide> }); <ide> <add>var Offer = mongoose.model('Offer', { <add> department: String, <add> carousel: String, <add> id: String, <add> name: String, <add> mainImage: String <add>}); <add> <ide> // status <ide> <ide> router.get('/status/ping', function(req, res) { <ide> res.send('pong') <ide> }) <del> <del>// getOffer <del> <del>router.get('/getOffer/:id', function(req, res) { <del> var offerId = req.params.id; <del> <del> mobiusIns.getOffer(offerId, function (error, offer) { <del> if (error) { <del> res.status(404).send(); <del> } else { <del> res.send(offer); <del> } <del> }); <del>}); <ide> <ide> // carousels <ide> <ide> }); <ide> }); <ide> <add>// offers <add> <add>router.get('/getOffer/:id', function(req, res) { <add> var offerId = req.params.id; <add> <add> mobiusIns.getOffer(offerId, function (error, data) { <add> if (error) { <add> res.status(404).send(); <add> } else { <add> res.send(data); <add> } <add> }); <add>}); <add> <add>router.get('/getOffers/:department/:carousel', function(req, res) { <add> Offer.where({ <add> department: req.params.department, <add> carousel: req.params.carousel <add> }).find(function (err, elem) { <add> if (err) return res.send(err); <add> if (elem) { <add> var response = { <add> 'collection': req.params.department, <add> 'items': elem <add> }; <add> res.send(response); <add> } <add> }); <add>}); <add> <add>router.post('/addOffer/:id', function(req, res) { <add> var offerId = req.params.id; <add> <add> mobiusIns.getOffer(offerId, function (error, data) { <add> if (error) { <add> res.status(404).send(); <add> } else { <add> var offer = new Offer({ <add> department: req.body.department, <add> carousel: req.body.carousel, <add> id: data.id, <add> name: data.name, <add> mainImage: data.mainImage.small <add> }); <add> <add> offer.save(function () { <add> res.send('created!'); <add> }); <add> } <add> }); <add>}); <add> <add>router.delete('/removeOffer/:id', function(req, res) { <add> Offer.findOneAndRemove({ <add> id: req.params.id, <add> department: req.body.department, <add> carousel: req.body.carousel <add> }, function () { <add> res.send('removed!'); <add> }); <add>}); <add> <ide> app.use('/', router); <ide> <ide> app.listen(8080, function () {
JavaScript
agpl-3.0
4958c2730e815de6c0e61ed6e638aba70a4012ae
0
moxiecode/moxie,moxiecode/moxie,moxiecode/moxie,moxiecode/moxie
/** * FileInput.js * * Copyright 2013, Moxiecode Systems AB * Released under GPL License. * * License: http://www.plupload.com/license * Contributing: http://www.plupload.com/contributing */ define('moxie/file/FileInput', [ 'moxie/core/utils/Basic', 'moxie/core/utils/Mime', 'moxie/core/utils/Dom', 'moxie/core/Exceptions', 'moxie/core/EventTarget', 'moxie/core/I18n', 'moxie/file/File', 'moxie/runtime/RuntimeClient' ], function(Basic, Mime, Dom, x, EventTarget, I18n, File, RuntimeClient) { /** Provides a convenient way to create cross-browser file-picker. Generates file selection dialog on click, converts selected files to _File_ objects, to be used in conjunction with _Image_, preloaded in memory with _FileReader_ or uploaded to a server through _XMLHttpRequest_. @class FileInput @constructor @extends EventTarget @uses RuntimeClient @param {Object|String|DOMElement} options If options is string or node, argument is considered as _browse\_button_. @param {String|DOMElement} options.browse_button DOM Element to turn into file picker. @param {Array} [options.accept] Array of mime types to accept. By default accepts all. @param {String} [options.file='file'] Name of the file field (not the filename). @param {Boolean} [options.multiple=false] Enable selection of multiple files. @param {Boolean} [options.directory=false] Turn file input into the folder input (cannot be both at the same time). @param {String|DOMElement} [options.container] DOM Element to use as a container for file-picker. Defaults to parentNode for _browse\_button_. @param {Object|String} [options.required_caps] Set of required capabilities, that chosen runtime must support. @example <div id="container"> <a id="file-picker" href="javascript:;">Browse...</a> </div> <script> var fileInput = new mOxie.FileInput({ browse_button: 'file-picker', // or document.getElementById('file-picker') container: 'container', accept: [ {title: "Image files", extensions: "jpg,gif,png"} // accept only images ], multiple: true // allow multiple file selection }); fileInput.onchange = function(e) { // do something to files array console.info(e.target.files); // or this.files or fileInput.files }; fileInput.init(); // initialize </script> */ var dispatches = [ /** Dispatched when runtime is connected and file-picker is ready to be used. @event ready @param {Object} event */ 'ready', /** Dispatched right after [ready](#event_ready) event, and whenever [refresh()](#method_refresh) is invoked. Check [corresponding documentation entry](#method_refresh) for more info. @event refresh @param {Object} event */ /** Dispatched when selection of files in the dialog is complete. @event change @param {Object} event */ 'change', 'cancel', // TODO: might be useful /** Dispatched when mouse cursor enters file-picker area. Can be used to style element accordingly. @event mouseenter @param {Object} event */ 'mouseenter', /** Dispatched when mouse cursor leaves file-picker area. Can be used to style element accordingly. @event mouseleave @param {Object} event */ 'mouseleave', /** Dispatched when functional mouse button is pressed on top of file-picker area. @event mousedown @param {Object} event */ 'mousedown', /** Dispatched when functional mouse button is released on top of file-picker area. @event mouseup @param {Object} event */ 'mouseup' ]; function FileInput(options) { var self = this, container, browseButton, defaults; // if flat argument passed it should be browse_button id if (Basic.inArray(Basic.typeOf(options), ['string', 'node']) !== -1) { options = { browse_button : options }; } // this will help us to find proper default container browseButton = Dom.get(options.browse_button); if (!browseButton) { // browse button is required throw new x.DOMException(x.DOMException.NOT_FOUND_ERR); } // figure out the options defaults = { accept: [{ title: I18n.translate('All Files'), extensions: '*' }], name: 'file', multiple: false, required_caps: false, container: browseButton.parentNode || document.body }; options = typeof(options) === 'object' ? Basic.extend({}, defaults, options) : defaults; // normalize accept option (could be list of mime types or array of title/extensions pairs) if (typeof(options.accept) === 'string') { options.accept = Mime.mimes2extList(options.accept); } container = Dom.get(options.container); // make sure we have container if (!container) { container = document.body; } // make container relative, if it's not if (Dom.getStyle(container, 'position') === 'static') { container.style.position = 'relative'; } container = browseButton = null; // IE RuntimeClient.call(self); Basic.extend(self, { /** Unique id of the component @property uid @protected @readOnly @type {String} @default UID */ uid: Basic.guid('uid_'), /** Unique id of the connected runtime, if any. @property ruid @protected @type {String} */ ruid: null, /** Array of selected mOxie.File objects @property files @type {Array} @default null */ files: null, /** Initializes the file-picker, connects it to runtime and dispatches event ready when done. @method init */ init: function() { self.convertEventPropsToHandlers(dispatches); self.bind('RuntimeInit', function(e, runtime) { self.ruid = runtime.uid; self.bind("Ready", function() { self.trigger("Refresh"); }, 999); self.bind("Change", function() { var files = runtime.exec.call(self, 'FileInput', 'getFiles'); self.files = []; Basic.each(files, function(file) { self.files.push(new File(self.ruid, file)); }); }, 999); // re-position and resize shim container self.bind('Refresh', function() { var pos, size, browseButton, shimContainer; browseButton = Dom.get(options.browse_button); shimContainer = Dom.get(runtime.shimid); // do not use runtime.getShimContainer(), since it will create container if it doesn't exist if (browseButton) { pos = Dom.getPos(browseButton, Dom.get(options.container)); size = Dom.getSize(browseButton); if (shimContainer) { Basic.extend(shimContainer.style, { top : pos.y + 'px', left : pos.x + 'px', width : size.w + 'px', height : size.h + 'px' }); } } shimContainer = browseButton = null; }); runtime.exec.call(self, 'FileInput', 'init', options); }); // runtime needs: options.required_features, options.runtime_order and options.container self.connectRuntime(options); // throws RuntimeError }, /** Disables file-picker element, so that it doesn't react to mouse clicks. @method disable @param {Boolean} [state=true] Disable component if - true, enable if - false */ disable: function(state) { var runtime = this.getRuntime(); if (runtime) { runtime.exec.call(this, 'FileInput', 'disable', Basic.typeOf(state) === 'undefined' ? true : state); } }, /** Reposition and resize dialog trigger to match the position and size of browse_button element. @method refresh */ refresh: function() { self.trigger("Refresh"); } }); } FileInput.prototype = EventTarget.instance; return FileInput; });
src/javascript/file/FileInput.js
/** * FileInput.js * * Copyright 2013, Moxiecode Systems AB * Released under GPL License. * * License: http://www.plupload.com/license * Contributing: http://www.plupload.com/contributing */ define('moxie/file/FileInput', [ 'moxie/core/utils/Basic', 'moxie/core/utils/Mime', 'moxie/core/utils/Dom', 'moxie/core/Exceptions', 'moxie/core/EventTarget', 'moxie/core/I18n', 'moxie/file/File', 'moxie/runtime/RuntimeClient' ], function(Basic, Mime, Dom, x, EventTarget, I18n, File, RuntimeClient) { /** Provides a convenient way to create cross-browser file-picker. Generates file selection dialog on click, converts selected files to mOxie.File objects, to be used in conjunction with _mOxie.Image_, preloaded in memory with _mOxie.FileReader_ or uploaded to a server through _mOxie.XMLHttpRequest_. @class FileInput @constructor @extends EventTarget @uses RuntimeClient @param {Object|String|DOMElement} options If options is string or node, argument is considered as options.browse_button @param {String|DOMElement} options.browse_button DOM Element to turn into file picker @param {Array} [options.accept] Array of mime types to accept. By default accepts all @param {String} [options.file='file'] Name of the file field (not the filename) @param {Boolean} [options.multiple=false] Enable selection of multiple files @param {Boolean} [options.directory=false] Turn file input into the folder input (cannot be both at the same time) @param {String|DOMElement} [options.container] DOM Element to use as a container for file-picker. Defaults to parentNode for options.browse_button @param {Object|String} [options.required_caps] Set of required capabilities, that chosen runtime must support @example <div id="container"> <a id="file-picker" href="javascript:;">Browse...</a> </div> <script> var fileInput = new mOxie.FileInput({ browse_button: 'file-picker', // or document.getElementById('file-picker') container: 'container', accept: [ {title: "Image files", extensions: "jpg,gif,png"} // accept only images ], multiple: true // allow multiple file selection }); fileInput.onchange = function(e) { // do something to files array console.info(e.target.files); // or this.files or fileInput.files }; fileInput.init(); // initialize </script> */ var dispatches = [ /** Dispatched when runtime is connected and file-picker is ready to be used. @event ready @param {Object} event */ 'ready', /** Dispatched right after [ready](#event_ready) event, and whenever [refresh()](#method_refresh) is invoked. Check [corresponding documentation entry](#method_refresh) for more info. @event refresh @param {Object} event */ /** Dispatched when selection of files in the dialog is complete. @event change @param {Object} event */ 'change', 'cancel', // TODO: might be useful /** Dispatched when mouse cursor enters file-picker area. Can be used to style element accordingly. @event mouseenter @param {Object} event */ 'mouseenter', /** Dispatched when mouse cursor leaves file-picker area. Can be used to style element accordingly. @event mouseleave @param {Object} event */ 'mouseleave', /** Dispatched when functional mouse button is pressed on top of file-picker area. @event mousedown @param {Object} event */ 'mousedown', /** Dispatched when functional mouse button is released on top of file-picker area. @event mouseup @param {Object} event */ 'mouseup' ]; function FileInput(options) { var self = this, container, browseButton, defaults; // if flat argument passed it should be browse_button id if (Basic.inArray(Basic.typeOf(options), ['string', 'node']) !== -1) { options = { browse_button : options }; } // this will help us to find proper default container browseButton = Dom.get(options.browse_button); if (!browseButton) { // browse button is required throw new x.DOMException(x.DOMException.NOT_FOUND_ERR); } // figure out the options defaults = { accept: [{ title: I18n.translate('All Files'), extensions: '*' }], name: 'file', multiple: false, required_caps: false, container: browseButton.parentNode || document.body }; options = typeof(options) === 'object' ? Basic.extend({}, defaults, options) : defaults; // normalize accept option (could be list of mime types or array of title/extensions pairs) if (typeof(options.accept) === 'string') { options.accept = Mime.mimes2extList(options.accept); } container = Dom.get(options.container); // make sure we have container if (!container) { container = document.body; } // make container relative, if it's not if (Dom.getStyle(container, 'position') === 'static') { container.style.position = 'relative'; } container = browseButton = null; // IE RuntimeClient.call(self); Basic.extend(self, { /** Unique id of the component @property uid @protected @readOnly @type {String} @default UID */ uid: Basic.guid('uid_'), /** Unique id of the connected runtime, if any. @property ruid @protected @type {String} */ ruid: null, /** Array of selected mOxie.File objects @property files @type {Array} @default null */ files: null, /** Initializes the file-picker, connects it to runtime and dispatches event ready when done. @method init */ init: function() { self.convertEventPropsToHandlers(dispatches); self.bind('RuntimeInit', function(e, runtime) { self.ruid = runtime.uid; self.bind("Ready", function() { self.trigger("Refresh"); }, 999); self.bind("Change", function() { var files = runtime.exec.call(self, 'FileInput', 'getFiles'); self.files = []; Basic.each(files, function(file) { self.files.push(new File(self.ruid, file)); }); }, 999); // re-position and resize shim container self.bind('Refresh', function() { var pos, size, browseButton, shimContainer; browseButton = Dom.get(options.browse_button); shimContainer = Dom.get(runtime.shimid); // do not use runtime.getShimContainer(), since it will create container if it doesn't exist if (browseButton) { pos = Dom.getPos(browseButton, Dom.get(options.container)); size = Dom.getSize(browseButton); if (shimContainer) { Basic.extend(shimContainer.style, { top : pos.y + 'px', left : pos.x + 'px', width : size.w + 'px', height : size.h + 'px' }); } } shimContainer = browseButton = null; }); runtime.exec.call(self, 'FileInput', 'init', options); }); // runtime needs: options.required_features, options.runtime_order and options.container self.connectRuntime(options); // throws RuntimeError }, /** Disables file-picker element, so that it doesn't react to mouse clicks. @method disable @param {Boolean} [state=true] Disable component if - true, enable if - false */ disable: function(state) { var runtime = this.getRuntime(); if (runtime) { runtime.exec.call(this, 'FileInput', 'disable', Basic.typeOf(state) === 'undefined' ? true : state); } }, /** Reposition and resize dialog trigger to match the position and size of browse_button element. @method refresh */ refresh: function() { self.trigger("Refresh"); } }); } FileInput.prototype = EventTarget.instance; return FileInput; });
FileInput: Small update to comments.
src/javascript/file/FileInput.js
FileInput: Small update to comments.
<ide><path>rc/javascript/file/FileInput.js <ide> ], function(Basic, Mime, Dom, x, EventTarget, I18n, File, RuntimeClient) { <ide> /** <ide> Provides a convenient way to create cross-browser file-picker. Generates file selection dialog on click, <del> converts selected files to mOxie.File objects, to be used in conjunction with _mOxie.Image_, preloaded in memory <del> with _mOxie.FileReader_ or uploaded to a server through _mOxie.XMLHttpRequest_. <add> converts selected files to _File_ objects, to be used in conjunction with _Image_, preloaded in memory <add> with _FileReader_ or uploaded to a server through _XMLHttpRequest_. <ide> <ide> @class FileInput <ide> @constructor <ide> @extends EventTarget <ide> @uses RuntimeClient <del> @param {Object|String|DOMElement} options If options is string or node, argument is considered as options.browse_button <del> @param {String|DOMElement} options.browse_button DOM Element to turn into file picker <del> @param {Array} [options.accept] Array of mime types to accept. By default accepts all <del> @param {String} [options.file='file'] Name of the file field (not the filename) <del> @param {Boolean} [options.multiple=false] Enable selection of multiple files <del> @param {Boolean} [options.directory=false] Turn file input into the folder input (cannot be both at the same time) <del> @param {String|DOMElement} [options.container] DOM Element to use as a container for file-picker. Defaults to parentNode for options.browse_button <del> @param {Object|String} [options.required_caps] Set of required capabilities, that chosen runtime must support <add> @param {Object|String|DOMElement} options If options is string or node, argument is considered as _browse\_button_. <add> @param {String|DOMElement} options.browse_button DOM Element to turn into file picker. <add> @param {Array} [options.accept] Array of mime types to accept. By default accepts all. <add> @param {String} [options.file='file'] Name of the file field (not the filename). <add> @param {Boolean} [options.multiple=false] Enable selection of multiple files. <add> @param {Boolean} [options.directory=false] Turn file input into the folder input (cannot be both at the same time). <add> @param {String|DOMElement} [options.container] DOM Element to use as a container for file-picker. Defaults to parentNode <add> for _browse\_button_. <add> @param {Object|String} [options.required_caps] Set of required capabilities, that chosen runtime must support. <ide> <ide> @example <ide> <div id="container">
Java
agpl-3.0
39e004b4aa77798ae9a7309f7bb8486bd9ef91e0
0
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
a9bf596c-2e61-11e5-9284-b827eb9e62be
hello.java
a9b9f382-2e61-11e5-9284-b827eb9e62be
a9bf596c-2e61-11e5-9284-b827eb9e62be
hello.java
a9bf596c-2e61-11e5-9284-b827eb9e62be
<ide><path>ello.java <del>a9b9f382-2e61-11e5-9284-b827eb9e62be <add>a9bf596c-2e61-11e5-9284-b827eb9e62be
Java
apache-2.0
a4d364d00767a5331db828929cf04da38b96e11e
0
darranl/directory-server,darranl/directory-server,apache/directory-server,drankye/directory-server,drankye/directory-server,apache/directory-server
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.server.core.authn; import java.net.SocketAddress; import org.apache.directory.api.ldap.model.constants.AuthenticationLevel; import org.apache.directory.api.ldap.model.exception.LdapNoPermissionException; import org.apache.directory.server.core.api.LdapPrincipal; import org.apache.directory.server.core.api.interceptor.context.BindOperationContext; import org.apache.directory.server.i18n.I18n; import org.apache.mina.core.session.IoSession; /** * An {@link Authenticator} that handles anonymous connections * (type <tt>'none'</tt>). * * @author <a href="mailto:[email protected]">Apache Directory Project</a> */ public class AnonymousAuthenticator extends AbstractAuthenticator { /** * Creates a new instance. */ public AnonymousAuthenticator() { super( AuthenticationLevel.NONE ); } /** * If the context is not configured to allow anonymous connections, * this method throws a {@link javax.naming.NoPermissionException}. */ public LdapPrincipal authenticate( BindOperationContext bindContext ) throws LdapNoPermissionException { // We only allow Anonymous binds if the service allows them if ( getDirectoryService().isAllowAnonymousAccess() ) { LOG.info( "Authentication as anonymous" ); LdapPrincipal principal = getDirectoryService().getAdminSession().getAnonymousPrincipal(); IoSession session = bindContext.getIoSession(); if ( session != null ) { SocketAddress clientAddress = session.getRemoteAddress(); principal.setClientAddress( clientAddress ); SocketAddress serverAddress = session.getServiceAddress(); principal.setServerAddress( serverAddress ); } return principal; } else { LOG.info( "Cannot authenticate as anonymous, the server does not allow it" ); throw new LdapNoPermissionException( I18n.err( I18n.ERR_228 ) ); } } }
interceptors/authn/src/main/java/org/apache/directory/server/core/authn/AnonymousAuthenticator.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.server.core.authn; import java.net.SocketAddress; import org.apache.directory.api.ldap.model.constants.AuthenticationLevel; import org.apache.directory.api.ldap.model.exception.LdapNoPermissionException; import org.apache.directory.server.core.api.LdapPrincipal; import org.apache.directory.server.core.api.interceptor.context.BindOperationContext; import org.apache.directory.server.i18n.I18n; import org.apache.mina.core.session.IoSession; /** * An {@link Authenticator} that handles anonymous connections * (type <tt>'none'</tt>). * * @author <a href="mailto:[email protected]">Apache Directory Project</a> */ public class AnonymousAuthenticator extends AbstractAuthenticator { /** * Creates a new instance. */ public AnonymousAuthenticator() { super( AuthenticationLevel.NONE ); } /** * If the context is not configured to allow anonymous connections, * this method throws a {@link javax.naming.NoPermissionException}. */ public LdapPrincipal authenticate( BindOperationContext bindContext ) throws LdapNoPermissionException { // We only allow Anonymous binds if the service allows them if ( getDirectoryService().isAllowAnonymousAccess() ) { LdapPrincipal principal = getDirectoryService().getAdminSession().getAnonymousPrincipal(); IoSession session = bindContext.getIoSession(); if ( session != null ) { SocketAddress clientAddress = session.getRemoteAddress(); principal.setClientAddress( clientAddress ); SocketAddress serverAddress = session.getServiceAddress(); principal.setServerAddress( serverAddress ); } return principal; } else { LOG.info( "Cannot authenticate as anonymous, the server does not allow it" ); throw new LdapNoPermissionException( I18n.err( I18n.ERR_228 ) ); } } }
Added a log to tell when some anonymous bind is done git-svn-id: 90776817adfbd895fc5cfa90f675377e0a62e745@1602462 13f79535-47bb-0310-9956-ffa450edef68
interceptors/authn/src/main/java/org/apache/directory/server/core/authn/AnonymousAuthenticator.java
Added a log to tell when some anonymous bind is done
<ide><path>nterceptors/authn/src/main/java/org/apache/directory/server/core/authn/AnonymousAuthenticator.java <ide> // We only allow Anonymous binds if the service allows them <ide> if ( getDirectoryService().isAllowAnonymousAccess() ) <ide> { <add> LOG.info( "Authentication as anonymous" ); <ide> LdapPrincipal principal = getDirectoryService().getAdminSession().getAnonymousPrincipal(); <ide> <ide> IoSession session = bindContext.getIoSession();
Java
apache-2.0
a10e3e53bb8a44ece42ebda78f8ed44a07173f1c
0
browep/MaterialDesignLibrary
package com.gc.materialdesign.views; import com.gc.materialdesign.R; import com.gc.materialdesign.utils.Utils; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Typeface; import android.util.AttributeSet; import android.util.Log; import android.util.TypedValue; import android.widget.RelativeLayout; import android.widget.TextView; public class ButtonFlat extends Button { TextView textButton; public ButtonFlat(Context context, AttributeSet attrs) { super(context, attrs); } protected void setDefaultProperties(){ minHeight = 36; minWidth = 88; rippleSize = 3; // Min size setMinimumHeight(Utils.dpToPx(minHeight, getResources())); setMinimumWidth(Utils.dpToPx(minWidth, getResources())); setBackgroundResource(R.drawable.background_transparent); } @Override protected void setAttributes(AttributeSet attrs) { // Set text button String text = null; int textResource = attrs.getAttributeResourceValue(ANDROIDXML,"text",-1); if(textResource != -1){ text = getResources().getString(textResource); }else{ text = attrs.getAttributeValue(ANDROIDXML,"text"); } int[] textSizeAttr = new int[] { android.R.attr.textSize }; TypedArray typedArray = getContext().obtainStyledAttributes(attrs, textSizeAttr ); float textSize = typedArray.getDimension(0, 24); Log.d("ButtonFlat", "textSize: " + textSize); if(text != null){ textButton = new TextView(getContext()); textButton.setText(text.toUpperCase()); textButton.setTextColor(backgroundColor); textButton.setTypeface(null, Typeface.BOLD); textButton.setTextSize(TypedValue.COMPLEX_UNIT_PX, textSize); RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT,LayoutParams.WRAP_CONTENT); params.addRule(RelativeLayout.CENTER_IN_PARENT, RelativeLayout.TRUE); textButton.setLayoutParams(params); addView(textButton); } int bacgroundColor = attrs.getAttributeResourceValue(ANDROIDXML,"background",-1); if(bacgroundColor != -1){ setBackgroundColor(getResources().getColor(bacgroundColor)); }else{ // Color by hexadecimal // Color by hexadecimal background = attrs.getAttributeIntValue(ANDROIDXML, "background", -1); if (background != -1) setBackgroundColor(background); } } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); if (x != -1) { Paint paint = new Paint(); paint.setAntiAlias(true); paint.setColor(makePressColor()); canvas.drawCircle(x, y, radius, paint); if(radius > getHeight()/rippleSize) radius += rippleSpeed; if(radius >= getWidth()){ x = -1; y = -1; radius = getHeight()/rippleSize; if(onClickListener != null&& clickAfterRipple) onClickListener.onClick(this); } invalidate(); } } /** * Make a dark color to ripple effect * @return */ @Override protected int makePressColor(){ return Color.parseColor("#88DDDDDD"); } public void setText(String text){ textButton.setText(text.toUpperCase()); } // Set color of background public void setBackgroundColor(int color){ backgroundColor = color; if(isEnabled()) beforeBackground = backgroundColor; textButton.setTextColor(color); } @Override public TextView getTextView() { return textButton; } public String getText(){ return textButton.getText().toString(); } }
MaterialDesign/src/com/gc/materialdesign/views/ButtonFlat.java
package com.gc.materialdesign.views; import com.gc.materialdesign.R; import com.gc.materialdesign.utils.Utils; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Typeface; import android.util.AttributeSet; import android.widget.RelativeLayout; import android.widget.TextView; public class ButtonFlat extends Button { TextView textButton; public ButtonFlat(Context context, AttributeSet attrs) { super(context, attrs); } protected void setDefaultProperties(){ minHeight = 36; minWidth = 88; rippleSize = 3; // Min size setMinimumHeight(Utils.dpToPx(minHeight, getResources())); setMinimumWidth(Utils.dpToPx(minWidth, getResources())); setBackgroundResource(R.drawable.background_transparent); } @Override protected void setAttributes(AttributeSet attrs) { // Set text button String text = null; int textResource = attrs.getAttributeResourceValue(ANDROIDXML,"text",-1); if(textResource != -1){ text = getResources().getString(textResource); }else{ text = attrs.getAttributeValue(ANDROIDXML,"text"); } if(text != null){ textButton = new TextView(getContext()); textButton.setText(text.toUpperCase()); textButton.setTextColor(backgroundColor); textButton.setTypeface(null, Typeface.BOLD); RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT,LayoutParams.WRAP_CONTENT); params.addRule(RelativeLayout.CENTER_IN_PARENT, RelativeLayout.TRUE); textButton.setLayoutParams(params); addView(textButton); } int bacgroundColor = attrs.getAttributeResourceValue(ANDROIDXML,"background",-1); if(bacgroundColor != -1){ setBackgroundColor(getResources().getColor(bacgroundColor)); }else{ // Color by hexadecimal // Color by hexadecimal background = attrs.getAttributeIntValue(ANDROIDXML, "background", -1); if (background != -1) setBackgroundColor(background); } } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); if (x != -1) { Paint paint = new Paint(); paint.setAntiAlias(true); paint.setColor(makePressColor()); canvas.drawCircle(x, y, radius, paint); if(radius > getHeight()/rippleSize) radius += rippleSpeed; if(radius >= getWidth()){ x = -1; y = -1; radius = getHeight()/rippleSize; if(onClickListener != null&& clickAfterRipple) onClickListener.onClick(this); } invalidate(); } } /** * Make a dark color to ripple effect * @return */ @Override protected int makePressColor(){ return Color.parseColor("#88DDDDDD"); } public void setText(String text){ textButton.setText(text.toUpperCase()); } // Set color of background public void setBackgroundColor(int color){ backgroundColor = color; if(isEnabled()) beforeBackground = backgroundColor; textButton.setTextColor(color); } @Override public TextView getTextView() { return textButton; } public String getText(){ return textButton.getText().toString(); } }
use textSize from attrs
MaterialDesign/src/com/gc/materialdesign/views/ButtonFlat.java
use textSize from attrs
<ide><path>aterialDesign/src/com/gc/materialdesign/views/ButtonFlat.java <ide> import com.gc.materialdesign.utils.Utils; <ide> <ide> import android.content.Context; <add>import android.content.res.TypedArray; <ide> import android.graphics.Canvas; <ide> import android.graphics.Color; <ide> import android.graphics.Paint; <ide> import android.graphics.Typeface; <ide> import android.util.AttributeSet; <add>import android.util.Log; <add>import android.util.TypedValue; <ide> import android.widget.RelativeLayout; <ide> import android.widget.TextView; <ide> <ide> }else{ <ide> text = attrs.getAttributeValue(ANDROIDXML,"text"); <ide> } <del> if(text != null){ <add> <add> int[] textSizeAttr = new int[] { android.R.attr.textSize }; <add> TypedArray typedArray = getContext().obtainStyledAttributes(attrs, textSizeAttr ); <add> float textSize = typedArray.getDimension(0, 24); <add> Log.d("ButtonFlat", "textSize: " + textSize); <add> if(text != null){ <ide> textButton = new TextView(getContext()); <ide> textButton.setText(text.toUpperCase()); <ide> textButton.setTextColor(backgroundColor); <ide> textButton.setTypeface(null, Typeface.BOLD); <add> textButton.setTextSize(TypedValue.COMPLEX_UNIT_PX, textSize); <ide> RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT,LayoutParams.WRAP_CONTENT); <ide> params.addRule(RelativeLayout.CENTER_IN_PARENT, RelativeLayout.TRUE); <ide> textButton.setLayoutParams(params);
Java
apache-2.0
dc3cc1f245c0cab397ccdf0bf2a416b53b9cee98
0
qos-ch/reload4j,qos-ch/reload4j,qos-ch/reload4j
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.log4j; import java.util.Hashtable; import org.apache.log4j.helpers.ThreadLocalMap; /** * The MDC class is similar to the {@link NDC} class except that it is based on * a map instead of a stack. It provides <em>mapped diagnostic contexts</em>. A * <em>Mapped Diagnostic Context</em>, or MDC in short, is an instrument for * distinguishing interleaved log output from different sources. Log output is * typically interleaved when a server handles multiple clients * near-simultaneously. * * <p> * <b><em>The MDC is managed on a per thread basis</em></b>. A child thread * automatically inherits a <em>copy</em> of the mapped diagnostic context of * its parent. * * <p> * The MDC class requires JDK 1.2 or above. Under JDK 1.1 the MDC will always * return empty values but otherwise will not affect or harm your application. * * @since 1.2 * * @author Ceki G&uuml;lc&uuml; */ public class MDC { final static MDC mdc = new MDC(); static final int HT_SIZE = 7; // boolean java1; Object tlm; private MDC() { tlm = new ThreadLocalMap(); } /** * Put a context value (the <code>o</code> parameter) as identified with the * <code>key</code> parameter into the current thread's context map. * * <p> * If the current thread does not have a context map it is created as a side * effect. * */ static public void put(String key, Object o) { if (mdc != null) { mdc.put0(key, o); } } /** * Get the context identified by the <code>key</code> parameter. * * <p> * This method has no side effects. */ static public Object get(String key) { if (mdc != null) { return mdc.get0(key); } return null; } /** * Remove the the context identified by the <code>key</code> parameter. * */ static public void remove(String key) { if (mdc != null) { mdc.remove0(key); } } /** * Get the current thread's MDC as a hashtable. This method is intended to be * used internally. */ public static Hashtable getContext() { if (mdc != null) { return mdc.getContext0(); } else { return null; } } /** * Remove all values from the MDC. * * @since 1.2.16 */ public static void clear() { if (mdc != null) { mdc.clear0(); } } private void put0(String key, Object o) { if (tlm == null) { return; } else { Hashtable ht = (Hashtable) ((ThreadLocalMap) tlm).get(); if (ht == null) { ht = new Hashtable(HT_SIZE); ((ThreadLocalMap) tlm).set(ht); } ht.put(key, o); } } private Object get0(String key) { if (tlm == null) { return null; } else { Hashtable ht = (Hashtable) ((ThreadLocalMap) tlm).get(); if (ht != null && key != null) { return ht.get(key); } else { return null; } } } private void remove0(String key) { if (tlm != null) { Hashtable ht = (Hashtable) ((ThreadLocalMap) tlm).get(); if (ht != null) { ht.remove(key); // clean up if this was the last key if (ht.isEmpty()) { clear0(); } } } } private Hashtable getContext0() { if (tlm == null) { return null; } else { return (Hashtable) ((ThreadLocalMap) tlm).get(); } } private void clear0() { if (tlm != null) { Hashtable ht = (Hashtable) ((ThreadLocalMap) tlm).get(); if (ht != null) { ht.clear(); } ((ThreadLocalMap) tlm).remove(); } } }
src/main/java/org/apache/log4j/MDC.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.log4j; import java.util.Hashtable; import org.apache.log4j.helpers.ThreadLocalMap; /** * The MDC class is similar to the {@link NDC} class except that it is based on * a map instead of a stack. It provides <em>mapped diagnostic contexts</em>. A * <em>Mapped Diagnostic Context</em>, or MDC in short, is an instrument for * distinguishing interleaved log output from different sources. Log output is * typically interleaved when a server handles multiple clients * near-simultaneously. * * <p> * <b><em>The MDC is managed on a per thread basis</em></b>. A child thread * automatically inherits a <em>copy</em> of the mapped diagnostic context of * its parent. * * <p> * The MDC class requires JDK 1.2 or above. Under JDK 1.1 the MDC will always * return empty values but otherwise will not affect or harm your application. * * @since 1.2 * * @author Ceki G&uuml;lc&uuml; */ public class MDC { final static MDC mdc = new MDC(); static final int HT_SIZE = 7; // boolean java1; Object tlm; private MDC() { tlm = new ThreadLocalMap(); } /** * Put a context value (the <code>o</code> parameter) as identified with the * <code>key</code> parameter into the current thread's context map. * * <p> * If the current thread does not have a context map it is created as a side * effect. * */ static public void put(String key, Object o) { if (mdc != null) { mdc.put0(key, o); } } /** * Get the context identified by the <code>key</code> parameter. * * <p> * This method has no side effects. */ static public Object get(String key) { if (mdc != null) { return mdc.get0(key); } return null; } /** * Remove the the context identified by the <code>key</code> parameter. * */ static public void remove(String key) { if (mdc != null) { mdc.remove0(key); } } /** * Get the current thread's MDC as a hashtable. This method is intended to be * used internally. */ public static Hashtable getContext() { if (mdc != null) { return mdc.getContext0(); } else { return null; } } /** * Remove all values from the MDC. * * @since 1.2.16 */ public static void clear() { if (mdc != null) { mdc.clear0(); } } private void put0(String key, Object o) { if (tlm == null) { return; } else { Hashtable ht = (Hashtable) ((ThreadLocalMap) tlm).get(); if (ht == null) { ht = new Hashtable(HT_SIZE); ((ThreadLocalMap) tlm).set(ht); } ht.put(key, o); } } private Object get0(String key) { if (tlm == null) { return null; } else { Hashtable ht = (Hashtable) ((ThreadLocalMap) tlm).get(); if (ht != null && key != null) { return ht.get(key); } else { return null; } } } private void remove0(String key) { if (tlm != null) { Hashtable ht = (Hashtable) ((ThreadLocalMap) tlm).get(); if (ht != null) { ht.remove(key); // clean up if this was the last key if (ht.isEmpty()) { clear0(); } } } } private Hashtable getContext0() { if (tlm == null) { return null; } else { return (Hashtable) ((ThreadLocalMap) tlm).get(); } } private void clear0() { if (tlm != null) { Hashtable ht = (Hashtable) ((ThreadLocalMap) tlm).get(); if (ht != null) { ht.clear(); } (ThreadLocalMap) tlm).remove(); } } }
fix typo Signed-off-by: Ceki Gulcu <[email protected]>
src/main/java/org/apache/log4j/MDC.java
fix typo
<ide><path>rc/main/java/org/apache/log4j/MDC.java <ide> ht.clear(); <ide> } <ide> <del> (ThreadLocalMap) tlm).remove(); <add> ((ThreadLocalMap) tlm).remove(); <ide> } <ide> } <ide>
Java
mit
fa6035bda6c606868977179534cb941f26fbdb92
0
facebook/react-native,hammerandchisel/react-native,exponent/react-native,pandiaraj44/react-native,exponentjs/react-native,exponentjs/react-native,javache/react-native,myntra/react-native,exponentjs/react-native,janicduplessis/react-native,hammerandchisel/react-native,myntra/react-native,javache/react-native,facebook/react-native,hammerandchisel/react-native,janicduplessis/react-native,javache/react-native,myntra/react-native,myntra/react-native,janicduplessis/react-native,javache/react-native,hammerandchisel/react-native,janicduplessis/react-native,janicduplessis/react-native,facebook/react-native,exponent/react-native,exponentjs/react-native,facebook/react-native,arthuralee/react-native,facebook/react-native,arthuralee/react-native,exponentjs/react-native,hoangpham95/react-native,hoangpham95/react-native,javache/react-native,pandiaraj44/react-native,javache/react-native,facebook/react-native,pandiaraj44/react-native,myntra/react-native,facebook/react-native,exponent/react-native,janicduplessis/react-native,hoangpham95/react-native,arthuralee/react-native,hoangpham95/react-native,myntra/react-native,pandiaraj44/react-native,myntra/react-native,javache/react-native,myntra/react-native,exponent/react-native,pandiaraj44/react-native,janicduplessis/react-native,hammerandchisel/react-native,janicduplessis/react-native,pandiaraj44/react-native,hammerandchisel/react-native,hoangpham95/react-native,javache/react-native,myntra/react-native,arthuralee/react-native,pandiaraj44/react-native,exponent/react-native,exponent/react-native,exponentjs/react-native,exponentjs/react-native,facebook/react-native,exponent/react-native,hoangpham95/react-native,hoangpham95/react-native,exponentjs/react-native,exponent/react-native,hammerandchisel/react-native,arthuralee/react-native,facebook/react-native,javache/react-native,hammerandchisel/react-native,pandiaraj44/react-native,hoangpham95/react-native
// Copyright (c) Facebook, Inc. and its affiliates. // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. package com.facebook.react.uimanager; import android.content.Context; import android.support.v4.view.AccessibilityDelegateCompat; import android.support.v4.view.ViewCompat; import android.support.v4.view.accessibility.AccessibilityNodeInfoCompat; import android.view.View; import com.facebook.react.R; import java.util.Locale; import javax.annotation.Nullable; /** * Utility class that handles the addition of a "role" for accessibility to either a View or * AccessibilityNodeInfo. */ public class AccessibilityDelegateUtil { /** * These roles are defined by Google's TalkBack screen reader, and this list should be kept up to * date with their implementation. Details can be seen in their source code here: * * <p>https://github.com/google/talkback/blob/master/utils/src/main/java/Role.java */ public enum AccessibilityRole { NONE(null), BUTTON("android.widget.Button"), LINK("android.widget.ViewGroup"), SEARCH("android.widget.EditText"), IMAGE("android.widget.ImageView"), IMAGEBUTTON("android.widget.ImageView"), KEYBOARDKEY("android.inputmethodservice.Keyboard$Key"), TEXT("android.widget.ViewGroup"), ADJUSTABLE("android.widget.SeekBar"), SUMMARY("android.widget.ViewGroup"), HEADER("android.widget.ViewGroup"); @Nullable private final String mValue; AccessibilityRole(String type) { mValue = type; } @Nullable public String getValue() { return mValue; } public static AccessibilityRole fromValue(String value) { for (AccessibilityRole role : AccessibilityRole.values()) { if (role.name().equalsIgnoreCase(value)) { return role; } } throw new IllegalArgumentException("Invalid accessibility role value: " + value); } } private AccessibilityDelegateUtil() { // No instances } public static void setDelegate(final View view) { final String accessibilityHint = (String) view.getTag(R.id.accessibility_hint); final AccessibilityRole accessibilityRole = (AccessibilityRole) view.getTag(R.id.accessibility_role); // if a view already has an accessibility delegate, replacing it could cause problems, // so leave it alone. if (!ViewCompat.hasAccessibilityDelegate(view) && (accessibilityHint != null || accessibilityRole != null)) { ViewCompat.setAccessibilityDelegate( view, new AccessibilityDelegateCompat() { @Override public void onInitializeAccessibilityNodeInfo( View host, AccessibilityNodeInfoCompat info) { super.onInitializeAccessibilityNodeInfo(host, info); setRole(info, accessibilityRole, view.getContext()); if (!(accessibilityHint == null)) { String contentDescription=(String)info.getContentDescription(); if (contentDescription != null) { contentDescription = contentDescription + ", " + accessibilityHint; info.setContentDescription(contentDescription); } else { info.setContentDescription(accessibilityHint); } } } }); } } /** * Strings for setting the Role Description in english */ //TODO: Eventually support for other languages on talkback public static void setRole(AccessibilityNodeInfoCompat nodeInfo, AccessibilityRole role, final Context context) { if (role == null) { role = AccessibilityRole.NONE; } nodeInfo.setClassName(role.getValue()); if (Locale.getDefault().getLanguage().equals(new Locale("en").getLanguage())) { if (role.equals(AccessibilityRole.LINK)) { nodeInfo.setRoleDescription(context.getString(R.string.link_description)); } if (role.equals(AccessibilityRole.SEARCH)) { nodeInfo.setRoleDescription(context.getString(R.string.search_description)); } if (role.equals(AccessibilityRole.IMAGE)) { nodeInfo.setRoleDescription(context.getString(R.string.image_description)); } if (role.equals(AccessibilityRole.IMAGEBUTTON)) { nodeInfo.setRoleDescription(context.getString(R.string.image_button_description)); } if (role.equals(AccessibilityRole.ADJUSTABLE)) { nodeInfo.setRoleDescription(context.getString(R.string.adjustable_description)); } } if (role.equals(AccessibilityRole.IMAGEBUTTON)) { nodeInfo.setClickable(true); } } }
ReactAndroid/src/main/java/com/facebook/react/uimanager/AccessibilityDelegateUtil.java
// Copyright (c) Facebook, Inc. and its affiliates. // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. package com.facebook.react.uimanager; import android.content.Context; import android.support.v4.view.AccessibilityDelegateCompat; import android.support.v4.view.ViewCompat; import android.support.v4.view.accessibility.AccessibilityNodeInfoCompat; import android.view.View; import com.facebook.react.R; import java.util.Locale; import javax.annotation.Nullable; /** * Utility class that handles the addition of a "role" for accessibility to either a View or * AccessibilityNodeInfo. */ public class AccessibilityDelegateUtil { /** * These roles are defined by Google's TalkBack screen reader, and this list should be kept up to * date with their implementation. Details can be seen in their source code here: * * <p>https://github.com/google/talkback/blob/master/utils/src/main/java/Role.java */ public enum AccessibilityRole { NONE(null), BUTTON("android.widget.Button"), LINK("android.widget.ViewGroup"), SEARCH("android.widget.EditText"), IMAGE("android.widget.ImageView"), IMAGEBUTTON("android.widget.ImageView"), KEYBOARDKEY("android.inputmethodservice.Keyboard$Key"), TEXT("android.widget.ViewGroup"), ADJUSTABLE("android.widget.SeekBar"), SUMMARY("android.widget.ViewGroup"), HEADER("android.widget.ViewGroup"); @Nullable private final String mValue; AccessibilityRole(String type) { mValue = type; } @Nullable public String getValue() { return mValue; } public static AccessibilityRole fromValue(String value) { for (AccessibilityRole role : AccessibilityRole.values()) { if (role.name().equalsIgnoreCase(value)) { return role; } } throw new IllegalArgumentException("Invalid accessibility role value: " + value); } } private AccessibilityDelegateUtil() { // No instances } public static void setDelegate(final View view) { final String accessibilityHint = (String) view.getTag(R.id.accessibility_hint); final AccessibilityRole accessibilityRole = (AccessibilityRole) view.getTag(R.id.accessibility_role); // if a view already has an accessibility delegate, replacing it could cause problems, // so leave it alone. if (!ViewCompat.hasAccessibilityDelegate(view) && accessibilityHint != null && accessibilityRole != null) { ViewCompat.setAccessibilityDelegate( view, new AccessibilityDelegateCompat() { @Override public void onInitializeAccessibilityNodeInfo( View host, AccessibilityNodeInfoCompat info) { super.onInitializeAccessibilityNodeInfo(host, info); setRole(info, accessibilityRole, view.getContext()); if (!(accessibilityHint == null)) { String contentDescription=(String)info.getContentDescription(); if (contentDescription != null) { contentDescription = contentDescription + ", " + accessibilityHint; info.setContentDescription(contentDescription); } else { info.setContentDescription(accessibilityHint); } } } }); } } /** * Strings for setting the Role Description in english */ //TODO: Eventually support for other languages on talkback public static void setRole(AccessibilityNodeInfoCompat nodeInfo, AccessibilityRole role, final Context context) { if (role == null) { role = AccessibilityRole.NONE; } nodeInfo.setClassName(role.getValue()); if (Locale.getDefault().getLanguage().equals(new Locale("en").getLanguage())) { if (role.equals(AccessibilityRole.LINK)) { nodeInfo.setRoleDescription(context.getString(R.string.link_description)); } if (role.equals(AccessibilityRole.SEARCH)) { nodeInfo.setRoleDescription(context.getString(R.string.search_description)); } if (role.equals(AccessibilityRole.IMAGE)) { nodeInfo.setRoleDescription(context.getString(R.string.image_description)); } if (role.equals(AccessibilityRole.IMAGEBUTTON)) { nodeInfo.setRoleDescription(context.getString(R.string.image_button_description)); } if (role.equals(AccessibilityRole.ADJUSTABLE)) { nodeInfo.setRoleDescription(context.getString(R.string.adjustable_description)); } } if (role.equals(AccessibilityRole.IMAGEBUTTON)) { nodeInfo.setClickable(true); } } }
Fix accessibility role/label Summary: D10138128 had some shamefully wrong boolean logic to determine whether we should customize the accessibility delegate. Previously, we did it if BOTH the accessibility label AND role were present. We should actually do it if EITHER are present. Reviewed By: mdvacca Differential Revision: D10182135 fbshipit-source-id: 209a8ab43f5869762843fe878cfd59a7b9b5ab1a
ReactAndroid/src/main/java/com/facebook/react/uimanager/AccessibilityDelegateUtil.java
Fix accessibility role/label
<ide><path>eactAndroid/src/main/java/com/facebook/react/uimanager/AccessibilityDelegateUtil.java <ide> // if a view already has an accessibility delegate, replacing it could cause problems, <ide> // so leave it alone. <ide> if (!ViewCompat.hasAccessibilityDelegate(view) && <del> accessibilityHint != null && <del> accessibilityRole != null) { <add> (accessibilityHint != null || accessibilityRole != null)) { <ide> ViewCompat.setAccessibilityDelegate( <ide> view, <ide> new AccessibilityDelegateCompat() {
Java
agpl-3.0
4ca5dbb2e07a3ae5b9453615c020e4b30e1c323d
0
CecileBONIN/Silverpeas-Core,SilverDav/Silverpeas-Core,ebonnet/Silverpeas-Core,ebonnet/Silverpeas-Core,SilverDav/Silverpeas-Core,NicolasEYSSERIC/Silverpeas-Core,CecileBONIN/Silverpeas-Core,Silverpeas/Silverpeas-Core,mmoqui/Silverpeas-Core,auroreallibe/Silverpeas-Core,ebonnet/Silverpeas-Core,NicolasEYSSERIC/Silverpeas-Core,ebonnet/Silverpeas-Core,SilverTeamWork/Silverpeas-Core,SilverTeamWork/Silverpeas-Core,NicolasEYSSERIC/Silverpeas-Core,mmoqui/Silverpeas-Core,CecileBONIN/Silverpeas-Core,CecileBONIN/Silverpeas-Core,CecileBONIN/Silverpeas-Core,mmoqui/Silverpeas-Core,Silverpeas/Silverpeas-Core,ebonnet/Silverpeas-Core,NicolasEYSSERIC/Silverpeas-Core,SilverYoCha/Silverpeas-Core,SilverTeamWork/Silverpeas-Core,SilverYoCha/Silverpeas-Core,SilverDav/Silverpeas-Core,Silverpeas/Silverpeas-Core,auroreallibe/Silverpeas-Core,auroreallibe/Silverpeas-Core,SilverYoCha/Silverpeas-Core,ebonnet/Silverpeas-Core
/** * Copyright (C) 2000 - 2012 Silverpeas * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * As a special exception to the terms and conditions of version 3.0 of * the GPL, you may redistribute this Program in connection with Free/Libre * Open Source Software ("FLOSS") applications as described in Silverpeas's * FLOSS exception. You should have received a copy of the text describing * the FLOSS exception, and it is also available here: * "http://www.silverpeas.org/legal/licensing" * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.silverpeas.socialnetwork.myContactProfil.servlets; import com.silverpeas.directory.model.Member; import com.silverpeas.socialnetwork.myContactProfil.control.MyContactProfilSessionController; import com.stratelia.silverpeas.peasCore.ComponentContext; import com.stratelia.silverpeas.peasCore.MainSessionController; import com.stratelia.silverpeas.peasCore.servlets.ComponentRequestRouter; import com.stratelia.webactiv.beans.admin.UserDetail; import javax.servlet.http.HttpServletRequest; import java.util.ArrayList; import java.util.List; import java.util.Random; /** * @author azzedine */ public class MyContactProfilRequestRouter extends ComponentRequestRouter<MyContactProfilSessionController> { private static final long serialVersionUID = 1L; private final int NUMBER_CONTACTS_TO_DISPLAY = 3; /** * get Session ControlBeanName * @return String */ @Override public String getSessionControlBeanName() { return "myContactProfil"; } /** * create ComponentSession Controller * @param mainSessionCtrl * @param componentContext * @return ComponentSessionController */ @Override public MyContactProfilSessionController createComponentSessionController( MainSessionController mainSessionCtrl, ComponentContext componentContext) { return new MyContactProfilSessionController(mainSessionCtrl, componentContext); } /** *get Destination * @param function * @param sc * @param request * @return */ @Override public String getDestination(String function, MyContactProfilSessionController sc, HttpServletRequest request) { String destination = "#"; String userId = request.getParameter("userId"); if (function.equalsIgnoreCase("Infos")) { request.setAttribute("View", function); destination = "/socialNetwork/jsp/myContactProfil/myContactProfile.jsp"; } else if ("Main".equalsIgnoreCase(function)) { request.setAttribute("View", "Wall"); destination = "/socialNetwork/jsp/myContactProfil/myContactProfile.jsp"; } request.setAttribute("UserFull", sc.getUserFull(userId)); request.setAttribute("Member", new Member(sc.getUserDetail(userId))); List<String> contactIds = sc.getContactsIdsForUser(userId); request.setAttribute("Contacts", chooseContactsToDisplay(contactIds, sc)); request.setAttribute("ContactsNumber", contactIds.size()); contactIds = sc.getCommonContactsIdsForUser(userId); request.setAttribute("CommonContacts", chooseContactsToDisplay(contactIds, sc)); request.setAttribute("CommonContactsNumber", contactIds.size()); return destination; } /** * methode to choose (x) contacts for display it in the page profil x is the number of contacts * the methode use Random rule * @param contactIds * @return List<SNContactUser> */ private List<UserDetail> chooseContactsToDisplay(List<String> contactIds, MyContactProfilSessionController sc) { List<UserDetail> contacts = new ArrayList<UserDetail>(); int numberOfContactsTodisplay = sc.getSettings().getInteger("numberOfContactsTodisplay", NUMBER_CONTACTS_TO_DISPLAY); if (contactIds.size() <= numberOfContactsTodisplay) { for (String contactId : contactIds) { contacts.add(sc.getUserDetail(contactId)); } } else { Random random = new Random(); int indexContactsChoosed = (random.nextInt(contactIds.size())); for (int i = 0; i < numberOfContactsTodisplay; i++) { String contactId = contactIds.get((indexContactsChoosed + i) % numberOfContactsTodisplay); contacts.add(sc.getUserDetail(contactId)); } } return contacts; } }
war-core/src/main/java/com/silverpeas/socialnetwork/myContactProfil/servlets/MyContactProfilRequestRouter.java
/** * Copyright (C) 2000 - 2012 Silverpeas * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * As a special exception to the terms and conditions of version 3.0 of * the GPL, you may redistribute this Program in connection with Free/Libre * Open Source Software ("FLOSS") applications as described in Silverpeas's * FLOSS exception. You should have received a copy of the text describing * the FLOSS exception, and it is also available here: * "http://www.silverpeas.org/legal/licensing" * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.silverpeas.socialnetwork.myContactProfil.servlets; import com.silverpeas.directory.model.Member; import com.silverpeas.socialnetwork.myContactProfil.control.MyContactProfilSessionController; import com.stratelia.silverpeas.peasCore.ComponentContext; import com.stratelia.silverpeas.peasCore.MainSessionController; import com.stratelia.silverpeas.peasCore.servlets.ComponentRequestRouter; import com.stratelia.webactiv.beans.admin.UserDetail; import javax.servlet.http.HttpServletRequest; import java.util.ArrayList; import java.util.List; import java.util.Random; /** * @author azzedine */ public class MyContactProfilRequestRouter extends ComponentRequestRouter<MyContactProfilSessionController> { private static final long serialVersionUID = 1L; private final int NUMBER_CONTACTS_TO_DISPLAY = 3; /** * get Session ControlBeanName * @return String */ @Override public String getSessionControlBeanName() { return "myContactProfil"; } /** * create ComponentSession Controller * @param mainSessionCtrl * @param componentContext * @return ComponentSessionController */ @Override public MyContactProfilSessionController createComponentSessionController( MainSessionController mainSessionCtrl, ComponentContext componentContext) { return new MyContactProfilSessionController(mainSessionCtrl, componentContext); } /** *get Destination * @param function * @param sc * @param request * @return */ @Override public String getDestination(String function, MyContactProfilSessionController sc, HttpServletRequest request) { String destination = "#"; String userId = request.getParameter("userId"); if (function.equalsIgnoreCase("Infos")) { request.setAttribute("View", function); destination = "/socialnetwork/jsp/myContactProfil/myContactProfile.jsp"; } else if ("Main".equalsIgnoreCase(function)) { request.setAttribute("View", "Wall"); destination = "/socialnetwork/jsp/myContactProfil/myContactProfile.jsp"; } request.setAttribute("UserFull", sc.getUserFull(userId)); request.setAttribute("Member", new Member(sc.getUserDetail(userId))); List<String> contactIds = sc.getContactsIdsForUser(userId); request.setAttribute("Contacts", chooseContactsToDisplay(contactIds, sc)); request.setAttribute("ContactsNumber", contactIds.size()); contactIds = sc.getCommonContactsIdsForUser(userId); request.setAttribute("CommonContacts", chooseContactsToDisplay(contactIds, sc)); request.setAttribute("CommonContactsNumber", contactIds.size()); return destination; } /** * methode to choose (x) contacts for display it in the page profil x is the number of contacts * the methode use Random rule * @param contactIds * @return List<SNContactUser> */ private List<UserDetail> chooseContactsToDisplay(List<String> contactIds, MyContactProfilSessionController sc) { List<UserDetail> contacts = new ArrayList<UserDetail>(); int numberOfContactsTodisplay = sc.getSettings().getInteger("numberOfContactsTodisplay", NUMBER_CONTACTS_TO_DISPLAY); if (contactIds.size() <= numberOfContactsTodisplay) { for (String contactId : contactIds) { contacts.add(sc.getUserDetail(contactId)); } } else { Random random = new Random(); int indexContactsChoosed = (random.nextInt(contactIds.size())); for (int i = 0; i < numberOfContactsTodisplay; i++) { String contactId = contactIds.get((indexContactsChoosed + i) % numberOfContactsTodisplay); contacts.add(sc.getUserDetail(contactId)); } } return contacts; } }
fixing bug #3547 JSP directories are case sensitive. Replacing socialnetwork by socialNetwork.
war-core/src/main/java/com/silverpeas/socialnetwork/myContactProfil/servlets/MyContactProfilRequestRouter.java
fixing bug #3547 JSP directories are case sensitive. Replacing socialnetwork by socialNetwork.
<ide><path>ar-core/src/main/java/com/silverpeas/socialnetwork/myContactProfil/servlets/MyContactProfilRequestRouter.java <ide> String userId = request.getParameter("userId"); <ide> if (function.equalsIgnoreCase("Infos")) { <ide> request.setAttribute("View", function); <del> destination = "/socialnetwork/jsp/myContactProfil/myContactProfile.jsp"; <add> destination = "/socialNetwork/jsp/myContactProfil/myContactProfile.jsp"; <ide> } else if ("Main".equalsIgnoreCase(function)) { <ide> request.setAttribute("View", "Wall"); <del> destination = "/socialnetwork/jsp/myContactProfil/myContactProfile.jsp"; <add> destination = "/socialNetwork/jsp/myContactProfil/myContactProfile.jsp"; <ide> } <ide> <ide> request.setAttribute("UserFull", sc.getUserFull(userId));
Java
lgpl-2.1
d8c194a76cb149f5c8ffa71e98090784976d1c80
0
cytoscape/cytoscape-impl,cytoscape/cytoscape-impl,cytoscape/cytoscape-impl,cytoscape/cytoscape-impl,cytoscape/cytoscape-impl
package org.cytoscape.work.internal.tunables; /* * #%L * Cytoscape Work Swing Impl (work-swing-impl) * $Id:$ * $HeadURL:$ * %% * Copyright (C) 2006 - 2013 The Cytoscape Consortium * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation, either version 2.1 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Lesser Public License for more details. * * You should have received a copy of the GNU General Lesser Public * License along with this program. If not, see * <http://www.gnu.org/licenses/lgpl-2.1.html>. * #L% */ import static org.cytoscape.work.internal.tunables.utils.GUIDefaults.setTooltip; import static org.cytoscape.work.internal.tunables.utils.GUIDefaults.updateFieldPanel; import java.awt.Window; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.lang.reflect.Field; import java.lang.reflect.Method; import javax.swing.JCheckBox; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.SwingUtilities; import org.cytoscape.work.Tunable; import org.cytoscape.work.swing.AbstractGUITunableHandler; import org.cytoscape.work.swing.DirectlyPresentableTunableHandler; /** * Handler for the type <i>Boolean</i> of <code>Tunable</code> * @author pasteur */ public class BooleanHandler extends AbstractGUITunableHandler implements ActionListener, DirectlyPresentableTunableHandler { private JCheckBox checkBox; private JOptionPane optionPane; private boolean useOptionPane; private int selectedOption; /** * Constructs the <code>GUIHandler</code> for the <code>Boolean</code> type * * It creates the Swing component for this Object (JCheckBox) with its description/initial state, and displays it * * @param f field that has been annotated * @param o object contained in <code>f</code> * @param t tunable associated to <code>f</code> */ public BooleanHandler(Field f, Object o, Tunable t) { super(f, o, t); init(); } public BooleanHandler(final Method getter, final Method setter, final Object instance, final Tunable tunable) { super(getter, setter, instance, tunable); init(); } private void init() { // setup GUI checkBox = new JCheckBox(); checkBox.setSelected(getBoolean()); checkBox.addActionListener(this); final JLabel label = new JLabel(getDescription()); label.addMouseListener(new MouseAdapter() { @Override public void mouseReleased(final MouseEvent e) { if (checkBox.isEnabled() && SwingUtilities.isLeftMouseButton(e)) { checkBox.doClick(); checkBox.requestFocusInWindow(); } } }); updateFieldPanel(panel, label, checkBox, horizontal); setTooltip(getTooltip(), label, checkBox); } private boolean getBoolean() { try { return (Boolean)getValue(); } catch (final Exception e) { e.printStackTrace(); return false; } } @Override public void update(){ boolean b; try { b = (Boolean) getValue(); checkBox.setSelected(b); } catch(Exception e){ e.printStackTrace(); } } @Override public boolean isForcedToSetDirectly() { return getParams().getProperty("ForceSetDirectly", "false").equalsIgnoreCase("true"); } @Override public boolean setTunableDirectly(Window possibleParent) { selectedOption = setOptionPaneGUI(possibleParent); useOptionPane = true; handle(); useOptionPane = false; return selectedOption != JOptionPane.CANCEL_OPTION; } @SuppressWarnings("static-access") private int setOptionPaneGUI(Window possibleParent) { return optionPane.showOptionDialog( possibleParent, getDescription(), getParams().getProperty("ForceSetTitle", " "), JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE, null, null, null ); } /** * To set the current value represented in the <code>GUIHandler</code> (in a <code>JCheckBox</code>) * to the value of this <code>Boolean</code> object */ @Override public void handle() { try { final Boolean setting; if (useOptionPane) setting = selectedOption == JOptionPane.YES_OPTION ? true : false; else setting = checkBox.isSelected(); setValue(setting); } catch (Exception e) { e.printStackTrace(); } } /** * To get the state of the value of the <code>BooleanHandler</code> : <code>true</code> or <code>false</code> */ @Override public String getState() { return String.valueOf(checkBox.isSelected()); } @Override public void actionPerformed(ActionEvent e) { handle(); } }
work-swing-impl/impl/src/main/java/org/cytoscape/work/internal/tunables/BooleanHandler.java
package org.cytoscape.work.internal.tunables; /* * #%L * Cytoscape Work Swing Impl (work-swing-impl) * $Id:$ * $HeadURL:$ * %% * Copyright (C) 2006 - 2013 The Cytoscape Consortium * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation, either version 2.1 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Lesser Public License for more details. * * You should have received a copy of the GNU General Lesser Public * License along with this program. If not, see * <http://www.gnu.org/licenses/lgpl-2.1.html>. * #L% */ import static org.cytoscape.work.internal.tunables.utils.GUIDefaults.setTooltip; import static org.cytoscape.work.internal.tunables.utils.GUIDefaults.updateFieldPanel; import java.awt.Window; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.lang.reflect.Field; import java.lang.reflect.Method; import javax.swing.JCheckBox; import javax.swing.JOptionPane; import org.cytoscape.work.Tunable; import org.cytoscape.work.swing.AbstractGUITunableHandler; import org.cytoscape.work.swing.DirectlyPresentableTunableHandler; /** * Handler for the type <i>Boolean</i> of <code>Tunable</code> * @author pasteur */ public class BooleanHandler extends AbstractGUITunableHandler implements ActionListener, DirectlyPresentableTunableHandler { private JCheckBox checkBox; private JOptionPane optionPane; private boolean useOptionPane; private int selectedOption; /** * Constructs the <code>GUIHandler</code> for the <code>Boolean</code> type * * It creates the Swing component for this Object (JCheckBox) with its description/initial state, and displays it * * @param f field that has been annotated * @param o object contained in <code>f</code> * @param t tunable associated to <code>f</code> */ public BooleanHandler(Field f, Object o, Tunable t) { super(f, o, t); init(); } public BooleanHandler(final Method getter, final Method setter, final Object instance, final Tunable tunable) { super(getter, setter, instance, tunable); init(); } private void init() { String description = getDescription().trim(); if (description.endsWith(":")) description = description.substring(0, description.length() - 1); //setup GUI checkBox = new JCheckBox(description); checkBox.setSelected(getBoolean()); checkBox.addActionListener(this); updateFieldPanel(panel, checkBox, horizontal); setTooltip(getTooltip(), checkBox); } private boolean getBoolean() { try { return (Boolean)getValue(); } catch (final Exception e) { e.printStackTrace(); return false; } } @Override public void update(){ boolean b; try { b = (Boolean) getValue(); checkBox.setSelected(b); } catch(Exception e){ e.printStackTrace(); } } @Override public boolean isForcedToSetDirectly() { return getParams().getProperty("ForceSetDirectly", "false").equalsIgnoreCase("true"); } @Override public boolean setTunableDirectly(Window possibleParent) { selectedOption = setOptionPaneGUI(possibleParent); useOptionPane = true; handle(); useOptionPane = false; return selectedOption != JOptionPane.CANCEL_OPTION; } @SuppressWarnings("static-access") private int setOptionPaneGUI(Window possibleParent) { return optionPane.showOptionDialog( possibleParent, getDescription(), getParams().getProperty("ForceSetTitle", " "), JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE, null, null, null ); } /** * To set the current value represented in the <code>GUIHandler</code> (in a <code>JCheckBox</code>) * to the value of this <code>Boolean</code> object */ @Override public void handle() { try { final Boolean setting; if (useOptionPane) setting = selectedOption == JOptionPane.YES_OPTION ? true : false; else setting = checkBox.isSelected(); setValue(setting); } catch (Exception e) { e.printStackTrace(); } } /** * To get the state of the value of the <code>BooleanHandler</code> : <code>true</code> or <code>false</code> */ @Override public String getState() { return String.valueOf(checkBox.isSelected()); } @Override public void actionPerformed(ActionEvent e) { handle(); } }
Refs #3094: Check-box labels are now left-justified (to prevent too much empty space in Tunable forms) and are still clickable.
work-swing-impl/impl/src/main/java/org/cytoscape/work/internal/tunables/BooleanHandler.java
Refs #3094: Check-box labels are now left-justified (to prevent too much empty space in Tunable forms) and are still clickable.
<ide><path>ork-swing-impl/impl/src/main/java/org/cytoscape/work/internal/tunables/BooleanHandler.java <ide> import java.awt.Window; <ide> import java.awt.event.ActionEvent; <ide> import java.awt.event.ActionListener; <add>import java.awt.event.MouseAdapter; <add>import java.awt.event.MouseEvent; <ide> import java.lang.reflect.Field; <ide> import java.lang.reflect.Method; <ide> <ide> import javax.swing.JCheckBox; <add>import javax.swing.JLabel; <ide> import javax.swing.JOptionPane; <add>import javax.swing.SwingUtilities; <ide> <ide> import org.cytoscape.work.Tunable; <ide> import org.cytoscape.work.swing.AbstractGUITunableHandler; <ide> } <ide> <ide> private void init() { <del> String description = getDescription().trim(); <del> <del> if (description.endsWith(":")) <del> description = description.substring(0, description.length() - 1); <del> <del> //setup GUI <del> checkBox = new JCheckBox(description); <add> // setup GUI <add> checkBox = new JCheckBox(); <ide> checkBox.setSelected(getBoolean()); <ide> checkBox.addActionListener(this); <add> <add> final JLabel label = new JLabel(getDescription()); <add> label.addMouseListener(new MouseAdapter() { <add> @Override <add> public void mouseReleased(final MouseEvent e) { <add> if (checkBox.isEnabled() && SwingUtilities.isLeftMouseButton(e)) { <add> checkBox.doClick(); <add> checkBox.requestFocusInWindow(); <add> } <add> } <add> }); <ide> <del> updateFieldPanel(panel, checkBox, horizontal); <del> setTooltip(getTooltip(), checkBox); <add> updateFieldPanel(panel, label, checkBox, horizontal); <add> setTooltip(getTooltip(), label, checkBox); <ide> } <ide> <ide> private boolean getBoolean() {
Java
lgpl-2.1
0605fba6b294e0e27fcdec84af20dcde327abc0a
0
deegree/deegree3,deegree/deegree3,deegree/deegree3,deegree/deegree3,deegree/deegree3
//$HeadURL$ /*---------------------------------------------------------------------------- This file is part of deegree, http://deegree.org/ Copyright (C) 2001-2009 by: Department of Geography, University of Bonn and lat/lon GmbH This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Contact information: lat/lon GmbH Aennchenstr. 19, 53177 Bonn Germany http://lat-lon.de/ Department of Geography, University of Bonn Prof. Dr. Klaus Greve Postfach 1147, 53001 Bonn Germany http://www.geographie.uni-bonn.de/deegree/ e-mail: [email protected] ----------------------------------------------------------------------------*/ package org.deegree.services.wms.model.layers; import static org.deegree.cs.coordinatesystems.GeographicCRS.WGS84; import static org.deegree.services.wms.MapService.prepareImage; import static org.deegree.services.wms.model.Dimension.parseTyped; import static org.slf4j.LoggerFactory.getLogger; import java.awt.Graphics2D; import java.awt.image.BufferedImage; import java.io.StringReader; import java.text.ParseException; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Map; import java_cup.runtime.Symbol; import org.deegree.commons.annotations.LoggingNotes; import org.deegree.commons.utils.DoublePair; import org.deegree.commons.utils.Pair; import org.deegree.commons.utils.Triple; import org.deegree.cs.CRSUtils; import org.deegree.cs.coordinatesystems.ICRS; import org.deegree.cs.exceptions.TransformationException; import org.deegree.cs.exceptions.UnknownCRSException; import org.deegree.cs.persistence.CRSManager; import org.deegree.feature.Feature; import org.deegree.feature.FeatureCollection; import org.deegree.feature.types.FeatureType; import org.deegree.filter.XPathEvaluator; import org.deegree.geometry.Envelope; import org.deegree.geometry.Geometry; import org.deegree.geometry.GeometryFactory; import org.deegree.geometry.GeometryTransformer; import org.deegree.protocol.wms.dims.DimensionLexer; import org.deegree.protocol.wms.dims.parser; import org.deegree.rendering.r2d.Renderer; import org.deegree.rendering.r2d.TextRenderer; import org.deegree.rendering.r2d.se.unevaluated.Style; import org.deegree.rendering.r2d.styling.Styling; import org.deegree.rendering.r2d.styling.TextStyling; import org.deegree.services.jaxb.wms.AbstractLayerType; import org.deegree.services.jaxb.wms.BoundingBoxType; import org.deegree.services.jaxb.wms.DimensionType; import org.deegree.services.jaxb.wms.LanguageStringType; import org.deegree.services.wms.MapService; import org.deegree.services.wms.WMSException.InvalidDimensionValue; import org.deegree.services.wms.WMSException.MissingDimensionValue; import org.deegree.services.wms.controller.ops.GetFeatureInfo; import org.deegree.services.wms.controller.ops.GetMap; import org.deegree.services.wms.model.Dimension; import org.slf4j.Logger; /** * <code>Layer</code> * * @author <a href="mailto:[email protected]">Andreas Schmitz</a> * @author last edited by: $Author$ * * @version $Revision$, $Date$ */ @LoggingNotes(warn = "logs information about dimension handling") public abstract class Layer { private static final Logger LOG = getLogger( Layer.class ); private String name; private String title; private String abstract_; private LinkedList<LanguageStringType> keywords; private Envelope bbox; private LinkedList<ICRS> srs; private DoublePair scaleHint; private LinkedList<Layer> children; private Layer parent; Dimension<Date> time; HashMap<String, Dimension<Object>> dimensions = new HashMap<String, Dimension<Object>>(); private String internalName; private boolean queryable = true; protected MapService service; protected Layer( MapService service, String name, String title, Layer parent ) { this.service = service; this.name = name; this.title = title; this.parent = parent; keywords = new LinkedList<LanguageStringType>(); srs = new LinkedList<ICRS>(); children = new LinkedList<Layer>(); } protected Layer( MapService service, AbstractLayerType layer, Layer parent ) { this.service = service; name = layer.getName(); title = layer.getTitle(); abstract_ = layer.getAbstract(); keywords = layer.getKeywords() == null ? new LinkedList<LanguageStringType>() : new LinkedList<LanguageStringType>( layer.getKeywords().getKeyword() ); bbox = parseBoundingBox( layer.getBoundingBox() ); srs = parseCoordinateSystems( layer.getCRS() ); if ( srs == null ) { srs = new LinkedList<ICRS>(); } this.parent = parent; children = new LinkedList<Layer>(); if ( layer.isQueryable() != null ) { queryable = layer.isQueryable(); } for ( DimensionType type : layer.getDimension() ) { parser parser = new parser( new DimensionLexer( new StringReader( type.getExtent() ) ) ); parser defaultParser = null; if ( type.getDefaultValue() != null ) { defaultParser = new parser( new DimensionLexer( new StringReader( type.getDefaultValue() ) ) ); } LinkedList<?> list; LinkedList<?> defaultList; try { Symbol sym = parser.parse(); if ( sym.value instanceof Exception ) { final String msg = ( (Exception) sym.value ).getMessage(); LOG.warn( "The dimension '{}' has not been added for layer '{}' because the error" + " '{}' occurred while parsing the extent/default values.", new Object[] { type.getName(), name, msg } ); continue; } list = (LinkedList<?>) sym.value; if ( defaultParser != null ) { sym = defaultParser.parse(); if ( sym.value instanceof Exception ) { final String msg = ( (Exception) sym.value ).getMessage(); LOG.warn( "The dimension '{}' has not been added for layer '{}' because the error" + " '{}' occurred while parsing the extent/default values.", new Object[] { type.getName(), name, msg } ); continue; } } defaultList = (LinkedList<?>) sym.value; } catch ( Exception e ) { LOG.warn( "The dimension '{}' has not been added for layer '{}' because the error" + " '{}' occurred while parsing the extent/default values.", new Object[] { type.getName(), name, e.getLocalizedMessage() } ); continue; } if ( type.isIsTime() ) { try { boolean current = ( type.isCurrent() != null ) && type.isCurrent(); boolean nearest = ( type.isNearestValue() != null ) && type.isNearestValue(); boolean multiple = ( type.isMultipleValues() != null ) && type.isMultipleValues(); time = new Dimension<Date>( "time", (List<?>) parseTyped( defaultList, true ), current, nearest, multiple, "ISO8601", null, type.getProperty(), (List<?>) parseTyped( list, true ) ); } catch ( ParseException e ) { LOG.warn( "The TIME dimension has not been added for layer {} because the error" + " '{}' occurred while parsing the extent/default values.", name, e.getLocalizedMessage() ); } } else if ( type.isIsElevation() ) { try { boolean nearest = ( type.isNearestValue() != null ) && type.isNearestValue(); boolean multiple = ( type.isMultipleValues() != null ) && type.isMultipleValues(); dimensions.put( "elevation", new Dimension<Object>( "elevation", (List<?>) parseTyped( defaultList, false ), false, nearest, multiple, type.getUnits(), type.getUnitSymbol() == null ? "m" : type.getUnitSymbol(), type.getProperty(), (List<?>) parseTyped( list, false ) ) ); } catch ( ParseException e ) { // does not happen, as we're not parsing with time == true } } else { try { boolean nearest = ( type.isNearestValue() != null ) && type.isNearestValue(); boolean multiple = ( type.isMultipleValues() != null ) && type.isMultipleValues(); Dimension<Object> dim = new Dimension<Object>( type.getName(), (List<?>) parseTyped( type.getDefaultValue(), false ), false, nearest, multiple, type.getUnits(), type.getUnitSymbol(), type.getProperty(), (List<?>) parseTyped( list, false ) ); dimensions.put( type.getName(), dim ); } catch ( ParseException e ) { // does not happen, as we're not parsing with time == true } } } } /** * @param name */ public void setInternalName( String name ) { internalName = name; } private static Envelope parseBoundingBox( BoundingBoxType box ) { Envelope bbox = null; if ( box != null ) { Double[] points = box.getLowerCorner().toArray( new Double[] {} ); double[] min = new double[points.length]; for ( int i = 0; i < min.length; ++i ) { min[i] = points[i]; } points = box.getUpperCorner().toArray( new Double[] {} ); double[] max = new double[points.length]; for ( int i = 0; i < max.length; ++i ) { max[i] = points[i]; } bbox = new GeometryFactory().createEnvelope( min, max, CRSManager.getCRSRef( WGS84 ) ); } return bbox; } private LinkedList<ICRS> parseCoordinateSystems( String crs ) { LinkedList<ICRS> list = new LinkedList<ICRS>(); if ( crs == null ) { return list; } for ( String c : crs.split( "\\s" ) ) { if ( !c.isEmpty() ) { list.add( CRSManager.getCRSRef( c ) ); } } return list; } /** * @param f * @param evaluator * @param style * @param renderer * @param textRenderer * @param scale * @param resolution */ public static void render( final Feature f, final XPathEvaluator<Feature> evaluator, final Style style, final Renderer renderer, final TextRenderer textRenderer, final double scale, final double resolution ) { Style s = style; if ( s == null ) { s = new Style(); } s = s.filter( scale ); LinkedList<Triple<Styling, LinkedList<Geometry>, String>> evalds = s.evaluate( f, evaluator ); for ( Triple<Styling, LinkedList<Geometry>, String> evald : evalds ) { // boolean invisible = true; // inner: for ( Geometry g : evald.second ) { // if ( g instanceof Point || g instanceof MultiPoint ) { // invisible = false; // break inner; // } // if ( !( g.getEnvelope().getSpan0() < resolution && g.getEnvelope().getSpan1() < resolution ) ) { // invisible = false; // break inner; // } // } // if ( !invisible ) { if ( evald.first instanceof TextStyling ) { textRenderer.render( (TextStyling) evald.first, evald.third, evald.second ); } else { renderer.render( evald.first, evald.second ); } // } else { // LOG.debug( "Skipping invisible feature." ); // } } } /** * @return the name */ public String getName() { return name; } /** * @return the internal name (used for style lookup etc.) */ public String getInternalName() { return internalName == null ? name : internalName; } /** * @param name * the name to set */ public void setName( String name ) { this.name = name; } /** * @return the title */ public String getTitle() { return title; } /** * @param title * the title to set */ public void setTitle( String title ) { this.title = title; } /** * @return the abstract_ */ public String getAbstract() { return abstract_; } /** * @param abstract_ * the abstract_ to set */ public void setAbstract( String abstract_ ) { this.abstract_ = abstract_; } /** * @return the live keywords list */ public LinkedList<LanguageStringType> getKeywords() { return keywords; } /** * @param keywords * the keywords to set (will be copied) */ public void setKeywords( Collection<LanguageStringType> keywords ) { this.keywords = new LinkedList<LanguageStringType>( keywords ); } /** * @return the bbox */ public Envelope getBbox() { try { Envelope bbox = this.bbox; if ( bbox != null && bbox.getCoordinateDimension() <= 1 ) { bbox = null; } if ( bbox != null && bbox.getCoordinateSystem() != CRSUtils.EPSG_4326 ) { bbox = new GeometryTransformer( CRSUtils.EPSG_4326 ).transform( bbox ); } if ( children != null && !children.isEmpty() ) { for ( Layer l : children ) { Envelope lbox = l.getBbox(); if ( lbox != null && lbox.getCoordinateDimension() <= 1 ) { lbox = null; } if ( lbox != null ) { lbox = new GeometryTransformer( CRSUtils.EPSG_4326 ).transform( lbox ); if ( bbox == null ) { bbox = lbox; } else { bbox = bbox.merge( lbox ); } } } } return bbox; } catch ( TransformationException e ) { LOG.info( "A transformation was not possible. Most probably a bug in your setup. Message was '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } catch ( UnknownCRSException e ) { LOG.info( "A crs was not known. Most probably a bug of some kind. Message was '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } return null; } /** * @param bbox * the bbox to set */ public void setBbox( Envelope bbox ) { this.bbox = bbox; } /** * @return the live list of srs */ public LinkedList<ICRS> getSrs() { return srs; } /** * @param srs * the srs to set (will be copied) */ public void setSrs( Collection<ICRS> srs ) { this.srs = new LinkedList<ICRS>( srs ); } /** * @return the scaleHint, SLD style */ public DoublePair getScaleHint() { return scaleHint; } /** * @param scaleHint * the scaleHint to set, SLD style */ public void setScaleHint( DoublePair scaleHint ) { this.scaleHint = scaleHint; } /** * @return the parent layer, or null */ public Layer getParent() { return parent; } /** * @param parent * the parent to set */ public void setParent( Layer parent ) { this.parent = parent; } /** * @return the live list of children */ public LinkedList<Layer> getChildren() { return children; } /** * @param children * the new children (will be copied) */ public void setChildren( List<Layer> children ) { this.children = new LinkedList<Layer>( children ); } /** * @param gm * @param style * @return a buffered image containing the map, and warning headers * @throws MissingDimensionValue * @throws InvalidDimensionValue */ public Pair<BufferedImage, LinkedList<String>> paintMap( GetMap gm, Style style ) throws MissingDimensionValue, InvalidDimensionValue { BufferedImage img = prepareImage( gm ); Graphics2D g = img.createGraphics(); LinkedList<String> list = paintMap( g, gm, style ); g.dispose(); return new Pair<BufferedImage, LinkedList<String>>( img, list ); } /** * @param g * @param gm * @param style * @return a list of warning headers (currently only used for dimension warnings) * @throws MissingDimensionValue * @throws InvalidDimensionValue */ public abstract LinkedList<String> paintMap( Graphics2D g, GetMap gm, Style style ) throws MissingDimensionValue, InvalidDimensionValue; /** * @param fi * @param style * @return a collection of matching features and a list of warning headers (currently only used for dimension * warnings) * @throws MissingDimensionValue * @throws InvalidDimensionValue */ public abstract Pair<FeatureCollection, LinkedList<String>> getFeatures( GetFeatureInfo fi, Style style ) throws MissingDimensionValue, InvalidDimensionValue; /** * @param name * @return null, or the layer with the given name */ public Layer getChild( String name ) { for ( Layer l : children ) { if ( l.getName() != null && l.getName().equals( name ) ) { return l; } } return null; } /** * */ public void close() { // nothing to do here } /** * @param layer */ public void addOrReplace( Layer layer ) { ListIterator<Layer> iter = children.listIterator(); while ( iter.hasNext() ) { Layer next = iter.next(); if ( next.name != null && next.name.equals( layer.getName() ) ) { next.close(); iter.set( layer ); return; } } children.add( layer ); } /** * @return true if it's data source is currently available */ public boolean isAvailable() { return true; } /** * @param l */ public void remove( Layer l ) { children.remove( l ); } /** * @return the feature type, or null, if not applicable */ public abstract FeatureType getFeatureType(); /** * @return all dimensions including time and elevation (if applicable) */ public Map<String, Dimension<?>> getDimensions() { HashMap<String, Dimension<?>> dims = new HashMap<String, Dimension<?>>(); if ( time != null ) { dims.put( "time", time ); } dims.putAll( dimensions ); return dims; } /** * @return false, if queryable has been turned off */ public boolean isQueryable() { return queryable; } }
deegree-services/deegree-services-wms/src/main/java/org/deegree/services/wms/model/layers/Layer.java
//$HeadURL$ /*---------------------------------------------------------------------------- This file is part of deegree, http://deegree.org/ Copyright (C) 2001-2009 by: Department of Geography, University of Bonn and lat/lon GmbH This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Contact information: lat/lon GmbH Aennchenstr. 19, 53177 Bonn Germany http://lat-lon.de/ Department of Geography, University of Bonn Prof. Dr. Klaus Greve Postfach 1147, 53001 Bonn Germany http://www.geographie.uni-bonn.de/deegree/ e-mail: [email protected] ----------------------------------------------------------------------------*/ package org.deegree.services.wms.model.layers; import static org.deegree.cs.coordinatesystems.GeographicCRS.WGS84; import static org.deegree.services.wms.MapService.prepareImage; import static org.deegree.services.wms.model.Dimension.parseTyped; import static org.slf4j.LoggerFactory.getLogger; import java.awt.Graphics2D; import java.awt.image.BufferedImage; import java.io.StringReader; import java.text.ParseException; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Map; import java_cup.runtime.Symbol; import org.deegree.commons.annotations.LoggingNotes; import org.deegree.commons.utils.DoublePair; import org.deegree.commons.utils.Pair; import org.deegree.commons.utils.Triple; import org.deegree.cs.CRSUtils; import org.deegree.cs.coordinatesystems.ICRS; import org.deegree.cs.exceptions.TransformationException; import org.deegree.cs.exceptions.UnknownCRSException; import org.deegree.cs.persistence.CRSManager; import org.deegree.feature.Feature; import org.deegree.feature.FeatureCollection; import org.deegree.feature.types.FeatureType; import org.deegree.filter.XPathEvaluator; import org.deegree.geometry.Envelope; import org.deegree.geometry.Geometry; import org.deegree.geometry.GeometryFactory; import org.deegree.geometry.GeometryTransformer; import org.deegree.protocol.wms.dims.DimensionLexer; import org.deegree.protocol.wms.dims.parser; import org.deegree.rendering.r2d.Renderer; import org.deegree.rendering.r2d.TextRenderer; import org.deegree.rendering.r2d.se.unevaluated.Style; import org.deegree.rendering.r2d.styling.Styling; import org.deegree.rendering.r2d.styling.TextStyling; import org.deegree.services.jaxb.wms.AbstractLayerType; import org.deegree.services.jaxb.wms.BoundingBoxType; import org.deegree.services.jaxb.wms.DimensionType; import org.deegree.services.jaxb.wms.LanguageStringType; import org.deegree.services.wms.MapService; import org.deegree.services.wms.WMSException.InvalidDimensionValue; import org.deegree.services.wms.WMSException.MissingDimensionValue; import org.deegree.services.wms.controller.ops.GetFeatureInfo; import org.deegree.services.wms.controller.ops.GetMap; import org.deegree.services.wms.model.Dimension; import org.slf4j.Logger; /** * <code>Layer</code> * * @author <a href="mailto:[email protected]">Andreas Schmitz</a> * @author last edited by: $Author$ * * @version $Revision$, $Date$ */ @LoggingNotes(warn = "logs information about dimension handling") public abstract class Layer { private static final Logger LOG = getLogger( Layer.class ); private String name; private String title; private String abstract_; private LinkedList<LanguageStringType> keywords; private Envelope bbox; private LinkedList<ICRS> srs; private DoublePair scaleHint; private LinkedList<Layer> children; private Layer parent; Dimension<Date> time; HashMap<String, Dimension<Object>> dimensions = new HashMap<String, Dimension<Object>>(); private String internalName; private boolean queryable = true; protected MapService service; protected Layer( MapService service, String name, String title, Layer parent ) { this.service = service; this.name = name; this.title = title; this.parent = parent; keywords = new LinkedList<LanguageStringType>(); srs = new LinkedList<ICRS>(); children = new LinkedList<Layer>(); } protected Layer( MapService service, AbstractLayerType layer, Layer parent ) { this.service = service; name = layer.getName(); title = layer.getTitle(); abstract_ = layer.getAbstract(); keywords = layer.getKeywords() == null ? new LinkedList<LanguageStringType>() : new LinkedList<LanguageStringType>( layer.getKeywords().getKeyword() ); bbox = parseBoundingBox( layer.getBoundingBox() ); srs = parseCoordinateSystems( layer.getCRS() ); if ( srs == null ) { srs = new LinkedList<ICRS>(); } this.parent = parent; children = new LinkedList<Layer>(); if ( layer.isQueryable() != null ) { queryable = layer.isQueryable(); } for ( DimensionType type : layer.getDimension() ) { parser parser = new parser( new DimensionLexer( new StringReader( type.getExtent() ) ) ); parser defaultParser = null; if ( type.getDefaultValue() != null ) { defaultParser = new parser( new DimensionLexer( new StringReader( type.getDefaultValue() ) ) ); } LinkedList<?> list; LinkedList<?> defaultList; try { Symbol sym = parser.parse(); if ( sym.value instanceof Exception ) { final String msg = ( (Exception) sym.value ).getMessage(); LOG.warn( "The dimension '{}' has not been added for layer '{}' because the error" + " '{}' occurred while parsing the extent/default values.", new Object[] { type.getName(), name, msg } ); continue; } list = (LinkedList<?>) sym.value; if ( defaultParser != null ) { sym = defaultParser.parse(); if ( sym.value instanceof Exception ) { final String msg = ( (Exception) sym.value ).getMessage(); LOG.warn( "The dimension '{}' has not been added for layer '{}' because the error" + " '{}' occurred while parsing the extent/default values.", new Object[] { type.getName(), name, msg } ); continue; } } defaultList = (LinkedList<?>) sym.value; } catch ( Exception e ) { LOG.warn( "The dimension '{}' has not been added for layer '{}' because the error" + " '{}' occurred while parsing the extent/default values.", new Object[] { type.getName(), name, e.getLocalizedMessage() } ); continue; } if ( type.isIsTime() ) { try { boolean current = ( type.isCurrent() != null ) && type.isCurrent(); boolean nearest = ( type.isNearestValue() != null ) && type.isNearestValue(); boolean multiple = ( type.isMultipleValues() != null ) && type.isMultipleValues(); time = new Dimension<Date>( "time", (List<?>) parseTyped( defaultList, true ), current, nearest, multiple, "ISO8601", null, type.getProperty(), (List<?>) parseTyped( list, true ) ); } catch ( ParseException e ) { LOG.warn( "The TIME dimension has not been added for layer {} because the error" + " '{}' occurred while parsing the extent/default values.", name, e.getLocalizedMessage() ); } } else if ( type.isIsElevation() ) { try { boolean nearest = ( type.isNearestValue() != null ) && type.isNearestValue(); boolean multiple = ( type.isMultipleValues() != null ) && type.isMultipleValues(); dimensions.put( "elevation", new Dimension<Object>( "elevation", (List<?>) parseTyped( defaultList, false ), false, nearest, multiple, type.getUnits(), type.getUnitSymbol() == null ? "m" : type.getUnitSymbol(), type.getProperty(), (List<?>) parseTyped( list, false ) ) ); } catch ( ParseException e ) { // does not happen, as we're not parsing with time == true } } else { try { boolean nearest = ( type.isNearestValue() != null ) && type.isNearestValue(); boolean multiple = ( type.isMultipleValues() != null ) && type.isMultipleValues(); Dimension<Object> dim = new Dimension<Object>( type.getName(), (List<?>) parseTyped( type.getDefaultValue(), false ), false, nearest, multiple, type.getUnits(), type.getUnitSymbol(), type.getProperty(), (List<?>) parseTyped( list, false ) ); dimensions.put( type.getName(), dim ); } catch ( ParseException e ) { // does not happen, as we're not parsing with time == true } } } } /** * @param name */ public void setInternalName( String name ) { internalName = name; } private static Envelope parseBoundingBox( BoundingBoxType box ) { Envelope bbox = null; if ( box != null ) { Double[] points = box.getLowerCorner().toArray( new Double[] {} ); double[] min = new double[points.length]; for ( int i = 0; i < min.length; ++i ) { min[i] = points[i]; } points = box.getUpperCorner().toArray( new Double[] {} ); double[] max = new double[points.length]; for ( int i = 0; i < max.length; ++i ) { max[i] = points[i]; } bbox = new GeometryFactory().createEnvelope( min, max, CRSManager.getCRSRef( WGS84 ) ); } return bbox; } private LinkedList<ICRS> parseCoordinateSystems( String crs ) { LinkedList<ICRS> list = new LinkedList<ICRS>(); if ( crs == null ) { return list; } for ( String c : crs.split( "\\s" ) ) { list.add( CRSManager.getCRSRef( c ) ); } return list; } /** * @param f * @param evaluator * @param style * @param renderer * @param textRenderer * @param scale * @param resolution */ public static void render( final Feature f, final XPathEvaluator<Feature> evaluator, final Style style, final Renderer renderer, final TextRenderer textRenderer, final double scale, final double resolution ) { Style s = style; if ( s == null ) { s = new Style(); } s = s.filter( scale ); LinkedList<Triple<Styling, LinkedList<Geometry>, String>> evalds = s.evaluate( f, evaluator ); for ( Triple<Styling, LinkedList<Geometry>, String> evald : evalds ) { // boolean invisible = true; // inner: for ( Geometry g : evald.second ) { // if ( g instanceof Point || g instanceof MultiPoint ) { // invisible = false; // break inner; // } // if ( !( g.getEnvelope().getSpan0() < resolution && g.getEnvelope().getSpan1() < resolution ) ) { // invisible = false; // break inner; // } // } // if ( !invisible ) { if ( evald.first instanceof TextStyling ) { textRenderer.render( (TextStyling) evald.first, evald.third, evald.second ); } else { renderer.render( evald.first, evald.second ); } // } else { // LOG.debug( "Skipping invisible feature." ); // } } } /** * @return the name */ public String getName() { return name; } /** * @return the internal name (used for style lookup etc.) */ public String getInternalName() { return internalName == null ? name : internalName; } /** * @param name * the name to set */ public void setName( String name ) { this.name = name; } /** * @return the title */ public String getTitle() { return title; } /** * @param title * the title to set */ public void setTitle( String title ) { this.title = title; } /** * @return the abstract_ */ public String getAbstract() { return abstract_; } /** * @param abstract_ * the abstract_ to set */ public void setAbstract( String abstract_ ) { this.abstract_ = abstract_; } /** * @return the live keywords list */ public LinkedList<LanguageStringType> getKeywords() { return keywords; } /** * @param keywords * the keywords to set (will be copied) */ public void setKeywords( Collection<LanguageStringType> keywords ) { this.keywords = new LinkedList<LanguageStringType>( keywords ); } /** * @return the bbox */ public Envelope getBbox() { try { Envelope bbox = this.bbox; if ( bbox != null && bbox.getCoordinateDimension() <= 1 ) { bbox = null; } if ( bbox != null && bbox.getCoordinateSystem() != CRSUtils.EPSG_4326 ) { bbox = new GeometryTransformer( CRSUtils.EPSG_4326 ).transform( bbox ); } if ( children != null && !children.isEmpty() ) { for ( Layer l : children ) { Envelope lbox = l.getBbox(); if ( lbox != null && lbox.getCoordinateDimension() <= 1 ) { lbox = null; } if ( lbox != null ) { lbox = new GeometryTransformer( CRSUtils.EPSG_4326 ).transform( lbox ); if ( bbox == null ) { bbox = lbox; } else { bbox = bbox.merge( lbox ); } } } } return bbox; } catch ( TransformationException e ) { LOG.info( "A transformation was not possible. Most probably a bug in your setup. Message was '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } catch ( UnknownCRSException e ) { LOG.info( "A crs was not known. Most probably a bug of some kind. Message was '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } return null; } /** * @param bbox * the bbox to set */ public void setBbox( Envelope bbox ) { this.bbox = bbox; } /** * @return the live list of srs */ public LinkedList<ICRS> getSrs() { return srs; } /** * @param srs * the srs to set (will be copied) */ public void setSrs( Collection<ICRS> srs ) { this.srs = new LinkedList<ICRS>( srs ); } /** * @return the scaleHint, SLD style */ public DoublePair getScaleHint() { return scaleHint; } /** * @param scaleHint * the scaleHint to set, SLD style */ public void setScaleHint( DoublePair scaleHint ) { this.scaleHint = scaleHint; } /** * @return the parent layer, or null */ public Layer getParent() { return parent; } /** * @param parent * the parent to set */ public void setParent( Layer parent ) { this.parent = parent; } /** * @return the live list of children */ public LinkedList<Layer> getChildren() { return children; } /** * @param children * the new children (will be copied) */ public void setChildren( List<Layer> children ) { this.children = new LinkedList<Layer>( children ); } /** * @param gm * @param style * @return a buffered image containing the map, and warning headers * @throws MissingDimensionValue * @throws InvalidDimensionValue */ public Pair<BufferedImage, LinkedList<String>> paintMap( GetMap gm, Style style ) throws MissingDimensionValue, InvalidDimensionValue { BufferedImage img = prepareImage( gm ); Graphics2D g = img.createGraphics(); LinkedList<String> list = paintMap( g, gm, style ); g.dispose(); return new Pair<BufferedImage, LinkedList<String>>( img, list ); } /** * @param g * @param gm * @param style * @return a list of warning headers (currently only used for dimension warnings) * @throws MissingDimensionValue * @throws InvalidDimensionValue */ public abstract LinkedList<String> paintMap( Graphics2D g, GetMap gm, Style style ) throws MissingDimensionValue, InvalidDimensionValue; /** * @param fi * @param style * @return a collection of matching features and a list of warning headers (currently only used for dimension * warnings) * @throws MissingDimensionValue * @throws InvalidDimensionValue */ public abstract Pair<FeatureCollection, LinkedList<String>> getFeatures( GetFeatureInfo fi, Style style ) throws MissingDimensionValue, InvalidDimensionValue; /** * @param name * @return null, or the layer with the given name */ public Layer getChild( String name ) { for ( Layer l : children ) { if ( l.getName() != null && l.getName().equals( name ) ) { return l; } } return null; } /** * */ public void close() { // nothing to do here } /** * @param layer */ public void addOrReplace( Layer layer ) { ListIterator<Layer> iter = children.listIterator(); while ( iter.hasNext() ) { Layer next = iter.next(); if ( next.name != null && next.name.equals( layer.getName() ) ) { next.close(); iter.set( layer ); return; } } children.add( layer ); } /** * @return true if it's data source is currently available */ public boolean isAvailable() { return true; } /** * @param l */ public void remove( Layer l ) { children.remove( l ); } /** * @return the feature type, or null, if not applicable */ public abstract FeatureType getFeatureType(); /** * @return all dimensions including time and elevation (if applicable) */ public Map<String, Dimension<?>> getDimensions() { HashMap<String, Dimension<?>> dims = new HashMap<String, Dimension<?>>(); if ( time != null ) { dims.put( "time", time ); } dims.putAll( dimensions ); return dims; } /** * @return false, if queryable has been turned off */ public boolean isQueryable() { return queryable; } }
fixed error when parsing crs list in config
deegree-services/deegree-services-wms/src/main/java/org/deegree/services/wms/model/layers/Layer.java
fixed error when parsing crs list in config
<ide><path>eegree-services/deegree-services-wms/src/main/java/org/deegree/services/wms/model/layers/Layer.java <ide> } <ide> <ide> for ( String c : crs.split( "\\s" ) ) { <del> list.add( CRSManager.getCRSRef( c ) ); <add> if ( !c.isEmpty() ) { <add> list.add( CRSManager.getCRSRef( c ) ); <add> } <ide> } <ide> <ide> return list;
Java
apache-2.0
error: pathspec 'src/me/lexjoy/utils/MatchUtils.java' did not match any file(s) known to git
8a8313173c258eb3f5426302e4da94a5cb0a996d
1
lexjoy/lex-utils
package me.lexjoy.utils; import java.util.Collection; public class MatchUtils { public static boolean isEmpty(Object o) { if (o == null) { return true; } Class<?> oType = o.getClass(); if (oType.isAssignableFrom(String.class)) { return "".equals(o); } if (oType.isArray()) { return isEqual(ReflectUtils.getFieldValue(o, "length"), 0); } if (oType.isAssignableFrom(Collection.class)) { return Collection.class.cast(o).isEmpty(); } return false; } public static boolean checkEmpty(Object... args) { if (args == null || args.length == 0) { return true; } for (Object o : args) { if (isEmpty(o)) { continue; } return false; } return true; } public static boolean isEqual(Object e1, Object e2) { return e1 == null ? e2 == null : e1.equals(e2); } private MatchUtils() {} }
src/me/lexjoy/utils/MatchUtils.java
Create MatchUtils.java
src/me/lexjoy/utils/MatchUtils.java
Create MatchUtils.java
<ide><path>rc/me/lexjoy/utils/MatchUtils.java <add>package me.lexjoy.utils; <add> <add>import java.util.Collection; <add> <add>public class MatchUtils { <add> <add> public static boolean isEmpty(Object o) { <add> if (o == null) { <add> return true; <add> } <add> Class<?> oType = o.getClass(); <add> <add> if (oType.isAssignableFrom(String.class)) { <add> return "".equals(o); <add> } <add> if (oType.isArray()) { <add> return isEqual(ReflectUtils.getFieldValue(o, "length"), 0); <add> } <add> if (oType.isAssignableFrom(Collection.class)) { <add> return Collection.class.cast(o).isEmpty(); <add> } <add> return false; <add> } <add> <add> public static boolean checkEmpty(Object... args) { <add> if (args == null || args.length == 0) { <add> return true; <add> } <add> for (Object o : args) { <add> if (isEmpty(o)) { <add> continue; <add> } <add> return false; <add> } <add> return true; <add> } <add> <add> public static boolean isEqual(Object e1, Object e2) { <add> return e1 == null ? e2 == null : e1.equals(e2); <add> } <add> <add> private MatchUtils() {} <add> <add>}
Java
mit
96f0c79f1b24a9fc11a3b4a85b47cf92271ee718
0
devgateway/ocvn,devgateway/oc-explorer,devgateway/ocua,devgateway/ocvn,devgateway/oc-explorer,devgateway/dg-toolkit,devgateway/oc-explorer,devgateway/ocua,devgateway/dg-toolkit,devgateway/dg-toolkit,devgateway/ocua,devgateway/ocvn
package org.devgateway.toolkit.web.spring.util; import org.devgateway.toolkit.web.spring.AsyncControllerLookupService; /** * @see AsyncControllerLookupService * @author mihai * * @param <RET> * @param <BEANPARAM> */ public abstract class AsyncBeanParamControllerMethodCallable<RET, BEANPARAM> { public abstract RET invokeControllerMethod(final BEANPARAM filter); }
web/src/main/java/org/devgateway/toolkit/web/spring/util/AsyncBeanParamControllerMethodCallable.java
package org.devgateway.toolkit.web.spring.util; import org.devgateway.toolkit.web.spring.AsyncControllerLookupService; /** * @see AsyncControllerLookupService * @author mihai * * @param <RET> * @param <BEANPARAM> */ public abstract class AsyncBeanParamControllerMethodCallable<RET, BEANPARAM> { public abstract RET invokeControllerMethod(BEANPARAM filter); }
added final #108
web/src/main/java/org/devgateway/toolkit/web/spring/util/AsyncBeanParamControllerMethodCallable.java
added final #108
<ide><path>eb/src/main/java/org/devgateway/toolkit/web/spring/util/AsyncBeanParamControllerMethodCallable.java <ide> * @param <BEANPARAM> <ide> */ <ide> public abstract class AsyncBeanParamControllerMethodCallable<RET, BEANPARAM> { <del> public abstract RET invokeControllerMethod(BEANPARAM filter); <add> public abstract RET invokeControllerMethod(final BEANPARAM filter); <ide> }
JavaScript
mit
20a9c3332e1f88b5f03119f747c0ada7e646c470
0
vusion/proto-ui,vusion/proto-ui
import Popper from '@vusion/popper.js'; import MEmitter from '../m-emitter.vue'; import ev from '../../utils/event'; export const MPopper = { name: 'm-popper', mixins: [MEmitter], isPopper: true, props: { opened: { type: Boolean, default: false }, trigger: { type: String, default: 'click' }, triggerElement: { type: [String, HTMLElement, Function], default: 'reference' }, reference: { type: [String, HTMLElement, Function], default: 'context-parent', validator: (value) => { if (typeof value !== 'string') return true; else return ['parent', '$parent', 'context-parent', 'prev', 'next'].includes(value); } }, placement: { type: String, default: 'bottom-start', validator: (value) => /^(top|bottom|left|right)(-start|-end)?$/.test(value), }, hoverDelay: { type: Number, default: 0 }, hideDelay: { type: Number, default: 0 }, appendTo: { type: String, default: 'body', validator: (value) => ['body', 'reference'].includes(value) }, boundariesElement: { default: 'window' }, arrowElement: { type: String, default: '[u-arrow]' }, escapeWithReference: { type: Boolean, default: true }, followCursor: { type: [Boolean, Number, Object], default: false }, offset: { type: [Number, String], default: 0 }, options: { type: Object, default() { return { modifiers: {}, }; }, }, disabled: { type: Boolean, default: false }, }, data() { return { currentOpened: this.opened, referenceEl: undefined, triggers: [], // 所有的触发器,为了方便,第一项始终为默认的 // popper: undefined, // 在出现滚动条的时候 需要特殊处理下 offEvents: [], }; }, computed: { currentFollowCursor() { if (typeof this.followCursor === 'object') return this.followCursor; else { let followCursor; if (typeof this.followCursor === 'boolean') followCursor = { offsetX: 10, offsetY: 10 }; else if (typeof this.followCursor === 'number') followCursor = { offsetX: this.followCursor, offsetY: this.followCursor }; if (this.placement.startsWith('top')) followCursor.offsetY = -followCursor.offsetY; if (this.placement.startsWith('left')) followCursor.offsetX = -followCursor.offsetX; if (this.placement === 'top' || this.placement === 'bottom') followCursor.offsetX = 0; if (this.placement === 'top-end' || this.placement === 'bottom-end') followCursor.offsetX = -followCursor.offsetX; if (this.placement === 'left' || this.placement === 'right') followCursor.offsetY = 0; if (this.placement === 'left-end' || this.placement === 'right-end') followCursor.offsetY = -followCursor.offsetY; return followCursor; } }, }, watch: { opened(opened) { this.currentOpened = opened; }, currentOpened(currentOpened) { // 不直接用样式的显隐,而是用 popper 的 create 和 destroy,是因为 popper 有可能是从不同的地方触发的,reference 对象会变 if (currentOpened) { this.createPopper(); this.$emit('open', undefined, this); } else { this.destroyPopper(); this.$emit('close', undefined, this); } }, reference() { /** * 问题:现在的 popper 不支持动态改变 reference,导致 popper 的位置显示有问题 * 解决方法:暂时在 popper.js 文档中未找到理想的解决方案,采取先删除 popper,再新创建 popper 的方法修复位置问题, * 后面需要研究下 popper.js 的源码 */ this.destroyPopper(); this.referenceEl = this.getReferenceEl(); this.createPopper(); }, }, mounted() { // 字符串类型的 reference 只有首次获取是有效的,因为之后节点会被插到别的地方 this.referenceEl = this.getReferenceEl(); const triggerEl = this.getTriggerEl(this.referenceEl); this.addTrigger(triggerEl, this.trigger); this.currentOpened && this.createPopper(); }, beforeDestroy() { this.destroyPopper(); // 取消绑定事件 this.offEvents.forEach((off) => off()); }, methods: { getOptions() { const options = Object.assign({}, this.options, { placement: this.placement, }); // 自定义options 传入offset值情况 if (!options.modifiers.offset && this.offset) { options.modifiers.offset = { offset: this.offset, }; } options.escapeWithReference = this.escapeWithReference; options.modifiers.arrow = { element: this.arrowElement }; options.modifiers.preventOverflow = { boundariesElement: this.boundariesElement }; return options; }, getReferenceEl() { if (this.reference instanceof HTMLElement) return this.reference; else if (this.reference instanceof Function) return this.reference(this.$el); else if (this.$el) { if (this.reference === 'parent') return this.$el.parentElement; else if (this.reference === '$parent') return this.$parent.$el; else if (this.reference === 'context-parent') { // 求上下文中的 parent if (this.$parent === this.$vnode.context) return this.$el.parentElement; // Vue 的 vnode.parent 没有连接起来,需要自己找,不知道有没有更好的方法 let parentVNode = this.$parent._vnode; while (parentVNode && !parentVNode.children.includes(this.$vnode)) parentVNode = parentVNode.children.find((child) => child.elm.contains(this.$el)); // if (!parentVNode) if (parentVNode.context === this.$vnode.context) return parentVNode.elm; // 否则,找第一个上下文一致的组件 let parentVM = this.$parent; while (parentVM && parentVM.$vnode.context !== this.$vnode.context) parentVM = parentVM.$parent; return parentVM.$el; } else if (this.reference === 'prev') return this.$el.previousElementSibling; else if (this.reference === 'next') return this.$el.nextElementSibling; } }, getTriggerEl(referenceEl) { if (this.triggerElement === 'reference') return referenceEl; else if (this.triggerElement instanceof HTMLElement) return this.triggerElement; else if (this.triggerElement instanceof Function) return this.triggerElement(referenceEl); }, /** * 添加触发器时,绑定事件 */ addTrigger(el, event) { const popperEl = this.$el; // @TODO: support directives const arr = event.split('.'); event = arr[0]; this.triggers.push({ el, event }); // 绑定事件 this.followCursor && this.offEvents.push(ev.on(document, 'mousemove', (e) => this.updatePositionByCursor(e, el))); if (event === 'click') this.offEvents.push(ev.on(el, 'click', (e) => { if (arr[1] === 'stop') e.stopPropagation(); else if (arr[1] === 'prevent') e.preventDefault(); this.toggle(); this.followCursor && this.$nextTick(() => this.updatePositionByCursor(e, el)); })); else if (event === 'hover') { let timer; this.offEvents.push(ev.on(el, 'mouseenter', (e) => { timer = clearTimeout(timer); setTimeout(() => { this.open(); this.followCursor && this.$nextTick(() => this.updatePositionByCursor(e, el)); }, this.hoverDelay); })); this.offEvents.push(ev.on(document, 'mouseover', (e) => { if (this.currentOpened && !timer && !el.contains(e.target) && !popperEl.contains(e.target)) timer = setTimeout(() => this.close(), this.hideDelay); })); } else if (event === 'double-click') this.offEvents.push(ev.on(el, 'dblclick', (e) => { this.toggle(); this.followCursor && this.$nextTick(() => this.updatePositionByCursor(e, el)); })); else if (event === 'right-click') { this.offEvents.push(ev.on(el, 'contextmenu', (e) => { e.preventDefault(); this.toggle(); this.followCursor && this.$nextTick(() => this.updatePositionByCursor(e, el)); })); } // @TODO: 有没有必要搞 focus-in this.offEvents.push(ev.on(document, 'mousedown', (e) => { !el.contains(e.target) && !popperEl.contains(e.target) && this.close(); })); }, createPopper() { const referenceEl = this.referenceEl; const popperEl = this.$el; if (this.appendTo === 'body') document.body.appendChild(popperEl); else if (this.appendTo === 'reference') referenceEl.appendChild(popperEl); const options = this.getOptions(); this.popper = new Popper(referenceEl, popperEl, options); }, update() { this.popper && this.popper.update(); }, scheduleUpdate() { this.popper && this.popper.scheduleUpdate(); }, destroyPopper() { const referenceEl = this.referenceEl; const popperEl = this.$el; if (this.appendTo === 'body') popperEl.parentElement === document.body && document.body.removeChild(popperEl); else if (this.appendTo === 'reference') popperEl.parentElement === referenceEl && referenceEl.removeChild(popperEl); this.popper && this.popper.destroy(); this.popper = undefined; }, updatePositionByCursor(e, el) { // @TODO: 两种 offset 属性有些冗余 if (e.target !== el || !this.popper) return; const top = e.clientY + this.currentFollowCursor.offsetY; const left = e.clientX + this.currentFollowCursor.offsetX; const right = e.clientX + this.currentFollowCursor.offsetX; const bottom = e.clientY + this.currentFollowCursor.offsetY; this.popper.reference = { getBoundingClientRect: () => ({ width: 0, height: 0, top, left, right, bottom, }), clientWidth: 0, clientHeight: 0, }; this.popper.scheduleUpdate(); }, open() { // Check if enabled if (this.disabled) return; // Prevent replication if (this.currentOpened) return; // Emit a `before-` event with preventDefault() if (this.$emitPrevent('before-open', undefined, this)) return; // Assign and sync `opened` this.currentOpened = true; this.$emit('update:opened', true, this); // Emit `after-` events // this.$emit('open', undefined, this); }, close() { // Check if enabled if (this.disabled) return; // Prevent replication if (!this.currentOpened) return; // Emit a `before-` event with preventDefault() if (this.$emitPrevent('before-close', undefined, this)) return; // Assign and sync `opened` this.currentOpened = false; this.$emit('update:opened', false, this); // Emit `after-` events // this.$emit('close', undefined, this); }, toggle(opened) { // Method overloading if (opened === undefined) opened = !this.currentOpened; // @deprecated start if (this.disabled) return; const oldOpened = this.currentOpened; if (opened === oldOpened) return; if (this.$emitPrevent('before-toggle', { opened }, this)) return; opened ? this.open() : this.close(); this.$emit('toggle', { opened }, this); // @deprecated end }, }, }; export default MPopper;
src/components/m-popper.vue/index.js
import Popper from '@vusion/popper.js'; import MEmitter from '../m-emitter.vue'; import ev from '../../utils/event'; export const MPopper = { name: 'm-popper', mixins: [MEmitter], isPopper: true, props: { opened: { type: Boolean, default: false }, trigger: { type: String, default: 'click' }, triggerElement: { type: [String, HTMLElement, Function], default: 'reference' }, reference: { type: [String, HTMLElement, Function], default: 'context-parent', validator: (value) => { if (typeof value !== 'string') return true; else return ['parent', '$parent', 'context-parent', 'prev', 'next'].includes(value); } }, placement: { type: String, default: 'bottom-start', validator: (value) => /^(top|bottom|left|right)(-start|-end)?$/.test(value), }, hoverDelay: { type: Number, default: 0 }, hideDelay: { type: Number, default: 0 }, appendTo: { type: String, default: 'body', validator: (value) => ['body', 'reference'].includes(value) }, boundariesElement: { default: 'window' }, arrowElement: { type: String, default: '[u-arrow]' }, escapeWithReference: { type: Boolean, default: true }, followCursor: { type: [Boolean, Number, Object], default: false }, offset: { type: [Number, String], default: 0 }, options: { type: Object, default() { return { modifiers: {}, }; }, }, disabled: { type: Boolean, default: false }, }, data() { return { currentOpened: this.opened, referenceEl: undefined, triggers: [], // 所有的触发器,为了方便,第一项始终为默认的 // popper: undefined, // 在出现滚动条的时候 需要特殊处理下 offEvents: [], }; }, computed: { currentFollowCursor() { if (typeof this.followCursor === 'object') return this.followCursor; else { let followCursor; if (typeof this.followCursor === 'boolean') followCursor = { offsetX: 10, offsetY: 10 }; else if (typeof this.followCursor === 'number') followCursor = { offsetX: this.followCursor, offsetY: this.followCursor }; if (this.placement.startsWith('top')) followCursor.offsetY = -followCursor.offsetY; if (this.placement.startsWith('left')) followCursor.offsetX = -followCursor.offsetX; if (this.placement === 'top' || this.placement === 'bottom') followCursor.offsetX = 0; if (this.placement === 'top-end' || this.placement === 'bottom-end') followCursor.offsetX = -followCursor.offsetX; if (this.placement === 'left' || this.placement === 'right') followCursor.offsetY = 0; if (this.placement === 'left-end' || this.placement === 'right-end') followCursor.offsetY = -followCursor.offsetY; return followCursor; } }, }, watch: { opened(opened) { this.currentOpened = opened; }, currentOpened(currentOpened) { // 不直接用样式的显隐,而是用 popper 的 create 和 destroy,是因为 popper 有可能是从不同的地方触发的,reference 对象会变 if (currentOpened) { this.createPopper(); this.$emit('open', undefined, this); } else { this.destroyPopper(); this.$emit('close', undefined, this); } }, reference() { this.referenceEl = this.getReferenceEl(); }, }, mounted() { // 字符串类型的 reference 只有首次获取是有效的,因为之后节点会被插到别的地方 this.referenceEl = this.getReferenceEl(); const triggerEl = this.getTriggerEl(this.referenceEl); this.addTrigger(triggerEl, this.trigger); this.currentOpened && this.createPopper(); }, beforeDestroy() { this.destroyPopper(); // 取消绑定事件 this.offEvents.forEach((off) => off()); }, methods: { getOptions() { const options = Object.assign({}, this.options, { placement: this.placement, }); // 自定义options 传入offset值情况 if (!options.modifiers.offset && this.offset) { options.modifiers.offset = { offset: this.offset, }; } options.escapeWithReference = this.escapeWithReference; options.modifiers.arrow = { element: this.arrowElement }; options.modifiers.preventOverflow = { boundariesElement: this.boundariesElement }; return options; }, getReferenceEl() { if (this.reference instanceof HTMLElement) return this.reference; else if (this.reference instanceof Function) return this.reference(this.$el); else if (this.$el) { if (this.reference === 'parent') return this.$el.parentElement; else if (this.reference === '$parent') return this.$parent.$el; else if (this.reference === 'context-parent') { // 求上下文中的 parent if (this.$parent === this.$vnode.context) return this.$el.parentElement; // Vue 的 vnode.parent 没有连接起来,需要自己找,不知道有没有更好的方法 let parentVNode = this.$parent._vnode; while (parentVNode && !parentVNode.children.includes(this.$vnode)) parentVNode = parentVNode.children.find((child) => child.elm.contains(this.$el)); // if (!parentVNode) if (parentVNode.context === this.$vnode.context) return parentVNode.elm; // 否则,找第一个上下文一致的组件 let parentVM = this.$parent; while (parentVM && parentVM.$vnode.context !== this.$vnode.context) parentVM = parentVM.$parent; return parentVM.$el; } else if (this.reference === 'prev') return this.$el.previousElementSibling; else if (this.reference === 'next') return this.$el.nextElementSibling; } }, getTriggerEl(referenceEl) { if (this.triggerElement === 'reference') return referenceEl; else if (this.triggerElement instanceof HTMLElement) return this.triggerElement; else if (this.triggerElement instanceof Function) return this.triggerElement(referenceEl); }, /** * 添加触发器时,绑定事件 */ addTrigger(el, event) { const popperEl = this.$el; // @TODO: support directives const arr = event.split('.'); event = arr[0]; this.triggers.push({ el, event }); // 绑定事件 this.followCursor && this.offEvents.push(ev.on(document, 'mousemove', (e) => this.updatePositionByCursor(e, el))); if (event === 'click') this.offEvents.push(ev.on(el, 'click', (e) => { if (arr[1] === 'stop') e.stopPropagation(); else if (arr[1] === 'prevent') e.preventDefault(); this.toggle(); this.followCursor && this.$nextTick(() => this.updatePositionByCursor(e, el)); })); else if (event === 'hover') { let timer; this.offEvents.push(ev.on(el, 'mouseenter', (e) => { timer = clearTimeout(timer); setTimeout(() => { this.open(); this.followCursor && this.$nextTick(() => this.updatePositionByCursor(e, el)); }, this.hoverDelay); })); this.offEvents.push(ev.on(document, 'mouseover', (e) => { if (this.currentOpened && !timer && !el.contains(e.target) && !popperEl.contains(e.target)) timer = setTimeout(() => this.close(), this.hideDelay); })); } else if (event === 'double-click') this.offEvents.push(ev.on(el, 'dblclick', (e) => { this.toggle(); this.followCursor && this.$nextTick(() => this.updatePositionByCursor(e, el)); })); else if (event === 'right-click') { this.offEvents.push(ev.on(el, 'contextmenu', (e) => { e.preventDefault(); this.toggle(); this.followCursor && this.$nextTick(() => this.updatePositionByCursor(e, el)); })); } // @TODO: 有没有必要搞 focus-in this.offEvents.push(ev.on(document, 'mousedown', (e) => { !el.contains(e.target) && !popperEl.contains(e.target) && this.close(); })); }, createPopper() { const referenceEl = this.referenceEl; const popperEl = this.$el; if (this.appendTo === 'body') document.body.appendChild(popperEl); else if (this.appendTo === 'reference') referenceEl.appendChild(popperEl); const options = this.getOptions(); this.popper = new Popper(referenceEl, popperEl, options); }, update() { this.popper && this.popper.update(); }, scheduleUpdate() { this.popper && this.popper.scheduleUpdate(); }, destroyPopper() { const referenceEl = this.referenceEl; const popperEl = this.$el; if (this.appendTo === 'body') popperEl.parentElement === document.body && document.body.removeChild(popperEl); else if (this.appendTo === 'reference') popperEl.parentElement === referenceEl && referenceEl.removeChild(popperEl); this.popper && this.popper.destroy(); this.popper = undefined; }, updatePositionByCursor(e, el) { // @TODO: 两种 offset 属性有些冗余 if (e.target !== el || !this.popper) return; const top = e.clientY + this.currentFollowCursor.offsetY; const left = e.clientX + this.currentFollowCursor.offsetX; const right = e.clientX + this.currentFollowCursor.offsetX; const bottom = e.clientY + this.currentFollowCursor.offsetY; this.popper.reference = { getBoundingClientRect: () => ({ width: 0, height: 0, top, left, right, bottom, }), clientWidth: 0, clientHeight: 0, }; this.popper.scheduleUpdate(); }, open() { // Check if enabled if (this.disabled) return; // Prevent replication if (this.currentOpened) return; // Emit a `before-` event with preventDefault() if (this.$emitPrevent('before-open', undefined, this)) return; // Assign and sync `opened` this.currentOpened = true; this.$emit('update:opened', true, this); // Emit `after-` events // this.$emit('open', undefined, this); }, close() { // Check if enabled if (this.disabled) return; // Prevent replication if (!this.currentOpened) return; // Emit a `before-` event with preventDefault() if (this.$emitPrevent('before-close', undefined, this)) return; // Assign and sync `opened` this.currentOpened = false; this.$emit('update:opened', false, this); // Emit `after-` events // this.$emit('close', undefined, this); }, toggle(opened) { // Method overloading if (opened === undefined) opened = !this.currentOpened; // @deprecated start if (this.disabled) return; const oldOpened = this.currentOpened; if (opened === oldOpened) return; if (this.$emitPrevent('before-toggle', { opened }, this)) return; opened ? this.open() : this.close(); this.$emit('toggle', { opened }, this); // @deprecated end }, }, }; export default MPopper;
:bug: Fix popper reference dynamic change problem
src/components/m-popper.vue/index.js
:bug: Fix popper reference dynamic change problem
<ide><path>rc/components/m-popper.vue/index.js <ide> } <ide> }, <ide> reference() { <add> /** <add> * 问题:现在的 popper 不支持动态改变 reference,导致 popper 的位置显示有问题 <add> * 解决方法:暂时在 popper.js 文档中未找到理想的解决方案,采取先删除 popper,再新创建 popper 的方法修复位置问题, <add> * 后面需要研究下 popper.js 的源码 <add> */ <add> this.destroyPopper(); <ide> this.referenceEl = this.getReferenceEl(); <add> this.createPopper(); <ide> }, <ide> }, <ide> mounted() {
Java
apache-2.0
70c645a7ff7400d0207a9453eb7e1def0782107d
0
apache/commons-text,apache/commons-text
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.text.matcher; /** * Determines if a character array portion matches. * * @since 1.3 */ public interface StringMatcher { /** * Returns the number of matching characters, zero for no match. * <p> * This method is called to check for a match. The parameter {@code pos} represents the current position to be * checked in the string {@code buffer} (a character array which must not be changed). The API guarantees that * {@code pos} is a valid index for {@code buffer}. * </p> * <p> * The matching code may check one character or many. It may check characters preceding {@code pos} as well as those * after. * </p> * <p> * It must return zero for no match, or a positive number if a match was found. The number indicates the number of * characters that matched. * </p> * * @param buffer the text content to match against, do not change * @param pos the starting position for the match, valid for buffer * @return The number of matching characters, zero for no match * @since 1.9 */ default int isMatch(final char[] buffer, final int pos) { return isMatch(buffer, pos, 0, buffer.length); } /** * Returns the number of matching characters, zero for no match. * <p> * This method is called to check for a match. The parameter {@code pos} represents the current position to be * checked in the string {@code buffer} (a character array which must not be changed). The API guarantees that * {@code pos} is a valid index for {@code buffer}. * </p> * <p> * The matching code may check one character or many. It may check characters preceding {@code pos} as well as those * after. * </p> * <p> * It must return zero for no match, or a positive number if a match was found. The number indicates the number of * characters that matched. * </p> * * @param buffer the text content to match against, do not change * @param pos the starting position for the match, valid for buffer * @return The number of matching characters, zero for no match * @since 1.9 */ default int isMatch(final CharSequence buffer, final int pos) { return isMatch(buffer, pos, 0, buffer.length()); } /** * Returns the number of matching characters, {@code 0} if there is no match. * <p> * This method is called to check for a match against a source {@code buffer}. The parameter {@code start} * represents the start position to be checked in the {@code buffer} (a character array which MUST not be * changed). The implementation SHOULD guarantees that {@code start} is a valid index in {@code buffer}. * </p> * <p> * The character array may be larger than the active area to be matched. Only values in the buffer between the * specified indices may be accessed, in other words: {@code bufferStart <= start < bufferEnd}. * </p> * <p> * The matching code may check one character or many. It may check characters preceding {@code start} as well as * those after, so long as no checks exceed the bounds specified. * </p> * <p> * It must return zero for no match, or a positive number if a match was found. The number indicates the number of * characters that matched. * </p> * * @param buffer the source text to search, do not change. * @param start the starting position for the match, valid in {@code buffer}. * @param bufferStart the first active index in the buffer, valid in {@code buffer}. * @param bufferEnd the end index (exclusive) of the active buffer, valid in {@code buffer}. * @return The number of matching characters, zero if there is no match. */ int isMatch(char[] buffer, int start, int bufferStart, int bufferEnd); /** * Returns the number of matching characters, {@code 0} if there is no match. * <p> * This method is called to check for a match against a source {@code buffer}. The parameter {@code start} * represents the start position to be checked in the {@code buffer} (a character array which MUST not be * changed). The implementation SHOULD guarantees that {@code start} is a valid index in {@code buffer}. * </p> * <p> * The character array may be larger than the active area to be matched. Only values in the buffer between the * specified indices may be accessed, in other words: {@code bufferStart <= start < bufferEnd}. * </p> * <p> * The matching code may check one character or many. It may check characters preceding {@code start} as well as * those after, so long as no checks exceed the bounds specified. * </p> * <p> * It must return zero for no match, or a positive number if a match was found. The number indicates the number of * characters that matched. * </p> * * @param buffer the source text to search, do not change. * @param start the starting position for the match, valid in {@code buffer}. * @param bufferStart the first active index in the buffer, valid in {@code buffer}. * @param bufferEnd the end index (exclusive) of the active buffer, valid in {@code buffer}. * @return The number of matching characters, zero if there is no match. */ default int isMatch(final CharSequence buffer, final int start, final int bufferStart, final int bufferEnd) { return isMatch(AbstractStringMatcher.toCharArray(buffer), start, bufferEnd, bufferEnd); } /** * Returns the size of the matching string. Defaults to 0. * * @return the size of the matching string. * @since 1.9 */ default int size() { return 0; } }
src/main/java/org/apache/commons/text/matcher/StringMatcher.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.text.matcher; /** * Determines if a character array portion matches. * * @since 1.3 */ public interface StringMatcher { /** * Returns the number of matching characters, zero for no match. * <p> * This method is called to check for a match. The parameter {@code pos} represents the current position to be * checked in the string {@code buffer} (a character array which must not be changed). The API guarantees that * {@code pos} is a valid index for {@code buffer}. * </p> * <p> * The matching code may check one character or many. It may check characters preceding {@code pos} as well as those * after. * </p> * <p> * It must return zero for no match, or a positive number if a match was found. The number indicates the number of * characters that matched. * </p> * * @param buffer the text content to match against, do not change * @param pos the starting position for the match, valid for buffer * @return The number of matching characters, zero for no match * @since 1.9 */ default int isMatch(final char[] buffer, final int pos) { return isMatch(buffer, pos, 0, buffer.length); } /** * Returns the number of matching characters, zero for no match. * <p> * This method is called to check for a match. The parameter {@code pos} represents the current position to be * checked in the string {@code buffer} (a character array which must not be changed). The API guarantees that * {@code pos} is a valid index for {@code buffer}. * </p> * <p> * The matching code may check one character or many. It may check characters preceding {@code pos} as well as those * after. * </p> * <p> * It must return zero for no match, or a positive number if a match was found. The number indicates the number of * characters that matched. * </p> * * @param buffer the text content to match against, do not change * @param pos the starting position for the match, valid for buffer * @return The number of matching characters, zero for no match * @since 1.9 */ default int isMatch(final CharSequence buffer, final int pos) { return isMatch(buffer, pos, 0, buffer.length()); } /** * Returns the number of matching characters, {@code 0} if there is no match. * <p> * This method is called to check for a match against a source {@code buffer}. The parameter {@code start} * represents the start position to be checked in the array {@code buffer} (a character array which MUST not be * changed). The implementation SHOULD guarantees that {@code start} is a valid index in {@code buffer}. * </p> * <p> * The character array may be larger than the active area to be matched. Only values in the buffer between the * specified indices may be accessed, in other words: {@code bufferStart <= start < bufferEnd}. * </p> * <p> * The matching code may check one character or many. It may check characters preceding {@code start} as well as * those after, so long as no checks exceed the bounds specified. * </p> * <p> * It must return zero for no match, or a positive number if a match was found. The number indicates the number of * characters that matched. * </p> * * @param buffer the source text to search, do not change. * @param start the starting position for the match, valid in {@code buffer}. * @param bufferStart the first active index in the buffer, valid in {@code buffer}. * @param bufferEnd the end index (exclusive) of the active buffer, valid in {@code buffer}. * @return The number of matching characters, zero if there is no match. */ int isMatch(char[] buffer, int start, int bufferStart, int bufferEnd); /** * Returns the number of matching characters, {@code 0} if there is no match. * <p> * This method is called to check for a match against a source {@code buffer}. The parameter {@code start} * represents the start position to be checked in the array {@code buffer} (a character array which MUST not be * changed). The implementation SHOULD guarantees that {@code start} is a valid index in {@code buffer}. * </p> * <p> * The character array may be larger than the active area to be matched. Only values in the buffer between the * specified indices may be accessed, in other words: {@code bufferStart <= start < bufferEnd}. * </p> * <p> * The matching code may check one character or many. It may check characters preceding {@code start} as well as * those after, so long as no checks exceed the bounds specified. * </p> * <p> * It must return zero for no match, or a positive number if a match was found. The number indicates the number of * characters that matched. * </p> * * @param buffer the source text to search, do not change. * @param start the starting position for the match, valid in {@code buffer}. * @param bufferStart the first active index in the buffer, valid in {@code buffer}. * @param bufferEnd the end index (exclusive) of the active buffer, valid in {@code buffer}. * @return The number of matching characters, zero if there is no match. */ default int isMatch(final CharSequence buffer, final int start, final int bufferStart, final int bufferEnd) { return isMatch(AbstractStringMatcher.toCharArray(buffer), start, bufferEnd, bufferEnd); } /** * Returns the size of the matching string. Defaults to 0. * * @return the size of the matching string. * @since 1.9 */ default int size() { return 0; } }
Javadoc.
src/main/java/org/apache/commons/text/matcher/StringMatcher.java
Javadoc.
<ide><path>rc/main/java/org/apache/commons/text/matcher/StringMatcher.java <ide> * Returns the number of matching characters, {@code 0} if there is no match. <ide> * <p> <ide> * This method is called to check for a match against a source {@code buffer}. The parameter {@code start} <del> * represents the start position to be checked in the array {@code buffer} (a character array which MUST not be <add> * represents the start position to be checked in the {@code buffer} (a character array which MUST not be <ide> * changed). The implementation SHOULD guarantees that {@code start} is a valid index in {@code buffer}. <ide> * </p> <ide> * <p> <ide> * Returns the number of matching characters, {@code 0} if there is no match. <ide> * <p> <ide> * This method is called to check for a match against a source {@code buffer}. The parameter {@code start} <del> * represents the start position to be checked in the array {@code buffer} (a character array which MUST not be <add> * represents the start position to be checked in the {@code buffer} (a character array which MUST not be <ide> * changed). The implementation SHOULD guarantees that {@code start} is a valid index in {@code buffer}. <ide> * </p> <ide> * <p>
Java
bsd-3-clause
d5a4b8463c7ff4564fd0a2505bcbb2ed739d54f4
0
GabrielDancause/jbooktrader,GabrielDancause/jbooktrader,mkoistinen/JBookTrader,mkoistinen/JBookTrader,mkoistinen/JBookTrader,GabrielDancause/jbooktrader
package com.jbooktrader.platform.strategy; import com.ib.client.*; import com.jbooktrader.platform.commission.*; import com.jbooktrader.platform.indicator.*; import com.jbooktrader.platform.marketbook.*; import com.jbooktrader.platform.model.*; import com.jbooktrader.platform.model.ModelListener.*; import com.jbooktrader.platform.optimizer.*; import com.jbooktrader.platform.performance.*; import com.jbooktrader.platform.position.*; import com.jbooktrader.platform.report.*; import com.jbooktrader.platform.schedule.*; import java.util.*; /** * Base class for all classes that implement trading strategies. */ public abstract class Strategy implements Comparable<Strategy> { private final StrategyParams params; private final EventReport eventReport; private final Dispatcher dispatcher; private final String name; private MarketBook marketBook; private Contract contract; private TradingSchedule tradingSchedule; private PositionManager positionManager; private PerformanceManager performanceManager; private StrategyReportManager strategyReportManager; private IndicatorManager indicatorManager; private double bidAskSpread; private Date lastContractCheck; private static int HOUR_IN_MS = 1000 * 60 * 60; /** * Framework calls this method when a new snapshot of the limit order book is taken. */ public abstract void onBookSnapshot(); /** * Framework calls this method to set strategy parameter ranges and values. */ protected abstract void setParams(); /** * Framework calls this method to instantiate indicators. */ public abstract void setIndicators(); /** * Framework calls this to check and replace contract if needed * optional override */ public abstract Contract getNewContract(); protected Strategy(StrategyParams params) { this.params = params; if (params.size() == 0) { setParams(); } name = getClass().getSimpleName(); dispatcher = Dispatcher.getInstance(); eventReport = dispatcher.getEventReport(); } public void setMarketBook(MarketBook marketBook) { this.marketBook = marketBook; } public void setIndicatorManager(IndicatorManager indicatorManager) { this.indicatorManager = indicatorManager; indicatorManager.setMarketBook(marketBook); } protected void setPosition(int position) { positionManager.setTargetPosition(position); } public double getBidAskSpread() { return bidAskSpread; } public void closePosition() { setPosition(0); if (positionManager.getCurrentPosition() != 0) { Mode mode = dispatcher.getMode(); if (mode == Mode.ForwardTest || mode == Mode.Trade) { String msg = "End of trading interval. Closing current position."; eventReport.report(name, msg); } positionManager.trade(); } } public StrategyParams getParams() { return params; } protected int getParam(String name) { return params.get(name).getValue(); } protected void addParam(String name, int min, int max, int step, int value) { params.add(name, min, max, step, value); } public PositionManager getPositionManager() { return positionManager; } public PerformanceManager getPerformanceManager() { return performanceManager; } public StrategyReportManager getStrategyReportManager() { return strategyReportManager; } public IndicatorManager getIndicatorManager() { return indicatorManager; } public TradingSchedule getTradingSchedule() { return tradingSchedule; } protected Indicator addIndicator(Indicator indicator) { return indicatorManager.addIndicator(indicator); } protected void setStrategy(Contract contract, TradingSchedule tradingSchedule, int multiplier, Commission commission, double bidAskSpread) { this.contract = contract; lastContractCheck = new Date(); // set this, as our contract is valid now, just created contract.m_multiplier = String.valueOf(multiplier); this.tradingSchedule = tradingSchedule; performanceManager = new PerformanceManager(this, multiplier, commission); positionManager = new PositionManager(this); strategyReportManager = new StrategyReportManager(this); marketBook = dispatcher.getTrader().getAssistant().createMarketBook(this); this.bidAskSpread = bidAskSpread; } public MarketBook getMarketBook() { return marketBook; } public Contract getContract() { return contract; } public String getSymbol() { String symbol = contract.m_symbol; if (contract.m_currency != null) { symbol += "." + contract.m_currency; } return symbol; } public String getName() { return name; } public void processInstant(boolean isInSchedule) { if (isInSchedule) { if (indicatorManager.hasValidIndicators()) { onBookSnapshot(); positionManager.trade(); } } else { closePosition();// force flat position // check if we have a current position, because it may take time to close, if zero, replace contract if needed if (positionManager.getCurrentPosition() == 0) { // checking only after trading hours, and after positions are all closed Date now = new Date(); if (lastContractCheck == null || (lastContractCheck.getTime() < now.getTime() - HOUR_IN_MS)) { Contract newContract = getNewContract(); lastContractCheck = now; if (!contract.m_expiry.equals(newContract.m_expiry)) { // need to switch contracts, should be ok, we have no open positions and we are not in side trading hours String msg = "Switching Contract from" + contract.m_symbol + "-" + contract.m_expiry + "to " + newContract.m_symbol + "-" + newContract.m_expiry; eventReport.report(name, msg); contract = newContract; } } } } } public void process() { if (!marketBook.isEmpty()) { indicatorManager.updateIndicators(); MarketSnapshot marketSnapshot = marketBook.getSnapshot(); long instant = marketSnapshot.getTime(); processInstant(tradingSchedule.contains(instant)); performanceManager.updatePositionValue(marketSnapshot.getPrice(), positionManager.getCurrentPosition()); dispatcher.fireModelChanged(Event.StrategyUpdate, this); } } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(" ").append(name).append(" ["); sb.append(contract.m_symbol).append("-"); sb.append(contract.m_secType).append("-"); sb.append(contract.m_exchange).append("]"); return sb.toString(); } public int compareTo(Strategy other) { return name.compareTo(other.name); } }
source/com/jbooktrader/platform/strategy/Strategy.java
package com.jbooktrader.platform.strategy; import com.ib.client.*; import com.jbooktrader.platform.commission.*; import com.jbooktrader.platform.indicator.*; import com.jbooktrader.platform.marketbook.*; import com.jbooktrader.platform.model.*; import com.jbooktrader.platform.model.ModelListener.*; import com.jbooktrader.platform.optimizer.*; import com.jbooktrader.platform.performance.*; import com.jbooktrader.platform.position.*; import com.jbooktrader.platform.report.*; import com.jbooktrader.platform.schedule.*; import java.util.Date; import java.util.logging.Level; import java.util.logging.Logger; /** * Base class for all classes that implement trading strategies. */ public abstract class Strategy implements Comparable<Strategy> { private final StrategyParams params; private final EventReport eventReport; private final Dispatcher dispatcher; private final String name; private MarketBook marketBook; private Contract contract; private TradingSchedule tradingSchedule; private PositionManager positionManager; private PerformanceManager performanceManager; private StrategyReportManager strategyReportManager; private IndicatorManager indicatorManager; private double bidAskSpread; private Date lastContractCheck; private static int HOUR_IN_MS = 1000*60*60; private final static Logger LOGGER = Logger.getLogger(Strategy.class.getName()); /** * Framework calls this method when a new snapshot of the limit order book is taken. */ public abstract void onBookSnapshot(); /** * Framework calls this method to set strategy parameter ranges and values. */ protected abstract void setParams(); /** * Framework calls this method to instantiate indicators. */ public abstract void setIndicators(); /** * Framework calls this to check and replace contract if needed * optional override */ public Contract getNewContract() { String msg = "getNewContract was not implemented by Strategy, override getNewContract to be sure you end up with proper contract every day."; eventReport.report(name, msg); return contract; // default impl returns existing contract, no check is done } protected Strategy(StrategyParams params) { this.params = params; if (params.size() == 0) { setParams(); } name = getClass().getSimpleName(); dispatcher = Dispatcher.getInstance(); eventReport = dispatcher.getEventReport(); } public void setMarketBook(MarketBook marketBook) { this.marketBook = marketBook; } public void setIndicatorManager(IndicatorManager indicatorManager) { this.indicatorManager = indicatorManager; indicatorManager.setMarketBook(marketBook); } protected void setPosition(int position) { positionManager.setTargetPosition(position); } public double getBidAskSpread() { return bidAskSpread; } public void closePosition() { setPosition(0); if (positionManager.getCurrentPosition() != 0) { Mode mode = dispatcher.getMode(); if (mode == Mode.ForwardTest || mode == Mode.Trade) { String msg = "End of trading interval. Closing current position."; eventReport.report(name, msg); } positionManager.trade(); } } public StrategyParams getParams() { return params; } protected int getParam(String name) { return params.get(name).getValue(); } protected void addParam(String name, int min, int max, int step, int value) { params.add(name, min, max, step, value); } public PositionManager getPositionManager() { return positionManager; } public PerformanceManager getPerformanceManager() { return performanceManager; } public StrategyReportManager getStrategyReportManager() { return strategyReportManager; } public IndicatorManager getIndicatorManager() { return indicatorManager; } public TradingSchedule getTradingSchedule() { return tradingSchedule; } protected Indicator addIndicator(Indicator indicator) { return indicatorManager.addIndicator(indicator); } protected void setStrategy(Contract contract, TradingSchedule tradingSchedule, int multiplier, Commission commission, double bidAskSpread) { this.contract = contract; lastContractCheck = new Date(); // set this, as our contract is valid now, just created contract.m_multiplier = String.valueOf(multiplier); this.tradingSchedule = tradingSchedule; performanceManager = new PerformanceManager(this, multiplier, commission); positionManager = new PositionManager(this); strategyReportManager = new StrategyReportManager(this); marketBook = dispatcher.getTrader().getAssistant().createMarketBook(this); this.bidAskSpread = bidAskSpread; } public MarketBook getMarketBook() { return marketBook; } public Contract getContract() { return contract; } public String getSymbol() { String symbol = contract.m_symbol; if (contract.m_currency != null) { symbol += "." + contract.m_currency; } return symbol; } public String getName() { return name; } public void processInstant(boolean isInSchedule) { if (isInSchedule) { if (indicatorManager.hasValidIndicators()) { onBookSnapshot(); positionManager.trade(); } } else { closePosition();// force flat position // check if we have a current position, because it may take time to close, if zero, replace contract if needed if (positionManager.getCurrentPosition() == 0) { // checking only after trading hours, and after positions are all closed Date now = new Date(); if (lastContractCheck == null || (lastContractCheck.getTime() < now.getTime() - HOUR_IN_MS)) { LOGGER.log(Level.FINEST,"Checking contract"); Contract newContract = getNewContract(); lastContractCheck = now; if (!contract.m_expiry.equals(newContract.m_expiry)) { // need to switch contracts, should be ok, we have no open positions and we are not in side trading hours String msg = "Switching Contract from" + contract.m_symbol+"-"+contract.m_expiry + "to " + newContract.m_symbol+"-"+newContract.m_expiry; eventReport.report(name, msg); contract = newContract; } } } } } public void process() { if (!marketBook.isEmpty()) { indicatorManager.updateIndicators(); MarketSnapshot marketSnapshot = marketBook.getSnapshot(); long instant = marketSnapshot.getTime(); processInstant(tradingSchedule.contains(instant)); performanceManager.updatePositionValue(marketSnapshot.getPrice(), positionManager.getCurrentPosition()); dispatcher.fireModelChanged(Event.StrategyUpdate, this); } } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(" ").append(name).append(" ["); sb.append(contract.m_symbol).append("-"); sb.append(contract.m_secType).append("-"); sb.append(contract.m_exchange).append("]"); return sb.toString(); } public int compareTo(Strategy other) { return name.compareTo(other.name); } }
Made getNewContract() abstract.
source/com/jbooktrader/platform/strategy/Strategy.java
Made getNewContract() abstract.
<ide><path>ource/com/jbooktrader/platform/strategy/Strategy.java <ide> import com.jbooktrader.platform.report.*; <ide> import com.jbooktrader.platform.schedule.*; <ide> <del>import java.util.Date; <del>import java.util.logging.Level; <del>import java.util.logging.Logger; <add>import java.util.*; <ide> <ide> /** <ide> * Base class for all classes that implement trading strategies. <ide> private IndicatorManager indicatorManager; <ide> private double bidAskSpread; <ide> private Date lastContractCheck; <del> <del> private static int HOUR_IN_MS = 1000*60*60; <del> <del> private final static Logger LOGGER = Logger.getLogger(Strategy.class.getName()); <add> private static int HOUR_IN_MS = 1000 * 60 * 60; <ide> <ide> /** <ide> * Framework calls this method when a new snapshot of the limit order book is taken. <ide> * Framework calls this to check and replace contract if needed <ide> * optional override <ide> */ <del> public Contract getNewContract() { <del> String msg = "getNewContract was not implemented by Strategy, override getNewContract to be sure you end up with proper contract every day."; <del> eventReport.report(name, msg); <del> return contract; // default impl returns existing contract, no check is done <del> } <add> public abstract Contract getNewContract(); <ide> <ide> protected Strategy(StrategyParams params) { <ide> this.params = params; <ide> } else { <ide> closePosition();// force flat position <ide> // check if we have a current position, because it may take time to close, if zero, replace contract if needed <del> if (positionManager.getCurrentPosition() == 0) <del> { <add> if (positionManager.getCurrentPosition() == 0) { <ide> // checking only after trading hours, and after positions are all closed <ide> Date now = new Date(); <ide> if (lastContractCheck == null || (lastContractCheck.getTime() < now.getTime() - HOUR_IN_MS)) { <del> LOGGER.log(Level.FINEST,"Checking contract"); <ide> Contract newContract = getNewContract(); <ide> lastContractCheck = now; <ide> if (!contract.m_expiry.equals(newContract.m_expiry)) { <ide> // need to switch contracts, should be ok, we have no open positions and we are not in side trading hours <del> String msg = "Switching Contract from" + contract.m_symbol+"-"+contract.m_expiry + "to " + newContract.m_symbol+"-"+newContract.m_expiry; <add> String msg = "Switching Contract from" + contract.m_symbol + "-" + contract.m_expiry + "to " + newContract.m_symbol + "-" + newContract.m_expiry; <ide> eventReport.report(name, msg); <ide> contract = newContract; <ide> }
Java
apache-2.0
735bcf85e9b59a71981babe2f5da51978d61d8d3
0
apurtell/hbase,mahak/hbase,Apache9/hbase,ndimiduk/hbase,apurtell/hbase,mahak/hbase,mahak/hbase,ndimiduk/hbase,ndimiduk/hbase,mahak/hbase,apurtell/hbase,ndimiduk/hbase,ndimiduk/hbase,ndimiduk/hbase,Apache9/hbase,Apache9/hbase,apurtell/hbase,apurtell/hbase,mahak/hbase,Apache9/hbase,apurtell/hbase,mahak/hbase,mahak/hbase,apurtell/hbase,Apache9/hbase,Apache9/hbase,apurtell/hbase,Apache9/hbase,apurtell/hbase,mahak/hbase,apurtell/hbase,Apache9/hbase,ndimiduk/hbase,Apache9/hbase,ndimiduk/hbase,ndimiduk/hbase,ndimiduk/hbase,mahak/hbase,Apache9/hbase,mahak/hbase
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master; import static org.apache.hadoop.hbase.util.ConcurrentMapUtils.computeIfAbsent; import java.io.IOException; import java.net.InetAddress; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentNavigableMap; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Predicate; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ClockOutOfSyncException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.RegionMetrics; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.ServerMetrics; import org.apache.hadoop.hbase.ServerMetricsBuilder; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.YouAreDeadException; import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.AsyncRegionServerAdmin; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.ipc.RemoteWithExtrasException; import org.apache.hadoop.hbase.master.assignment.RegionStates; import org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.procedure2.Procedure; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FutureUtils; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FlushedRegionSequenceId; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FlushedSequenceId; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FlushedStoreSequenceId; import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest; /** * The ServerManager class manages info about region servers. * <p> * Maintains lists of online and dead servers. Processes the startups, * shutdowns, and deaths of region servers. * <p> * Servers are distinguished in two different ways. A given server has a * location, specified by hostname and port, and of which there can only be one * online at any given time. A server instance is specified by the location * (hostname and port) as well as the startcode (timestamp from when the server * was started). This is used to differentiate a restarted instance of a given * server from the original instance. * <p> * If a sever is known not to be running any more, it is called dead. The dead * server needs to be handled by a ServerShutdownHandler. If the handler is not * enabled yet, the server can't be handled right away so it is queued up. * After the handler is enabled, the server will be submitted to a handler to handle. * However, the handler may be just partially enabled. If so, * the server cannot be fully processed, and be queued up for further processing. * A server is fully processed only after the handler is fully enabled * and has completed the handling. */ @InterfaceAudience.Private public class ServerManager { public static final String WAIT_ON_REGIONSERVERS_MAXTOSTART = "hbase.master.wait.on.regionservers.maxtostart"; public static final String WAIT_ON_REGIONSERVERS_MINTOSTART = "hbase.master.wait.on.regionservers.mintostart"; public static final String WAIT_ON_REGIONSERVERS_TIMEOUT = "hbase.master.wait.on.regionservers.timeout"; public static final String WAIT_ON_REGIONSERVERS_INTERVAL = "hbase.master.wait.on.regionservers.interval"; /** * see HBASE-20727 * if set to true, flushedSequenceIdByRegion and storeFlushedSequenceIdsByRegion * will be persisted to HDFS and loaded when master restart to speed up log split */ public static final String PERSIST_FLUSHEDSEQUENCEID = "hbase.master.persist.flushedsequenceid.enabled"; public static final boolean PERSIST_FLUSHEDSEQUENCEID_DEFAULT = true; public static final String FLUSHEDSEQUENCEID_FLUSHER_INTERVAL = "hbase.master.flushedsequenceid.flusher.interval"; public static final int FLUSHEDSEQUENCEID_FLUSHER_INTERVAL_DEFAULT = 3 * 60 * 60 * 1000; // 3 hours public static final String MAX_CLOCK_SKEW_MS = "hbase.master.maxclockskew"; private static final Logger LOG = LoggerFactory.getLogger(ServerManager.class); // Set if we are to shutdown the cluster. private AtomicBoolean clusterShutdown = new AtomicBoolean(false); /** * The last flushed sequence id for a region. */ private final ConcurrentNavigableMap<byte[], Long> flushedSequenceIdByRegion = new ConcurrentSkipListMap<>(Bytes.BYTES_COMPARATOR); private boolean persistFlushedSequenceId = true; private volatile boolean isFlushSeqIdPersistInProgress = false; /** File on hdfs to store last flushed sequence id of regions */ private static final String LAST_FLUSHED_SEQ_ID_FILE = ".lastflushedseqids"; private FlushedSequenceIdFlusher flushedSeqIdFlusher; /** * The last flushed sequence id for a store in a region. */ private final ConcurrentNavigableMap<byte[], ConcurrentNavigableMap<byte[], Long>> storeFlushedSequenceIdsByRegion = new ConcurrentSkipListMap<>(Bytes.BYTES_COMPARATOR); /** Map of registered servers to their current load */ private final ConcurrentNavigableMap<ServerName, ServerMetrics> onlineServers = new ConcurrentSkipListMap<>(); /** List of region servers that should not get any more new regions. */ private final ArrayList<ServerName> drainingServers = new ArrayList<>(); private final MasterServices master; private final DeadServer deadservers = new DeadServer(); private final long maxSkew; private final long warningSkew; /** Listeners that are called on server events. */ private List<ServerListener> listeners = new CopyOnWriteArrayList<>(); /** * Constructor. */ public ServerManager(final MasterServices master) { this.master = master; Configuration c = master.getConfiguration(); maxSkew = c.getLong(MAX_CLOCK_SKEW_MS, 30000); warningSkew = c.getLong("hbase.master.warningclockskew", 10000); persistFlushedSequenceId = c.getBoolean(PERSIST_FLUSHEDSEQUENCEID, PERSIST_FLUSHEDSEQUENCEID_DEFAULT); } /** * Add the listener to the notification list. * @param listener The ServerListener to register */ public void registerListener(final ServerListener listener) { this.listeners.add(listener); } /** * Remove the listener from the notification list. * @param listener The ServerListener to unregister */ public boolean unregisterListener(final ServerListener listener) { return this.listeners.remove(listener); } /** * Let the server manager know a new regionserver has come online * @param request the startup request * @param versionNumber the version number of the new regionserver * @param version the version of the new regionserver, could contain strings like "SNAPSHOT" * @param ia the InetAddress from which request is received * @return The ServerName we know this server as. * @throws IOException */ ServerName regionServerStartup(RegionServerStartupRequest request, int versionNumber, String version, InetAddress ia) throws IOException { // Test for case where we get a region startup message from a regionserver // that has been quickly restarted but whose znode expiration handler has // not yet run, or from a server whose fail we are currently processing. // Test its host+port combo is present in serverAddressToServerInfo. If it // is, reject the server and trigger its expiration. The next time it comes // in, it should have been removed from serverAddressToServerInfo and queued // for processing by ProcessServerShutdown. final String hostname = request.hasUseThisHostnameInstead() ? request.getUseThisHostnameInstead() : ia.getHostName(); ServerName sn = ServerName.valueOf(hostname, request.getPort(), request.getServerStartCode()); checkClockSkew(sn, request.getServerCurrentTime()); checkIsDead(sn, "STARTUP"); if (!checkAndRecordNewServer(sn, ServerMetricsBuilder.of(sn, versionNumber, version))) { LOG.warn( "THIS SHOULD NOT HAPPEN, RegionServerStartup" + " could not record the server: " + sn); } return sn; } /** * Updates last flushed sequence Ids for the regions on server sn * @param sn * @param hsl */ private void updateLastFlushedSequenceIds(ServerName sn, ServerMetrics hsl) { for (Entry<byte[], RegionMetrics> entry : hsl.getRegionMetrics().entrySet()) { byte[] encodedRegionName = Bytes.toBytes(RegionInfo.encodeRegionName(entry.getKey())); Long existingValue = flushedSequenceIdByRegion.get(encodedRegionName); long l = entry.getValue().getCompletedSequenceId(); // Don't let smaller sequence ids override greater sequence ids. if (LOG.isTraceEnabled()) { LOG.trace(Bytes.toString(encodedRegionName) + ", existingValue=" + existingValue + ", completeSequenceId=" + l); } if (existingValue == null || (l != HConstants.NO_SEQNUM && l > existingValue)) { flushedSequenceIdByRegion.put(encodedRegionName, l); } else if (l != HConstants.NO_SEQNUM && l < existingValue) { LOG.warn("RegionServer " + sn + " indicates a last flushed sequence id (" + l + ") that is less than the previous last flushed sequence id (" + existingValue + ") for region " + Bytes.toString(entry.getKey()) + " Ignoring."); } ConcurrentNavigableMap<byte[], Long> storeFlushedSequenceId = computeIfAbsent(storeFlushedSequenceIdsByRegion, encodedRegionName, () -> new ConcurrentSkipListMap<>(Bytes.BYTES_COMPARATOR)); for (Entry<byte[], Long> storeSeqId : entry.getValue().getStoreSequenceId().entrySet()) { byte[] family = storeSeqId.getKey(); existingValue = storeFlushedSequenceId.get(family); l = storeSeqId.getValue(); if (LOG.isTraceEnabled()) { LOG.trace(Bytes.toString(encodedRegionName) + ", family=" + Bytes.toString(family) + ", existingValue=" + existingValue + ", completeSequenceId=" + l); } // Don't let smaller sequence ids override greater sequence ids. if (existingValue == null || (l != HConstants.NO_SEQNUM && l > existingValue.longValue())) { storeFlushedSequenceId.put(family, l); } } } } public void regionServerReport(ServerName sn, ServerMetrics sl) throws YouAreDeadException { checkIsDead(sn, "REPORT"); if (null == this.onlineServers.replace(sn, sl)) { // Already have this host+port combo and its just different start code? // Just let the server in. Presume master joining a running cluster. // recordNewServer is what happens at the end of reportServerStartup. // The only thing we are skipping is passing back to the regionserver // the ServerName to use. Here we presume a master has already done // that so we'll press on with whatever it gave us for ServerName. if (!checkAndRecordNewServer(sn, sl)) { LOG.info("RegionServerReport ignored, could not record the server: " + sn); return; // Not recorded, so no need to move on } } updateLastFlushedSequenceIds(sn, sl); } /** * Check is a server of same host and port already exists, * if not, or the existed one got a smaller start code, record it. * * @param serverName the server to check and record * @param sl the server load on the server * @return true if the server is recorded, otherwise, false */ boolean checkAndRecordNewServer(final ServerName serverName, final ServerMetrics sl) { ServerName existingServer = null; synchronized (this.onlineServers) { existingServer = findServerWithSameHostnamePortWithLock(serverName); if (existingServer != null && (existingServer.getStartcode() > serverName.getStartcode())) { LOG.info("Server serverName=" + serverName + " rejected; we already have " + existingServer.toString() + " registered with same hostname and port"); return false; } recordNewServerWithLock(serverName, sl); } // Tell our listeners that a server was added if (!this.listeners.isEmpty()) { for (ServerListener listener : this.listeners) { listener.serverAdded(serverName); } } // Note that we assume that same ts means same server, and don't expire in that case. // TODO: ts can theoretically collide due to clock shifts, so this is a bit hacky. if (existingServer != null && (existingServer.getStartcode() < serverName.getStartcode())) { LOG.info("Triggering server recovery; existingServer " + existingServer + " looks stale, new server:" + serverName); expireServer(existingServer); } return true; } /** * Find out the region servers crashed between the crash of the previous master instance and the * current master instance and schedule SCP for them. * <p/> * Since the {@code RegionServerTracker} has already helped us to construct the online servers set * by scanning zookeeper, now we can compare the online servers with {@code liveServersFromWALDir} * to find out whether there are servers which are already dead. * <p/> * Must be called inside the initialization method of {@code RegionServerTracker} to avoid * concurrency issue. * @param deadServersFromPE the region servers which already have a SCP associated. * @param liveServersFromWALDir the live region servers from wal directory. */ void findDeadServersAndProcess(Set<ServerName> deadServersFromPE, Set<ServerName> liveServersFromWALDir) { deadServersFromPE.forEach(deadservers::putIfAbsent); liveServersFromWALDir.stream().filter(sn -> !onlineServers.containsKey(sn)) .forEach(this::expireServer); } /** * Checks if the clock skew between the server and the master. If the clock skew exceeds the * configured max, it will throw an exception; if it exceeds the configured warning threshold, * it will log a warning but start normally. * @param serverName Incoming servers's name * @param serverCurrentTime * @throws ClockOutOfSyncException if the skew exceeds the configured max value */ private void checkClockSkew(final ServerName serverName, final long serverCurrentTime) throws ClockOutOfSyncException { long skew = Math.abs(EnvironmentEdgeManager.currentTime() - serverCurrentTime); if (skew > maxSkew) { String message = "Server " + serverName + " has been " + "rejected; Reported time is too far out of sync with master. " + "Time difference of " + skew + "ms > max allowed of " + maxSkew + "ms"; LOG.warn(message); throw new ClockOutOfSyncException(message); } else if (skew > warningSkew){ String message = "Reported time for server " + serverName + " is out of sync with master " + "by " + skew + "ms. (Warning threshold is " + warningSkew + "ms; " + "error threshold is " + maxSkew + "ms)"; LOG.warn(message); } } /** * Called when RegionServer first reports in for duty and thereafter each * time it heartbeats to make sure it is has not been figured for dead. * If this server is on the dead list, reject it with a YouAreDeadException. * If it was dead but came back with a new start code, remove the old entry * from the dead list. * @param what START or REPORT */ private void checkIsDead(final ServerName serverName, final String what) throws YouAreDeadException { if (this.deadservers.isDeadServer(serverName)) { // Exact match: host name, port and start code all match with existing one of the // dead servers. So, this server must be dead. Tell it to kill itself. String message = "Server " + what + " rejected; currently processing " + serverName + " as dead server"; LOG.debug(message); throw new YouAreDeadException(message); } // Remove dead server with same hostname and port of newly checking in rs after master // initialization. See HBASE-5916 for more information. if ((this.master == null || this.master.isInitialized()) && this.deadservers.cleanPreviousInstance(serverName)) { // This server has now become alive after we marked it as dead. // We removed it's previous entry from the dead list to reflect it. LOG.debug("{} {} came back up, removed it from the dead servers list", what, serverName); } } /** * Assumes onlineServers is locked. * @return ServerName with matching hostname and port. */ private ServerName findServerWithSameHostnamePortWithLock( final ServerName serverName) { ServerName end = ServerName.valueOf(serverName.getHostname(), serverName.getPort(), Long.MAX_VALUE); ServerName r = onlineServers.lowerKey(end); if (r != null) { if (ServerName.isSameAddress(r, serverName)) { return r; } } return null; } /** * Adds the onlineServers list. onlineServers should be locked. * @param serverName The remote servers name. */ void recordNewServerWithLock(final ServerName serverName, final ServerMetrics sl) { LOG.info("Registering regionserver=" + serverName); this.onlineServers.put(serverName, sl); } public ConcurrentNavigableMap<byte[], Long> getFlushedSequenceIdByRegion() { return flushedSequenceIdByRegion; } public RegionStoreSequenceIds getLastFlushedSequenceId(byte[] encodedRegionName) { RegionStoreSequenceIds.Builder builder = RegionStoreSequenceIds.newBuilder(); Long seqId = flushedSequenceIdByRegion.get(encodedRegionName); builder.setLastFlushedSequenceId(seqId != null ? seqId.longValue() : HConstants.NO_SEQNUM); Map<byte[], Long> storeFlushedSequenceId = storeFlushedSequenceIdsByRegion.get(encodedRegionName); if (storeFlushedSequenceId != null) { for (Map.Entry<byte[], Long> entry : storeFlushedSequenceId.entrySet()) { builder.addStoreSequenceId(StoreSequenceId.newBuilder() .setFamilyName(UnsafeByteOperations.unsafeWrap(entry.getKey())) .setSequenceId(entry.getValue().longValue()).build()); } } return builder.build(); } /** * @param serverName * @return ServerMetrics if serverName is known else null */ public ServerMetrics getLoad(final ServerName serverName) { return this.onlineServers.get(serverName); } /** * Compute the average load across all region servers. * Currently, this uses a very naive computation - just uses the number of * regions being served, ignoring stats about number of requests. * @return the average load */ public double getAverageLoad() { int totalLoad = 0; int numServers = 0; for (ServerMetrics sl : this.onlineServers.values()) { numServers++; totalLoad += sl.getRegionMetrics().size(); } return numServers == 0 ? 0 : (double)totalLoad / (double)numServers; } /** @return the count of active regionservers */ public int countOfRegionServers() { // Presumes onlineServers is a concurrent map return this.onlineServers.size(); } /** * @return Read-only map of servers to serverinfo */ public Map<ServerName, ServerMetrics> getOnlineServers() { // Presumption is that iterating the returned Map is OK. synchronized (this.onlineServers) { return Collections.unmodifiableMap(this.onlineServers); } } public DeadServer getDeadServers() { return this.deadservers; } /** * Checks if any dead servers are currently in progress. * @return true if any RS are being processed as dead, false if not */ public boolean areDeadServersInProgress() throws IOException { return master.getProcedures().stream() .anyMatch(p -> !p.isFinished() && p instanceof ServerCrashProcedure); } void letRegionServersShutdown() { long previousLogTime = 0; ServerName sn = master.getServerName(); ZKWatcher zkw = master.getZooKeeper(); int onlineServersCt; while ((onlineServersCt = onlineServers.size()) > 0){ if (EnvironmentEdgeManager.currentTime() > (previousLogTime + 1000)) { Set<ServerName> remainingServers = onlineServers.keySet(); synchronized (onlineServers) { if (remainingServers.size() == 1 && remainingServers.contains(sn)) { // Master will delete itself later. return; } } StringBuilder sb = new StringBuilder(); // It's ok here to not sync on onlineServers - merely logging for (ServerName key : remainingServers) { if (sb.length() > 0) { sb.append(", "); } sb.append(key); } LOG.info("Waiting on regionserver(s) " + sb.toString()); previousLogTime = EnvironmentEdgeManager.currentTime(); } try { List<String> servers = getRegionServersInZK(zkw); if (servers == null || servers.isEmpty() || (servers.size() == 1 && servers.contains(sn.toString()))) { LOG.info("ZK shows there is only the master self online, exiting now"); // Master could have lost some ZK events, no need to wait more. break; } } catch (KeeperException ke) { LOG.warn("Failed to list regionservers", ke); // ZK is malfunctioning, don't hang here break; } synchronized (onlineServers) { try { if (onlineServersCt == onlineServers.size()) onlineServers.wait(100); } catch (InterruptedException ignored) { // continue } } } } private List<String> getRegionServersInZK(final ZKWatcher zkw) throws KeeperException { return ZKUtil.listChildrenNoWatch(zkw, zkw.getZNodePaths().rsZNode); } /** * Expire the passed server. Add it to list of dead servers and queue a shutdown processing. * @return pid if we queued a ServerCrashProcedure else {@link Procedure#NO_PROC_ID} if we did * not (could happen for many reasons including the fact that its this server that is * going down or we already have queued an SCP for this server or SCP processing is * currently disabled because we are in startup phase). */ // Redo test so we can make this protected. public synchronized long expireServer(final ServerName serverName) { return expireServer(serverName, false); } synchronized long expireServer(final ServerName serverName, boolean force) { // THIS server is going down... can't handle our own expiration. if (serverName.equals(master.getServerName())) { if (!(master.isAborted() || master.isStopped())) { master.stop("We lost our znode?"); } return Procedure.NO_PROC_ID; } if (this.deadservers.isDeadServer(serverName)) { LOG.warn("Expiration called on {} but already in DeadServer", serverName); return Procedure.NO_PROC_ID; } moveFromOnlineToDeadServers(serverName); // If server is in draining mode, remove corresponding znode // In some tests, the mocked HM may not have ZK Instance, hence null check if (master.getZooKeeper() != null) { String drainingZnode = ZNodePaths .joinZNode(master.getZooKeeper().getZNodePaths().drainingZNode, serverName.getServerName()); try { ZKUtil.deleteNodeFailSilent(master.getZooKeeper(), drainingZnode); } catch (KeeperException e) { LOG.warn("Error deleting the draining znode for stopping server " + serverName.getServerName(), e); } } // If cluster is going down, yes, servers are going to be expiring; don't // process as a dead server if (isClusterShutdown()) { LOG.info("Cluster shutdown set; " + serverName + " expired; onlineServers=" + this.onlineServers.size()); if (this.onlineServers.isEmpty()) { master.stop("Cluster shutdown set; onlineServer=0"); } return Procedure.NO_PROC_ID; } LOG.info("Processing expiration of " + serverName + " on " + this.master.getServerName()); long pid = master.getAssignmentManager().submitServerCrash(serverName, true, force); // Tell our listeners that a server was removed if (!this.listeners.isEmpty()) { this.listeners.stream().forEach(l -> l.serverRemoved(serverName)); } // trigger a persist of flushedSeqId if (flushedSeqIdFlusher != null) { flushedSeqIdFlusher.triggerNow(); } return pid; } /** * Called when server has expired. */ // Locking in this class needs cleanup. public synchronized void moveFromOnlineToDeadServers(final ServerName sn) { synchronized (this.onlineServers) { boolean online = this.onlineServers.containsKey(sn); if (online) { // Remove the server from the known servers lists and update load info BUT // add to deadservers first; do this so it'll show in dead servers list if // not in online servers list. this.deadservers.putIfAbsent(sn); this.onlineServers.remove(sn); onlineServers.notifyAll(); } else { // If not online, that is odd but may happen if 'Unknown Servers' -- where meta // has references to servers not online nor in dead servers list. If // 'Unknown Server', don't add to DeadServers else will be there for ever. LOG.trace("Expiration of {} but server not online", sn); } } } /* * Remove the server from the drain list. */ public synchronized boolean removeServerFromDrainList(final ServerName sn) { // Warn if the server (sn) is not online. ServerName is of the form: // <hostname> , <port> , <startcode> if (!this.isServerOnline(sn)) { LOG.warn("Server " + sn + " is not currently online. " + "Removing from draining list anyway, as requested."); } // Remove the server from the draining servers lists. return this.drainingServers.remove(sn); } /** * Add the server to the drain list. * @param sn * @return True if the server is added or the server is already on the drain list. */ public synchronized boolean addServerToDrainList(final ServerName sn) { // Warn if the server (sn) is not online. ServerName is of the form: // <hostname> , <port> , <startcode> if (!this.isServerOnline(sn)) { LOG.warn("Server " + sn + " is not currently online. " + "Ignoring request to add it to draining list."); return false; } // Add the server to the draining servers lists, if it's not already in // it. if (this.drainingServers.contains(sn)) { LOG.warn("Server " + sn + " is already in the draining server list." + "Ignoring request to add it again."); return true; } LOG.info("Server " + sn + " added to draining server list."); return this.drainingServers.add(sn); } /** * Contacts a region server and waits up to timeout ms * to close the region. This bypasses the active hmaster. * Pass -1 as timeout if you do not want to wait on result. */ public static void closeRegionSilentlyAndWait(AsyncClusterConnection connection, ServerName server, RegionInfo region, long timeout) throws IOException, InterruptedException { AsyncRegionServerAdmin admin = connection.getRegionServerAdmin(server); try { FutureUtils.get( admin.closeRegion(ProtobufUtil.buildCloseRegionRequest(server, region.getRegionName()))); } catch (IOException e) { LOG.warn("Exception when closing region: " + region.getRegionNameAsString(), e); } if (timeout < 0) { return; } long expiration = timeout + EnvironmentEdgeManager.currentTime(); while (EnvironmentEdgeManager.currentTime() < expiration) { try { RegionInfo rsRegion = ProtobufUtil.toRegionInfo(FutureUtils .get( admin.getRegionInfo(RequestConverter.buildGetRegionInfoRequest(region.getRegionName()))) .getRegionInfo()); if (rsRegion == null) { return; } } catch (IOException ioe) { if (ioe instanceof NotServingRegionException || (ioe instanceof RemoteWithExtrasException && ((RemoteWithExtrasException)ioe).unwrapRemoteException() instanceof NotServingRegionException)) { // no need to retry again return; } LOG.warn("Exception when retrieving regioninfo from: " + region.getRegionNameAsString(), ioe); } Thread.sleep(1000); } throw new IOException("Region " + region + " failed to close within" + " timeout " + timeout); } /** * Calculate min necessary to start. This is not an absolute. It is just * a friction that will cause us hang around a bit longer waiting on * RegionServers to check-in. */ private int getMinToStart() { if (master.isInMaintenanceMode()) { // If in maintenance mode, then master hosting meta will be the only server available return 1; } int minimumRequired = 1; int minToStart = this.master.getConfiguration().getInt(WAIT_ON_REGIONSERVERS_MINTOSTART, -1); // Ensure we are never less than minimumRequired else stuff won't work. return Math.max(minToStart, minimumRequired); } /** * Wait for the region servers to report in. * We will wait until one of this condition is met: * - the master is stopped * - the 'hbase.master.wait.on.regionservers.maxtostart' number of * region servers is reached * - the 'hbase.master.wait.on.regionservers.mintostart' is reached AND * there have been no new region server in for * 'hbase.master.wait.on.regionservers.interval' time AND * the 'hbase.master.wait.on.regionservers.timeout' is reached * * @throws InterruptedException */ public void waitForRegionServers(MonitoredTask status) throws InterruptedException { final long interval = this.master.getConfiguration(). getLong(WAIT_ON_REGIONSERVERS_INTERVAL, 1500); final long timeout = this.master.getConfiguration(). getLong(WAIT_ON_REGIONSERVERS_TIMEOUT, 4500); // Min is not an absolute; just a friction making us wait longer on server checkin. int minToStart = getMinToStart(); int maxToStart = this.master.getConfiguration(). getInt(WAIT_ON_REGIONSERVERS_MAXTOSTART, Integer.MAX_VALUE); if (maxToStart < minToStart) { LOG.warn(String.format("The value of '%s' (%d) is set less than '%s' (%d), ignoring.", WAIT_ON_REGIONSERVERS_MAXTOSTART, maxToStart, WAIT_ON_REGIONSERVERS_MINTOSTART, minToStart)); maxToStart = Integer.MAX_VALUE; } long now = EnvironmentEdgeManager.currentTime(); final long startTime = now; long slept = 0; long lastLogTime = 0; long lastCountChange = startTime; int count = countOfRegionServers(); int oldCount = 0; // This while test is a little hard to read. We try to comment it in below but in essence: // Wait if Master is not stopped and the number of regionservers that have checked-in is // less than the maxToStart. Both of these conditions will be true near universally. // Next, we will keep cycling if ANY of the following three conditions are true: // 1. The time since a regionserver registered is < interval (means servers are actively checking in). // 2. We are under the total timeout. // 3. The count of servers is < minimum. for (ServerListener listener: this.listeners) { listener.waiting(); } while (!this.master.isStopped() && !isClusterShutdown() && count < maxToStart && ((lastCountChange + interval) > now || timeout > slept || count < minToStart)) { // Log some info at every interval time or if there is a change if (oldCount != count || lastLogTime + interval < now) { lastLogTime = now; String msg = "Waiting on regionserver count=" + count + "; waited="+ slept + "ms, expecting min=" + minToStart + " server(s), max=" + getStrForMax(maxToStart) + " server(s), " + "timeout=" + timeout + "ms, lastChange=" + (now - lastCountChange) + "ms"; LOG.info(msg); status.setStatus(msg); } // We sleep for some time final long sleepTime = 50; Thread.sleep(sleepTime); now = EnvironmentEdgeManager.currentTime(); slept = now - startTime; oldCount = count; count = countOfRegionServers(); if (count != oldCount) { lastCountChange = now; } } // Did we exit the loop because cluster is going down? if (isClusterShutdown()) { this.master.stop("Cluster shutdown"); } LOG.info("Finished waiting on RegionServer count=" + count + "; waited=" + slept + "ms," + " expected min=" + minToStart + " server(s), max=" + getStrForMax(maxToStart) + " server(s),"+ " master is "+ (this.master.isStopped() ? "stopped.": "running")); } private String getStrForMax(final int max) { return max == Integer.MAX_VALUE? "NO_LIMIT": Integer.toString(max); } /** * @return A copy of the internal list of online servers. */ public List<ServerName> getOnlineServersList() { // TODO: optimize the load balancer call so we don't need to make a new list // TODO: FIX. THIS IS POPULAR CALL. return new ArrayList<>(this.onlineServers.keySet()); } /** * @param keys The target server name * @param idleServerPredicator Evaluates the server on the given load * @return A copy of the internal list of online servers matched by the predicator */ public List<ServerName> getOnlineServersListWithPredicator(List<ServerName> keys, Predicate<ServerMetrics> idleServerPredicator) { List<ServerName> names = new ArrayList<>(); if (keys != null && idleServerPredicator != null) { keys.forEach(name -> { ServerMetrics load = onlineServers.get(name); if (load != null) { if (idleServerPredicator.test(load)) { names.add(name); } } }); } return names; } /** * @return A copy of the internal list of draining servers. */ public List<ServerName> getDrainingServersList() { return new ArrayList<>(this.drainingServers); } public boolean isServerOnline(ServerName serverName) { return serverName != null && onlineServers.containsKey(serverName); } public enum ServerLiveState { LIVE, DEAD, UNKNOWN } /** * @return whether the server is online, dead, or unknown. */ public synchronized ServerLiveState isServerKnownAndOnline(ServerName serverName) { return onlineServers.containsKey(serverName) ? ServerLiveState.LIVE : (deadservers.isDeadServer(serverName) ? ServerLiveState.DEAD : ServerLiveState.UNKNOWN); } /** * Check if a server is known to be dead. A server can be online, * or known to be dead, or unknown to this manager (i.e, not online, * not known to be dead either; it is simply not tracked by the * master any more, for example, a very old previous instance). */ public synchronized boolean isServerDead(ServerName serverName) { return serverName == null || deadservers.isDeadServer(serverName); } /** * Check if a server is unknown. A server can be online, * or known to be dead, or unknown to this manager (i.e, not online, * not known to be dead either; it is simply not tracked by the * master any more, for example, a very old previous instance). */ public boolean isServerUnknown(ServerName serverName) { return serverName == null || (!onlineServers.containsKey(serverName) && !deadservers.isDeadServer(serverName)); } public void shutdownCluster() { String statusStr = "Cluster shutdown requested of master=" + this.master.getServerName(); LOG.info(statusStr); this.clusterShutdown.set(true); if (onlineServers.isEmpty()) { // we do not synchronize here so this may cause a double stop, but not a big deal master.stop("OnlineServer=0 right after cluster shutdown set"); } } public boolean isClusterShutdown() { return this.clusterShutdown.get(); } /** * start chore in ServerManager */ public void startChore() { Configuration c = master.getConfiguration(); if (persistFlushedSequenceId) { new Thread(() -> { // after AM#loadMeta, RegionStates should be loaded, and some regions are // deleted by drop/split/merge during removeDeletedRegionFromLoadedFlushedSequenceIds, // but these deleted regions are not added back to RegionStates, // so we can safely remove deleted regions. removeDeletedRegionFromLoadedFlushedSequenceIds(); }, "RemoveDeletedRegionSyncThread").start(); int flushPeriod = c.getInt(FLUSHEDSEQUENCEID_FLUSHER_INTERVAL, FLUSHEDSEQUENCEID_FLUSHER_INTERVAL_DEFAULT); flushedSeqIdFlusher = new FlushedSequenceIdFlusher( "FlushedSequenceIdFlusher", flushPeriod); master.getChoreService().scheduleChore(flushedSeqIdFlusher); } } /** * Stop the ServerManager. */ public void stop() { if (flushedSeqIdFlusher != null) { flushedSeqIdFlusher.shutdown(); } if (persistFlushedSequenceId) { try { persistRegionLastFlushedSequenceIds(); } catch (IOException e) { LOG.warn("Failed to persist last flushed sequence id of regions" + " to file system", e); } } } /** * Creates a list of possible destinations for a region. It contains the online servers, but not * the draining or dying servers. * @param serversToExclude can be null if there is no server to exclude */ public List<ServerName> createDestinationServersList(final List<ServerName> serversToExclude) { Set<ServerName> destServers = new HashSet<>(); onlineServers.forEach((sn, sm) -> { if (sm.getLastReportTimestamp() > 0) { // This means we have already called regionServerReport at leaset once, then let's include // this server for region assignment. This is an optimization to avoid assigning regions to // an uninitialized server. See HBASE-25032 for more details. destServers.add(sn); } }); if (serversToExclude != null) { destServers.removeAll(serversToExclude); } // Loop through the draining server list and remove them from the server list final List<ServerName> drainingServersCopy = getDrainingServersList(); destServers.removeAll(drainingServersCopy); return new ArrayList<>(destServers); } /** * Calls {@link #createDestinationServersList} without server to exclude. */ public List<ServerName> createDestinationServersList(){ return createDestinationServersList(null); } /** * To clear any dead server with same host name and port of any online server */ void clearDeadServersWithSameHostNameAndPortOfOnlineServer() { for (ServerName serverName : getOnlineServersList()) { deadservers.cleanAllPreviousInstances(serverName); } } /** * Called by delete table and similar to notify the ServerManager that a region was removed. */ public void removeRegion(final RegionInfo regionInfo) { final byte[] encodedName = regionInfo.getEncodedNameAsBytes(); storeFlushedSequenceIdsByRegion.remove(encodedName); flushedSequenceIdByRegion.remove(encodedName); } public boolean isRegionInServerManagerStates(final RegionInfo hri) { final byte[] encodedName = hri.getEncodedNameAsBytes(); return (storeFlushedSequenceIdsByRegion.containsKey(encodedName) || flushedSequenceIdByRegion.containsKey(encodedName)); } /** * Called by delete table and similar to notify the ServerManager that a region was removed. */ public void removeRegions(final List<RegionInfo> regions) { for (RegionInfo hri: regions) { removeRegion(hri); } } /** * May return 0 when server is not online. */ public int getVersionNumber(ServerName serverName) { ServerMetrics serverMetrics = onlineServers.get(serverName); return serverMetrics != null ? serverMetrics.getVersionNumber() : 0; } /** * May return "0.0.0" when server is not online */ public String getVersion(ServerName serverName) { ServerMetrics serverMetrics = onlineServers.get(serverName); return serverMetrics != null ? serverMetrics.getVersion() : "0.0.0"; } public int getInfoPort(ServerName serverName) { ServerMetrics serverMetrics = onlineServers.get(serverName); return serverMetrics != null ? serverMetrics.getInfoServerPort() : 0; } /** * Persist last flushed sequence id of each region to HDFS * @throws IOException if persit to HDFS fails */ private void persistRegionLastFlushedSequenceIds() throws IOException { if (isFlushSeqIdPersistInProgress) { return; } isFlushSeqIdPersistInProgress = true; try { Configuration conf = master.getConfiguration(); Path rootDir = CommonFSUtils.getRootDir(conf); Path lastFlushedSeqIdPath = new Path(rootDir, LAST_FLUSHED_SEQ_ID_FILE); FileSystem fs = FileSystem.get(conf); if (fs.exists(lastFlushedSeqIdPath)) { LOG.info("Rewriting .lastflushedseqids file at: " + lastFlushedSeqIdPath); if (!fs.delete(lastFlushedSeqIdPath, false)) { throw new IOException("Unable to remove existing " + lastFlushedSeqIdPath); } } else { LOG.info("Writing .lastflushedseqids file at: " + lastFlushedSeqIdPath); } FSDataOutputStream out = fs.create(lastFlushedSeqIdPath); FlushedSequenceId.Builder flushedSequenceIdBuilder = FlushedSequenceId.newBuilder(); try { for (Entry<byte[], Long> entry : flushedSequenceIdByRegion.entrySet()) { FlushedRegionSequenceId.Builder flushedRegionSequenceIdBuilder = FlushedRegionSequenceId.newBuilder(); flushedRegionSequenceIdBuilder.setRegionEncodedName( ByteString.copyFrom(entry.getKey())); flushedRegionSequenceIdBuilder.setSeqId(entry.getValue()); ConcurrentNavigableMap<byte[], Long> storeSeqIds = storeFlushedSequenceIdsByRegion.get(entry.getKey()); if (storeSeqIds != null) { for (Entry<byte[], Long> store : storeSeqIds.entrySet()) { FlushedStoreSequenceId.Builder flushedStoreSequenceIdBuilder = FlushedStoreSequenceId.newBuilder(); flushedStoreSequenceIdBuilder.setFamily(ByteString.copyFrom(store.getKey())); flushedStoreSequenceIdBuilder.setSeqId(store.getValue()); flushedRegionSequenceIdBuilder.addStores(flushedStoreSequenceIdBuilder); } } flushedSequenceIdBuilder.addRegionSequenceId(flushedRegionSequenceIdBuilder); } flushedSequenceIdBuilder.build().writeDelimitedTo(out); } finally { if (out != null) { out.close(); } } } finally { isFlushSeqIdPersistInProgress = false; } } /** * Load last flushed sequence id of each region from HDFS, if persisted */ public void loadLastFlushedSequenceIds() throws IOException { if (!persistFlushedSequenceId) { return; } Configuration conf = master.getConfiguration(); Path rootDir = CommonFSUtils.getRootDir(conf); Path lastFlushedSeqIdPath = new Path(rootDir, LAST_FLUSHED_SEQ_ID_FILE); FileSystem fs = FileSystem.get(conf); if (!fs.exists(lastFlushedSeqIdPath)) { LOG.info("No .lastflushedseqids found at" + lastFlushedSeqIdPath + " will record last flushed sequence id" + " for regions by regionserver report all over again"); return; } else { LOG.info("begin to load .lastflushedseqids at " + lastFlushedSeqIdPath); } FSDataInputStream in = fs.open(lastFlushedSeqIdPath); try { FlushedSequenceId flushedSequenceId = FlushedSequenceId.parseDelimitedFrom(in); if (flushedSequenceId == null) { LOG.info(".lastflushedseqids found at {} is empty", lastFlushedSeqIdPath); return; } for (FlushedRegionSequenceId flushedRegionSequenceId : flushedSequenceId .getRegionSequenceIdList()) { byte[] encodedRegionName = flushedRegionSequenceId .getRegionEncodedName().toByteArray(); flushedSequenceIdByRegion .putIfAbsent(encodedRegionName, flushedRegionSequenceId.getSeqId()); if (flushedRegionSequenceId.getStoresList() != null && flushedRegionSequenceId.getStoresList().size() != 0) { ConcurrentNavigableMap<byte[], Long> storeFlushedSequenceId = computeIfAbsent(storeFlushedSequenceIdsByRegion, encodedRegionName, () -> new ConcurrentSkipListMap<>(Bytes.BYTES_COMPARATOR)); for (FlushedStoreSequenceId flushedStoreSequenceId : flushedRegionSequenceId .getStoresList()) { storeFlushedSequenceId .put(flushedStoreSequenceId.getFamily().toByteArray(), flushedStoreSequenceId.getSeqId()); } } } } finally { in.close(); } } /** * Regions may have been removed between latest persist of FlushedSequenceIds * and master abort. So after loading FlushedSequenceIds from file, and after * meta loaded, we need to remove the deleted region according to RegionStates. */ public void removeDeletedRegionFromLoadedFlushedSequenceIds() { RegionStates regionStates = master.getAssignmentManager().getRegionStates(); Iterator<byte[]> it = flushedSequenceIdByRegion.keySet().iterator(); while(it.hasNext()) { byte[] regionEncodedName = it.next(); if (regionStates.getRegionState(Bytes.toStringBinary(regionEncodedName)) == null) { it.remove(); storeFlushedSequenceIdsByRegion.remove(regionEncodedName); } } } private class FlushedSequenceIdFlusher extends ScheduledChore { public FlushedSequenceIdFlusher(String name, int p) { super(name, master, p, 60 * 1000); //delay one minute before first execute } @Override protected void chore() { try { persistRegionLastFlushedSequenceIds(); } catch (IOException e) { LOG.debug("Failed to persist last flushed sequence id of regions" + " to file system", e); } } } }
hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master; import static org.apache.hadoop.hbase.util.ConcurrentMapUtils.computeIfAbsent; import java.io.IOException; import java.net.InetAddress; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentNavigableMap; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Predicate; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ClockOutOfSyncException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.RegionMetrics; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.ServerMetrics; import org.apache.hadoop.hbase.ServerMetricsBuilder; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.YouAreDeadException; import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.AsyncRegionServerAdmin; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.ipc.RemoteWithExtrasException; import org.apache.hadoop.hbase.master.assignment.RegionStates; import org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.procedure2.Procedure; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FutureUtils; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FlushedRegionSequenceId; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FlushedSequenceId; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FlushedStoreSequenceId; import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest; /** * The ServerManager class manages info about region servers. * <p> * Maintains lists of online and dead servers. Processes the startups, * shutdowns, and deaths of region servers. * <p> * Servers are distinguished in two different ways. A given server has a * location, specified by hostname and port, and of which there can only be one * online at any given time. A server instance is specified by the location * (hostname and port) as well as the startcode (timestamp from when the server * was started). This is used to differentiate a restarted instance of a given * server from the original instance. * <p> * If a sever is known not to be running any more, it is called dead. The dead * server needs to be handled by a ServerShutdownHandler. If the handler is not * enabled yet, the server can't be handled right away so it is queued up. * After the handler is enabled, the server will be submitted to a handler to handle. * However, the handler may be just partially enabled. If so, * the server cannot be fully processed, and be queued up for further processing. * A server is fully processed only after the handler is fully enabled * and has completed the handling. */ @InterfaceAudience.Private public class ServerManager { public static final String WAIT_ON_REGIONSERVERS_MAXTOSTART = "hbase.master.wait.on.regionservers.maxtostart"; public static final String WAIT_ON_REGIONSERVERS_MINTOSTART = "hbase.master.wait.on.regionservers.mintostart"; public static final String WAIT_ON_REGIONSERVERS_TIMEOUT = "hbase.master.wait.on.regionservers.timeout"; public static final String WAIT_ON_REGIONSERVERS_INTERVAL = "hbase.master.wait.on.regionservers.interval"; /** * see HBASE-20727 * if set to true, flushedSequenceIdByRegion and storeFlushedSequenceIdsByRegion * will be persisted to HDFS and loaded when master restart to speed up log split */ public static final String PERSIST_FLUSHEDSEQUENCEID = "hbase.master.persist.flushedsequenceid.enabled"; public static final boolean PERSIST_FLUSHEDSEQUENCEID_DEFAULT = true; public static final String FLUSHEDSEQUENCEID_FLUSHER_INTERVAL = "hbase.master.flushedsequenceid.flusher.interval"; public static final int FLUSHEDSEQUENCEID_FLUSHER_INTERVAL_DEFAULT = 3 * 60 * 60 * 1000; // 3 hours public static final String MAX_CLOCK_SKEW_MS = "hbase.master.maxclockskew"; private static final Logger LOG = LoggerFactory.getLogger(ServerManager.class); // Set if we are to shutdown the cluster. private AtomicBoolean clusterShutdown = new AtomicBoolean(false); /** * The last flushed sequence id for a region. */ private final ConcurrentNavigableMap<byte[], Long> flushedSequenceIdByRegion = new ConcurrentSkipListMap<>(Bytes.BYTES_COMPARATOR); private boolean persistFlushedSequenceId = true; private volatile boolean isFlushSeqIdPersistInProgress = false; /** File on hdfs to store last flushed sequence id of regions */ private static final String LAST_FLUSHED_SEQ_ID_FILE = ".lastflushedseqids"; private FlushedSequenceIdFlusher flushedSeqIdFlusher; /** * The last flushed sequence id for a store in a region. */ private final ConcurrentNavigableMap<byte[], ConcurrentNavigableMap<byte[], Long>> storeFlushedSequenceIdsByRegion = new ConcurrentSkipListMap<>(Bytes.BYTES_COMPARATOR); /** Map of registered servers to their current load */ private final ConcurrentNavigableMap<ServerName, ServerMetrics> onlineServers = new ConcurrentSkipListMap<>(); /** List of region servers that should not get any more new regions. */ private final ArrayList<ServerName> drainingServers = new ArrayList<>(); private final MasterServices master; private final DeadServer deadservers = new DeadServer(); private final long maxSkew; private final long warningSkew; /** Listeners that are called on server events. */ private List<ServerListener> listeners = new CopyOnWriteArrayList<>(); /** * Constructor. */ public ServerManager(final MasterServices master) { this.master = master; Configuration c = master.getConfiguration(); maxSkew = c.getLong(MAX_CLOCK_SKEW_MS, 30000); warningSkew = c.getLong("hbase.master.warningclockskew", 10000); persistFlushedSequenceId = c.getBoolean(PERSIST_FLUSHEDSEQUENCEID, PERSIST_FLUSHEDSEQUENCEID_DEFAULT); } /** * Add the listener to the notification list. * @param listener The ServerListener to register */ public void registerListener(final ServerListener listener) { this.listeners.add(listener); } /** * Remove the listener from the notification list. * @param listener The ServerListener to unregister */ public boolean unregisterListener(final ServerListener listener) { return this.listeners.remove(listener); } /** * Let the server manager know a new regionserver has come online * @param request the startup request * @param versionNumber the version number of the new regionserver * @param version the version of the new regionserver, could contain strings like "SNAPSHOT" * @param ia the InetAddress from which request is received * @return The ServerName we know this server as. * @throws IOException */ ServerName regionServerStartup(RegionServerStartupRequest request, int versionNumber, String version, InetAddress ia) throws IOException { // Test for case where we get a region startup message from a regionserver // that has been quickly restarted but whose znode expiration handler has // not yet run, or from a server whose fail we are currently processing. // Test its host+port combo is present in serverAddressToServerInfo. If it // is, reject the server and trigger its expiration. The next time it comes // in, it should have been removed from serverAddressToServerInfo and queued // for processing by ProcessServerShutdown. final String hostname = request.hasUseThisHostnameInstead() ? request.getUseThisHostnameInstead() : ia.getHostName(); ServerName sn = ServerName.valueOf(hostname, request.getPort(), request.getServerStartCode()); checkClockSkew(sn, request.getServerCurrentTime()); checkIsDead(sn, "STARTUP"); if (!checkAndRecordNewServer(sn, ServerMetricsBuilder.of(sn, versionNumber, version))) { LOG.warn( "THIS SHOULD NOT HAPPEN, RegionServerStartup" + " could not record the server: " + sn); } return sn; } /** * Updates last flushed sequence Ids for the regions on server sn * @param sn * @param hsl */ private void updateLastFlushedSequenceIds(ServerName sn, ServerMetrics hsl) { for (Entry<byte[], RegionMetrics> entry : hsl.getRegionMetrics().entrySet()) { byte[] encodedRegionName = Bytes.toBytes(RegionInfo.encodeRegionName(entry.getKey())); Long existingValue = flushedSequenceIdByRegion.get(encodedRegionName); long l = entry.getValue().getCompletedSequenceId(); // Don't let smaller sequence ids override greater sequence ids. if (LOG.isTraceEnabled()) { LOG.trace(Bytes.toString(encodedRegionName) + ", existingValue=" + existingValue + ", completeSequenceId=" + l); } if (existingValue == null || (l != HConstants.NO_SEQNUM && l > existingValue)) { flushedSequenceIdByRegion.put(encodedRegionName, l); } else if (l != HConstants.NO_SEQNUM && l < existingValue) { LOG.warn("RegionServer " + sn + " indicates a last flushed sequence id (" + l + ") that is less than the previous last flushed sequence id (" + existingValue + ") for region " + Bytes.toString(entry.getKey()) + " Ignoring."); } ConcurrentNavigableMap<byte[], Long> storeFlushedSequenceId = computeIfAbsent(storeFlushedSequenceIdsByRegion, encodedRegionName, () -> new ConcurrentSkipListMap<>(Bytes.BYTES_COMPARATOR)); for (Entry<byte[], Long> storeSeqId : entry.getValue().getStoreSequenceId().entrySet()) { byte[] family = storeSeqId.getKey(); existingValue = storeFlushedSequenceId.get(family); l = storeSeqId.getValue(); if (LOG.isTraceEnabled()) { LOG.trace(Bytes.toString(encodedRegionName) + ", family=" + Bytes.toString(family) + ", existingValue=" + existingValue + ", completeSequenceId=" + l); } // Don't let smaller sequence ids override greater sequence ids. if (existingValue == null || (l != HConstants.NO_SEQNUM && l > existingValue.longValue())) { storeFlushedSequenceId.put(family, l); } } } } public void regionServerReport(ServerName sn, ServerMetrics sl) throws YouAreDeadException { checkIsDead(sn, "REPORT"); if (null == this.onlineServers.replace(sn, sl)) { // Already have this host+port combo and its just different start code? // Just let the server in. Presume master joining a running cluster. // recordNewServer is what happens at the end of reportServerStartup. // The only thing we are skipping is passing back to the regionserver // the ServerName to use. Here we presume a master has already done // that so we'll press on with whatever it gave us for ServerName. if (!checkAndRecordNewServer(sn, sl)) { LOG.info("RegionServerReport ignored, could not record the server: " + sn); return; // Not recorded, so no need to move on } } updateLastFlushedSequenceIds(sn, sl); } /** * Check is a server of same host and port already exists, * if not, or the existed one got a smaller start code, record it. * * @param serverName the server to check and record * @param sl the server load on the server * @return true if the server is recorded, otherwise, false */ boolean checkAndRecordNewServer(final ServerName serverName, final ServerMetrics sl) { ServerName existingServer = null; synchronized (this.onlineServers) { existingServer = findServerWithSameHostnamePortWithLock(serverName); if (existingServer != null && (existingServer.getStartcode() > serverName.getStartcode())) { LOG.info("Server serverName=" + serverName + " rejected; we already have " + existingServer.toString() + " registered with same hostname and port"); return false; } recordNewServerWithLock(serverName, sl); } // Tell our listeners that a server was added if (!this.listeners.isEmpty()) { for (ServerListener listener : this.listeners) { listener.serverAdded(serverName); } } // Note that we assume that same ts means same server, and don't expire in that case. // TODO: ts can theoretically collide due to clock shifts, so this is a bit hacky. if (existingServer != null && (existingServer.getStartcode() < serverName.getStartcode())) { LOG.info("Triggering server recovery; existingServer " + existingServer + " looks stale, new server:" + serverName); expireServer(existingServer); } return true; } /** * Find out the region servers crashed between the crash of the previous master instance and the * current master instance and schedule SCP for them. * <p/> * Since the {@code RegionServerTracker} has already helped us to construct the online servers set * by scanning zookeeper, now we can compare the online servers with {@code liveServersFromWALDir} * to find out whether there are servers which are already dead. * <p/> * Must be called inside the initialization method of {@code RegionServerTracker} to avoid * concurrency issue. * @param deadServersFromPE the region servers which already have a SCP associated. * @param liveServersFromWALDir the live region servers from wal directory. */ void findDeadServersAndProcess(Set<ServerName> deadServersFromPE, Set<ServerName> liveServersFromWALDir) { deadServersFromPE.forEach(deadservers::putIfAbsent); liveServersFromWALDir.stream().filter(sn -> !onlineServers.containsKey(sn)) .forEach(this::expireServer); } /** * Checks if the clock skew between the server and the master. If the clock skew exceeds the * configured max, it will throw an exception; if it exceeds the configured warning threshold, * it will log a warning but start normally. * @param serverName Incoming servers's name * @param serverCurrentTime * @throws ClockOutOfSyncException if the skew exceeds the configured max value */ private void checkClockSkew(final ServerName serverName, final long serverCurrentTime) throws ClockOutOfSyncException { long skew = Math.abs(EnvironmentEdgeManager.currentTime() - serverCurrentTime); if (skew > maxSkew) { String message = "Server " + serverName + " has been " + "rejected; Reported time is too far out of sync with master. " + "Time difference of " + skew + "ms > max allowed of " + maxSkew + "ms"; LOG.warn(message); throw new ClockOutOfSyncException(message); } else if (skew > warningSkew){ String message = "Reported time for server " + serverName + " is out of sync with master " + "by " + skew + "ms. (Warning threshold is " + warningSkew + "ms; " + "error threshold is " + maxSkew + "ms)"; LOG.warn(message); } } /** * Called when RegionServer first reports in for duty and thereafter each * time it heartbeats to make sure it is has not been figured for dead. * If this server is on the dead list, reject it with a YouAreDeadException. * If it was dead but came back with a new start code, remove the old entry * from the dead list. * @param what START or REPORT */ private void checkIsDead(final ServerName serverName, final String what) throws YouAreDeadException { if (this.deadservers.isDeadServer(serverName)) { // Exact match: host name, port and start code all match with existing one of the // dead servers. So, this server must be dead. Tell it to kill itself. String message = "Server " + what + " rejected; currently processing " + serverName + " as dead server"; LOG.debug(message); throw new YouAreDeadException(message); } // Remove dead server with same hostname and port of newly checking in rs after master // initialization. See HBASE-5916 for more information. if ((this.master == null || this.master.isInitialized()) && this.deadservers.cleanPreviousInstance(serverName)) { // This server has now become alive after we marked it as dead. // We removed it's previous entry from the dead list to reflect it. LOG.debug("{} {} came back up, removed it from the dead servers list", what, serverName); } } /** * Assumes onlineServers is locked. * @return ServerName with matching hostname and port. */ private ServerName findServerWithSameHostnamePortWithLock( final ServerName serverName) { ServerName end = ServerName.valueOf(serverName.getHostname(), serverName.getPort(), Long.MAX_VALUE); ServerName r = onlineServers.lowerKey(end); if (r != null) { if (ServerName.isSameAddress(r, serverName)) { return r; } } return null; } /** * Adds the onlineServers list. onlineServers should be locked. * @param serverName The remote servers name. */ void recordNewServerWithLock(final ServerName serverName, final ServerMetrics sl) { LOG.info("Registering regionserver=" + serverName); this.onlineServers.put(serverName, sl); } public ConcurrentNavigableMap<byte[], Long> getFlushedSequenceIdByRegion() { return flushedSequenceIdByRegion; } public RegionStoreSequenceIds getLastFlushedSequenceId(byte[] encodedRegionName) { RegionStoreSequenceIds.Builder builder = RegionStoreSequenceIds.newBuilder(); Long seqId = flushedSequenceIdByRegion.get(encodedRegionName); builder.setLastFlushedSequenceId(seqId != null ? seqId.longValue() : HConstants.NO_SEQNUM); Map<byte[], Long> storeFlushedSequenceId = storeFlushedSequenceIdsByRegion.get(encodedRegionName); if (storeFlushedSequenceId != null) { for (Map.Entry<byte[], Long> entry : storeFlushedSequenceId.entrySet()) { builder.addStoreSequenceId(StoreSequenceId.newBuilder() .setFamilyName(UnsafeByteOperations.unsafeWrap(entry.getKey())) .setSequenceId(entry.getValue().longValue()).build()); } } return builder.build(); } /** * @param serverName * @return ServerMetrics if serverName is known else null */ public ServerMetrics getLoad(final ServerName serverName) { return this.onlineServers.get(serverName); } /** * Compute the average load across all region servers. * Currently, this uses a very naive computation - just uses the number of * regions being served, ignoring stats about number of requests. * @return the average load */ public double getAverageLoad() { int totalLoad = 0; int numServers = 0; for (ServerMetrics sl : this.onlineServers.values()) { numServers++; totalLoad += sl.getRegionMetrics().size(); } return numServers == 0 ? 0 : (double)totalLoad / (double)numServers; } /** @return the count of active regionservers */ public int countOfRegionServers() { // Presumes onlineServers is a concurrent map return this.onlineServers.size(); } /** * @return Read-only map of servers to serverinfo */ public Map<ServerName, ServerMetrics> getOnlineServers() { // Presumption is that iterating the returned Map is OK. synchronized (this.onlineServers) { return Collections.unmodifiableMap(this.onlineServers); } } public DeadServer getDeadServers() { return this.deadservers; } /** * Checks if any dead servers are currently in progress. * @return true if any RS are being processed as dead, false if not */ public boolean areDeadServersInProgress() throws IOException { return master.getProcedures().stream() .anyMatch(p -> !p.isFinished() && p instanceof ServerCrashProcedure); } void letRegionServersShutdown() { long previousLogTime = 0; ServerName sn = master.getServerName(); ZKWatcher zkw = master.getZooKeeper(); int onlineServersCt; while ((onlineServersCt = onlineServers.size()) > 0){ if (EnvironmentEdgeManager.currentTime() > (previousLogTime + 1000)) { Set<ServerName> remainingServers = onlineServers.keySet(); synchronized (onlineServers) { if (remainingServers.size() == 1 && remainingServers.contains(sn)) { // Master will delete itself later. return; } } StringBuilder sb = new StringBuilder(); // It's ok here to not sync on onlineServers - merely logging for (ServerName key : remainingServers) { if (sb.length() > 0) { sb.append(", "); } sb.append(key); } LOG.info("Waiting on regionserver(s) " + sb.toString()); previousLogTime = EnvironmentEdgeManager.currentTime(); } try { List<String> servers = getRegionServersInZK(zkw); if (servers == null || servers.isEmpty() || (servers.size() == 1 && servers.contains(sn.toString()))) { LOG.info("ZK shows there is only the master self online, exiting now"); // Master could have lost some ZK events, no need to wait more. break; } } catch (KeeperException ke) { LOG.warn("Failed to list regionservers", ke); // ZK is malfunctioning, don't hang here break; } synchronized (onlineServers) { try { if (onlineServersCt == onlineServers.size()) onlineServers.wait(100); } catch (InterruptedException ignored) { // continue } } } } private List<String> getRegionServersInZK(final ZKWatcher zkw) throws KeeperException { return ZKUtil.listChildrenNoWatch(zkw, zkw.getZNodePaths().rsZNode); } /** * Expire the passed server. Add it to list of dead servers and queue a shutdown processing. * @return pid if we queued a ServerCrashProcedure else {@link Procedure#NO_PROC_ID} if we did * not (could happen for many reasons including the fact that its this server that is * going down or we already have queued an SCP for this server or SCP processing is * currently disabled because we are in startup phase). */ // Redo test so we can make this protected. public synchronized long expireServer(final ServerName serverName) { return expireServer(serverName, false); } synchronized long expireServer(final ServerName serverName, boolean force) { // THIS server is going down... can't handle our own expiration. if (serverName.equals(master.getServerName())) { if (!(master.isAborted() || master.isStopped())) { master.stop("We lost our znode?"); } return Procedure.NO_PROC_ID; } if (this.deadservers.isDeadServer(serverName)) { LOG.warn("Expiration called on {} but already in DeadServer", serverName); return Procedure.NO_PROC_ID; } moveFromOnlineToDeadServers(serverName); // If server is in draining mode, remove corresponding znode // In some tests, the mocked HM may not have ZK Instance, hence null check if (master.getZooKeeper() != null) { String drainingZnode = ZNodePaths .joinZNode(master.getZooKeeper().getZNodePaths().drainingZNode, serverName.getServerName()); try { ZKUtil.deleteNodeFailSilent(master.getZooKeeper(), drainingZnode); } catch (KeeperException e) { LOG.warn("Error deleting the draining znode for stopping server " + serverName.getServerName(), e); } } // If cluster is going down, yes, servers are going to be expiring; don't // process as a dead server if (isClusterShutdown()) { LOG.info("Cluster shutdown set; " + serverName + " expired; onlineServers=" + this.onlineServers.size()); if (this.onlineServers.isEmpty()) { master.stop("Cluster shutdown set; onlineServer=0"); } return Procedure.NO_PROC_ID; } LOG.info("Processing expiration of " + serverName + " on " + this.master.getServerName()); long pid = master.getAssignmentManager().submitServerCrash(serverName, true, force); // Tell our listeners that a server was removed if (!this.listeners.isEmpty()) { this.listeners.stream().forEach(l -> l.serverRemoved(serverName)); } // trigger a persist of flushedSeqId if (flushedSeqIdFlusher != null) { flushedSeqIdFlusher.triggerNow(); } return pid; } /** * Called when server has expired. */ // Locking in this class needs cleanup. public synchronized void moveFromOnlineToDeadServers(final ServerName sn) { synchronized (this.onlineServers) { boolean online = this.onlineServers.containsKey(sn); if (online) { // Remove the server from the known servers lists and update load info BUT // add to deadservers first; do this so it'll show in dead servers list if // not in online servers list. this.deadservers.putIfAbsent(sn); this.onlineServers.remove(sn); onlineServers.notifyAll(); } else { // If not online, that is odd but may happen if 'Unknown Servers' -- where meta // has references to servers not online nor in dead servers list. If // 'Unknown Server', don't add to DeadServers else will be there for ever. LOG.trace("Expiration of {} but server not online", sn); } } } /* * Remove the server from the drain list. */ public synchronized boolean removeServerFromDrainList(final ServerName sn) { // Warn if the server (sn) is not online. ServerName is of the form: // <hostname> , <port> , <startcode> if (!this.isServerOnline(sn)) { LOG.warn("Server " + sn + " is not currently online. " + "Removing from draining list anyway, as requested."); } // Remove the server from the draining servers lists. return this.drainingServers.remove(sn); } /** * Add the server to the drain list. * @param sn * @return True if the server is added or the server is already on the drain list. */ public synchronized boolean addServerToDrainList(final ServerName sn) { // Warn if the server (sn) is not online. ServerName is of the form: // <hostname> , <port> , <startcode> if (!this.isServerOnline(sn)) { LOG.warn("Server " + sn + " is not currently online. " + "Ignoring request to add it to draining list."); return false; } // Add the server to the draining servers lists, if it's not already in // it. if (this.drainingServers.contains(sn)) { LOG.warn("Server " + sn + " is already in the draining server list." + "Ignoring request to add it again."); return true; } LOG.info("Server " + sn + " added to draining server list."); return this.drainingServers.add(sn); } /** * Contacts a region server and waits up to timeout ms * to close the region. This bypasses the active hmaster. * Pass -1 as timeout if you do not want to wait on result. */ public static void closeRegionSilentlyAndWait(AsyncClusterConnection connection, ServerName server, RegionInfo region, long timeout) throws IOException, InterruptedException { AsyncRegionServerAdmin admin = connection.getRegionServerAdmin(server); try { FutureUtils.get( admin.closeRegion(ProtobufUtil.buildCloseRegionRequest(server, region.getRegionName()))); } catch (IOException e) { LOG.warn("Exception when closing region: " + region.getRegionNameAsString(), e); } if (timeout < 0) { return; } long expiration = timeout + EnvironmentEdgeManager.currentTime(); while (EnvironmentEdgeManager.currentTime() < expiration) { try { RegionInfo rsRegion = ProtobufUtil.toRegionInfo(FutureUtils .get( admin.getRegionInfo(RequestConverter.buildGetRegionInfoRequest(region.getRegionName()))) .getRegionInfo()); if (rsRegion == null) { return; } } catch (IOException ioe) { if (ioe instanceof NotServingRegionException || (ioe instanceof RemoteWithExtrasException && ((RemoteWithExtrasException)ioe).unwrapRemoteException() instanceof NotServingRegionException)) { // no need to retry again return; } LOG.warn("Exception when retrieving regioninfo from: " + region.getRegionNameAsString(), ioe); } Thread.sleep(1000); } throw new IOException("Region " + region + " failed to close within" + " timeout " + timeout); } /** * Calculate min necessary to start. This is not an absolute. It is just * a friction that will cause us hang around a bit longer waiting on * RegionServers to check-in. */ private int getMinToStart() { if (master.isInMaintenanceMode()) { // If in maintenance mode, then master hosting meta will be the only server available return 1; } int minimumRequired = 1; int minToStart = this.master.getConfiguration().getInt(WAIT_ON_REGIONSERVERS_MINTOSTART, -1); // Ensure we are never less than minimumRequired else stuff won't work. return Math.max(minToStart, minimumRequired); } /** * Wait for the region servers to report in. * We will wait until one of this condition is met: * - the master is stopped * - the 'hbase.master.wait.on.regionservers.maxtostart' number of * region servers is reached * - the 'hbase.master.wait.on.regionservers.mintostart' is reached AND * there have been no new region server in for * 'hbase.master.wait.on.regionservers.interval' time AND * the 'hbase.master.wait.on.regionservers.timeout' is reached * * @throws InterruptedException */ public void waitForRegionServers(MonitoredTask status) throws InterruptedException { final long interval = this.master.getConfiguration(). getLong(WAIT_ON_REGIONSERVERS_INTERVAL, 1500); final long timeout = this.master.getConfiguration(). getLong(WAIT_ON_REGIONSERVERS_TIMEOUT, 4500); // Min is not an absolute; just a friction making us wait longer on server checkin. int minToStart = getMinToStart(); int maxToStart = this.master.getConfiguration(). getInt(WAIT_ON_REGIONSERVERS_MAXTOSTART, Integer.MAX_VALUE); if (maxToStart < minToStart) { LOG.warn(String.format("The value of '%s' (%d) is set less than '%s' (%d), ignoring.", WAIT_ON_REGIONSERVERS_MAXTOSTART, maxToStart, WAIT_ON_REGIONSERVERS_MINTOSTART, minToStart)); maxToStart = Integer.MAX_VALUE; } long now = EnvironmentEdgeManager.currentTime(); final long startTime = now; long slept = 0; long lastLogTime = 0; long lastCountChange = startTime; int count = countOfRegionServers(); int oldCount = 0; // This while test is a little hard to read. We try to comment it in below but in essence: // Wait if Master is not stopped and the number of regionservers that have checked-in is // less than the maxToStart. Both of these conditions will be true near universally. // Next, we will keep cycling if ANY of the following three conditions are true: // 1. The time since a regionserver registered is < interval (means servers are actively checking in). // 2. We are under the total timeout. // 3. The count of servers is < minimum. for (ServerListener listener: this.listeners) { listener.waiting(); } while (!this.master.isStopped() && !isClusterShutdown() && count < maxToStart && ((lastCountChange + interval) > now || timeout > slept || count < minToStart)) { // Log some info at every interval time or if there is a change if (oldCount != count || lastLogTime + interval < now) { lastLogTime = now; String msg = "Waiting on regionserver count=" + count + "; waited="+ slept + "ms, expecting min=" + minToStart + " server(s), max="+ getStrForMax(maxToStart) + " server(s), " + "timeout=" + timeout + "ms, lastChange=" + (lastCountChange - now) + "ms"; LOG.info(msg); status.setStatus(msg); } // We sleep for some time final long sleepTime = 50; Thread.sleep(sleepTime); now = EnvironmentEdgeManager.currentTime(); slept = now - startTime; oldCount = count; count = countOfRegionServers(); if (count != oldCount) { lastCountChange = now; } } // Did we exit the loop because cluster is going down? if (isClusterShutdown()) { this.master.stop("Cluster shutdown"); } LOG.info("Finished waiting on RegionServer count=" + count + "; waited=" + slept + "ms," + " expected min=" + minToStart + " server(s), max=" + getStrForMax(maxToStart) + " server(s),"+ " master is "+ (this.master.isStopped() ? "stopped.": "running")); } private String getStrForMax(final int max) { return max == Integer.MAX_VALUE? "NO_LIMIT": Integer.toString(max); } /** * @return A copy of the internal list of online servers. */ public List<ServerName> getOnlineServersList() { // TODO: optimize the load balancer call so we don't need to make a new list // TODO: FIX. THIS IS POPULAR CALL. return new ArrayList<>(this.onlineServers.keySet()); } /** * @param keys The target server name * @param idleServerPredicator Evaluates the server on the given load * @return A copy of the internal list of online servers matched by the predicator */ public List<ServerName> getOnlineServersListWithPredicator(List<ServerName> keys, Predicate<ServerMetrics> idleServerPredicator) { List<ServerName> names = new ArrayList<>(); if (keys != null && idleServerPredicator != null) { keys.forEach(name -> { ServerMetrics load = onlineServers.get(name); if (load != null) { if (idleServerPredicator.test(load)) { names.add(name); } } }); } return names; } /** * @return A copy of the internal list of draining servers. */ public List<ServerName> getDrainingServersList() { return new ArrayList<>(this.drainingServers); } public boolean isServerOnline(ServerName serverName) { return serverName != null && onlineServers.containsKey(serverName); } public enum ServerLiveState { LIVE, DEAD, UNKNOWN } /** * @return whether the server is online, dead, or unknown. */ public synchronized ServerLiveState isServerKnownAndOnline(ServerName serverName) { return onlineServers.containsKey(serverName) ? ServerLiveState.LIVE : (deadservers.isDeadServer(serverName) ? ServerLiveState.DEAD : ServerLiveState.UNKNOWN); } /** * Check if a server is known to be dead. A server can be online, * or known to be dead, or unknown to this manager (i.e, not online, * not known to be dead either; it is simply not tracked by the * master any more, for example, a very old previous instance). */ public synchronized boolean isServerDead(ServerName serverName) { return serverName == null || deadservers.isDeadServer(serverName); } /** * Check if a server is unknown. A server can be online, * or known to be dead, or unknown to this manager (i.e, not online, * not known to be dead either; it is simply not tracked by the * master any more, for example, a very old previous instance). */ public boolean isServerUnknown(ServerName serverName) { return serverName == null || (!onlineServers.containsKey(serverName) && !deadservers.isDeadServer(serverName)); } public void shutdownCluster() { String statusStr = "Cluster shutdown requested of master=" + this.master.getServerName(); LOG.info(statusStr); this.clusterShutdown.set(true); if (onlineServers.isEmpty()) { // we do not synchronize here so this may cause a double stop, but not a big deal master.stop("OnlineServer=0 right after cluster shutdown set"); } } public boolean isClusterShutdown() { return this.clusterShutdown.get(); } /** * start chore in ServerManager */ public void startChore() { Configuration c = master.getConfiguration(); if (persistFlushedSequenceId) { new Thread(() -> { // after AM#loadMeta, RegionStates should be loaded, and some regions are // deleted by drop/split/merge during removeDeletedRegionFromLoadedFlushedSequenceIds, // but these deleted regions are not added back to RegionStates, // so we can safely remove deleted regions. removeDeletedRegionFromLoadedFlushedSequenceIds(); }, "RemoveDeletedRegionSyncThread").start(); int flushPeriod = c.getInt(FLUSHEDSEQUENCEID_FLUSHER_INTERVAL, FLUSHEDSEQUENCEID_FLUSHER_INTERVAL_DEFAULT); flushedSeqIdFlusher = new FlushedSequenceIdFlusher( "FlushedSequenceIdFlusher", flushPeriod); master.getChoreService().scheduleChore(flushedSeqIdFlusher); } } /** * Stop the ServerManager. */ public void stop() { if (flushedSeqIdFlusher != null) { flushedSeqIdFlusher.shutdown(); } if (persistFlushedSequenceId) { try { persistRegionLastFlushedSequenceIds(); } catch (IOException e) { LOG.warn("Failed to persist last flushed sequence id of regions" + " to file system", e); } } } /** * Creates a list of possible destinations for a region. It contains the online servers, but not * the draining or dying servers. * @param serversToExclude can be null if there is no server to exclude */ public List<ServerName> createDestinationServersList(final List<ServerName> serversToExclude) { Set<ServerName> destServers = new HashSet<>(); onlineServers.forEach((sn, sm) -> { if (sm.getLastReportTimestamp() > 0) { // This means we have already called regionServerReport at leaset once, then let's include // this server for region assignment. This is an optimization to avoid assigning regions to // an uninitialized server. See HBASE-25032 for more details. destServers.add(sn); } }); if (serversToExclude != null) { destServers.removeAll(serversToExclude); } // Loop through the draining server list and remove them from the server list final List<ServerName> drainingServersCopy = getDrainingServersList(); destServers.removeAll(drainingServersCopy); return new ArrayList<>(destServers); } /** * Calls {@link #createDestinationServersList} without server to exclude. */ public List<ServerName> createDestinationServersList(){ return createDestinationServersList(null); } /** * To clear any dead server with same host name and port of any online server */ void clearDeadServersWithSameHostNameAndPortOfOnlineServer() { for (ServerName serverName : getOnlineServersList()) { deadservers.cleanAllPreviousInstances(serverName); } } /** * Called by delete table and similar to notify the ServerManager that a region was removed. */ public void removeRegion(final RegionInfo regionInfo) { final byte[] encodedName = regionInfo.getEncodedNameAsBytes(); storeFlushedSequenceIdsByRegion.remove(encodedName); flushedSequenceIdByRegion.remove(encodedName); } public boolean isRegionInServerManagerStates(final RegionInfo hri) { final byte[] encodedName = hri.getEncodedNameAsBytes(); return (storeFlushedSequenceIdsByRegion.containsKey(encodedName) || flushedSequenceIdByRegion.containsKey(encodedName)); } /** * Called by delete table and similar to notify the ServerManager that a region was removed. */ public void removeRegions(final List<RegionInfo> regions) { for (RegionInfo hri: regions) { removeRegion(hri); } } /** * May return 0 when server is not online. */ public int getVersionNumber(ServerName serverName) { ServerMetrics serverMetrics = onlineServers.get(serverName); return serverMetrics != null ? serverMetrics.getVersionNumber() : 0; } /** * May return "0.0.0" when server is not online */ public String getVersion(ServerName serverName) { ServerMetrics serverMetrics = onlineServers.get(serverName); return serverMetrics != null ? serverMetrics.getVersion() : "0.0.0"; } public int getInfoPort(ServerName serverName) { ServerMetrics serverMetrics = onlineServers.get(serverName); return serverMetrics != null ? serverMetrics.getInfoServerPort() : 0; } /** * Persist last flushed sequence id of each region to HDFS * @throws IOException if persit to HDFS fails */ private void persistRegionLastFlushedSequenceIds() throws IOException { if (isFlushSeqIdPersistInProgress) { return; } isFlushSeqIdPersistInProgress = true; try { Configuration conf = master.getConfiguration(); Path rootDir = CommonFSUtils.getRootDir(conf); Path lastFlushedSeqIdPath = new Path(rootDir, LAST_FLUSHED_SEQ_ID_FILE); FileSystem fs = FileSystem.get(conf); if (fs.exists(lastFlushedSeqIdPath)) { LOG.info("Rewriting .lastflushedseqids file at: " + lastFlushedSeqIdPath); if (!fs.delete(lastFlushedSeqIdPath, false)) { throw new IOException("Unable to remove existing " + lastFlushedSeqIdPath); } } else { LOG.info("Writing .lastflushedseqids file at: " + lastFlushedSeqIdPath); } FSDataOutputStream out = fs.create(lastFlushedSeqIdPath); FlushedSequenceId.Builder flushedSequenceIdBuilder = FlushedSequenceId.newBuilder(); try { for (Entry<byte[], Long> entry : flushedSequenceIdByRegion.entrySet()) { FlushedRegionSequenceId.Builder flushedRegionSequenceIdBuilder = FlushedRegionSequenceId.newBuilder(); flushedRegionSequenceIdBuilder.setRegionEncodedName( ByteString.copyFrom(entry.getKey())); flushedRegionSequenceIdBuilder.setSeqId(entry.getValue()); ConcurrentNavigableMap<byte[], Long> storeSeqIds = storeFlushedSequenceIdsByRegion.get(entry.getKey()); if (storeSeqIds != null) { for (Entry<byte[], Long> store : storeSeqIds.entrySet()) { FlushedStoreSequenceId.Builder flushedStoreSequenceIdBuilder = FlushedStoreSequenceId.newBuilder(); flushedStoreSequenceIdBuilder.setFamily(ByteString.copyFrom(store.getKey())); flushedStoreSequenceIdBuilder.setSeqId(store.getValue()); flushedRegionSequenceIdBuilder.addStores(flushedStoreSequenceIdBuilder); } } flushedSequenceIdBuilder.addRegionSequenceId(flushedRegionSequenceIdBuilder); } flushedSequenceIdBuilder.build().writeDelimitedTo(out); } finally { if (out != null) { out.close(); } } } finally { isFlushSeqIdPersistInProgress = false; } } /** * Load last flushed sequence id of each region from HDFS, if persisted */ public void loadLastFlushedSequenceIds() throws IOException { if (!persistFlushedSequenceId) { return; } Configuration conf = master.getConfiguration(); Path rootDir = CommonFSUtils.getRootDir(conf); Path lastFlushedSeqIdPath = new Path(rootDir, LAST_FLUSHED_SEQ_ID_FILE); FileSystem fs = FileSystem.get(conf); if (!fs.exists(lastFlushedSeqIdPath)) { LOG.info("No .lastflushedseqids found at" + lastFlushedSeqIdPath + " will record last flushed sequence id" + " for regions by regionserver report all over again"); return; } else { LOG.info("begin to load .lastflushedseqids at " + lastFlushedSeqIdPath); } FSDataInputStream in = fs.open(lastFlushedSeqIdPath); try { FlushedSequenceId flushedSequenceId = FlushedSequenceId.parseDelimitedFrom(in); if (flushedSequenceId == null) { LOG.info(".lastflushedseqids found at {} is empty", lastFlushedSeqIdPath); return; } for (FlushedRegionSequenceId flushedRegionSequenceId : flushedSequenceId .getRegionSequenceIdList()) { byte[] encodedRegionName = flushedRegionSequenceId .getRegionEncodedName().toByteArray(); flushedSequenceIdByRegion .putIfAbsent(encodedRegionName, flushedRegionSequenceId.getSeqId()); if (flushedRegionSequenceId.getStoresList() != null && flushedRegionSequenceId.getStoresList().size() != 0) { ConcurrentNavigableMap<byte[], Long> storeFlushedSequenceId = computeIfAbsent(storeFlushedSequenceIdsByRegion, encodedRegionName, () -> new ConcurrentSkipListMap<>(Bytes.BYTES_COMPARATOR)); for (FlushedStoreSequenceId flushedStoreSequenceId : flushedRegionSequenceId .getStoresList()) { storeFlushedSequenceId .put(flushedStoreSequenceId.getFamily().toByteArray(), flushedStoreSequenceId.getSeqId()); } } } } finally { in.close(); } } /** * Regions may have been removed between latest persist of FlushedSequenceIds * and master abort. So after loading FlushedSequenceIds from file, and after * meta loaded, we need to remove the deleted region according to RegionStates. */ public void removeDeletedRegionFromLoadedFlushedSequenceIds() { RegionStates regionStates = master.getAssignmentManager().getRegionStates(); Iterator<byte[]> it = flushedSequenceIdByRegion.keySet().iterator(); while(it.hasNext()) { byte[] regionEncodedName = it.next(); if (regionStates.getRegionState(Bytes.toStringBinary(regionEncodedName)) == null) { it.remove(); storeFlushedSequenceIdsByRegion.remove(regionEncodedName); } } } private class FlushedSequenceIdFlusher extends ScheduledChore { public FlushedSequenceIdFlusher(String name, int p) { super(name, master, p, 60 * 1000); //delay one minute before first execute } @Override protected void chore() { try { persistRegionLastFlushedSequenceIds(); } catch (IOException e) { LOG.debug("Failed to persist last flushed sequence id of regions" + " to file system", e); } } } }
HBASE-26219 Negative time is logged while waiting on regionservers (#3622) Signed-off-by: Wei-Chiu Chuang <[email protected]> Signed-off-by: Peter Somogyi <[email protected]> Signed-off-by: Duo Zhang <[email protected]>
hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
HBASE-26219 Negative time is logged while waiting on regionservers (#3622)
<ide><path>base-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java <ide> lastLogTime = now; <ide> String msg = <ide> "Waiting on regionserver count=" + count + "; waited="+ <del> slept + "ms, expecting min=" + minToStart + " server(s), max="+ getStrForMax(maxToStart) + <del> " server(s), " + "timeout=" + timeout + "ms, lastChange=" + (lastCountChange - now) + "ms"; <add> slept + "ms, expecting min=" + minToStart + " server(s), max=" <add> + getStrForMax(maxToStart) + " server(s), " + "timeout=" + timeout <add> + "ms, lastChange=" + (now - lastCountChange) + "ms"; <ide> LOG.info(msg); <ide> status.setStatus(msg); <ide> }
Java
apache-2.0
e1551a76b23cbe9c549b4b7980eb5a43d3ce65f2
0
JoelBondurant/RandomCodeSamples,JoelBondurant/RandomCodeSamples,JoelBondurant/RandomCodeSamples,JoelBondurant/RandomCodeSamples
package com.sensoranalytics.inspectionmanager.entity; import java.io.Serializable; import java.util.Date; import java.util.HashSet; import java.util.Set; import javax.persistence.*; import javax.validation.constraints.NotNull; import javax.validation.constraints.Null; import javax.validation.constraints.Size; /** * A class to represent wafer events. * * @author Joel Bondurant * @version 2011.0421 * @since 1.0 */ @Entity @Table( name = "WaferEvent", uniqueConstraints = @UniqueConstraint(columnNames = {"TYPE", "DATEANDTIME", "WAFER_ID", "PRODUCT_ID", "OPERATION_ID", "TOOLCHAMBER_ID"}) ) @NamedQueries({ @NamedQuery(name = WaferEvent.FIND_RECENT_BY_DATE, query = "select we from WaferEvent we where we.dateAndTime >= :aDate") }) public class WaferEvent implements BaseEntity, Serializable, Comparable<WaferEvent> { private static final long serialVersionUID = 1L; protected static final String FIND_RECENT_BY_DATE = "WaferEvent.findRecentByDate"; /** * Primary Id. */ @Id @Basic(optional = false) @NotNull @GeneratedValue(strategy = GenerationType.AUTO) private Long id; @Enumerated(EnumType.STRING) @NotNull private Type type; @NotNull @Temporal(TemporalType.TIMESTAMP) private Date dateAndTime; @Null @Size(min = 1, max = 50) private String productUse; @NotNull @ManyToOne(fetch = FetchType.EAGER, optional = false) @JoinColumn(name = "WAFER_ID") private Wafer wafer; @NotNull @OneToOne(fetch = FetchType.EAGER, optional = false) @JoinColumn(name = "PRODUCT_ID") private Product product; @OneToOne(fetch = FetchType.EAGER) @JoinColumn(name = "NEXTPRODUCT_ID") private Product nextProduct; @NotNull @OneToOne(fetch = FetchType.EAGER, optional = false) @JoinColumn(name = "TOOLCHAMBER_ID") private ToolChamber toolChamber; @OneToOne(fetch = FetchType.EAGER) @JoinColumn(name = "NEXTTOOLCHAMBER_ID") private ToolChamber nextToolChamber; @NotNull @OneToOne(fetch = FetchType.EAGER, optional = false) @JoinColumn(name = "OPERATION_ID") private Operation operation; @OneToOne(fetch = FetchType.EAGER) @JoinColumn(name = "NEXTOPERATION_ID") private Operation nextOperation; @OneToMany(fetch = FetchType.LAZY) private Set<Tag> samplingTags; @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "SAMPLINGMETHOD_ID") private SamplingMethod samplingMethod; @NotNull private Boolean getsProcessed; @Null private Integer slot; @NotNull private Long fileImportId; protected WaferEvent() { this.samplingTags = new HashSet<>(); } /** * Constructor. * @param waferEventTypeId The wafer event type identifier. * @param dateAndTime The time of the event. * @param productUse The product use code associated with the event. * @param toolChamber The tool chamber associated with the event. * @param product The product associated with the event. * @param nextProduct The next product associated with the event. * @param recipe The recipe associated with the event. * @param operation The operation associated with the event. * @param nextOperation The next operation associated with the event. * @param samplingTags The sampling tags associated with the event. * @param getsProcessed Boolean flag to determine how the event is processed. * @param fileImportId Link to the file import which created the event. */ public WaferEvent(String waferEventTypeId, Wafer wafer, Date dateAndTime, Integer slot, String productUse, ToolChamber toolChamber, ToolChamber nextToolChamber, Product product, Product nextProduct, Operation operation, Operation nextOperation, Set<Tag> samplingTags, Boolean getsProcessed, Long fileImportId) { this(); this.type = Type.valueOf(waferEventTypeId); this.wafer = wafer; this.slot = slot; this.dateAndTime = dateAndTime; this.productUse = productUse; this.toolChamber = toolChamber; this.nextToolChamber = nextToolChamber; this.product = product; this.nextProduct = nextProduct; this.operation = operation; this.nextOperation = nextOperation; this.samplingTags.addAll(samplingTags); this.fileImportId = fileImportId; //this.samplingMethod = Factory.getInstance().getSamplingMethodByTags(samplingTags); this.getsProcessed = getsProcessed; if (!product.getFlow().containsOperation(operation)) { String msg = "WaferEvent::Product/Operation mismatch: ("; msg += (product==null?"":product.getPDFId()); msg += "/" + (operation==null?"":operation.getFactoryId()) + ")"; throw new IllegalArgumentException(msg); } if (nextProduct != null && nextOperation != null && !nextProduct.getFlow().containsOperation(nextOperation)) { String msg = "WaferEvent::NextProduct/NextOperation mismatch: ("; msg += (nextProduct==null?"":nextProduct.getPDFId()); msg += "/" + (nextOperation==null?"":nextOperation.getFactoryId()) + ")"; throw new IllegalArgumentException(msg); } } @Override public Long getId() { return this.id; } /** * Type getter. * @return The type of this. */ public Type getType() { return this.type; } public Boolean getsProcessed() { return this.getsProcessed; } public boolean matches(WaferEvent evt) { if (evt.operation.matches(this.operation) && evt.type == this.type) { return true; } return false; } public ToolChamber getToolChamber() { return this.toolChamber; } public ToolChamber getNextToolChamber() { return this.nextToolChamber; } public Operation getOperation() { return this.operation; } public Integer getSlot() { return this.slot; } public Operation getNextOperation() { return this.nextOperation; } public SamplingMethod getSamplingMethod() { return this.samplingMethod; } public Date getDateAndTime() { return this.dateAndTime; } public Product getProduct() { return this.product; } public Step getStep() { return this.product.getFlow().getStep(this.operation); } public Set<Tag> getSamplingTags() { return this.samplingTags; } public Wafer getWafer() { return this.wafer; } public void setProduct(Product product) { this.product = product; } public Product getNextProduct() { return this.nextProduct; } public boolean isNextInfoValid() { if (nextProduct != null && nextOperation != null) { boolean containsOperation = nextProduct.getFlow().containsOperation(nextOperation); return containsOperation; } return false; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(WaferEvent.class.getName()); sb.append("(HASH:").append(Integer.toString(hashCode())).append("):"); if (this.id != null) { sb.append("(ID:").append(Long.toString(this.id)).append("):"); } sb.append("(TYPE:").append(this.type.name()).append(") "); sb.append("(LOT:").append(this.wafer.getLot().getFactoryId()).append(") "); sb.append("(WAFER:").append(this.wafer.getFactoryId()).append(") "); sb.append("(PRODUCT:").append(this.product.getPDFId()).append(") "); sb.append("(OPERATION:").append(this.operation.getFactoryId()).append(")"); if (this.toolChamber != null) { sb.append(" (TOOLCHAMBER:").append(this.toolChamber.getFactoryId()).append(")"); } return sb.toString(); } public String duplicateHash() { StringBuilder sb = new StringBuilder(); sb.append(WaferEvent.class.getName()); sb.append("(TYPE:").append(this.type.name()).append(")"); sb.append("(TIME:").append(Long.toString(this.dateAndTime.getTime())).append(")"); sb.append("(LOT:").append(this.wafer.getLot().getFactoryId()).append(")"); sb.append("(WAFER:").append(this.wafer.getFactoryId()).append(")"); sb.append("(PRODUCT:").append(this.product.getPDFId()).append(")"); sb.append("(OPERATION:").append(this.operation.getFactoryId()).append(")"); sb.append("(TOOL:").append(this.toolChamber.getParentTool().getFactoryId()).append(")"); sb.append("(TOOLCHAMBER:").append(this.toolChamber.getFactoryId()).append(")"); return sb.toString(); } public void setSamplingMethod(SamplingMethod waferSamplingMethod) { this.samplingMethod = waferSamplingMethod; } public String getHashString() { StringBuilder sb = new StringBuilder(); sb.append(this.type.name()).append("_"); sb.append(this.product.getPDFId()).append("_"); sb.append(this.operation.getFactoryId()).append("_"); sb.append(this.wafer.getFactoryId()); return sb.toString(); } @Override public boolean equals(Object obj) { if (!(obj instanceof WaferEvent)) { return false; } WaferEvent other = (WaferEvent) obj; return this.hashString().equals(other.hashString()); } private String hashString() { StringBuilder sb = new StringBuilder(); sb.append(this.type.name()).append("_"); sb.append(this.product.getPDFId()).append("_"); sb.append(this.operation.getFactoryId()).append("_"); sb.append(this.wafer.getFactoryId()); return sb.toString(); } @Override public int hashCode() { return hashString().hashCode(); } @Override public int compareTo(WaferEvent otherWaferEvent) { return this.wafer.getSlotNumber().compareTo(otherWaferEvent.wafer.getSlotNumber()); } /** * An enumeration of wafer event types. */ public enum Type { /** * A wafer processing run has begun. */ BEGIN_RUN, /** * A wafer processing run has ended. */ END_RUN } }
java/sapartial/WaferEvent.java
package com.sensoranalytics.inspectionmanager.entity; import java.io.Serializable; import java.util.Date; import java.util.HashSet; import java.util.Set; import javax.persistence.*; import javax.validation.constraints.NotNull; import javax.validation.constraints.Null; import javax.validation.constraints.Size; /** * A class to represent wafer events. * * @author Joel Bondurant * @version 2011.0421 * @since 1.0 */ @Entity @Table( name = "WaferEvent", uniqueConstraints = @UniqueConstraint(columnNames = {"TYPE", "DATEANDTIME", "WAFER_ID", "PRODUCT_ID", "OPERATION_ID", "TOOLCHAMBER_ID"}) ) @NamedQueries({ @NamedQuery(name = WaferEvent.FIND_RECENT_BY_DATE, query = "select we from WaferEvent we where we.dateAndTime >= :aDate") }) public class WaferEvent implements BaseEntity, Serializable, Comparable<WaferEvent> { private static final long serialVersionUID = 1L; protected static final String FIND_RECENT_BY_DATE = "WaferEvent.findRecentByDate"; /** * Primary Id. */ @Id @Basic(optional = false) @NotNull @GeneratedValue(strategy = GenerationType.AUTO) private Long id; @Enumerated(EnumType.STRING) @NotNull private Type type; @NotNull @Temporal(TemporalType.TIMESTAMP) private Date dateAndTime; @Null @Size(min = 1, max = 50) private String productUse; @NotNull @ManyToOne(fetch = FetchType.EAGER, optional = false) @JoinColumn(name = "WAFER_ID") private Wafer wafer; @NotNull @OneToOne(fetch = FetchType.EAGER, optional = false) @JoinColumn(name = "PRODUCT_ID") private Product product; @OneToOne(fetch = FetchType.EAGER) @JoinColumn(name = "NEXTPRODUCT_ID") private Product nextProduct; @NotNull @OneToOne(fetch = FetchType.EAGER, optional = false) @JoinColumn(name = "TOOLCHAMBER_ID") private ToolChamber toolChamber; @OneToOne(fetch = FetchType.EAGER) @JoinColumn(name = "NEXTTOOLCHAMBER_ID") private ToolChamber nextToolChamber; @NotNull @OneToOne(fetch = FetchType.EAGER, optional = false) @JoinColumn(name = "OPERATION_ID") private Operation operation; @OneToOne(fetch = FetchType.EAGER) @JoinColumn(name = "NEXTOPERATION_ID") private Operation nextOperation; @OneToMany(fetch = FetchType.LAZY) private Set<Tag> samplingTags; @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "SAMPLINGMETHOD_ID") private SamplingMethod samplingMethod; @NotNull private Boolean getsProcessed; @Null private Integer slot; @NotNull private Long fileImportId; protected WaferEvent() { this.samplingTags = new HashSet<>(); } /** * Constructor. * @param waferEventTypeId The wafer event type identifier. * @param dateAndTime The time of the event. * @param productUse The product use code associated with the event. * @param toolChamber The tool chamber associated with the event. * @param product The product associated with the event. * @param nextProduct The next product associated with the event. * @param recipe The recipe associated with the event. * @param operation The operation associated with the event. * @param nextOperation The next operation associated with the event. * @param samplingTags The sampling tags associated with the event. * @param getsProcessed Boolean flag to determine how the event is processed. * @param fileImportId Link to the file import which created the event. */ public WaferEvent(String waferEventTypeId, Wafer wafer, Date dateAndTime, Integer slot, String productUse, ToolChamber toolChamber, ToolChamber nextToolChamber, Product product, Product nextProduct, Operation operation, Operation nextOperation, Set<Tag> samplingTags, Boolean getsProcessed, Long fileImportId) { this(); this.type = Type.valueOf(waferEventTypeId); this.wafer = wafer; this.slot = slot; this.dateAndTime = dateAndTime; this.productUse = productUse; this.toolChamber = toolChamber; this.nextToolChamber = nextToolChamber; this.product = product; this.nextProduct = nextProduct; this.operation = operation; this.nextOperation = nextOperation; this.samplingTags.addAll(samplingTags); this.fileImportId = fileImportId; //this.samplingMethod = Factory.getInstance().getSamplingMethodByTags(samplingTags); this.getsProcessed = getsProcessed; if (!product.getFlow().containsOperation(operation)) { String msg = "WaferEvent::Product/Operation mismatch: ("; msg += (product==null?"":product.getPDFId()); msg += "/" + (operation==null?"":operation.getFactoryId()) + ")"; throw new IllegalArgumentException(msg); } if (nextProduct != null && nextOperation != null && !nextProduct.getFlow().containsOperation(nextOperation)) { String msg = "WaferEvent::NextProduct/NextOperation mismatch: ("; msg += (nextProduct==null?"":nextProduct.getPDFId()); msg += "/" + (nextOperation==null?"":nextOperation.getFactoryId()) + ")"; throw new IllegalArgumentException(msg); } } @Override public Long getId() { return this.id; } /** * Type getter. * @return The type of this. */ public Type getType() { return this.type; } public Boolean getsProcessed() { return this.getsProcessed; } public boolean matches(WaferEvent evt) { if (evt.operation.matches(this.operation) && evt.type == this.type) { return true; } return false; } public ToolChamber getToolChamber() { return this.toolChamber; } public ToolChamber getNextToolChamber() { return this.nextToolChamber; } public Operation getOperation() { return this.operation; } public Integer getSlot() { return this.slot; } public Operation getNextOperation() { return this.nextOperation; } public SamplingMethod getSamplingMethod() { return this.samplingMethod; } public Date getDateAndTime() { return this.dateAndTime; } public Product getProduct() { return this.product; } public Step getStep() { return this.product.getFlow().getStep(this.operation); } public Set<Tag> getSamplingTags() { return this.samplingTags; } public Wafer getWafer() { return this.wafer; } public void setProduct(Product product) { this.product = product; } public Product getNextProduct() { return this.nextProduct; } public boolean isNextInfoValid() { if (nextProduct != null && nextOperation != null) { boolean containsOperation = nextProduct.getFlow().containsOperation(nextOperation); return containsOperation; } return false; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(WaferEvent.class.getName()); sb.append("(HASH:").append(Integer.toString(hashCode())).append("):"); if (this.id != null) { sb.append("(ID:").append(Long.toString(this.id)).append("):"); } sb.append("(TYPE:").append(this.type.name()).append(") "); sb.append("(LOT:").append(this.wafer.getLot().getFactoryId()).append(") "); sb.append("(WAFER:").append(this.wafer.getFactoryId()).append(") "); sb.append("(PRODUCT:").append(this.product.getPDFId()).append(") "); sb.append("(OPERATION:").append(this.operation.getFactoryId()).append(")"); if (this.toolChamber != null) { sb.append(" (TOOLCHAMBER:").append(this.toolChamber.getFactoryId()).append(")"); } return sb.toString(); } public String duplicateHash() { StringBuilder sb = new StringBuilder(); sb.append(WaferEvent.class.getName()); sb.append("(TYPE:").append(this.type.name()).append(")"); sb.append("(TIME:").append(Long.toString(this.dateAndTime.getTime())).append(")"); sb.append("(LOT:").append(this.wafer.getLot().getFactoryId()).append(")"); sb.append("(WAFER:").append(this.wafer.getFactoryId()).append(")"); sb.append("(PRODUCT:").append(this.product.getPDFId()).append(")"); sb.append("(OPERATION:").append(this.operation.getFactoryId()).append(")"); sb.append("(TOOL:").append(this.toolChamber.getParentTool().getFactoryId()).append(")"); sb.append("(TOOLCHAMBER:").append(this.toolChamber.getFactoryId()).append(")"); return sb.toString(); } public void setSamplingMethod(SamplingMethod waferSamplingMethod) { this.samplingMethod = waferSamplingMethod; } public String getHashString() { StringBuilder sb = new StringBuilder(); sb.append(this.type.name()).append("_"); sb.append(this.product.getPDFId()).append("_"); sb.append(this.operation.getFactoryId()).append("_"); sb.append(this.wafer.getFactoryId()); return sb.toString(); } @Override public boolean equals(Object obj) { if (!(obj instanceof WaferEvent)) { return false; } WaferEvent other = (WaferEvent) obj; return this.hashString().equals(other.hashCode()); } private String hashString() { StringBuilder sb = new StringBuilder(); sb.append(this.type.name()).append("_"); sb.append(this.product.getPDFId()).append("_"); sb.append(this.operation.getFactoryId()).append("_"); sb.append(this.wafer.getFactoryId()); return sb.toString(); } @Override public int hashCode() { return hashString().hashCode(); } @Override public int compareTo(WaferEvent otherWaferEvent) { return this.wafer.getSlotNumber().compareTo(otherWaferEvent.wafer.getSlotNumber()); } /** * An enumeration of wafer event types. */ public enum Type { /** * A wafer processing run has begun. */ BEGIN_RUN, /** * A wafer processing run has ended. */ END_RUN } }
Typofix.
java/sapartial/WaferEvent.java
Typofix.
<ide><path>ava/sapartial/WaferEvent.java <ide> return false; <ide> } <ide> WaferEvent other = (WaferEvent) obj; <del> return this.hashString().equals(other.hashCode()); <add> return this.hashString().equals(other.hashString()); <ide> } <ide> <ide> private String hashString() {
Java
mit
fd13603be6f4f2092f4bacfab6a59fc8471fc0bb
0
mfietz/AntennaPod,mfietz/AntennaPod,johnjohndoe/AntennaPod,johnjohndoe/AntennaPod,johnjohndoe/AntennaPod,mfietz/AntennaPod,johnjohndoe/AntennaPod,mfietz/AntennaPod
package de.danoeh.antennapod.activity; import android.content.res.TypedArray; import android.graphics.Color; import android.os.Build; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.MenuItem; import android.view.View; import android.webkit.WebSettings; import android.webkit.WebView; import android.webkit.WebViewClient; import android.widget.LinearLayout; import org.apache.commons.io.IOUtils; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; import de.danoeh.antennapod.R; import de.danoeh.antennapod.core.preferences.UserPreferences; import rx.Single; import rx.Subscription; import rx.android.schedulers.AndroidSchedulers; import rx.schedulers.Schedulers; /** * Displays the 'about' screen */ public class AboutActivity extends AppCompatActivity { private static final String TAG = AboutActivity.class.getSimpleName(); private WebView webView; private LinearLayout webViewContainer; private Subscription subscription; @Override protected void onCreate(Bundle savedInstanceState) { setTheme(UserPreferences.getTheme()); super.onCreate(savedInstanceState); getSupportActionBar().setDisplayShowHomeEnabled(true); setContentView(R.layout.about); webViewContainer = (LinearLayout) findViewById(R.id.webViewContainer); webView = (WebView) findViewById(R.id.webViewAbout); webView.getSettings().setCacheMode(WebSettings.LOAD_NO_CACHE); if (UserPreferences.getTheme() == R.style.Theme_AntennaPod_Dark) { if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1) { webView.setLayerType(View.LAYER_TYPE_SOFTWARE, null); } webView.setBackgroundColor(Color.TRANSPARENT); } webView.setWebViewClient(new WebViewClient() { @Override public boolean shouldOverrideUrlLoading(WebView view, String url) { if (!url.startsWith("http")) { url = url.replace("file:///android_asset/", ""); loadAsset(url); return true; } return false; } }); loadAsset("about.html"); } private void loadAsset(String filename) { subscription = Single.create(subscriber -> { InputStream input = null; try { TypedArray res = AboutActivity.this.getTheme().obtainStyledAttributes( new int[] { android.R.attr.textColorPrimary }); int colorResource = res.getColor(0, 0); String colorString = String.format("#%06X", 0xFFFFFF & colorResource); res.recycle(); input = getAssets().open(filename); String webViewData = IOUtils.toString(input, Charset.defaultCharset()); if (!webViewData.startsWith("<!DOCTYPE html>")) { webViewData = webViewData.replace("%", "&#37;"); webViewData = "<!DOCTYPE html>" + "<html>" + "<head>" + " <meta http-equiv=\"Content-Type\" content=\"text/html;charset=UTF-8\">" + " <style type=\"text/css\">" + " @font-face {" + " font-family: 'Roboto-Light';" + " src: url('file:///android_asset/Roboto-Light.ttf');" + " }" + " * {" + " color: %s;" + " font-family: roboto-Light;" + " font-size: 8pt;" + " }" + " </style>" + "</head><body><p>" + webViewData + "</p></body></html>"; webViewData = webViewData.replace("\n", "<br/>"); } webViewData = String.format(webViewData, colorString); subscriber.onSuccess(webViewData); } catch (IOException e) { Log.e(TAG, Log.getStackTraceString(e)); subscriber.onError(e); } finally { IOUtils.closeQuietly(input); } }) .subscribeOn(Schedulers.newThread()) .observeOn(AndroidSchedulers.mainThread()) .subscribe( webViewData -> webView.loadDataWithBaseURL("file:///android_asset/", webViewData.toString(), "text/html", "utf-8", "about:blank"), error -> Log.e(TAG, Log.getStackTraceString(error)) ); } @Override public void onBackPressed() { if (webView.canGoBack()) { webView.goBack(); } else { super.onBackPressed(); } } @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId() == android.R.id.home) { onBackPressed(); return true; } else { return super.onOptionsItemSelected(item); } } @Override protected void onDestroy() { super.onDestroy(); if(subscription != null) { subscription.unsubscribe(); } if (webViewContainer != null && webView != null) { webViewContainer.removeAllViews(); webView.destroy(); } } }
app/src/main/java/de/danoeh/antennapod/activity/AboutActivity.java
package de.danoeh.antennapod.activity; import android.content.res.TypedArray; import android.graphics.Color; import android.os.Build; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.MenuItem; import android.view.View; import android.webkit.WebSettings; import android.webkit.WebView; import android.webkit.WebViewClient; import android.widget.LinearLayout; import org.apache.commons.io.IOUtils; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; import de.danoeh.antennapod.R; import de.danoeh.antennapod.core.preferences.UserPreferences; import rx.Observable; import rx.Subscription; import rx.android.schedulers.AndroidSchedulers; import rx.schedulers.Schedulers; /** * Displays the 'about' screen */ public class AboutActivity extends AppCompatActivity { private static final String TAG = AboutActivity.class.getSimpleName(); private WebView webView; private LinearLayout webViewContainer; private Subscription subscription; @Override protected void onCreate(Bundle savedInstanceState) { setTheme(UserPreferences.getTheme()); super.onCreate(savedInstanceState); getSupportActionBar().setDisplayShowHomeEnabled(true); setContentView(R.layout.about); webViewContainer = (LinearLayout) findViewById(R.id.webViewContainer); webView = (WebView) findViewById(R.id.webViewAbout); webView.getSettings().setCacheMode(WebSettings.LOAD_NO_CACHE); if (UserPreferences.getTheme() == R.style.Theme_AntennaPod_Dark) { if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1) { webView.setLayerType(View.LAYER_TYPE_SOFTWARE, null); } webView.setBackgroundColor(Color.TRANSPARENT); } webView.setWebViewClient(new WebViewClient() { @Override public boolean shouldOverrideUrlLoading(WebView view, String url) { if (!url.startsWith("http")) { url = url.replace("file:///android_asset/", ""); loadAsset(url); return true; } return false; } }); loadAsset("about.html"); } private void loadAsset(String filename) { subscription = Observable.fromCallable(()-> { InputStream input = null; try { TypedArray res = AboutActivity.this.getTheme().obtainStyledAttributes( new int[] { android.R.attr.textColorPrimary }); int colorResource = res.getColor(0, 0); String colorString = String.format("#%06X", 0xFFFFFF & colorResource); res.recycle(); input = getAssets().open(filename); String webViewData = IOUtils.toString(input, Charset.defaultCharset()); if (!webViewData.startsWith("<!DOCTYPE html>")) { webViewData = webViewData.replace("%", "&#37;"); webViewData = "<!DOCTYPE html>" + "<html>" + "<head>" + " <meta http-equiv=\"Content-Type\" content=\"text/html;charset=UTF-8\">" + " <style type=\"text/css\">" + " @font-face {" + " font-family: 'Roboto-Light';" + " src: url('file:///android_asset/Roboto-Light.ttf');" + " }" + " * {" + " color: %s;" + " font-family: roboto-Light;" + " font-size: 8pt;" + " }" + " </style>" + "</head><body><p>" + webViewData + "</p></body></html>"; webViewData = webViewData.replace("\n", "<br/>"); } webViewData = String.format(webViewData, colorString); return webViewData; } catch (IOException e) { Log.e(TAG, Log.getStackTraceString(e)); throw e; } finally { IOUtils.closeQuietly(input); } }) .subscribeOn(Schedulers.newThread()) .observeOn(AndroidSchedulers.mainThread()) .subscribe( webviewData -> webView.loadDataWithBaseURL("file:///android_asset/", webviewData, "text/html", "utf-8", "about:blank"), error -> Log.e(TAG, Log.getStackTraceString(error)) ); } @Override public void onBackPressed() { if (webView.canGoBack()) { webView.goBack(); } else { super.onBackPressed(); } } @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId() == android.R.id.home) { onBackPressed(); return true; } else { return super.onOptionsItemSelected(item); } } @Override protected void onDestroy() { super.onDestroy(); if(subscription != null) { subscription.unsubscribe(); } if (webViewContainer != null && webView != null) { webViewContainer.removeAllViews(); webView.destroy(); } } }
Use single instead of fromCallable
app/src/main/java/de/danoeh/antennapod/activity/AboutActivity.java
Use single instead of fromCallable
<ide><path>pp/src/main/java/de/danoeh/antennapod/activity/AboutActivity.java <ide> <ide> import de.danoeh.antennapod.R; <ide> import de.danoeh.antennapod.core.preferences.UserPreferences; <del>import rx.Observable; <add>import rx.Single; <ide> import rx.Subscription; <ide> import rx.android.schedulers.AndroidSchedulers; <ide> import rx.schedulers.Schedulers; <ide> } <ide> <ide> private void loadAsset(String filename) { <del> subscription = Observable.fromCallable(()-> { <add> subscription = Single.create(subscriber -> { <ide> InputStream input = null; <ide> try { <ide> TypedArray res = AboutActivity.this.getTheme().obtainStyledAttributes( <ide> webViewData = webViewData.replace("\n", "<br/>"); <ide> } <ide> webViewData = String.format(webViewData, colorString); <del> return webViewData; <add> subscriber.onSuccess(webViewData); <ide> } catch (IOException e) { <ide> Log.e(TAG, Log.getStackTraceString(e)); <del> throw e; <add> subscriber.onError(e); <ide> } finally { <ide> IOUtils.closeQuietly(input); <ide> } <ide> .subscribeOn(Schedulers.newThread()) <ide> .observeOn(AndroidSchedulers.mainThread()) <ide> .subscribe( <del> webviewData -> <del> webView.loadDataWithBaseURL("file:///android_asset/", webviewData, "text/html", "utf-8", "about:blank"), <add> webViewData -> <add> webView.loadDataWithBaseURL("file:///android_asset/", webViewData.toString(), "text/html", "utf-8", "about:blank"), <ide> error -> Log.e(TAG, Log.getStackTraceString(error)) <ide> ); <ide> }
Java
agpl-3.0
8e4fdfa908844d451ff6584b8194448eb97e70ac
0
ebonnet/Silverpeas-Core,Silverpeas/Silverpeas-Core,SilverYoCha/Silverpeas-Core,ebonnet/Silverpeas-Core,SilverTeamWork/Silverpeas-Core,ebonnet/Silverpeas-Core,SilverDav/Silverpeas-Core,SilverDav/Silverpeas-Core,auroreallibe/Silverpeas-Core,mmoqui/Silverpeas-Core,SilverTeamWork/Silverpeas-Core,Silverpeas/Silverpeas-Core,auroreallibe/Silverpeas-Core,SilverYoCha/Silverpeas-Core,auroreallibe/Silverpeas-Core,Silverpeas/Silverpeas-Core,ebonnet/Silverpeas-Core,SilverYoCha/Silverpeas-Core,ebonnet/Silverpeas-Core,ebonnet/Silverpeas-Core,SilverDav/Silverpeas-Core,SilverTeamWork/Silverpeas-Core,mmoqui/Silverpeas-Core,mmoqui/Silverpeas-Core
/* * Copyright (C) 2000 - 2014 Silverpeas * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * As a special exception to the terms and conditions of version 3.0 of * the GPL, you may redistribute this Program in connection with Free/Libre * Open Source Software ("FLOSS") applications as described in Silverpeas's * FLOSS exception. You should have recieved a copy of the text describing * the FLOSS exception, and it is also available here: * "http://www.silverpeas.org/docs/core/legal/floss_exception.html" * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.silverpeas.test; /** * This builder extends the {@link WarBuilder} in order to centralize the * definition of common archive part definitions. */ public class WarBuilder4WarCore extends WarBuilder<WarBuilder4WarCore> { /** * Constructs a war builder for the specified test class. It will load all the resources in the * same packages of the specified test class. * @param test the class of the test for which a war archive will be build. */ protected <T> WarBuilder4WarCore(final Class<T> test) { super(test); } /** * Gets an instance of a war archive builder for the specified test class with the * following common stuffs: * <ul> * <li>silverpeas-core-web-test</li> * <li>silverpeas-core</li> * <li>all the necessary to handle http request ({@link org.silverpeas.servlet.HttpRequest} for example)</li> * </ul> * @return the instance of the war archive builder. */ public static <T> WarBuilder4WarCore onWarForTestClass(Class<T> test) { WarBuilder4WarCore warBuilder = new WarBuilder4WarCore(test); warBuilder.addMavenDependencies("javax.jcr:jcr"); warBuilder.addMavenDependenciesWithPersistence("org.silverpeas.core:silverpeas-core"); warBuilder.addMavenDependenciesWithPersistence("org.silverpeas.core.services:silverpeas-core-pdc"); warBuilder.addMavenDependenciesWithPersistence("org.silverpeas.core:silverpeas-core-web"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-comment"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-statistic"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-silverstatistics"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-searchengine"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-calendar"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-contact"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-mylinks"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-importexport"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-tagcloud"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-viewer"); warBuilder.addMavenDependencies("org.apache.tika:tika-core"); warBuilder.addMavenDependencies("org.apache.tika:tika-parsers"); warBuilder.addAsResource("META-INF/test-MANIFEST.MF", "META-INF/MANIFEST.MF"); return warBuilder; } }
core-war/src/integration-test/java/org/silverpeas/test/WarBuilder4WarCore.java
/* * Copyright (C) 2000 - 2014 Silverpeas * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * As a special exception to the terms and conditions of version 3.0 of * the GPL, you may redistribute this Program in connection with Free/Libre * Open Source Software ("FLOSS") applications as described in Silverpeas's * FLOSS exception. You should have recieved a copy of the text describing * the FLOSS exception, and it is also available here: * "http://www.silverpeas.org/docs/core/legal/floss_exception.html" * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.silverpeas.test; /** * This builder extends the {@link WarBuilder} in order to centralize the * definition of common archive part definitions. */ public class WarBuilder4WarCore extends WarBuilder<WarBuilder4WarCore> { /** * Constructs a war builder for the specified test class. It will load all the resources in the * same packages of the specified test class. * @param test the class of the test for which a war archive will be build. */ protected <T> WarBuilder4WarCore(final Class<T> test) { super(test); } /** * Gets an instance of a war archive builder for the specified test class with the * following common stuffs: * <ul> * <li>silverpeas-core-web-test</li> * <li>silverpeas-core</li> * <li>all the necessary to handle http request ({@link org.silverpeas.servlet.HttpRequest} for example)</li> * </ul> * @return the instance of the war archive builder. */ public static <T> WarBuilder4WarCore onWarForTestClass(Class<T> test) { WarBuilder4WarCore warBuilder = new WarBuilder4WarCore(test); warBuilder.addMavenDependencies("javax.jcr:jcr"); warBuilder.addMavenDependenciesWithPersistence("org.silverpeas.core:silverpeas-core"); warBuilder.addMavenDependenciesWithPersistence("org.silverpeas.core.services:silverpeas-core-pdc"); warBuilder.addMavenDependenciesWithPersistence("org.silverpeas.core:silverpeas-core-web"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-comment"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-statistic"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-silverstatistics"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-searchengine"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-formtemplate"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-calendar"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-contact"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-mylinks"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-importexport"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-tagcloud"); warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-viewer"); warBuilder.addMavenDependencies("org.apache.tika:tika-core"); warBuilder.addMavenDependencies("org.apache.tika:tika-parsers"); warBuilder.addAsResource("META-INF/test-MANIFEST.MF", "META-INF/MANIFEST.MF"); return warBuilder; } }
Feature #7579 - removing dependency that does not exist anymore.
core-war/src/integration-test/java/org/silverpeas/test/WarBuilder4WarCore.java
Feature #7579 - removing dependency that does not exist anymore.
<ide><path>ore-war/src/integration-test/java/org/silverpeas/test/WarBuilder4WarCore.java <ide> warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-statistic"); <ide> warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-silverstatistics"); <ide> warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-searchengine"); <del> warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-formtemplate"); <ide> warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-calendar"); <ide> warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-contact"); <ide> warBuilder.addMavenDependencies("org.silverpeas.core.services:silverpeas-core-mylinks");
Java
apache-2.0
4b36d0a0171e6ee1fef67fe6a2f4d8a281d1aea4
0
stephenc/maven,Distrotech/maven,gorcz/maven,josephw/maven,wangyuesong/maven,atanasenko/maven,trajano/maven,xasx/maven,wangyuesong/maven,lbndev/maven,mcculls/maven,kidaa/maven-1,barthel/maven,vedmishr/demo1,gorcz/maven,wangyuesong0/maven,lbndev/maven,runepeter/maven-deploy-plugin-2.8.1,xasx/maven,Distrotech/maven,changbai1980/maven,Mounika-Chirukuri/maven,ChristianSchulte/maven,skitt/maven,Mounika-Chirukuri/maven,karthikjaps/maven,vedmishr/demo1,ChristianSchulte/maven,skitt/maven,dsyer/maven,njuneau/maven,barthel/maven,trajano/maven,aheritier/maven,skitt/maven,changbai1980/maven,dsyer/maven,josephw/maven,dsyer/maven,njuneau/maven,trajano/maven,mizdebsk/maven,stephenc/maven,cstamas/maven,Mounika-Chirukuri/maven,mcculls/maven,apache/maven,olamy/maven,Tibor17/maven,stephenc/maven,josephw/maven,kidaa/maven-1,apache/maven,cstamas/maven,aheritier/maven,keith-turner/maven,rogerchina/maven,olamy/maven,Tibor17/maven,wangyuesong/maven,pkozelka/maven,wangyuesong0/maven,apache/maven,olamy/maven,changbai1980/maven,atanasenko/maven,mizdebsk/maven,likaiwalkman/maven,pkozelka/maven,mcculls/maven,vedmishr/demo1,ChristianSchulte/maven,mizdebsk/maven,karthikjaps/maven,keith-turner/maven,keith-turner/maven,rogerchina/maven,runepeter/maven-deploy-plugin-2.8.1,aheritier/maven,pkozelka/maven,atanasenko/maven,xasx/maven,barthel/maven,gorcz/maven,kidaa/maven-1,cstamas/maven,njuneau/maven,likaiwalkman/maven,karthikjaps/maven,rogerchina/maven,likaiwalkman/maven,lbndev/maven,wangyuesong0/maven
package org.apache.maven.reporting; /* * Copyright 2001-2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.project.MavenProject; import org.codehaus.doxia.module.xhtml.XhtmlSink; import org.codehaus.doxia.sink.Sink; import org.codehaus.doxia.site.renderer.SiteRenderer; import org.codehaus.plexus.util.IOUtil; import org.codehaus.plexus.util.StringInputStream; import org.codehaus.plexus.util.StringUtils; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Locale; import java.util.Map; /** * The basis for a Maven report. * * @author <a href="[email protected]">Emmanuel Venisse</a> * @version $Id: MavenReport.java 163376 2005-02-23 00:06:06Z brett $ */ public abstract class AbstractMavenReport extends AbstractMojo implements MavenReport { /** * @todo share, use default excludes from plexus utils. */ protected static final String[] DEFAULT_EXCLUDES = {// Miscellaneous typical temporary files "**/*~", "**/#*#", "**/.#*", "**/%*%", "**/._*", // CVS "**/CVS", "**/CVS/**", "**/.cvsignore", // SCCS "**/SCCS", "**/SCCS/**", // Visual SourceSafe "**/vssver.scc", // Subversion "**/.svn", "**/.svn/**", // Mac "**/.DS_Store"}; private Sink sink; private Locale locale = Locale.ENGLISH; protected abstract SiteRenderer getSiteRenderer(); protected abstract String getOutputDirectory(); protected abstract MavenProject getProject(); private File reportOutputDirectory; /** * @see org.apache.maven.plugin.Mojo#execute() */ public void execute() throws MojoExecutionException { try { String outputDirectory = getOutputDirectory(); XhtmlSink sink = getSiteRenderer().createSink( new File( outputDirectory ), getOutputName() + ".html", outputDirectory, getSiteDescriptor(), "maven" ); generate( sink, Locale.ENGLISH ); getSiteRenderer().copyResources( outputDirectory, "maven" ); } catch ( Exception e ) { throw new MojoExecutionException( "An error has occurred in " + getName( locale ) + " report generation.", e ); } } /** * @see org.apache.maven.reporting.MavenReport#generate(org.codehaus.doxia.sink.Sink, java.util.Locale) */ public void generate( Sink sink, Locale locale ) throws MavenReportException { if ( sink == null ) { throw new MavenReportException( "You must specify a sink." ); } else { this.sink = sink; } executeReport( locale ); closeReport(); } protected abstract void executeReport( Locale locale ) throws MavenReportException; protected void closeReport() { } public String getCategoryName() { return CATEGORY_PROJECT_REPORTS; } private String getReportsMenu() { StringBuffer buffer = new StringBuffer(); buffer.append( "<menu name=\"Project Documentation\">\n" ); buffer.append( " <item name=\"" + getName( locale ) + "\" href=\"/" + getOutputName() + ".html\"/>\n" ); buffer.append( "</menu>\n" ); return buffer.toString(); } private InputStream getSiteDescriptor() throws MojoExecutionException { String siteDescriptorContent = ""; try { siteDescriptorContent = IOUtil.toString( getClass().getResourceAsStream( "/default-report.xml" ) ); } catch ( IOException e ) { throw new MojoExecutionException( "The site descriptor cannot be read!", e ); } Map props = new HashMap(); props.put( "reports", getReportsMenu() ); // TODO: interpolate ${project.*} in general if ( getProject().getName() != null ) { props.put( "project.name", getProject().getName() ); } else { props.put( "project.name", "NO_PROJECT_NAME_SET" ); } if ( getProject().getUrl() != null ) { props.put( "project.url", getProject().getUrl() ); } else { props.put( "project.url", "NO_PROJECT_URL_SET" ); } siteDescriptorContent = StringUtils.interpolate( siteDescriptorContent, props ); return new StringInputStream( siteDescriptorContent ); } public File getReportOutputDirectory() { if ( reportOutputDirectory == null ) { reportOutputDirectory = new File( getOutputDirectory() ); } return reportOutputDirectory; } public void setReportOutputDirectory( File reportOutputDirectory ) { this.reportOutputDirectory = reportOutputDirectory; } public Sink getSink() { return sink; } }
maven-reporting/maven-reporting-impl/src/main/java/org/apache/maven/reporting/AbstractMavenReport.java
package org.apache.maven.reporting; /* * Copyright 2001-2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.project.MavenProject; import org.codehaus.doxia.module.xhtml.XhtmlSink; import org.codehaus.doxia.sink.Sink; import org.codehaus.doxia.site.renderer.SiteRenderer; import org.codehaus.plexus.util.IOUtil; import org.codehaus.plexus.util.StringInputStream; import org.codehaus.plexus.util.StringUtils; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Locale; import java.util.Map; /** * The basis for a Maven report. * * @author <a href="[email protected]">Emmanuel Venisse</a> * @version $Id: MavenReport.java 163376 2005-02-23 00:06:06Z brett $ */ public abstract class AbstractMavenReport extends AbstractMojo implements MavenReport { /** * @todo share, use default excludes from plexus utils. */ protected static final String[] DEFAULT_EXCLUDES = {// Miscellaneous typical temporary files "**/*~", "**/#*#", "**/.#*", "**/%*%", "**/._*", // CVS "**/CVS", "**/CVS/**", "**/.cvsignore", // SCCS "**/SCCS", "**/SCCS/**", // Visual SourceSafe "**/vssver.scc", // Subversion "**/.svn", "**/.svn/**", // Mac "**/.DS_Store"}; private Sink sink; private Locale locale = Locale.ENGLISH; protected abstract SiteRenderer getSiteRenderer(); protected abstract String getOutputDirectory(); protected abstract MavenProject getProject(); private File reportOutputDirectory; /** * @see org.apache.maven.plugin.Mojo#execute() */ public void execute() throws MojoExecutionException { try { String outputDirectory = getOutputDirectory(); XhtmlSink sink = getSiteRenderer().createSink( new File( outputDirectory ), getOutputName() + ".html", outputDirectory, getSiteDescriptor(), "maven" ); generate( sink, Locale.ENGLISH ); getSiteRenderer().copyResources( outputDirectory, "maven" ); } catch ( Exception e ) { throw new MojoExecutionException( "An error has occurred in " + getName( locale ) + " report generation.", e ); } } /** * @see org.apache.maven.reporting.MavenReport#generate(org.codehaus.doxia.sink.Sink, java.util.Locale) * @deprecated */ public void generate( Sink sink, Locale locale ) throws MavenReportException { if ( sink == null ) { throw new MavenReportException( "You must specify a sink." ); } else { this.sink = sink; } executeReport( locale ); closeReport(); } protected abstract void executeReport( Locale locale ) throws MavenReportException; protected void closeReport() { } public String getCategoryName() { return CATEGORY_PROJECT_REPORTS; } private String getReportsMenu() { StringBuffer buffer = new StringBuffer(); buffer.append( "<menu name=\"Project Documentation\">\n" ); buffer.append( " <item name=\"" + getName( locale ) + "\" href=\"/" + getOutputName() + ".html\"/>\n" ); buffer.append( "</menu>\n" ); return buffer.toString(); } private InputStream getSiteDescriptor() throws MojoExecutionException { String siteDescriptorContent = ""; try { siteDescriptorContent = IOUtil.toString( getClass().getResourceAsStream( "/default-report.xml" ) ); } catch ( IOException e ) { throw new MojoExecutionException( "The site descriptor cannot be read!", e ); } Map props = new HashMap(); props.put( "reports", getReportsMenu() ); // TODO: interpolate ${project.*} in general if ( getProject().getName() != null ) { props.put( "project.name", getProject().getName() ); } else { props.put( "project.name", "NO_PROJECT_NAME_SET" ); } if ( getProject().getUrl() != null ) { props.put( "project.url", getProject().getUrl() ); } else { props.put( "project.url", "NO_PROJECT_URL_SET" ); } siteDescriptorContent = StringUtils.interpolate( siteDescriptorContent, props ); return new StringInputStream( siteDescriptorContent ); } public File getReportOutputDirectory() { if ( reportOutputDirectory == null ) { reportOutputDirectory = new File( getOutputDirectory() ); } return reportOutputDirectory; } public void setReportOutputDirectory( File reportOutputDirectory ) { this.reportOutputDirectory = reportOutputDirectory; } public Sink getSink() { return sink; } }
remove deprecation that was intended to happen, but the other changes got backed out git-svn-id: 2c527eb49caa05e19d6b2be874bf74fa9d7ea670@225285 13f79535-47bb-0310-9956-ffa450edef68
maven-reporting/maven-reporting-impl/src/main/java/org/apache/maven/reporting/AbstractMavenReport.java
remove deprecation that was intended to happen, but the other changes got backed out
<ide><path>aven-reporting/maven-reporting-impl/src/main/java/org/apache/maven/reporting/AbstractMavenReport.java <ide> <ide> /** <ide> * @see org.apache.maven.reporting.MavenReport#generate(org.codehaus.doxia.sink.Sink, java.util.Locale) <del> * @deprecated <ide> */ <ide> public void generate( Sink sink, Locale locale ) <ide> throws MavenReportException
JavaScript
mit
6e4478a12b88871d78a269a4831d11defd55d6ce
0
SockDrawer/SockBot
'use strict'; const gulp = require('gulp'), gulpJsdoc2md = require('gulp-jsdoc-to-markdown'), rename = require('gulp-rename'), istanbul = require('gulp-istanbul'), istanbulHarmony = require('istanbul-harmony'), mocha = require('gulp-mocha'), eslint = require('gulp-eslint'), git = require('gulp-git'); const sockFiles = ['*.js', '!./gulpfile.js', '**/lib/**/*.js', '**/classes/**/*.js', '**/plugins/**/*.js', '!node_modules/**', '!test/**'], sockExterns = ['**/external/**/*.js'], sockDocs = ['README.md', 'docs/**/*.md'], sockTests = ['test/**/*.js']; const JobNumber = process.env.TRAVIS_JOB_NUMBER, PullRequestFlag = process.env.TRAVIS_PULL_REQUEST, PullRequest = PullRequestFlag && PullRequestFlag !== 'false', CI = process.env.CI === 'true', runDocs = !PullRequest && (!JobNumber || /[.]1$/.test(JobNumber)); const testReporter = CI ? 'spec': 'nyan'; /** * Pull git branch locally (solves detached head issue in CI) */ gulp.task('gitBranch', (done) => { let complete = false; const branch = process.env.TRAVIS_BRANCH; // Abort(successfully) early if not running in CI if (!(JobNumber && runDocs && branch)) { return done(); } git.checkout(branch, () => { // Make sure we have full log history. git.pull('origin', branch, {}, () => { if (!complete) { done(); } complete = true; }); }); }); /** * Generate API documentation for all js files, place markup in the correct folder for readthedocs.org */ gulp.task('docs', ['gitBranch', 'lintExterns'], (done) => { // Abort(successfully) early if running in CI and not job #1 if (!runDocs) { return done(); } gulp.src(sockFiles.concat(sockExterns)) .pipe(gulpJsdoc2md({})) .on('error', done) .pipe(rename((path) => { path.extname = '.md'; })) .pipe(gulp.dest('docs/api')) .on('finish', done); }); /** * Run all js files through eslint and report status. */ gulp.task('lintCore', () => { return gulp.src(sockFiles) .pipe(eslint()) .pipe(eslint.format()) .pipe(eslint.failAfterError()); }); /** * Run all js files through eslint and report status. */ gulp.task('lintExterns', (done) => { if (!runDocs) { return done(); } return gulp.src(sockExterns) .pipe(eslint()) .pipe(eslint.format()) .pipe(eslint.failAfterError()); }); /** * Run all tests through eslint and report status. */ gulp.task('lintTests', () => { return gulp.src(sockTests) .pipe(eslint()) .pipe(eslint.format()) .pipe(eslint.failAfterError()); }); /** * Set git username/email to CI user */ gulp.task('gitConfig', (done) => { // Abort(successfully) early if not running in CI if (!JobNumber) { return done(); } git.exec({ args: 'config user.name "Travis-CI"' }, () => { git.exec({ args: 'config user.email "[email protected]"' }, () => { done(); }); }); }); /** * Commit generated documentation to be picked up by readthedocs.org * * Add CI tag to commit to prevent CI jobs from being created by checking in docs */ gulp.task('commitDocs', ['gitConfig'], (done) => { gulp.src(sockDocs) .pipe(git.add()) .pipe(git.commit('Automatically push updated documentation [ci skip]')) .on('error', () => 0) .on('finish', done); }); /** * Commit and push docs to github to be picked up by readthedocs.org */ gulp.task('pushDocs', ['gitConfig', 'commitDocs'], (done) => { //Abort(successfully) early if running in CI and not job #1 if (!runDocs) { return done(); } git.addRemote('github', 'https://github.com/SockDrawer/SockBot.git', (e) => { if (e) { done(); } else { git.push('github', 'HEAD', { args: ['-q'] }, () => { done(); }); } }); }); /** * Run code coverage instrumented tests */ gulp.task('test', ['lintCore', 'lintTests'], (done) => { gulp.src(sockFiles) // Instrument code files with istanbulHarmony .pipe(istanbul({ instrumenter: istanbulHarmony.Instrumenter, includeUntested: true })) // hook require function for complete code coverage .pipe(istanbul.hookRequire()) .on('finish', () => { // Run all tests gulp.src(sockTests) .pipe(mocha({ reporter: testReporter })) .on('error', done) // Write code coverage reports .pipe(istanbul.writeReports()) .on('finish', done); }); }); // Meta tasks gulp.task('buildDocs', ['docs'], () => 0); gulp.task('preBuild', ['buildDocs'], () => 0); gulp.task('postBuild', ['pushDocs'], () => 0); gulp.task('default', ['lint'], () => 0); gulp.task('lint', ['lintCore', 'lintTests', 'lintExterns'], () => 0);
gulpfile.js
'use strict'; const gulp = require('gulp'), gulpJsdoc2md = require('gulp-jsdoc-to-markdown'), rename = require('gulp-rename'), istanbul = require('gulp-istanbul'), istanbulHarmony = require('istanbul-harmony'), mocha = require('gulp-mocha'), eslint = require('gulp-eslint'), git = require('gulp-git'); const sockFiles = ['*.js', '!./gulpfile.js', '**/lib/**/*.js', '**/classes/**/*.js', '**/plugins/**/*.js', '!node_modules/**', '!test/**'], sockExterns = ['**/external/**/*.js'], sockDocs = ['README.md', 'docs/**/*.md'], sockTests = ['test/**/*.js']; const JobNumber = process.env.TRAVIS_JOB_NUMBER, PullRequestFlag = process.env.TRAVIS_PULL_REQUEST, PullRequest = PullRequestFlag && PullRequestFlag !== 'false', runDocs = !PullRequest && (!JobNumber || /[.]1$/.test(JobNumber)); /** * Pull git branch locally (solves detached head issue in CI) */ gulp.task('gitBranch', (done) => { let complete = false; const branch = process.env.TRAVIS_BRANCH; // Abort(successfully) early if not running in CI if (!(JobNumber && runDocs && branch)) { return done(); } git.checkout(branch, () => { // Make sure we have full log history. git.pull('origin', branch, {}, () => { if (!complete) { done(); } complete = true; }); }); }); /** * Generate API documentation for all js files, place markup in the correct folder for readthedocs.org */ gulp.task('docs', ['gitBranch', 'lintExterns'], (done) => { // Abort(successfully) early if running in CI and not job #1 if (!runDocs) { return done(); } gulp.src(sockFiles.concat(sockExterns)) .pipe(gulpJsdoc2md({})) .on('error', done) .pipe(rename((path) => { path.extname = '.md'; })) .pipe(gulp.dest('docs/api')) .on('finish', done); }); /** * Run all js files through eslint and report status. */ gulp.task('lintCore', () => { return gulp.src(sockFiles) .pipe(eslint()) .pipe(eslint.format()) .pipe(eslint.failAfterError()); }); /** * Run all js files through eslint and report status. */ gulp.task('lintExterns', (done) => { if (!runDocs) { return done(); } return gulp.src(sockExterns) .pipe(eslint()) .pipe(eslint.format()) .pipe(eslint.failAfterError()); }); /** * Run all tests through eslint and report status. */ gulp.task('lintTests', () => { return gulp.src(sockTests) .pipe(eslint()) .pipe(eslint.format()) .pipe(eslint.failAfterError()); }); /** * Set git username/email to CI user */ gulp.task('gitConfig', (done) => { // Abort(successfully) early if not running in CI if (!JobNumber) { return done(); } git.exec({ args: 'config user.name "Travis-CI"' }, () => { git.exec({ args: 'config user.email "[email protected]"' }, () => { done(); }); }); }); /** * Commit generated documentation to be picked up by readthedocs.org * * Add CI tag to commit to prevent CI jobs from being created by checking in docs */ gulp.task('commitDocs', ['gitConfig'], (done) => { gulp.src(sockDocs) .pipe(git.add()) .pipe(git.commit('Automatically push updated documentation [ci skip]')) .on('error', () => 0) .on('finish', done); }); /** * Commit and push docs to github to be picked up by readthedocs.org */ gulp.task('pushDocs', ['gitConfig', 'commitDocs'], (done) => { //Abort(successfully) early if running in CI and not job #1 if (!runDocs) { return done(); } git.addRemote('github', 'https://github.com/SockDrawer/SockBot.git', (e) => { if (e) { done(); } else { git.push('github', 'HEAD', { args: ['-q'] }, () => { done(); }); } }); }); /** * Run code coverage instrumented tests */ gulp.task('test', ['lintCore', 'lintTests'], (done) => { gulp.src(sockFiles) // Instrument code files with istanbulHarmony .pipe(istanbul({ instrumenter: istanbulHarmony.Instrumenter, includeUntested: true })) // hook require function for complete code coverage .pipe(istanbul.hookRequire()) .on('finish', () => { // Run all tests gulp.src(sockTests) .pipe(mocha({ //reporter: 'dot' })) .on('error', done) // Write code coverage reports .pipe(istanbul.writeReports()) .on('finish', done); }); }); // Meta tasks gulp.task('buildDocs', ['docs'], () => 0); gulp.task('preBuild', ['buildDocs'], () => 0); gulp.task('postBuild', ['pushDocs'], () => 0); gulp.task('default', ['lint'], () => 0); gulp.task('lint', ['lintCore', 'lintTests', 'lintExterns'], () => 0);
change reporter based on CI status
gulpfile.js
change reporter based on CI status
<ide><path>ulpfile.js <ide> const JobNumber = process.env.TRAVIS_JOB_NUMBER, <ide> PullRequestFlag = process.env.TRAVIS_PULL_REQUEST, <ide> PullRequest = PullRequestFlag && PullRequestFlag !== 'false', <add> CI = process.env.CI === 'true', <ide> runDocs = !PullRequest && (!JobNumber || /[.]1$/.test(JobNumber)); <add> <add>const testReporter = CI ? 'spec': 'nyan'; <ide> <ide> /** <ide> * Pull git branch locally (solves detached head issue in CI) <ide> // Run all tests <ide> gulp.src(sockTests) <ide> .pipe(mocha({ <del> //reporter: 'dot' <add> reporter: testReporter <ide> })) <ide> .on('error', done) <ide> // Write code coverage reports
JavaScript
mit
b57b991672eda3d1c955d13cf4e83c58efa2ca6b
0
AyaNakazawa/business_card_bank,AyaNakazawa/business_card_bank,AyaNakazawa/business_card_bank
class BCBProcess extends CommonProcess { constructor() { super({ name: `${Project.NAME} Process` }); this.initProcess(); } initProcess() { this.initContent(); this.createDesc(); BCBProcess.initPopover(); this.initEvent(); this.initController(); this.show(); } initContent() { $('main').empty(); $('main').append(Content.getContent('desc-area')); $('main').append(Content.getContent('user-area')); $('main').append(Content.getContent('card-area')); } createDesc() { $('#desc-area').append(Content.getHeader('テストデータ')); $('#desc-area').append(Content.getItem({ name: 'test', keys: 'pass' })); $('#desc-area').append(Content.getItem({ name: 'test2', keys: 'pass2' })); $('#desc-area').append(Content.getItem({ name: 'aya', keys: 'P@ssw0rd' })); } static initPopover() { { new PopoverController({ name: 'ID Popover', selector: '#user-id-help', help: 'ID を入力してください。' }); new PopoverController({ name: 'Password Popover', selector: '#user-password-help', help: 'パスワード を入力してください。' }); new PopoverController({ name: 'Login Check Popover', selector: '#user-check-help', help: '共有デバイスでは設定に注意してください。' }); new PopoverController({ name: 'Logined ID Popover', selector: '#logined-id-help', help: 'ログインしている ID です。' }); } } initEvent() { CE = new CardEvent(); UE = new UserEvent(); { new SwitchEvent({ template: 'desc' }); new SwitchEvent({ template: 'user', view: true, lsKey: 'none' }); new SwitchEvent({ template: 'card', view: false, lsKey: 'none' }); } } initController() { } show() { $('main').slideDown(300); } }
js/myapp/bcb-process.js
class BCBProcess extends CommonProcess { constructor() { super({ name: `${Project.NAME} Process` }); BCBProcess.initProcess(); } static initProcess() { BCBProcess.initContent(); BCBProcess.createDesc(); BCBProcess.initPopover(); BCBProcess.initEvent(); BCBProcess.initController(); BCBProcess.show(); } static initContent() { $('main').empty(); $('main').append(Content.getContent('desc-area')); $('main').append(Content.getContent('user-area')); $('main').append(Content.getContent('card-area')); } static createDesc() { $('#desc-area').append(Content.getHeader('テストデータ')); $('#desc-area').append(Content.getItem({ name: 'test', keys: 'pass' })); $('#desc-area').append(Content.getItem({ name: 'test2', keys: 'pass2' })); $('#desc-area').append(Content.getItem({ name: 'aya', keys: 'P@ssw0rd' })); } static initPopover() { { new PopoverController({ name: 'ID Popover', selector: '#user-id-help', help: 'ID を入力してください。' }); new PopoverController({ name: 'Password Popover', selector: '#user-password-help', help: 'パスワード を入力してください。' }); new PopoverController({ name: 'Login Check Popover', selector: '#user-check-help', help: '共有デバイスでは設定に注意してください。' }); new PopoverController({ name: 'Logined ID Popover', selector: '#logined-id-help', help: 'ログインしている ID です。' }); } } static initEvent() { CE = new CardEvent(); UE = new UserEvent(); { new SwitchEvent({ template: 'desc' }); new SwitchEvent({ template: 'user', view: true, lsKey: 'none' }); new SwitchEvent({ template: 'card', view: false, lsKey: 'none' }); } } static initController() { } static show() { $('main').slideDown(300); } }
Remove static some function in BCBProcess
js/myapp/bcb-process.js
Remove static some function in BCBProcess
<ide><path>s/myapp/bcb-process.js <ide> name: `${Project.NAME} Process` <ide> }); <ide> <del> BCBProcess.initProcess(); <add> this.initProcess(); <ide> } <ide> <del> static initProcess() { <del> BCBProcess.initContent(); <del> BCBProcess.createDesc(); <add> initProcess() { <add> this.initContent(); <add> this.createDesc(); <ide> BCBProcess.initPopover(); <del> BCBProcess.initEvent(); <del> BCBProcess.initController(); <del> BCBProcess.show(); <add> this.initEvent(); <add> this.initController(); <add> this.show(); <ide> } <ide> <del> static initContent() { <add> initContent() { <ide> $('main').empty(); <ide> $('main').append(Content.getContent('desc-area')); <ide> $('main').append(Content.getContent('user-area')); <ide> $('main').append(Content.getContent('card-area')); <ide> } <ide> <del> static createDesc() { <add> createDesc() { <ide> $('#desc-area').append(Content.getHeader('テストデータ')); <ide> $('#desc-area').append(Content.getItem({ <ide> name: 'test', <ide> } <ide> } <ide> <del> static initEvent() { <add> initEvent() { <ide> CE = new CardEvent(); <ide> UE = new UserEvent(); <ide> { <ide> } <ide> } <ide> <del> static initController() { <add> initController() { <ide> <ide> } <ide> <del> static show() { <add> show() { <ide> $('main').slideDown(300); <ide> } <ide> }
Java
apache-2.0
07acf7d348fb8e9c6228c67ff6c7725f5837d394
0
mobica-lbn/opendoors17-aws,mobica-lbn/opendoors17-aws,mobica-lbn/opendoors17-aws
package com.mobica.cloud.aws.db; import com.amazonaws.services.dynamodbv2.datamodeling.*; import static com.mobica.cloud.aws.db.DbMessage.TABLE_NAME; @DynamoDBTable(tableName = TABLE_NAME) public class DbMessage { public static final String TABLE_NAME = "opendoors17-dev-test"; public static final String ID_NAME = "id"; @DynamoDBHashKey(attributeName = ID_NAME) private Long id; private String title; private String author; private String isbn; private String kind1; private String kind2; private String kind3; private String keyword1; private String keyword2; public DbMessage() { } public DbMessage(Long id) { this.id = id; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getAuthor() { return author; } public void setAuthor(String author) { this.author = author; } public String getIsbn() { return isbn; } public void setIsbn(String isbn) { this.isbn = isbn; } public String getKind1() { return kind1; } public void setKind1(String kind1) { this.kind1 = kind1; } public String getKind2() { return kind2; } public void setKind2(String kind2) { this.kind2 = kind2; } public String getKind3() { return kind3; } public void setKind3(String kind3) { this.kind3 = kind3; } public String getKeyword1() { return keyword1; } public void setKeyword1(String keyword1) { this.keyword1 = keyword1; } public String getKeyword2() { return keyword2; } public void setKeyword2(String keyword2) { this.keyword2 = keyword2; } @Override public String toString() { return "DbMessage{" + "id=" + id + ", title='" + title + '\'' + ", author='" + author + '\'' + ", isbn='" + isbn + '\'' + ", kind1='" + kind1 + '\'' + ", kind2='" + kind2 + '\'' + ", kind3='" + kind3 + '\'' + ", keyword1='" + keyword1 + '\'' + ", keyword2='" + keyword2 + '\'' + '}'; } }
aws-sqs/src/main/java/com/mobica/cloud/aws/db/DbMessage.java
package com.mobica.cloud.aws.db; import com.amazonaws.services.dynamodbv2.datamodeling.*; import static com.mobica.cloud.aws.db.DbMessage.TABLE_NAME; @DynamoDBTable(tableName = TABLE_NAME) public class DbMessage { public static final String TABLE_NAME = "opendoors17-dev-test"; public static final String ID_NAME = "id"; @DynamoDBHashKey(attributeName = ID_NAME) private Long id; private String title; private String author; private String isbn; private String kind1; private String kind2; private String kind3; private String keyword1; private String keyword2; public DbMessage() { } public DbMessage(Long id) { this.id = id; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getAuthor() { return author; } public void setAuthor(String author) { this.author = author; } public String getIsbn() { return isbn; } public void setIsbn(String isbn) { this.isbn = isbn; } public String getKind1() { return kind1; } public void setKind1(String kind1) { this.kind1 = kind1; } public String getKind2() { return kind2; } public void setKind2(String kind2) { this.kind2 = kind2; } public String getKind3() { return kind3; } public void setKind3(String kind3) { this.kind3 = kind3; } public String getKeyword1() { return keyword1; } public void setKeyword1(String keyword1) { this.keyword1 = keyword1; } public String getKeyword2() { return keyword2; } public void setKeyword2(String keyword2) { this.keyword2 = keyword2; } }
Added toString to DbMessage
aws-sqs/src/main/java/com/mobica/cloud/aws/db/DbMessage.java
Added toString to DbMessage
<ide><path>ws-sqs/src/main/java/com/mobica/cloud/aws/db/DbMessage.java <ide> public void setKeyword2(String keyword2) { <ide> this.keyword2 = keyword2; <ide> } <add> <add> @Override <add> public String toString() { <add> return "DbMessage{" + <add> "id=" + id + <add> ", title='" + title + '\'' + <add> ", author='" + author + '\'' + <add> ", isbn='" + isbn + '\'' + <add> ", kind1='" + kind1 + '\'' + <add> ", kind2='" + kind2 + '\'' + <add> ", kind3='" + kind3 + '\'' + <add> ", keyword1='" + keyword1 + '\'' + <add> ", keyword2='" + keyword2 + '\'' + <add> '}'; <add> } <ide> }
Java
apache-2.0
a649c53838b2541453dfba708e849947dab1662d
0
alxdarksage/BridgePF,DwayneJengSage/BridgePF,DwayneJengSage/BridgePF,Sage-Bionetworks/BridgePF,Sage-Bionetworks/BridgePF,Sage-Bionetworks/BridgePF,DwayneJengSage/BridgePF,alxdarksage/BridgePF,alxdarksage/BridgePF
package org.sagebionetworks.bridge; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static java.lang.Integer.parseInt; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.springframework.util.StringUtils.commaDelimitedListToSet; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URLEncoder; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import javax.annotation.Nonnull; import org.apache.commons.lang3.StringUtils; import org.joda.time.DateTime; import org.joda.time.LocalDate; import org.joda.time.LocalDateTime; import org.sagebionetworks.bridge.config.BridgeConfigFactory; import org.sagebionetworks.bridge.exceptions.BadRequestException; import org.sagebionetworks.bridge.exceptions.BridgeServiceException; import org.sagebionetworks.bridge.json.BridgeTypeName; import org.sagebionetworks.bridge.time.DateUtils; import org.sagebionetworks.bridge.models.Tuple; import org.sagebionetworks.bridge.models.accounts.Account; import org.sagebionetworks.bridge.models.accounts.AccountId; import org.sagebionetworks.bridge.models.accounts.StudyParticipant; import org.sagebionetworks.bridge.models.schedules.Activity; import org.sagebionetworks.bridge.models.schedules.ActivityType; import org.sagebionetworks.bridge.models.studies.PasswordPolicy; import org.sagebionetworks.bridge.models.studies.Study; import org.sagebionetworks.bridge.models.substudies.AccountSubstudy; import org.springframework.core.annotation.AnnotationUtils; import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapper.FailedBatch; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; public class BridgeUtils { public static final Joiner AND_JOINER = Joiner.on(" AND "); public static final Joiner COMMA_SPACE_JOINER = Joiner.on(", "); public static final Joiner COMMA_JOINER = Joiner.on(","); public static final Joiner SEMICOLON_SPACE_JOINER = Joiner.on("; "); public static final Joiner SPACE_JOINER = Joiner.on(" "); private static final int ONE_HOUR = 60*60; private static final int ONE_DAY = 60*60*24; private static final int ONE_MINUTE = 60; private static final SecureRandom SECURE_RANDOM = new SecureRandom(); // ThreadLocals are weird. They are basically a container that allows us to hold "global variables" for each // thread. This can be used, for example, to provide the request ID to any class without having to plumb a // "request context" object into every method of every class. private static final ThreadLocal<RequestContext> REQUEST_CONTEXT_THREAD_LOCAL = ThreadLocal.withInitial(() -> null); public static Tuple<String> parseAutoEventValue(String automaticEventValue) { int lastIndex = automaticEventValue.lastIndexOf(":P"); if (lastIndex == -1) { // This will certainly not pass validation return new Tuple<>(null, automaticEventValue); } return new Tuple<>(automaticEventValue.substring(0, lastIndex), automaticEventValue.substring(lastIndex+1)); } public static boolean isExternalIdAccount(StudyParticipant participant) { return (StringUtils.isNotBlank(participant.getExternalId()) && StringUtils.isBlank(participant.getEmail()) && participant.getPhone() == null); } /** Gets the request context for the current thread. See also RequestInterceptor. */ public static RequestContext getRequestContext() { RequestContext context = REQUEST_CONTEXT_THREAD_LOCAL.get(); if (context == null) { return RequestContext.NULL_INSTANCE; } return context; } /** @see #getRequestContext */ public static void setRequestContext(RequestContext context) { REQUEST_CONTEXT_THREAD_LOCAL.set(context); } public static Account filterForSubstudy(Account account) { if (account != null) { RequestContext context = getRequestContext(); Set<String> callerSubstudies = context.getCallerSubstudies(); if (BridgeUtils.isEmpty(callerSubstudies)) { return account; } for (AccountSubstudy accountSubstudy : account.getAccountSubstudies()) { if (callerSubstudies.contains(accountSubstudy.getSubstudyId())) { return account; } } } return null; } /** * Convert expiration measures in seconds to an English language explanation of * the expiration time. This is not intended to cover odd cases--our expirations * are in minutes, hours, or possibly days. */ public static String secondsToPeriodString(int seconds) { if (seconds >= (ONE_DAY*2) && seconds % ONE_DAY == 0) { return Integer.toString(seconds/ONE_DAY) + " days"; } else if (seconds >= ONE_DAY && seconds % ONE_DAY == 0) { return Integer.toString(seconds/ONE_DAY) + " day"; } else if (seconds >= (ONE_HOUR*2) && seconds % ONE_HOUR == 0) { return Integer.toString(seconds/ONE_HOUR) + " hours"; } else if (seconds >= ONE_HOUR && seconds % ONE_HOUR == 0) { return Integer.toString(seconds/ONE_HOUR) + " hour"; } else if (seconds >= (ONE_MINUTE*2) && seconds % ONE_MINUTE == 0) { return Integer.toString(seconds/ONE_MINUTE) + " minutes"; } else if (seconds >= ONE_MINUTE && seconds % ONE_MINUTE == 0) { return Integer.toString(seconds/ONE_MINUTE) + " minute"; } return Integer.toString(seconds) + " seconds"; } public static AccountId parseAccountId(String studyId, String identifier) { checkNotNull(studyId); checkNotNull(identifier); if (identifier.toLowerCase().startsWith("externalid:")) { return AccountId.forExternalId(studyId, identifier.substring(11)); } else if (identifier.toLowerCase().startsWith("healthcode:")) { return AccountId.forHealthCode(studyId, identifier.substring(11)); } return AccountId.forId(studyId, identifier); } /** * Create a variable map for the <code>resolveTemplate</code> method that includes common values from * a study that used in most of our templates. The map is mutable. Variables include: * <ul> * <li>studyName = study.getName()</li> * <li>studyShortName = study.getShortName()</li> * <li>studyId = study.getIdentifier()</li> * <li>sponsorName = study.getSponsorName()</li> * <li>supportEmail = study.getSupportEmail()</li> * <li>technicalEmail = study.getTechnicalEmail()</li> * <li>consentEmail = study.getConsentNotificationEmail()</li> * </ul> */ public static Map<String,String> studyTemplateVariables(Study study, Function<String,String> escaper) { Map<String,String> map = Maps.newHashMap(); map.put("studyName", study.getName()); map.put("studyShortName", study.getShortName()); map.put("studyId", study.getIdentifier()); map.put("sponsorName", study.getSponsorName()); map.put("supportEmail", Iterables.getFirst(commaListToOrderedSet(study.getSupportEmail()), "")); map.put("technicalEmail", Iterables.getFirst(commaListToOrderedSet(study.getTechnicalEmail()), "")); if (study.getConsentNotificationEmail() != null) { map.put("consentEmail", Iterables.getFirst(commaListToOrderedSet(study.getConsentNotificationEmail()), "")); } map.put("host", BridgeConfigFactory.getConfig().getHostnameWithPostfix("ws")); if (escaper != null) { for (Map.Entry<String,String> entry : map.entrySet()) { map.put(entry.getKey(), escaper.apply(entry.getValue())); } } return map; } public static Map<String,String> studyTemplateVariables(Study study) { return studyTemplateVariables(study, null); } /** * A simple means of providing template variables in template strings, in the format <code>${variableName}</code>. * This value will be replaced with the value of the variable name. The variable name/value pairs are passed to the * method as a map. Variables that are not found in the map will be left in the string as is. * * @see https://sagebionetworks.jira.com/wiki/display/BRIDGE/EmailTemplate * * @param template * @param values * @return */ public static String resolveTemplate(String template, Map<String,String> values) { checkNotNull(template); checkNotNull(values); for (Map.Entry<String,String> entry : values.entrySet()) { if (entry.getValue() != null) { String var = "${"+entry.getKey()+"}"; template = template.replace(var, entry.getValue()); } } return template; } public static String generateGuid() { return UUID.randomUUID().toString(); } /** Generate a random 16-byte salt, using a {@link SecureRandom}. */ public static byte[] generateSalt() { byte[] salt = new byte[16]; SECURE_RANDOM.nextBytes(salt); return salt; } /** * Searches for a @BridgeTypeName annotation on this or any parent class in the class hierarchy, returning * that value as the type name. If none exists, defaults to the simple class name. * @param clazz * @return */ public static String getTypeName(Class<?> clazz) { BridgeTypeName att = AnnotationUtils.findAnnotation(clazz,BridgeTypeName.class); if (att != null) { return att.value(); } return clazz.getSimpleName(); } /** * All batch methods in Dynamo return a list of failures rather than * throwing an exception. We should have an exception specifically for * these so the caller gets a list of items back, but for now, convert * to a generic exception; * @param failures */ public static void ifFailuresThrowException(List<FailedBatch> failures) { if (!failures.isEmpty()) { List<String> messages = Lists.newArrayList(); for (FailedBatch failure : failures) { String message = failure.getException().getMessage(); messages.add(message); String ids = Joiner.on("; ").join(failure.getUnprocessedItems().keySet()); messages.add(ids); } throw new BridgeServiceException(Joiner.on(", ").join(messages)); } } public static boolean isEmpty(Collection<?> coll) { return (coll == null || coll.isEmpty()); } public static <S,T> Map<S,T> asMap(List<T> list, Function<T,S> function) { Map<S,T> map = Maps.newHashMap(); if (list != null && function != null) { for (T item : list) { map.put(function.apply(item), item); } } return map; } public static Long parseLong(String value) { try { return Long.parseLong(value); } catch(NumberFormatException e) { throw new RuntimeException("'" + value + "' is not a valid integer"); } } public static Set<String> commaListToOrderedSet(String commaList) { if (commaList != null) { // This implementation must return a LinkedHashSet. This is a set // with ordered keys, in the order they were in the string, as some // set serializations depend on the order of the keys (languages). return commaDelimitedListToSet(commaList).stream() .map(string -> string.trim()) .filter(StringUtils::isNotBlank) .collect(Collectors.toCollection(LinkedHashSet::new)); } // Cannot make this immutable without losing the concrete type we rely // upon to ensure they keys are in the order they are inserted. return new LinkedHashSet<String>(); } public static String setToCommaList(Set<String> set) { if (set != null) { // User LinkedHashSet because some supplied sets will have ordered keys // and we want to preserve that order while processing the set. Set<String> result = set.stream() .filter(StringUtils::isNotBlank) .collect(Collectors.toCollection(LinkedHashSet::new)); return (result.isEmpty()) ? null : COMMA_JOINER.join(result); } return null; } /** * Wraps a set in an immutable set, or returns an empty immutable set if null. * @param set * @return */ public @Nonnull static <T> ImmutableSet<T> nullSafeImmutableSet(Set<T> set) { return (set == null) ? ImmutableSet.of() : ImmutableSet.copyOf(set.stream() .filter(element -> element != null).collect(Collectors.toSet())); } public @Nonnull static <T> ImmutableList<T> nullSafeImmutableList(List<T> list) { return (list == null) ? ImmutableList.of() : ImmutableList.copyOf(list.stream() .filter(element -> element != null).collect(Collectors.toList())); } public @Nonnull static <S,T> ImmutableMap<S,T> nullSafeImmutableMap(Map<S,T> map) { ImmutableMap.Builder<S, T> builder = new ImmutableMap.Builder<>(); if (map != null) { for (S key : map.keySet()) { if (map.get(key) != null) { builder.put(key, map.get(key)); } } } return builder.build(); } /** * Converts a string to an error key friendly string, e.g. "iPhone OS" is converted to "iphone_os". * * @throws IllegalArgumentException * if the string cannot be converted to an error key. */ public static String textToErrorKey(String text) { if (StringUtils.isBlank(text)) { throw new IllegalArgumentException("String is not translatable to an error key: " + text); } return text.toLowerCase().replaceAll(" ", "_").replaceAll("[^a-zA-Z0-9_-]", ""); } /** * Parse the string as an integer value, or return the defaultValue if it is null. * If the value is provided but not a parseable integer, thrown a BadRequestException. */ public static int getIntOrDefault(String value, int defaultValue) { if (isBlank(value)) { return defaultValue; } try { return parseInt(value); } catch(NumberFormatException e) { throw new BadRequestException(value + " is not an integer"); } } /** * Parse the string as a long value, or return the defaultValue if it is null. * If the value is provided but not a parseable long, thrown a BadRequestException. */ public static Long getLongOrDefault(String value, Long defaultValue) { if (isBlank(value)) { return defaultValue; } try { return parseLong(value); } catch(RuntimeException e) { throw new BadRequestException(value + " is not a long"); } } /** * Parse the string as a DateTime value, or return the defaultValue if it is null. * If the value is provided but not a parseable DateTime, thrown a BadRequestException. */ public static DateTime getDateTimeOrDefault(String value, DateTime defaultValue) { if (isBlank(value)) { return defaultValue; } try { return DateTime.parse(value); } catch(Exception e) { throw new BadRequestException(value + " is not a DateTime value"); } } public static LocalDate getLocalDateOrDefault(String value, LocalDate defaultValue) { if (isBlank(value)) { return defaultValue; } else { try { return DateUtils.parseCalendarDate(value); } catch (RuntimeException ex) { throw new BadRequestException(value + " is not a LocalDate value"); } } } /** * Creates a new copy of the map, removing any entries that have a null value (particularly easy to do this in * JSON). */ public static <K,V> Map<K,V> withoutNullEntries(Map<K,V> map) { checkNotNull(map); return map.entrySet().stream().filter(e -> e.getValue() != null).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } /** Helper method which puts something to a map, or removes it from the map if the value is null. */ public static <K,V> void putOrRemove(Map<K,V> map, K key, V value) { checkNotNull(map); checkNotNull(key); if (value != null) { map.put(key, value); } else { map.remove(key); } } public static String encodeURIComponent(String component) { String encoded = null; if (component != null) { try { encoded = URLEncoder.encode(component, "UTF-8"); } catch (UnsupportedEncodingException e) { // UTF-8 is always supported, so this should never happen. throw new BridgeServiceException(e.getMessage()); } } return encoded; } public static String passwordPolicyDescription(PasswordPolicy policy) { StringBuilder sb = new StringBuilder(); sb.append("Password must be ").append(policy.getMinLength()).append(" or more characters"); if (policy.isLowerCaseRequired() || policy.isNumericRequired() || policy.isSymbolRequired() || policy.isUpperCaseRequired()) { sb.append(", and must contain at least "); List<String> phrases = new ArrayList<>(); if (policy.isLowerCaseRequired()) { phrases.add("one lower-case letter"); } if (policy.isUpperCaseRequired()) { phrases.add("one upper-case letter"); } if (policy.isNumericRequired()) { phrases.add("one number"); } if (policy.isSymbolRequired()) { // !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ phrases.add("one symbolic character (non-alphanumerics like #$%&@)"); } for (int i=0; i < phrases.size(); i++) { if (i == phrases.size()-1) { sb.append(", and "); } else if (i > 0) { sb.append(", "); } sb.append(phrases.get(i)); } } sb.append("."); return sb.toString(); } public static String extractPasswordFromURI(URI uri) { boolean hasPassword = (uri.getUserInfo() != null && uri.getUserInfo().contains(":")); return (hasPassword) ? uri.getUserInfo().split(":")[1] : null; } public static String createReferentGuidIndex(ActivityType type, String guid, String localDateTime) { checkNotNull(type); checkNotNull(guid); checkNotNull(localDateTime); return String.format("%s:%s:%s", guid , type.name().toLowerCase(), localDateTime); } public static String createReferentGuidIndex(Activity activity, LocalDateTime localDateTime) { checkNotNull(activity); checkNotNull(localDateTime); ActivityType type = activity.getActivityType(); String timestamp = localDateTime.toString(); switch(type) { case COMPOUND: return createReferentGuidIndex(type, activity.getCompoundActivity().getTaskIdentifier(), timestamp); case SURVEY: return createReferentGuidIndex(type, activity.getSurvey().getGuid(), timestamp); case TASK: return createReferentGuidIndex(type, activity.getTask().getIdentifier(), timestamp); } throw new BridgeServiceException("Invalid activityType specified"); } public static String toSynapseFriendlyName(String input) { checkNotNull(input); String value = input.replaceAll("[^a-zA-Z0-9\\.\\-_\\s]", " ").replaceAll("\\s+", " ").trim(); checkArgument(StringUtils.isNotBlank(value)); return value; } }
app/org/sagebionetworks/bridge/BridgeUtils.java
package org.sagebionetworks.bridge; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static java.lang.Integer.parseInt; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.springframework.util.StringUtils.commaDelimitedListToSet; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URLEncoder; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import javax.annotation.Nonnull; import org.apache.commons.lang3.StringUtils; import org.joda.time.DateTime; import org.joda.time.LocalDate; import org.joda.time.LocalDateTime; import org.sagebionetworks.bridge.config.BridgeConfigFactory; import org.sagebionetworks.bridge.exceptions.BadRequestException; import org.sagebionetworks.bridge.exceptions.BridgeServiceException; import org.sagebionetworks.bridge.hibernate.HibernateAccount; import org.sagebionetworks.bridge.json.BridgeTypeName; import org.sagebionetworks.bridge.time.DateUtils; import org.sagebionetworks.bridge.models.Tuple; import org.sagebionetworks.bridge.models.accounts.Account; import org.sagebionetworks.bridge.models.accounts.AccountId; import org.sagebionetworks.bridge.models.accounts.StudyParticipant; import org.sagebionetworks.bridge.models.schedules.Activity; import org.sagebionetworks.bridge.models.schedules.ActivityType; import org.sagebionetworks.bridge.models.studies.PasswordPolicy; import org.sagebionetworks.bridge.models.studies.Study; import org.sagebionetworks.bridge.models.substudies.AccountSubstudy; import org.springframework.core.annotation.AnnotationUtils; import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapper.FailedBatch; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; public class BridgeUtils { public static final Joiner AND_JOINER = Joiner.on(" AND "); public static final Joiner COMMA_SPACE_JOINER = Joiner.on(", "); public static final Joiner COMMA_JOINER = Joiner.on(","); public static final Joiner SEMICOLON_SPACE_JOINER = Joiner.on("; "); public static final Joiner SPACE_JOINER = Joiner.on(" "); private static final int ONE_HOUR = 60*60; private static final int ONE_DAY = 60*60*24; private static final int ONE_MINUTE = 60; private static final SecureRandom SECURE_RANDOM = new SecureRandom(); // ThreadLocals are weird. They are basically a container that allows us to hold "global variables" for each // thread. This can be used, for example, to provide the request ID to any class without having to plumb a // "request context" object into every method of every class. private static final ThreadLocal<RequestContext> REQUEST_CONTEXT_THREAD_LOCAL = ThreadLocal.withInitial(() -> null); public static Tuple<String> parseAutoEventValue(String automaticEventValue) { int lastIndex = automaticEventValue.lastIndexOf(":P"); if (lastIndex == -1) { // This will certainly not pass validation return new Tuple<>(null, automaticEventValue); } return new Tuple<>(automaticEventValue.substring(0, lastIndex), automaticEventValue.substring(lastIndex+1)); } public static boolean isExternalIdAccount(StudyParticipant participant) { return (StringUtils.isNotBlank(participant.getExternalId()) && StringUtils.isBlank(participant.getEmail()) && participant.getPhone() == null); } /** Gets the request context for the current thread. See also RequestInterceptor. */ public static RequestContext getRequestContext() { RequestContext context = REQUEST_CONTEXT_THREAD_LOCAL.get(); if (context == null) { return RequestContext.NULL_INSTANCE; } return context; } /** @see #getRequestContext */ public static void setRequestContext(RequestContext context) { REQUEST_CONTEXT_THREAD_LOCAL.set(context); } public static Account filterForSubstudy(Account account) { if (account != null) { RequestContext context = getRequestContext(); Set<String> callerSubstudies = context.getCallerSubstudies(); if (BridgeUtils.isEmpty(callerSubstudies)) { return account; } for (AccountSubstudy accountSubstudy : account.getAccountSubstudies()) { if (callerSubstudies.contains(accountSubstudy.getSubstudyId())) { return account; } } } return null; } /** * Convert expiration measures in seconds to an English language explanation of * the expiration time. This is not intended to cover odd cases--our expirations * are in minutes, hours, or possibly days. */ public static String secondsToPeriodString(int seconds) { if (seconds >= (ONE_DAY*2) && seconds % ONE_DAY == 0) { return Integer.toString(seconds/ONE_DAY) + " days"; } else if (seconds >= ONE_DAY && seconds % ONE_DAY == 0) { return Integer.toString(seconds/ONE_DAY) + " day"; } else if (seconds >= (ONE_HOUR*2) && seconds % ONE_HOUR == 0) { return Integer.toString(seconds/ONE_HOUR) + " hours"; } else if (seconds >= ONE_HOUR && seconds % ONE_HOUR == 0) { return Integer.toString(seconds/ONE_HOUR) + " hour"; } else if (seconds >= (ONE_MINUTE*2) && seconds % ONE_MINUTE == 0) { return Integer.toString(seconds/ONE_MINUTE) + " minutes"; } else if (seconds >= ONE_MINUTE && seconds % ONE_MINUTE == 0) { return Integer.toString(seconds/ONE_MINUTE) + " minute"; } return Integer.toString(seconds) + " seconds"; } public static AccountId parseAccountId(String studyId, String identifier) { checkNotNull(studyId); checkNotNull(identifier); if (identifier.toLowerCase().startsWith("externalid:")) { return AccountId.forExternalId(studyId, identifier.substring(11)); } else if (identifier.toLowerCase().startsWith("healthcode:")) { return AccountId.forHealthCode(studyId, identifier.substring(11)); } return AccountId.forId(studyId, identifier); } /** * Create a variable map for the <code>resolveTemplate</code> method that includes common values from * a study that used in most of our templates. The map is mutable. Variables include: * <ul> * <li>studyName = study.getName()</li> * <li>studyShortName = study.getShortName()</li> * <li>studyId = study.getIdentifier()</li> * <li>sponsorName = study.getSponsorName()</li> * <li>supportEmail = study.getSupportEmail()</li> * <li>technicalEmail = study.getTechnicalEmail()</li> * <li>consentEmail = study.getConsentNotificationEmail()</li> * </ul> */ public static Map<String,String> studyTemplateVariables(Study study, Function<String,String> escaper) { Map<String,String> map = Maps.newHashMap(); map.put("studyName", study.getName()); map.put("studyShortName", study.getShortName()); map.put("studyId", study.getIdentifier()); map.put("sponsorName", study.getSponsorName()); map.put("supportEmail", Iterables.getFirst(commaListToOrderedSet(study.getSupportEmail()), "")); map.put("technicalEmail", Iterables.getFirst(commaListToOrderedSet(study.getTechnicalEmail()), "")); if (study.getConsentNotificationEmail() != null) { map.put("consentEmail", Iterables.getFirst(commaListToOrderedSet(study.getConsentNotificationEmail()), "")); } map.put("host", BridgeConfigFactory.getConfig().getHostnameWithPostfix("ws")); if (escaper != null) { for (Map.Entry<String,String> entry : map.entrySet()) { map.put(entry.getKey(), escaper.apply(entry.getValue())); } } return map; } public static Map<String,String> studyTemplateVariables(Study study) { return studyTemplateVariables(study, null); } /** * A simple means of providing template variables in template strings, in the format <code>${variableName}</code>. * This value will be replaced with the value of the variable name. The variable name/value pairs are passed to the * method as a map. Variables that are not found in the map will be left in the string as is. * * @see https://sagebionetworks.jira.com/wiki/display/BRIDGE/EmailTemplate * * @param template * @param values * @return */ public static String resolveTemplate(String template, Map<String,String> values) { checkNotNull(template); checkNotNull(values); for (Map.Entry<String,String> entry : values.entrySet()) { if (entry.getValue() != null) { String var = "${"+entry.getKey()+"}"; template = template.replace(var, entry.getValue()); } } return template; } public static String generateGuid() { return UUID.randomUUID().toString(); } /** Generate a random 16-byte salt, using a {@link SecureRandom}. */ public static byte[] generateSalt() { byte[] salt = new byte[16]; SECURE_RANDOM.nextBytes(salt); return salt; } /** * Searches for a @BridgeTypeName annotation on this or any parent class in the class hierarchy, returning * that value as the type name. If none exists, defaults to the simple class name. * @param clazz * @return */ public static String getTypeName(Class<?> clazz) { BridgeTypeName att = AnnotationUtils.findAnnotation(clazz,BridgeTypeName.class); if (att != null) { return att.value(); } return clazz.getSimpleName(); } /** * All batch methods in Dynamo return a list of failures rather than * throwing an exception. We should have an exception specifically for * these so the caller gets a list of items back, but for now, convert * to a generic exception; * @param failures */ public static void ifFailuresThrowException(List<FailedBatch> failures) { if (!failures.isEmpty()) { List<String> messages = Lists.newArrayList(); for (FailedBatch failure : failures) { String message = failure.getException().getMessage(); messages.add(message); String ids = Joiner.on("; ").join(failure.getUnprocessedItems().keySet()); messages.add(ids); } throw new BridgeServiceException(Joiner.on(", ").join(messages)); } } public static boolean isEmpty(Collection<?> coll) { return (coll == null || coll.isEmpty()); } public static <S,T> Map<S,T> asMap(List<T> list, Function<T,S> function) { Map<S,T> map = Maps.newHashMap(); if (list != null && function != null) { for (T item : list) { map.put(function.apply(item), item); } } return map; } public static Long parseLong(String value) { try { return Long.parseLong(value); } catch(NumberFormatException e) { throw new RuntimeException("'" + value + "' is not a valid integer"); } } public static Set<String> commaListToOrderedSet(String commaList) { if (commaList != null) { // This implementation must return a LinkedHashSet. This is a set // with ordered keys, in the order they were in the string, as some // set serializations depend on the order of the keys (languages). return commaDelimitedListToSet(commaList).stream() .map(string -> string.trim()) .filter(StringUtils::isNotBlank) .collect(Collectors.toCollection(LinkedHashSet::new)); } // Cannot make this immutable without losing the concrete type we rely // upon to ensure they keys are in the order they are inserted. return new LinkedHashSet<String>(); } public static String setToCommaList(Set<String> set) { if (set != null) { // User LinkedHashSet because some supplied sets will have ordered keys // and we want to preserve that order while processing the set. Set<String> result = set.stream() .filter(StringUtils::isNotBlank) .collect(Collectors.toCollection(LinkedHashSet::new)); return (result.isEmpty()) ? null : COMMA_JOINER.join(result); } return null; } /** * Wraps a set in an immutable set, or returns an empty immutable set if null. * @param set * @return */ public @Nonnull static <T> ImmutableSet<T> nullSafeImmutableSet(Set<T> set) { return (set == null) ? ImmutableSet.of() : ImmutableSet.copyOf(set.stream() .filter(element -> element != null).collect(Collectors.toSet())); } public @Nonnull static <T> ImmutableList<T> nullSafeImmutableList(List<T> list) { return (list == null) ? ImmutableList.of() : ImmutableList.copyOf(list.stream() .filter(element -> element != null).collect(Collectors.toList())); } public @Nonnull static <S,T> ImmutableMap<S,T> nullSafeImmutableMap(Map<S,T> map) { ImmutableMap.Builder<S, T> builder = new ImmutableMap.Builder<>(); if (map != null) { for (S key : map.keySet()) { if (map.get(key) != null) { builder.put(key, map.get(key)); } } } return builder.build(); } /** * Converts a string to an error key friendly string, e.g. "iPhone OS" is converted to "iphone_os". * * @throws IllegalArgumentException * if the string cannot be converted to an error key. */ public static String textToErrorKey(String text) { if (StringUtils.isBlank(text)) { throw new IllegalArgumentException("String is not translatable to an error key: " + text); } return text.toLowerCase().replaceAll(" ", "_").replaceAll("[^a-zA-Z0-9_-]", ""); } /** * Parse the string as an integer value, or return the defaultValue if it is null. * If the value is provided but not a parseable integer, thrown a BadRequestException. */ public static int getIntOrDefault(String value, int defaultValue) { if (isBlank(value)) { return defaultValue; } try { return parseInt(value); } catch(NumberFormatException e) { throw new BadRequestException(value + " is not an integer"); } } /** * Parse the string as a long value, or return the defaultValue if it is null. * If the value is provided but not a parseable long, thrown a BadRequestException. */ public static Long getLongOrDefault(String value, Long defaultValue) { if (isBlank(value)) { return defaultValue; } try { return parseLong(value); } catch(RuntimeException e) { throw new BadRequestException(value + " is not a long"); } } /** * Parse the string as a DateTime value, or return the defaultValue if it is null. * If the value is provided but not a parseable DateTime, thrown a BadRequestException. */ public static DateTime getDateTimeOrDefault(String value, DateTime defaultValue) { if (isBlank(value)) { return defaultValue; } try { return DateTime.parse(value); } catch(Exception e) { throw new BadRequestException(value + " is not a DateTime value"); } } public static LocalDate getLocalDateOrDefault(String value, LocalDate defaultValue) { if (isBlank(value)) { return defaultValue; } else { try { return DateUtils.parseCalendarDate(value); } catch (RuntimeException ex) { throw new BadRequestException(value + " is not a LocalDate value"); } } } /** * Creates a new copy of the map, removing any entries that have a null value (particularly easy to do this in * JSON). */ public static <K,V> Map<K,V> withoutNullEntries(Map<K,V> map) { checkNotNull(map); return map.entrySet().stream().filter(e -> e.getValue() != null).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } /** Helper method which puts something to a map, or removes it from the map if the value is null. */ public static <K,V> void putOrRemove(Map<K,V> map, K key, V value) { checkNotNull(map); checkNotNull(key); if (value != null) { map.put(key, value); } else { map.remove(key); } } public static String encodeURIComponent(String component) { String encoded = null; if (component != null) { try { encoded = URLEncoder.encode(component, "UTF-8"); } catch (UnsupportedEncodingException e) { // UTF-8 is always supported, so this should never happen. throw new BridgeServiceException(e.getMessage()); } } return encoded; } public static String passwordPolicyDescription(PasswordPolicy policy) { StringBuilder sb = new StringBuilder(); sb.append("Password must be ").append(policy.getMinLength()).append(" or more characters"); if (policy.isLowerCaseRequired() || policy.isNumericRequired() || policy.isSymbolRequired() || policy.isUpperCaseRequired()) { sb.append(", and must contain at least "); List<String> phrases = new ArrayList<>(); if (policy.isLowerCaseRequired()) { phrases.add("one lower-case letter"); } if (policy.isUpperCaseRequired()) { phrases.add("one upper-case letter"); } if (policy.isNumericRequired()) { phrases.add("one number"); } if (policy.isSymbolRequired()) { // !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ phrases.add("one symbolic character (non-alphanumerics like #$%&@)"); } for (int i=0; i < phrases.size(); i++) { if (i == phrases.size()-1) { sb.append(", and "); } else if (i > 0) { sb.append(", "); } sb.append(phrases.get(i)); } } sb.append("."); return sb.toString(); } public static String extractPasswordFromURI(URI uri) { boolean hasPassword = (uri.getUserInfo() != null && uri.getUserInfo().contains(":")); return (hasPassword) ? uri.getUserInfo().split(":")[1] : null; } public static String createReferentGuidIndex(ActivityType type, String guid, String localDateTime) { checkNotNull(type); checkNotNull(guid); checkNotNull(localDateTime); return String.format("%s:%s:%s", guid , type.name().toLowerCase(), localDateTime); } public static String createReferentGuidIndex(Activity activity, LocalDateTime localDateTime) { checkNotNull(activity); checkNotNull(localDateTime); ActivityType type = activity.getActivityType(); String timestamp = localDateTime.toString(); switch(type) { case COMPOUND: return createReferentGuidIndex(type, activity.getCompoundActivity().getTaskIdentifier(), timestamp); case SURVEY: return createReferentGuidIndex(type, activity.getSurvey().getGuid(), timestamp); case TASK: return createReferentGuidIndex(type, activity.getTask().getIdentifier(), timestamp); } throw new BridgeServiceException("Invalid activityType specified"); } public static String toSynapseFriendlyName(String input) { checkNotNull(input); String value = input.replaceAll("[^a-zA-Z0-9\\.\\-_\\s]", " ").replaceAll("\\s+", " ").trim(); checkArgument(StringUtils.isNotBlank(value)); return value; } }
Remove unused import
app/org/sagebionetworks/bridge/BridgeUtils.java
Remove unused import
<ide><path>pp/org/sagebionetworks/bridge/BridgeUtils.java <ide> import org.sagebionetworks.bridge.config.BridgeConfigFactory; <ide> import org.sagebionetworks.bridge.exceptions.BadRequestException; <ide> import org.sagebionetworks.bridge.exceptions.BridgeServiceException; <del>import org.sagebionetworks.bridge.hibernate.HibernateAccount; <ide> import org.sagebionetworks.bridge.json.BridgeTypeName; <ide> import org.sagebionetworks.bridge.time.DateUtils; <ide> import org.sagebionetworks.bridge.models.Tuple;
JavaScript
mit
0c6d3f8d7b4605ebb611b45e906c4f40f84c772e
0
home-labs/do-ui-rails,home-labs/jtime-rails-ui,home-labs/do-ui-rails,home-labs/esphinx-rails-ui,home-labs/jtime-rails-ui,home-labs/esphinx-rails-ui
//= require esphinx/support/jquery //= require esphinx/support/ajax //= require esphinx/support/string //= require esphinx/support/array "use strict"; var jQuery, Ajax; const MODULE_CSS = "esphinx-ui"; (function ($) { $.prototype.autocomplete = function (obj, args, callback) { var self = $(this), hashSpanized, amountChar, ajax = Ajax, domContent, domContentArr, textBoxContentArr, matchFoundArr, composeMatch = function () { var composedArr = [], leftUnitName, rightUnitName, searchIndex, sliceFound, itemAsLower, domObj; matchFoundArr.eachAttrs(function (value, i) { if (hashSpanized.hasOwnProperty(i)) { domObj = hashSpanized[i].get(0); itemAsLower = value.toLowerCase(); sliceFound = domObj.textContent; searchIndex = itemAsLower.search(sliceFound); leftUnitName = itemAsLower.slice(0, searchIndex); rightUnitName = itemAsLower.slice(sliceFound .length, itemAsLower.length); if (leftUnitName.present()) { composedArr.push(capitalizeIf(leftUnitName)); } else { domObj.textContent = capitalizeIf(domObj .textContent); composedArr.push(domObj); } if (rightUnitName.present()) { composedArr.push(rightUnitName); } } else { composedArr.push(value); } }); return composedArr; }, map = function (domObj) { var count = 0, tempArr, searchIndex, i, tempSearchIndex, tempI, index; hashSpanized = {}; domContent = domObj.textContent.trim(); domContentArr = domContent.split(" "); spacingWords(domContentArr); tempArr = domContentArr.copy(); for (i in textBoxContentArr) { if (textBoxContentArr.hasOwnProperty(i)) { searchIndex = domContent.toLowerCase() .search(textBoxContentArr[i].toLowerCase() .trim()); if (searchIndex !== -1) { tempI = 0; while (true) { if (tempArr.length) { tempSearchIndex = tempArr[tempI] .toLowerCase().trim().search( textBoxContentArr[i] .trim()); if (tempSearchIndex !== -1) { count += 1; index = domContentArr .indexOf(tempArr[tempI]); hashSpanized[index] = spanize(textBoxContentArr[i]); tempArr.delete(tempI); } else { tempI += 1; } } if (tempArr.length === tempI) { break; } } } else { break; } } } if (count >= textBoxContentArr.length) { matchFoundArr = domContentArr.copy(); return hashSpanized; } return false; }, // hasRightSpace = function (str) { // var // searchIndex, // unitName, // last; // if (str instanceof HTMLElement) { // searchIndex = domContent.toLowerCase().trim() // .search(str.textContent.toLowerCase()); // last = searchIndex + str.textContent.length; // unitName = wordOfName(searchIndex).toLowerCase(); // // if (str.textContent === unitName && // // domContent[last] === " ") { // if (domContent[last] === " ") { // return true; // } // } else { // searchIndex = domContent.toLowerCase().trim() // .search(str.toLowerCase()); // last = searchIndex + str.length; // unitName = wordOfName(searchIndex).toLowerCase(); // if (domContent[last] === " " || // str.toLowerCase() === unitName) { // // if (domContent[last] === " " || // // (str.toLowerCase() === unitName && last !== domContent.length)) { // return true; // } // } // return false; // }, composeName = function (composedArr, domObj) { var copy = $(domObj.cloneNode()); composedArr.forEach(function (v) { copy.append(v); }); return copy; }, capitalizeIf = function (str) { var searchIndex; searchIndex = domContent.toLowerCase().trim().search(str); if (![searchIndex-1].present) { return str.capitalize(); } else { return str; } }, spanize = function (sliceFound) { var spanizedSliceFound; spanizedSliceFound = $("<span></span>"); spanizedSliceFound .addClass(MODULE_CSS + "-slice-found"); spanizedSliceFound.text(sliceFound); return spanizedSliceFound; }, resolveAnswer = function (obj, order) { var found = [], tagName, decomposed, composed; if (!order) { order = "asc"; } order = order.toLocaleLowerCase(); tagName = obj.prop("tagName").toLowerCase(); textBoxContentArr = self.val().toLowerCase().trim() .split(" "); if (tagName === "li") { obj.sortByTextContent(order).each(function (i, domObj) { decomposed = map(domObj, self); if (decomposed) { composed = composeMatch(decomposed); found.push(composeName(composed, domObj)); } }); } // caso necessário, usar recursividade para reenviar formatado // como li return $(found); }, // wordOfName = function (key) { // return domContent.copyUntil(" ", key, true); // }, spacingWords = function (arr) { var i = 0, content = domContent.toLowerCase().trim(), lastName = domContentArr[domContentArr.length -1] .toLowerCase(), lastNameIndex = content.search(lastName); while (true) { if (i < arr.length -1) { arr[i] = arr[i] + " "; i += 1; } else { if (content.search(arr[i].toLowerCase()) !== lastNameIndex) { arr[i] = arr[i] + " "; } break; } } }, resolveAmountChar = function () { if (args.afterAmountChar) { amountChar = args.afterAmountChar; } else { amountChar = 1; } }, unsetKeyUpEvent = function () { self.off("keyup"); }, offlineSearch = function () { unsetKeyUpEvent(); resolveAmountChar(); self.on("keyup", function (e) { if (self.val()) { if (self.val().length >= amountChar) { callback(resolveAnswer(obj, args.order), e); } } else { callback([], e); } }); }, remoteSearch = function () { var jqObj; unsetKeyUpEvent(); resolveAmountChar(); self.on("keyup", function (e) { if (self.val().length === amountChar) { args.remote.query(e, function (q) { ajax({ url: args.remote.url, params: q }) .processing(function (xhr) { if (args.remote.processing) { args.remote.processing(xhr, e); } $(e.target).on("keypress", function (e) { e.preventDefault(); }); }) .done(function (a) { $(e.target).off("keypress"); jqObj = $(a).find("li"); callback(resolveAnswer(jqObj, args.order), e); if (jqObj.length) { self.autocomplete(jqObj, args, callback); } }); }); } }); }; // init if (obj instanceof jQuery) { offlineSearch(); } else if (obj instanceof Object) { callback = args; args = obj; remoteSearch(); } }; })(jQuery);
lib/assets/javascripts/esphinx/ui/support/jquery/autocomplete.js
//= require esphinx/support/jquery //= require esphinx/support/ajax //= require esphinx/support/string //= require esphinx/support/array "use strict"; var jQuery, Ajax; const MODULE_CSS = "esphinx-ui"; (function ($) { $.prototype.autocomplete = function (obj, args, callback) { var self = $(this), hashSpanized, amountChar, ajax = Ajax, domContent, domContentArr, textBoxContentArr, matchFoundArr, composeMatch = function () { var composedArr = [], leftUnitName, rightUnitName, searchIndex, sliceFound, itemAsLower, domObj; matchFoundArr.eachAttrs(function (value, i) { if (hashSpanized.hasOwnProperty(i)) { domObj = hashSpanized[i].get(0); itemAsLower = value.toLowerCase(); sliceFound = domObj.textContent; searchIndex = itemAsLower.search(sliceFound); leftUnitName = itemAsLower.slice(0, searchIndex); rightUnitName = itemAsLower.slice(sliceFound .length, itemAsLower.length); if (leftUnitName.present()) { composedArr.push(capitalizeIf(leftUnitName)); } else { domObj.textContent = capitalizeIf(domObj .textContent); composedArr.push(domObj); } if (rightUnitName.present()) { composedArr.push(rightUnitName); } } else { composedArr.push(value); } }); return composedArr; }, map = function (domObj) { var count = 0, tempArr, searchIndex, i, tempSearchIndex, tempI, index; hashSpanized = {}; domContent = domObj.textContent.trim(); domContentArr = domContent.split(" "); spacingWords(domContentArr); tempArr = domContentArr.copy(); for (i in textBoxContentArr) { if (textBoxContentArr.hasOwnProperty(i)) { searchIndex = domContent.toLowerCase() .search(textBoxContentArr[i].toLowerCase() .trim()); if (searchIndex !== -1) { tempI = 0; while (true) { if (tempArr.length) { tempSearchIndex = tempArr[tempI] .toLowerCase().trim().search( textBoxContentArr[i] .trim()); if (tempSearchIndex !== -1) { count += 1; index = domContentArr .indexOf(tempArr[tempI]); hashSpanized[index] = spanize(textBoxContentArr[i]); tempArr.delete(tempI); } else { tempI += 1; } } if (tempArr.length === tempI) { break; } } } else { break; } } } if (count >= textBoxContentArr.length) { matchFoundArr = domContentArr.copy(); return hashSpanized; } return false; }, // hasRightSpace = function (str) { // var // searchIndex, // unitName, // last; // if (str instanceof HTMLElement) { // searchIndex = domContent.toLowerCase().trim() // .search(str.textContent.toLowerCase()); // last = searchIndex + str.textContent.length; // unitName = wordOfName(searchIndex).toLowerCase(); // // if (str.textContent === unitName && // // domContent[last] === " ") { // if (domContent[last] === " ") { // return true; // } // } else { // searchIndex = domContent.toLowerCase().trim() // .search(str.toLowerCase()); // last = searchIndex + str.length; // unitName = wordOfName(searchIndex).toLowerCase(); // if (domContent[last] === " " || // str.toLowerCase() === unitName) { // // if (domContent[last] === " " || // // (str.toLowerCase() === unitName && last !== domContent.length)) { // return true; // } // } // return false; // }, composeName = function (composedArr, domObj) { var copy = $(domObj.cloneNode()); composedArr.forEach(function (v) { copy.append(v); }); return copy; }, capitalizeIf = function (str) { var searchIndex; searchIndex = domContent.toLowerCase().trim().search(str); if ([searchIndex-1].present) { return str.capitalize(); } else { return str; } }, spanize = function (sliceFound) { var spanizedSliceFound; spanizedSliceFound = $("<span></span>"); spanizedSliceFound .addClass(MODULE_CSS + "-slice-found"); spanizedSliceFound.text(sliceFound); return spanizedSliceFound; }, resolveAnswer = function (obj, order) { var found = [], tagName, decomposed, composed; if (!order) { order = "asc"; } order = order.toLocaleLowerCase(); tagName = obj.prop("tagName").toLowerCase(); textBoxContentArr = self.val().toLowerCase().trim() .split(" "); if (tagName === "li") { obj.sortByTextContent(order).each(function (i, domObj) { decomposed = map(domObj, self); if (decomposed) { composed = composeMatch(decomposed); found.push(composeName(composed, domObj)); } }); } // caso necessário, usar recursividade para reenviar formatado // como li return $(found); }, // wordOfName = function (key) { // return domContent.copyUntil(" ", key, true); // }, spacingWords = function (arr) { var i = 0, content = domContent.toLowerCase().trim(), lastName = domContentArr[domContentArr.length -1] .toLowerCase(), lastNameIndex = content.search(lastName); while (true) { if (i < arr.length -1) { arr[i] = arr[i] + " "; i += 1; } else { if (content.search(arr[i].toLowerCase()) !== lastNameIndex) { arr[i] = arr[i] + " "; } break; } } }, resolveAmountChar = function () { if (args.afterAmountChar) { amountChar = args.afterAmountChar; } else { amountChar = 1; } }, unsetKeyUpEvent = function () { self.off("keyup"); }, offlineSearch = function () { unsetKeyUpEvent(); resolveAmountChar(); self.on("keyup", function (e) { if (self.val()) { if (self.val().length >= amountChar) { callback(resolveAnswer(obj, args.order), e); } } else { callback([], e); } }); }, remoteSearch = function () { var jqObj; unsetKeyUpEvent(); resolveAmountChar(); self.on("keyup", function (e) { if (self.val().length === amountChar) { args.remote.query(e, function (q) { ajax({ url: args.remote.url, params: q }) .processing(function (xhr) { if (args.remote.processing) { args.remote.processing(xhr, e); } $(e.target).on("keypress", function (e) { e.preventDefault(); }); }) .done(function (a) { $(e.target).off("keypress"); jqObj = $(a).find("li"); callback(resolveAnswer(jqObj, args.order), e); if (jqObj.length) { self.autocomplete(jqObj, args, callback); } }); }); } }); }; // init if (obj instanceof jQuery) { offlineSearch(); } else if (obj instanceof Object) { callback = args; args = obj; remoteSearch(); } }; })(jQuery);
ajusta bug em método capitalizeIf
lib/assets/javascripts/esphinx/ui/support/jquery/autocomplete.js
ajusta bug em método capitalizeIf
<ide><path>ib/assets/javascripts/esphinx/ui/support/jquery/autocomplete.js <ide> <ide> searchIndex = domContent.toLowerCase().trim().search(str); <ide> <del> if ([searchIndex-1].present) { <add> if (![searchIndex-1].present) { <ide> return str.capitalize(); <ide> } else { <ide> return str;
Java
apache-2.0
06eef2729bda9118192a342808366485d777b6e8
0
hongsudt/server,HaiJiaoXinHeng/server-1,HaiJiaoXinHeng/server-1,hongsudt/server,hongsudt/server,HaiJiaoXinHeng/server-1
package org.ohmage.request; import java.io.BufferedInputStream; import java.io.BufferedWriter; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.Part; import org.apache.log4j.Logger; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.ohmage.annotator.Annotator; import org.ohmage.annotator.Annotator.ErrorCode; import org.ohmage.exception.ValidationException; import org.ohmage.util.StringUtils; /** * Superclass for all requests. Defines the basic requirements for a request. * * @author John Jenkins * @author Joshua Selsky */ public abstract class Request { private static final Logger LOGGER = Logger.getLogger(Request.class); /** * The key to use when responding with a JSONObject about whether the * request was a success or failure. */ public static final String JSON_KEY_RESULT = "result"; /** * The value to use for the {@link #JSON_KEY_RESULT} when the request is * successful. */ public static final String RESULT_SUCCESS = "success"; /** * The value to use for the {@link #JSON_KEY_RESULT} when the request has * failed. */ public static final String RESULT_FAILURE = "failure"; /** * The key to use when responding with a JSONObject where the request is * successful. The value associated with this key is the requested data. */ public static final String JSON_KEY_DATA = "data"; /** * The key to use when responding with a JSONOBject where the request has * failed. The value associated with this key is the error code and error * text describing why this request failed. */ public static final String JSON_KEY_ERRORS = "errors"; /** * A hard-coded JSONObject which represents a successful result. */ public static final String RESPONSE_SUCCESS_JSON_TEXT = "{\"" + JSON_KEY_RESULT + "\":\"" + RESULT_SUCCESS + "\"}"; /** * A hard-coded JSONObject with which to respond in the event that the * JSONObject that is the response cannot be built. */ public static final String RESPONSE_ERROR_JSON_TEXT = "{\"" + JSON_KEY_RESULT + "\":\"" + RESULT_FAILURE + "\"," + "\"" + JSON_KEY_ERRORS + "\":[" + "{\"" + Annotator.JSON_KEY_CODE + "\":\"0103\"," + "\"" + Annotator.JSON_KEY_TEXT + "\":\"An error occurred while building the JSON response.\"}" + "]}"; private static final String KEY_CONTENT_ENCODING = "Content-Encoding"; private static final String VALUE_GZIP = "gzip"; private static final int CHUNK_SIZE = 4096; private static final String PARAMETER_SEPARATOR = "&"; private static final String PARAMETER_VALUE_SEPARATOR = "="; private final Annotator annotator; private boolean failed; private final Map<String, String[]> parameters; /** * Default constructor. Creates a new, generic annotator for this object. * * @param httpRequest An HttpServletRequest that was used to create this * request. This may be null if no such request exists. */ protected Request(final HttpServletRequest httpRequest) { annotator = new Annotator(); failed = false; parameters = getParameters(httpRequest); } /** * @return Returns the parameters from the HTTP request. */ protected Map<String, String[]> getParameters() { return parameters; } /** * Returns whether or not this request has failed. * * @return Whether or not this request has failed. */ public boolean isFailed() { return failed; } /** * Simply sets the request as failed without updating the error message. */ public void setFailed() { failed = true; } /** * Marks that the request has failed and updates its response with the * given error code and error text. * * @param errorCode A four-character error code related to the error text. * * @param errorText The text to be returned to the user. */ public void setFailed(final ErrorCode errorCode, final String errorText) { annotator.update(errorCode, errorText); failed = true; } /** * Returns a String representation of the failure message that would be * returned to a user if this request has failed. All requests have a * default failure message, so this will always return some error message; * however, if the request has not yet failed, this result is meaningless. * * @return A String representation of the current failure message for this * request. */ public String getFailureMessage() { String result; try { // Use the annotator's message to build the response. JSONObject resultJson = new JSONObject(); resultJson.put(JSON_KEY_RESULT, RESULT_FAILURE); // FIXME: We no longer have multiple error messages per failed // response, so we need to get rid of this unnecessary array. JSONArray jsonArray = new JSONArray(); jsonArray.put(annotator.toJsonObject()); resultJson.put(JSON_KEY_ERRORS, jsonArray); result = resultJson.toString(); } catch(JSONException e) { // If we can't even build the failure message, write a hand- // written message as the response. LOGGER.error("An error occurred while building the failure JSON response.", e); result = RESPONSE_ERROR_JSON_TEXT; } return result; } /** * Returns an unmodifiable version of the parameter map. * * @return An unmodifiable version of the parameter map. */ public Map<String, String[]> getParameterMap() { return Collections.unmodifiableMap(parameters); } /** * Returns an array of all of the values from a parameter in the request. * * @param parameterKey The key to use to lookup the parameter value. * * @return An array of all values given for the parameter. The array may be * empty, but will never be null. */ protected String[] getParameterValues(String parameterKey) { if(parameterKey == null) { return new String[0]; } String[] result = parameters.get(parameterKey); if(result == null) { result = new String[0]; } return result; } /** * Returns the first value for some key from the parameter list. If there * are no values for a key, null is returned. * * @param parameterKey The key to use to lookup a list of values, the first * of which will be returned. * * @return Returns the first of a list of values for some key or null if no * values exist for the key. */ protected String getParameter(String parameterKey) { String[] values = getParameterValues(parameterKey); if(values.length == 0) { return null; } else { return values[0]; } } /** * Performs the operations for which this Request is responsible and * aggregates any resulting data. This should be container agnostic. The * specific constructors should gather the required information to perform * this service and any results set by this function should be not be * specific to any type of response generated by the container. */ public abstract void service(); /** * Gathers an request-specific data that should be logged in the audit. */ public abstract Map<String, String[]> getAuditInformation(); /************************************************************************** * Begin JEE Requirements *************************************************************************/ /** * Writes a response to the request. * * @param httpRequest The initial HTTP request. * * @param httpResponse The HTTP response to this request. */ public abstract void respond(HttpServletRequest httpRequest, HttpServletResponse httpResponse); /** * Writes the response that is a JSONObject. This is a helper function for * when {@link #respond(HttpServletRequest, HttpServletResponse)} is called * given that most responses are in some sort of JSON format. * * @param httpRequest The initial HTTP request that we are processing. * * @param httpResponse The response for this HTTP request. * * @param jsonResponse An already-constructed JSONObject that contains the * 'data' portion of the object. */ protected void respond(HttpServletRequest httpRequest, HttpServletResponse httpResponse, JSONObject jsonResponse) { respond(httpRequest, httpResponse, JSON_KEY_DATA, jsonResponse); } /** * Writes the response that is a JSONObject. This is a helper function for * when {@link #respond(HttpServletRequest, HttpServletResponse)} is called * given that most responses are in some sort of JSON format. This creates * a success/fail JSON response where, when the result is success, it will * also include a second key-value pair which are the parameters to this * function. * * @param httpRequest The initial HTTP request that we are processing. * * @param httpResponse The response for this HTTP request. * * @param key The key to include along with {@link #JSON_KEY_RESULT}. * * @param value The value to assign with the 'key'. */ protected void respond(HttpServletRequest httpRequest, HttpServletResponse httpResponse, String key, Object value) { // Create a writer for the HTTP response object. Writer writer = null; String responseText = ""; try { writer = new BufferedWriter(new OutputStreamWriter(getOutputStream(httpRequest, httpResponse))); // Sets the HTTP headers to disable caching. expireResponse(httpResponse); httpResponse.setContentType("text/html"); // If the response hasn't failed yet, attempt to create and write the // JSON response. if(! failed) { try { JSONObject result = new JSONObject(); result.put(JSON_KEY_RESULT, RESULT_SUCCESS); result.put(key, value); responseText = result.toString(); } catch(JSONException e) { // If anything fails, echo it in the logs and set the request // as failed. LOGGER.error("An error occurred while building the success JSON response.", e); failed = true; } } // If the request failed, either during the build or while the response // was being built, write a failure message. if(failed) { responseText = getFailureMessage(); } writer.write(responseText); } catch(IOException e) { LOGGER.error("Unable to write response message. Aborting.", e); } finally { try { if(writer != null) { writer.flush(); writer.close(); writer = null; } } catch(IOException e) { LOGGER.error("Unable to flush or close the writer.", e); } } } /** * Retrieves the parameter map from the request and returns it. * * @param httpRequest A HttpServletRequest that contains the desired * parameter map. * * @return Returns a map of keys to an array of values for all of the * parameters contained in the request. This may return an empty * map, but it will never return null. * * @throws IllegalArgumentException Thrown if the parameters cannot be * parsed. * * @throws IllegalStateException Thrown if there is a problem connecting to * or reading from the request. */ private Map<String, String[]> getParameters(HttpServletRequest httpRequest) { if(httpRequest == null) { return Collections.emptyMap(); } Enumeration<String> contentEncodingHeaders = httpRequest.getHeaders(KEY_CONTENT_ENCODING); while(contentEncodingHeaders.hasMoreElements()) { if(VALUE_GZIP.equals(contentEncodingHeaders.nextElement())) { return gunzipRequest(httpRequest); } } return httpRequest.getParameterMap(); } /** * Retrieves the parameter map from a request that has had its contents * GZIP'd. * * @param httpRequest A HttpServletRequest whose contents are GZIP'd as * indicated by a "Content-Encoding" header. * * @return Returns a map of keys to a list of values for all of the * parameters passed to the server. This may return an empty map, * but it will never return null. * * @throws IllegalArgumentException Thrown if the parameters cannot be * parsed. * * @throws IllegalStateException Thrown if there is a problem connecting to * or reading from the request. */ private Map<String, String[]> gunzipRequest(HttpServletRequest httpRequest) { // Retrieve the InputStream for the GZIP'd content of the request. InputStream inputStream; try { inputStream = new BufferedInputStream(new GZIPInputStream(httpRequest.getInputStream())); } catch(IOException e) { LOGGER.error("The uploaded content was not GZIP content.", e); setFailed(ErrorCode.SYSTEM_GENERAL_ERROR, "Not GZIP content."); return Collections.emptyMap(); } // Retrieve the parameter list as a string. String parameterString; try { // This will build the parameter string. StringBuilder builder = new StringBuilder(); // These will store the information for the current chunk. byte[] chunk = new byte[CHUNK_SIZE]; int readLen = 0; while((readLen = inputStream.read(chunk)) != -1) { builder.append(new String(chunk, 0, readLen)); } parameterString = builder.toString(); } catch(IOException e) { throw new IllegalStateException("There was an error while reading from the request's input stream.", e); } finally { try { if(inputStream != null) { inputStream.close(); inputStream = null; } } catch(IOException e) { throw new IllegalStateException("And error occurred while closing the input stream.", e); } } // Create the resulting object so that we will never return null. Map<String, String[]> parameterMap = new HashMap<String, String[]>(); // If the parameters string is not empty, parse it for the parameters. if(! StringUtils.isEmptyOrWhitespaceOnly(parameterString)) { Map<String, List<String>> parameters = new HashMap<String, List<String>>(); // First, split all of the parameters apart. String[] keyValuePairs = parameterString.split(PARAMETER_SEPARATOR); // For each of the pairs, split their key and value and store them. for(String keyValuePair : keyValuePairs) { // If the pair is empty or null, ignore it. if(StringUtils.isEmptyOrWhitespaceOnly(keyValuePair.trim())) { continue; } // Split the key from the value. String[] splitPair = keyValuePair.split(PARAMETER_VALUE_SEPARATOR); // If there isn't exactly one key to one value, then there is a // problem, and we need to abort. if(splitPair.length <= 1) { throw new IllegalArgumentException("One of the parameter's 'pairs' did not contain a '" + PARAMETER_VALUE_SEPARATOR + "': " + keyValuePair); } else if(splitPair.length > 2) { throw new IllegalArgumentException("One of the parameter's 'pairs' contained multiple '" + PARAMETER_VALUE_SEPARATOR + "'s: " + keyValuePair); } // The key is the first part of the pair. String key = StringUtils.urlDecode(splitPair[0]); // The first or next value for the key is the second part of // the pair. List<String> values = parameters.get(key); if(values == null) { values = new LinkedList<String>(); parameters.put(key, values); } values.add(StringUtils.urlDecode(splitPair[1])); } // Now that we have all of the pairs, convert it into the // appropriate map. for(String key : parameters.keySet()) { parameterMap.put(key, parameters.get(key).toArray(new String[0])); } } return parameterMap; } /** * Reads the HttpServletRequest for a key-value pair and returns the value * where the key is equal to the given key. * * @param httpRequest A "multipart/form-data" request that contains the * parameter that has a key value 'key'. * * @param key The key for the value we are after in the 'httpRequest'. * * @return Returns null if there is no such key in the request or if, * after reading the object, it has a length of 0. Otherwise, it * returns the value associated with the key as a byte array. * * @throws ServletException Thrown if the 'httpRequest' is not a * "multipart/form-data" request. * * @throws IOException Thrown if there is an error reading the value from * the request's input stream. * * @throws IllegalStateException Thrown if the entire request is larger * than the maximum allowed size for a * request or if the value of the requested * key is larger than the maximum allowed * size for a single value. */ protected byte[] getMultipartValue(HttpServletRequest httpRequest, String key) throws ValidationException { try { Part part = httpRequest.getPart(key); if(part == null) { return null; } InputStream partInputStream = part.getInputStream(); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); byte[] chunk = new byte[4096]; int amountRead; while((amountRead = partInputStream.read(chunk)) != -1) { outputStream.write(chunk, 0, amountRead); } if(outputStream.size() == 0) { return null; } else { return outputStream.toByteArray(); } } catch(ServletException e) { LOGGER.error("This is not a multipart/form-data POST.", e); setFailed(ErrorCode.SYSTEM_GENERAL_ERROR, "This is not a multipart/form-data POST which is what we expect for the current API call."); throw new ValidationException(e); } catch(IOException e) { LOGGER.error("There was an error reading the message from the input stream.", e); setFailed(); throw new ValidationException(e); } } /** * Sets the response headers to disallow client caching. */ protected void expireResponse(HttpServletResponse response) { response.setHeader("Expires", "Fri, 5 May 1995 12:00:00 GMT"); response.setHeader("Cache-Control", "no-store, no-cache, must-revalidate"); response.setHeader("Pragma", "no-cache"); // This is done to allow client content to be served up from from // different domains than the server data e.g., when you want to run a // client in a local sandbox, but retrieve data from a remote server response.setHeader("Access-Control-Allow-Origin","*"); } /** * There is functionality in Tomcat 6 to perform this action, but it is * also nice to have it controlled programmatically. * * @return an OutputStream appropriate for the headers found in the * request. */ protected OutputStream getOutputStream(HttpServletRequest request, HttpServletResponse response) throws IOException { OutputStream os = null; // Determine if the response can be gzipped String encoding = request.getHeader("Accept-Encoding"); if (encoding != null && encoding.indexOf("gzip") >= 0) { if(LOGGER.isDebugEnabled()) { LOGGER.debug("Returning a GZIPOutputStream"); } response.setHeader("Content-Encoding","gzip"); response.setHeader("Vary", "Accept-Encoding"); os = new GZIPOutputStream(response.getOutputStream()); } else { if(LOGGER.isDebugEnabled()) { LOGGER.debug("Returning the default OutputStream"); } os = response.getOutputStream(); } return os; } /************************************************************************** * End JEE Requirements *************************************************************************/ }
src/org/ohmage/request/Request.java
package org.ohmage.request; import java.io.BufferedInputStream; import java.io.BufferedWriter; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.Part; import org.apache.log4j.Logger; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.ohmage.annotator.Annotator; import org.ohmage.annotator.Annotator.ErrorCode; import org.ohmage.exception.ValidationException; import org.ohmage.util.StringUtils; /** * Superclass for all requests. Defines the basic requirements for a request. * * @author John Jenkins * @author Joshua Selsky */ public abstract class Request { private static final Logger LOGGER = Logger.getLogger(Request.class); /** * The key to use when responding with a JSONObject about whether the * request was a success or failure. */ public static final String JSON_KEY_RESULT = "result"; /** * The value to use for the {@link #JSON_KEY_RESULT} when the request is * successful. */ public static final String RESULT_SUCCESS = "success"; /** * The value to use for the {@link #JSON_KEY_RESULT} when the request has * failed. */ public static final String RESULT_FAILURE = "failure"; /** * The key to use when responding with a JSONObject where the request is * successful. The value associated with this key is the requested data. */ public static final String JSON_KEY_DATA = "data"; /** * The key to use when responding with a JSONOBject where the request has * failed. The value associated with this key is the error code and error * text describing why this request failed. */ public static final String JSON_KEY_ERRORS = "errors"; /** * A hard-coded JSONObject which represents a successful result. */ public static final String RESPONSE_SUCCESS_JSON_TEXT = "{\"" + JSON_KEY_RESULT + "\":\"" + RESULT_SUCCESS + "\"}"; /** * A hard-coded JSONObject with which to respond in the event that the * JSONObject that is the response cannot be built. */ public static final String RESPONSE_ERROR_JSON_TEXT = "{\"" + JSON_KEY_RESULT + "\":\"" + RESULT_FAILURE + "\"," + "\"" + JSON_KEY_ERRORS + "\":[" + "{\"" + Annotator.JSON_KEY_CODE + "\":\"0103\"," + "\"" + Annotator.JSON_KEY_TEXT + "\":\"An error occurred while building the JSON response.\"}" + "]}"; private static final String KEY_CONTENT_ENCODING = "Content-Encoding"; private static final String VALUE_GZIP = "gzip"; private static final int CHUNK_SIZE = 4096; private static final String PARAMETER_SEPARATOR = "&"; private static final String PARAMETER_VALUE_SEPARATOR = "="; private final Annotator annotator; private boolean failed; private final Map<String, String[]> parameters; /** * Default constructor. Creates a new, generic annotator for this object. * * @param httpRequest An HttpServletRequest that was used to create this * request. This may be null if no such request exists. */ protected Request(HttpServletRequest httpRequest) { annotator = new Annotator(); failed = false; parameters = getParameters(httpRequest); } /** * @return Returns the parameters from the HTTP request. */ protected Map<String, String[]> getParameters() { return parameters; } /** * Returns whether or not this request has failed. * * @return Whether or not this request has failed. */ public boolean isFailed() { return failed; } /** * Simply sets the request as failed without updating the error message. */ public void setFailed() { failed = true; } /** * Marks that the request has failed and updates its response with the * given error code and error text. * * @param errorCode A four-character error code related to the error text. * * @param errorText The text to be returned to the user. */ public void setFailed(final ErrorCode errorCode, final String errorText) { annotator.update(errorCode, errorText); failed = true; } /** * Returns a String representation of the failure message that would be * returned to a user if this request has failed. All requests have a * default failure message, so this will always return some error message; * however, if the request has not yet failed, this result is meaningless. * * @return A String representation of the current failure message for this * request. */ public String getFailureMessage() { String result; try { // Use the annotator's message to build the response. JSONObject resultJson = new JSONObject(); resultJson.put(JSON_KEY_RESULT, RESULT_FAILURE); // FIXME: We no longer have multiple error messages per failed // response, so we need to get rid of this unnecessary array. JSONArray jsonArray = new JSONArray(); jsonArray.put(annotator.toJsonObject()); resultJson.put(JSON_KEY_ERRORS, jsonArray); result = resultJson.toString(); } catch(JSONException e) { // If we can't even build the failure message, write a hand- // written message as the response. LOGGER.error("An error occurred while building the failure JSON response.", e); result = RESPONSE_ERROR_JSON_TEXT; } return result; } /** * Returns an unmodifiable version of the parameter map. * * @return An unmodifiable version of the parameter map. */ public Map<String, String[]> getParameterMap() { return Collections.unmodifiableMap(parameters); } /** * Returns an array of all of the values from a parameter in the request. * * @param parameterKey The key to use to lookup the parameter value. * * @return An array of all values given for the parameter. The array may be * empty, but will never be null. */ protected String[] getParameterValues(String parameterKey) { if(parameterKey == null) { return new String[0]; } String[] result = parameters.get(parameterKey); if(result == null) { result = new String[0]; } return result; } /** * Returns the first value for some key from the parameter list. If there * are no values for a key, null is returned. * * @param parameterKey The key to use to lookup a list of values, the first * of which will be returned. * * @return Returns the first of a list of values for some key or null if no * values exist for the key. */ protected String getParameter(String parameterKey) { String[] values = getParameterValues(parameterKey); if(values.length == 0) { return null; } else { return values[0]; } } /** * Performs the operations for which this Request is responsible and * aggregates any resulting data. This should be container agnostic. The * specific constructors should gather the required information to perform * this service and any results set by this function should be not be * specific to any type of response generated by the container. */ public abstract void service(); /** * Gathers an request-specific data that should be logged in the audit. */ public abstract Map<String, String[]> getAuditInformation(); /************************************************************************** * Begin JEE Requirements *************************************************************************/ /** * Writes a response to the request. * * @param httpRequest The initial HTTP request. * * @param httpResponse The HTTP response to this request. */ public abstract void respond(HttpServletRequest httpRequest, HttpServletResponse httpResponse); /** * Writes the response that is a JSONObject. This is a helper function for * when {@link #respond(HttpServletRequest, HttpServletResponse)} is called * given that most responses are in some sort of JSON format. * * @param httpRequest The initial HTTP request that we are processing. * * @param httpResponse The response for this HTTP request. * * @param jsonResponse An already-constructed JSONObject that contains the * 'data' portion of the object. */ protected void respond(HttpServletRequest httpRequest, HttpServletResponse httpResponse, JSONObject jsonResponse) { respond(httpRequest, httpResponse, JSON_KEY_DATA, jsonResponse); } /** * Writes the response that is a JSONObject. This is a helper function for * when {@link #respond(HttpServletRequest, HttpServletResponse)} is called * given that most responses are in some sort of JSON format. This creates * a success/fail JSON response where, when the result is success, it will * also include a second key-value pair which are the parameters to this * function. * * @param httpRequest The initial HTTP request that we are processing. * * @param httpResponse The response for this HTTP request. * * @param key The key to include along with {@link #JSON_KEY_RESULT}. * * @param value The value to assign with the 'key'. */ protected void respond(HttpServletRequest httpRequest, HttpServletResponse httpResponse, String key, Object value) { // Create a writer for the HTTP response object. Writer writer = null; String responseText = ""; try { writer = new BufferedWriter(new OutputStreamWriter(getOutputStream(httpRequest, httpResponse))); // Sets the HTTP headers to disable caching. expireResponse(httpResponse); httpResponse.setContentType("text/html"); // If the response hasn't failed yet, attempt to create and write the // JSON response. if(! failed) { try { JSONObject result = new JSONObject(); result.put(JSON_KEY_RESULT, RESULT_SUCCESS); result.put(key, value); responseText = result.toString(); } catch(JSONException e) { // If anything fails, echo it in the logs and set the request // as failed. LOGGER.error("An error occurred while building the success JSON response.", e); failed = true; } } // If the request failed, either during the build or while the response // was being built, write a failure message. if(failed) { responseText = getFailureMessage(); } writer.write(responseText); } catch(IOException e) { LOGGER.error("Unable to write response message. Aborting.", e); } finally { try { if(writer != null) { writer.flush(); writer.close(); writer = null; } } catch(IOException e) { LOGGER.error("Unable to flush or close the writer.", e); } } } /** * Retrieves the parameter map from the request and returns it. * * @param httpRequest A HttpServletRequest that contains the desired * parameter map. * * @return Returns a map of keys to an array of values for all of the * parameters contained in the request. This may return an empty * map, but it will never return null. * * @throws IllegalArgumentException Thrown if the parameters cannot be * parsed. * * @throws IllegalStateException Thrown if there is a problem connecting to * or reading from the request. */ private Map<String, String[]> getParameters(HttpServletRequest httpRequest) { if(httpRequest == null) { return Collections.emptyMap(); } Enumeration<String> contentEncodingHeaders = httpRequest.getHeaders(KEY_CONTENT_ENCODING); while(contentEncodingHeaders.hasMoreElements()) { if(VALUE_GZIP.equals(contentEncodingHeaders.nextElement())) { return gunzipRequest(httpRequest); } } return httpRequest.getParameterMap(); } /** * Retrieves the parameter map from a request that has had its contents * GZIP'd. * * @param httpRequest A HttpServletRequest whose contents are GZIP'd as * indicated by a "Content-Encoding" header. * * @return Returns a map of keys to a list of values for all of the * parameters passed to the server. This may return an empty map, * but it will never return null. * * @throws IllegalArgumentException Thrown if the parameters cannot be * parsed. * * @throws IllegalStateException Thrown if there is a problem connecting to * or reading from the request. */ private Map<String, String[]> gunzipRequest(HttpServletRequest httpRequest) { // Retrieve the InputStream for the GZIP'd content of the request. InputStream inputStream; try { inputStream = new BufferedInputStream(new GZIPInputStream(httpRequest.getInputStream())); } catch(IOException e) { LOGGER.error("The uploaded content was not GZIP content.", e); setFailed(ErrorCode.SYSTEM_GENERAL_ERROR, "Not a gzip file."); return Collections.emptyMap(); } // Retrieve the parameter list as a string. String parameterString; try { // This will build the parameter string. StringBuilder builder = new StringBuilder(); // These will store the information for the current chunk. byte[] chunk = new byte[CHUNK_SIZE]; int readLen = 0; while((readLen = inputStream.read(chunk)) != -1) { builder.append(new String(chunk, 0, readLen)); } parameterString = builder.toString(); } catch(IOException e) { throw new IllegalStateException("There was an error while reading from the request's input stream.", e); } finally { try { if(inputStream != null) { inputStream.close(); inputStream = null; } } catch(IOException e) { throw new IllegalStateException("And error occurred while closing the input stream.", e); } } // Create the resulting object so that, unless we fail, we will never // return null. Map<String, String[]> parameterMap = new HashMap<String, String[]>(); // If the parameters string is not empty, parse it for the parameters. if(! StringUtils.isEmptyOrWhitespaceOnly(parameterString)) { Map<String, List<String>> parameters = new HashMap<String, List<String>>(); // First, split all of the parameters apart. String[] keyValuePairs = parameterString.split(PARAMETER_SEPARATOR); // For each of the pairs, split their key and value and store them. for(String keyValuePair : keyValuePairs) { // If the pair is empty or null, ignore it. if(StringUtils.isEmptyOrWhitespaceOnly(keyValuePair.trim())) { continue; } // Split the key from the value. String[] splitPair = keyValuePair.split(PARAMETER_VALUE_SEPARATOR); // If there isn't exactly one key to one value, then there is a // problem, and we need to abort. if(splitPair.length <= 1) { throw new IllegalArgumentException("One of the parameter's 'pairs' did not contain a '" + PARAMETER_VALUE_SEPARATOR + "': " + keyValuePair); } else if(splitPair.length > 2) { throw new IllegalArgumentException("One of the parameter's 'pairs' contained multiple '" + PARAMETER_VALUE_SEPARATOR + "'s: " + keyValuePair); } // The key is the first part of the pair. String key = splitPair[0]; // The first or next value for the key is the second part of // the pair. List<String> values = parameters.get(key); if(values == null) { values = new LinkedList<String>(); parameters.put(key, values); } values.add(StringUtils.urlDecode(splitPair[1])); } // Now that we have all of the pairs, convert it into the // appropriate map. for(String key : parameters.keySet()) { parameterMap.put(key, parameters.get(key).toArray(new String[0])); } } return parameterMap; } /** * Reads the HttpServletRequest for a key-value pair and returns the value * where the key is equal to the given key. * * @param httpRequest A "multipart/form-data" request that contains the * parameter that has a key value 'key'. * * @param key The key for the value we are after in the 'httpRequest'. * * @return Returns null if there is no such key in the request or if, * after reading the object, it has a length of 0. Otherwise, it * returns the value associated with the key as a byte array. * * @throws ServletException Thrown if the 'httpRequest' is not a * "multipart/form-data" request. * * @throws IOException Thrown if there is an error reading the value from * the request's input stream. * * @throws IllegalStateException Thrown if the entire request is larger * than the maximum allowed size for a * request or if the value of the requested * key is larger than the maximum allowed * size for a single value. */ protected byte[] getMultipartValue(HttpServletRequest httpRequest, String key) throws ValidationException { try { Part part = httpRequest.getPart(key); if(part == null) { return null; } InputStream partInputStream = part.getInputStream(); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); byte[] chunk = new byte[4096]; int amountRead; while((amountRead = partInputStream.read(chunk)) != -1) { outputStream.write(chunk, 0, amountRead); } if(outputStream.size() == 0) { return null; } else { return outputStream.toByteArray(); } } catch(ServletException e) { LOGGER.error("This is not a multipart/form-data POST.", e); setFailed(ErrorCode.SYSTEM_GENERAL_ERROR, "This is not a multipart/form-data POST which is what we expect for the current API call."); throw new ValidationException(e); } catch(IOException e) { LOGGER.error("There was an error reading the message from the input stream.", e); setFailed(); throw new ValidationException(e); } } /** * Sets the response headers to disallow client caching. */ protected void expireResponse(HttpServletResponse response) { response.setHeader("Expires", "Fri, 5 May 1995 12:00:00 GMT"); response.setHeader("Cache-Control", "no-store, no-cache, must-revalidate"); response.setHeader("Pragma", "no-cache"); // This is done to allow client content to be served up from from // different domains than the server data e.g., when you want to run a // client in a local sandbox, but retrieve data from a remote server response.setHeader("Access-Control-Allow-Origin","*"); } /** * There is functionality in Tomcat 6 to perform this action, but it is * also nice to have it controlled programmatically. * * @return an OutputStream appropriate for the headers found in the * request. */ protected OutputStream getOutputStream(HttpServletRequest request, HttpServletResponse response) throws IOException { OutputStream os = null; // Determine if the response can be gzipped String encoding = request.getHeader("Accept-Encoding"); if (encoding != null && encoding.indexOf("gzip") >= 0) { if(LOGGER.isDebugEnabled()) { LOGGER.debug("Returning a GZIPOutputStream"); } response.setHeader("Content-Encoding","gzip"); response.setHeader("Vary", "Accept-Encoding"); os = new GZIPOutputStream(response.getOutputStream()); } else { if(LOGGER.isDebugEnabled()) { LOGGER.debug("Returning the default OutputStream"); } os = response.getOutputStream(); } return os; } /************************************************************************** * End JEE Requirements *************************************************************************/ }
Fixed a bug with GZIP'd uploads.
src/org/ohmage/request/Request.java
Fixed a bug with GZIP'd uploads.
<ide><path>rc/org/ohmage/request/Request.java <ide> * @param httpRequest An HttpServletRequest that was used to create this <ide> * request. This may be null if no such request exists. <ide> */ <del> protected Request(HttpServletRequest httpRequest) { <add> protected Request(final HttpServletRequest httpRequest) { <ide> annotator = new Annotator(); <ide> failed = false; <ide> <ide> } <ide> catch(IOException e) { <ide> LOGGER.error("The uploaded content was not GZIP content.", e); <del> setFailed(ErrorCode.SYSTEM_GENERAL_ERROR, "Not a gzip file."); <add> setFailed(ErrorCode.SYSTEM_GENERAL_ERROR, "Not GZIP content."); <ide> return Collections.emptyMap(); <ide> } <ide> <ide> } <ide> } <ide> <del> // Create the resulting object so that, unless we fail, we will never <del> // return null. <add> // Create the resulting object so that we will never return null. <ide> Map<String, String[]> parameterMap = new HashMap<String, String[]>(); <ide> <ide> // If the parameters string is not empty, parse it for the parameters. <ide> } <ide> <ide> // The key is the first part of the pair. <del> String key = splitPair[0]; <add> String key = StringUtils.urlDecode(splitPair[0]); <ide> <ide> // The first or next value for the key is the second part of <ide> // the pair.
Java
apache-2.0
1d767ad94601d8034f8c6556eede28847dd1adff
0
Elttbakh/Test02,SCORPIO12/Case2,jmhanna/commons-csv,afafhassan/commons-csv,lihenu/Crossover_project,pvllnspk/commons-csv,warriorno22/commons-csv,AndrewGuthua/CrossOverTest2,dakinyade/commons-csv,amee-trivedi/commons-csv,fadysamirzakarya/common-csv-2,fadysamirzakarya/common-csv-2,mirasrael/commons-csv,quettech/qa2,viliescu/PRODSUP-002,COTechTrial/case2,GauriGNaik/commons-csv,muhammadallee/commons-csv,gargchap/gargvaibhav,mohanaraosv/commons-csv,festusjejelowo/commons-csv,rayiss/commons-csv,apache/commons-csv,COTechTrial/case2,chio003/Test2,UzumakiMansi/commons-csv,gargchap/gargvaibhav,mirasrael/commons-csv,syedbilalmasaud/case2,shacore10/commons-csv,chio003/Test2,shashankasharma/commons-csv,UzumakiMansi/commons-csv,expertryk/commons-csv,quettech/csv-import,sufianqayyum131/PRODSUP-002,parmarsumit/commons-csv,jtardaguila/test2,chronoangelus/commons-csv,najamalvi/PRODSUP-002,sruputway/commons-csv_test,sruputway/commons-csv_test,viliescu/PRODSUP-002,jtardaguila/test2,quettech/csv-import,warriorno22/commons-csv,thanhnbt/commons-csv,fabriciobressan/crossover_question2,fadysamirzakarya/commons-csv,expertryk/commons-csv,dakinyade/commons-csv,fadysamirzakarya/commons-csv,shubhcollaborator/common-csvnew,arunpaulonline/test2,catconst/commons-csv,syedbilalmasaud/case2,najamalvi/PRODSUP-002,DGAlexandru/commons-csv,lihenu/Crossover_project,catconst/commons-csv,shashankasharma/commons-csv,festusjejelowo/commons-csv,jmhanna/commons-csv,catconst/commons-csv,mohanaraosv/commons-csv,Elttbakh/Test02,UzumakiMansi/commons-csv,Aweitzel86/TestCase2.1,apache/commons-csv,shashankasharma/commons-csv,thanhnbt/commons-csv,Elttbakh/Test03,pvllnspk/commons-csv,RavinaDhruve/commons-csv,iffi101/commons-csv,Aweitzel86/TestCase2.1,SCORPIO12/Case2,RavinaDhruve/commons-csv,harikrishna1947a/csv,amee-trivedi/commons-csv,fabriciobressan/crossover_question2,Elttbakh/Test03,arunnairvyaj/commons-csv-trunk,shadykandeel/commons-csv,muhammadallee/commons-csv,arunnairvyaj/commons-csv-trunk,mbreslow/commons-csv,afafhassan/commons-csv,shadykandeel/commons-csv,rayiss/commons-csv,khalilrahman/commons-csv,mbreslow/commons-csv,chronoangelus/commons-csv,arunpaulonline/test2,harikrishna1947a/csv,parmarsumit/commons-csv,shashankasharma/commons-csv,khalilrahman/commons-csv,quettech/qa2,shubhcollaborator/common-csvnew,iffi101/commons-csv,AndrewGuthua/CrossOverTest2,DGAlexandru/commons-csv,GauriGNaik/commons-csv,sufianqayyum131/PRODSUP-002,shacore10/commons-csv
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.csv; import java.io.Serializable; import java.util.Arrays; import java.util.Iterator; import java.util.Map; /** * A CSV record parsed from a CSV file. * * @version $Id$ */ public class CSVRecord implements Serializable, Iterable<String> { private static final String[] EMPTY_STRING_ARRAY = new String[0]; private static final long serialVersionUID = 1L; /** The accumulated comments (if any) */ private final String comment; /** The column name to index mapping. */ private final Map<String, Integer> mapping; /** The record number. */ private final long recordNumber; /** The values of the record */ private final String[] values; CSVRecord(final String[] values, final Map<String, Integer> mapping, final String comment, final long recordNumber) { this.recordNumber = recordNumber; this.values = values != null ? values : EMPTY_STRING_ARRAY; this.mapping = mapping; this.comment = comment; } /** * Returns a value by {@link Enum}. * * @param e * an enum * @return the String at the given enum String */ public String get(Enum<?> e) { return get(e.toString()); } /** * Returns a value by index. * * @param i * a column index (0-based) * @return the String at the given index */ public String get(final int i) { return values[i]; } /** * Returns a value by name. * * @param name * the name of the column to be retrieved. * @return the column value, or {@code null} if the column name is not found * @throws IllegalStateException * if no header mapping was provided * @throws IllegalArgumentException * if the record is inconsistent * @see #isConsistent() */ public String get(final String name) { if (mapping == null) { throw new IllegalStateException( "No header mapping was specified, the record values can't be accessed by name"); } final Integer index = mapping.get(name); try { return index != null ? values[index.intValue()] : null; } catch (final ArrayIndexOutOfBoundsException e) { throw new IllegalArgumentException( String.format( "Index for header '%s' is %d but CSVRecord only has %d values!", name, index, Integer.valueOf(values.length))); } } /** * Returns the comment for this record, if any. * * @return the comment for this record, or null if no comment for this * record is available. */ public String getComment() { return comment; } /** * Returns the number of this record in the parsed CSV file. * * @return the number of this record. */ public long getRecordNumber() { return recordNumber; } /** * Returns true if this record is consistent, false if not. Currently, the only check is matching the record size to * the header size. Some programs can export files that fails this test but still produce parsable files. * * @return true of this record is valid, false if not * @see CSVParserTest#org.apache.commons.csv.CSVParserTest.testMappedButNotSetAsOutlook2007ContactExport() */ public boolean isConsistent() { return mapping == null ? true : mapping.size() == values.length; } /** * Checks whether a given column is mapped, i.e. its name has been defined to the parser. * * @param name * the name of the column to be retrieved. * @return whether a given column is mapped. */ public boolean isMapped(final String name) { return mapping != null ? mapping.containsKey(name) : false; } /** * Checks whether a given columns is mapped and has a value. * * @param name * the name of the column to be retrieved. * @return whether a given columns is mapped and has a value */ public boolean isSet(final String name) { return isMapped(name) && mapping.get(name).intValue() < values.length; } /** * Returns an iterator over the values of this record. * * @return an iterator over the values of this record. */ public Iterator<String> iterator() { return Arrays.asList(values).iterator(); } /** * Returns the number of values in this record. * * @return the number of values. */ public int size() { return values.length; } @Override public String toString() { return Arrays.toString(values); } String[] values() { return values; } }
src/main/java/org/apache/commons/csv/CSVRecord.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.csv; import java.io.Serializable; import java.util.Arrays; import java.util.Iterator; import java.util.Map; /** * A CSV record parsed from a CSV file. * * @version $Id$ */ public class CSVRecord implements Serializable, Iterable<String> { private static final long serialVersionUID = 1L; private static final String[] EMPTY_STRING_ARRAY = new String[0]; /** The values of the record */ private final String[] values; /** The column name to index mapping. */ private final Map<String, Integer> mapping; /** The accumulated comments (if any) */ private final String comment; /** The record number. */ private final long recordNumber; CSVRecord(final String[] values, final Map<String, Integer> mapping, final String comment, final long recordNumber) { this.recordNumber = recordNumber; this.values = values != null ? values : EMPTY_STRING_ARRAY; this.mapping = mapping; this.comment = comment; } /** * Returns a value by {@link Enum}. * * @param e * an enum * @return the String at the given enum String */ public String get(Enum<?> e) { return get(e.toString()); } /** * Returns a value by index. * * @param i * a column index (0-based) * @return the String at the given index */ public String get(final int i) { return values[i]; } /** * Returns a value by name. * * @param name * the name of the column to be retrieved. * @return the column value, or {@code null} if the column name is not found * @throws IllegalStateException * if no header mapping was provided * @throws IllegalArgumentException * if the record is inconsistent * @see #isConsistent() */ public String get(final String name) { if (mapping == null) { throw new IllegalStateException( "No header mapping was specified, the record values can't be accessed by name"); } final Integer index = mapping.get(name); try { return index != null ? values[index.intValue()] : null; } catch (final ArrayIndexOutOfBoundsException e) { throw new IllegalArgumentException( String.format( "Index for header '%s' is %d but CSVRecord only has %d values!", name, index, Integer.valueOf(values.length))); } } /** * Returns true if this record is consistent, false if not. Currently, the only check is matching the record size to * the header size. Some programs can export files that fails this test but still produce parsable files. * * @return true of this record is valid, false if not * @see CSVParserTest#org.apache.commons.csv.CSVParserTest.testMappedButNotSetAsOutlook2007ContactExport() */ public boolean isConsistent() { return mapping == null ? true : mapping.size() == values.length; } /** * Checks whether a given column is mapped, i.e. its name has been defined to the parser. * * @param name * the name of the column to be retrieved. * @return whether a given column is mapped. */ public boolean isMapped(final String name) { return mapping != null ? mapping.containsKey(name) : false; } /** * Checks whether a given columns is mapped and has a value. * * @param name * the name of the column to be retrieved. * @return whether a given columns is mapped and has a value */ public boolean isSet(final String name) { return isMapped(name) && mapping.get(name).intValue() < values.length; } /** * Returns an iterator over the values of this record. * * @return an iterator over the values of this record. */ public Iterator<String> iterator() { return Arrays.asList(values).iterator(); } String[] values() { return values; } /** * Returns the comment for this record, if any. * * @return the comment for this record, or null if no comment for this * record is available. */ public String getComment() { return comment; } /** * Returns the number of this record in the parsed CSV file. * * @return the number of this record. */ public long getRecordNumber() { return recordNumber; } /** * Returns the number of values in this record. * * @return the number of values. */ public int size() { return values.length; } @Override public String toString() { return Arrays.toString(values); } }
Sort members. git-svn-id: 77bd0fb3f0b1af3312cb764eaf99792cfc1ce6c4@1509231 13f79535-47bb-0310-9956-ffa450edef68
src/main/java/org/apache/commons/csv/CSVRecord.java
Sort members.
<ide><path>rc/main/java/org/apache/commons/csv/CSVRecord.java <ide> */ <ide> public class CSVRecord implements Serializable, Iterable<String> { <ide> <add> private static final String[] EMPTY_STRING_ARRAY = new String[0]; <add> <ide> private static final long serialVersionUID = 1L; <ide> <del> private static final String[] EMPTY_STRING_ARRAY = new String[0]; <del> <del> /** The values of the record */ <del> private final String[] values; <add> /** The accumulated comments (if any) */ <add> private final String comment; <ide> <ide> /** The column name to index mapping. */ <ide> private final Map<String, Integer> mapping; <ide> <del> /** The accumulated comments (if any) */ <del> private final String comment; <del> <ide> /** The record number. */ <ide> private final long recordNumber; <add> <add> /** The values of the record */ <add> private final String[] values; <ide> <ide> CSVRecord(final String[] values, final Map<String, Integer> mapping, <ide> final String comment, final long recordNumber) { <ide> } <ide> <ide> /** <add> * Returns the comment for this record, if any. <add> * <add> * @return the comment for this record, or null if no comment for this <add> * record is available. <add> */ <add> public String getComment() { <add> return comment; <add> } <add> <add> /** <add> * Returns the number of this record in the parsed CSV file. <add> * <add> * @return the number of this record. <add> */ <add> public long getRecordNumber() { <add> return recordNumber; <add> } <add> <add> /** <ide> * Returns true if this record is consistent, false if not. Currently, the only check is matching the record size to <ide> * the header size. Some programs can export files that fails this test but still produce parsable files. <ide> * <ide> return Arrays.asList(values).iterator(); <ide> } <ide> <del> String[] values() { <del> return values; <del> } <del> <del> /** <del> * Returns the comment for this record, if any. <del> * <del> * @return the comment for this record, or null if no comment for this <del> * record is available. <del> */ <del> public String getComment() { <del> return comment; <del> } <del> <del> /** <del> * Returns the number of this record in the parsed CSV file. <del> * <del> * @return the number of this record. <del> */ <del> public long getRecordNumber() { <del> return recordNumber; <del> } <del> <ide> /** <ide> * Returns the number of values in this record. <ide> * <ide> return Arrays.toString(values); <ide> } <ide> <add> String[] values() { <add> return values; <add> } <add> <ide> <ide> }
Java
apache-2.0
74004356daf1dd2c9e189d3f42a88d22c87235ed
0
spennihana/h2o-3,junwucs/h2o-3,michalkurka/h2o-3,junwucs/h2o-3,printedheart/h2o-3,jangorecki/h2o-3,YzPaul3/h2o-3,brightchen/h2o-3,spennihana/h2o-3,datachand/h2o-3,madmax983/h2o-3,datachand/h2o-3,kyoren/https-github.com-h2oai-h2o-3,datachand/h2o-3,mathemage/h2o-3,tarasane/h2o-3,mathemage/h2o-3,madmax983/h2o-3,h2oai/h2o-3,printedheart/h2o-3,pchmieli/h2o-3,kyoren/https-github.com-h2oai-h2o-3,michalkurka/h2o-3,junwucs/h2o-3,datachand/h2o-3,pchmieli/h2o-3,YzPaul3/h2o-3,brightchen/h2o-3,kyoren/https-github.com-h2oai-h2o-3,brightchen/h2o-3,michalkurka/h2o-3,datachand/h2o-3,jangorecki/h2o-3,michalkurka/h2o-3,madmax983/h2o-3,h2oai/h2o-dev,YzPaul3/h2o-3,YzPaul3/h2o-3,junwucs/h2o-3,jangorecki/h2o-3,spennihana/h2o-3,brightchen/h2o-3,pchmieli/h2o-3,h2oai/h2o-3,brightchen/h2o-3,h2oai/h2o-3,kyoren/https-github.com-h2oai-h2o-3,h2oai/h2o-3,h2oai/h2o-dev,datachand/h2o-3,spennihana/h2o-3,spennihana/h2o-3,mathemage/h2o-3,kyoren/https-github.com-h2oai-h2o-3,madmax983/h2o-3,junwucs/h2o-3,YzPaul3/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,jangorecki/h2o-3,jangorecki/h2o-3,printedheart/h2o-3,tarasane/h2o-3,brightchen/h2o-3,pchmieli/h2o-3,madmax983/h2o-3,madmax983/h2o-3,pchmieli/h2o-3,printedheart/h2o-3,YzPaul3/h2o-3,h2oai/h2o-dev,datachand/h2o-3,spennihana/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,tarasane/h2o-3,tarasane/h2o-3,h2oai/h2o-dev,printedheart/h2o-3,mathemage/h2o-3,tarasane/h2o-3,jangorecki/h2o-3,madmax983/h2o-3,pchmieli/h2o-3,h2oai/h2o-dev,junwucs/h2o-3,mathemage/h2o-3,mathemage/h2o-3,kyoren/https-github.com-h2oai-h2o-3,YzPaul3/h2o-3,tarasane/h2o-3,jangorecki/h2o-3,h2oai/h2o-dev,junwucs/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,printedheart/h2o-3,spennihana/h2o-3,kyoren/https-github.com-h2oai-h2o-3,tarasane/h2o-3,printedheart/h2o-3,mathemage/h2o-3,pchmieli/h2o-3,h2oai/h2o-3,brightchen/h2o-3
package hex.deeplearning; import hex.DataInfo; import static java.lang.Double.isNaN; import water.*; import water.fvec.Frame; import water.util.*; import java.util.Arrays; import java.util.Random; /** * This class contains the state of the Deep Learning model * This will be shared: one per node */ public class DeepLearningModelInfo extends Iced { public TwoDimTable summaryTable; public DataInfo data_info; public DataInfo data_info() { return data_info; } // model is described by parameters and the following arrays private Storage.DenseRowMatrix[] dense_row_weights; //one 2D weight matrix per layer (stored as a 1D array each) private Storage.DenseVector[] biases; //one 1D bias array per layer private Storage.DenseVector[] avg_activations; //one 1D array per hidden layer // helpers for storing previous step deltas // Note: These two arrays *could* be made transient and then initialized freshly in makeNeurons() and in DeepLearningTask.initLocal() // But then, after each reduction, the weights would be lost and would have to restart afresh -> not *exactly* right, but close... private Storage.DenseRowMatrix[] dense_row_weights_momenta; private Storage.DenseVector[] biases_momenta; // helpers for AdaDelta private Storage.DenseRowMatrix[] dense_row_ada_dx_g; private Storage.DenseVector[] biases_ada_dx_g; private boolean[] _saw_missing_cats; // whether missing value was encountered for each categorical predictor - needed for varimp // compute model size (number of model parameters required for making predictions) // momenta are not counted here, but they are needed for model building public long size() { long siz = 0; for (Storage.DenseRowMatrix w : dense_row_weights) if (w != null) siz += w.size(); for (Storage.Vector b : biases) siz += b.size(); return siz; } /** * Check whether a missing value was found for every categorical predictor * @param cats */ void checkMissingCats(int[] cats) { if (cats == null) return; if (_saw_missing_cats == null) return; for (int i=0; i<cats.length; ++i) { assert(data_info._catMissing[i] == 1); //have a missing bucket for each categorical if (_saw_missing_cats[i]) continue; _saw_missing_cats[i] = (cats[i] == data_info._catOffsets[i+1]-1); } } // accessors to (shared) weights and biases - those will be updated racily (c.f. Hogwild!) boolean has_momenta() { return get_params()._momentum_start != 0 || get_params()._momentum_stable != 0; } boolean adaDelta() { return get_params()._adaptive_rate; } public final Storage.DenseRowMatrix get_weights(int i) { return dense_row_weights[i]; } public final Storage.DenseVector get_biases(int i) { return biases[i]; } public final Storage.DenseRowMatrix get_weights_momenta(int i) { return dense_row_weights_momenta[i]; } public final Storage.DenseVector get_biases_momenta(int i) { return biases_momenta[i]; } public final Storage.DenseRowMatrix get_ada_dx_g(int i) { return dense_row_ada_dx_g[i]; } public final Storage.DenseVector get_biases_ada_dx_g(int i) { return biases_ada_dx_g[i]; } //accessor to shared parameter defining avg activations public final Storage.DenseVector get_avg_activations(int i) { return avg_activations[i]; } public DeepLearningParameters parameters; public final DeepLearningParameters get_params() { return parameters; } public final void set_params(DeepLearningParameters p) { parameters = (DeepLearningParameters) p.clone(); } private double[] mean_rate; private double[] rms_rate; private double[] mean_bias; private double[] rms_bias; private double[] mean_weight; public double[] rms_weight; public double[] mean_a; private volatile boolean unstable = false; public boolean unstable() { return unstable; } public void set_unstable() { if (!unstable) computeStats(); unstable = true; } private long processed_global; public synchronized long get_processed_global() { return processed_global; } public synchronized void set_processed_global(long p) { processed_global = p; } public synchronized void add_processed_global(long p) { processed_global += p; } private long processed_local; public synchronized long get_processed_local() { return processed_local; } public synchronized void set_processed_local(long p) { processed_local = p; } public synchronized void add_processed_local(long p) { processed_local += p; } public synchronized long get_processed_total() { return processed_global + processed_local; } // package local helpers int[] units; //number of neurons per layer, extracted from parameters and from datainfo final boolean _classification; // Classification cache (nclasses>1) final Frame _train; // Prepared training frame final Frame _valid; // Prepared validation frame /** * Dummy constructor, only to be used for deserialization from autobuffer */ private DeepLearningModelInfo() { super(); // key is null _classification = false; _train = _valid = null; } /** * Main constructor * @param params Model parameters * @param dinfo Data Info * @param nClasses number of classes (1 for regression, 0 for autoencoder) * @param train User-given training data frame, prepared by AdaptTestTrain * @param valid User-specified validation data frame, prepared by AdaptTestTrain */ public DeepLearningModelInfo(final DeepLearningParameters params, final DataInfo dinfo, int nClasses, Frame train, Frame valid) { _classification = nClasses > 1; _train = train; _valid = valid; data_info = dinfo; parameters = (DeepLearningParameters) params.clone(); //make a copy, don't change model's parameters DeepLearningParameters.Sanity.modifyParms(parameters, parameters, nClasses); //sanitize the model_info's parameters final int num_input = dinfo.fullN(); final int num_output = get_params()._autoencoder ? num_input : (_classification ? train.lastVec().cardinality() : 1); if (!get_params()._autoencoder) assert(num_output == nClasses); _saw_missing_cats = dinfo._cats > 0 ? new boolean[data_info._cats] : null; assert (num_input > 0); assert (num_output > 0); if (has_momenta() && adaDelta()) throw new IllegalArgumentException("Cannot have non-zero momentum and adaptive rate at the same time."); final int layers = get_params()._hidden.length; // units (# neurons for each layer) units = new int[layers + 2]; if (get_params()._max_categorical_features <= Integer.MAX_VALUE - dinfo._nums) units[0] = Math.min(dinfo._nums + get_params()._max_categorical_features, num_input); else units[0] = num_input; System.arraycopy(get_params()._hidden, 0, units, 1, layers); units[layers + 1] = num_output; boolean printLevels = units[0] > 1000L; boolean warn = units[0] > 100000L; if (printLevels) { final String[][] domains = dinfo._adaptedFrame.domains(); int[] levels = new int[domains.length]; for (int i = 0; i < levels.length; ++i) { levels[i] = domains[i] != null ? domains[i].length : 0; } Arrays.sort(levels); if (warn) { Log.warn("==================================================================================================================================="); Log.warn(num_input + " input features" + (dinfo._cats > 0 ? " (after categorical one-hot encoding)" : "") + ". Can be slow and require a lot of memory."); } if (levels[levels.length - 1] > 0) { int levelcutoff = levels[levels.length - 1 - Math.min(10, levels.length - 1)]; int count = 0; for (int i = 0; i < dinfo._adaptedFrame.numCols() - (get_params()._autoencoder ? 0 : 1) && count < 10; ++i) { if (dinfo._adaptedFrame.domains()[i] != null && dinfo._adaptedFrame.domains()[i].length >= levelcutoff) { if (warn) { Log.warn("Categorical feature '" + dinfo._adaptedFrame._names[i] + "' has cardinality " + dinfo._adaptedFrame.domains()[i].length + "."); } else { Log.info("Categorical feature '" + dinfo._adaptedFrame._names[i] + "' has cardinality " + dinfo._adaptedFrame.domains()[i].length + "."); } } count++; } } if (warn) { Log.warn("Suggestions:"); Log.warn(" *) Limit the size of the first hidden layer"); if (dinfo._cats > 0) { Log.warn(" *) Limit the total number of one-hot encoded features with the parameter 'max_categorical_features'"); Log.warn(" *) Run h2o.interaction(...,pairwise=F) on high-cardinality categorical columns to limit the factor count, see http://learn.h2o.ai"); } Log.warn("==================================================================================================================================="); } } int[] mult = new int[layers + 1]; for (int i=0;i<layers;++i) { mult[i] = (get_params()._activation == DeepLearningParameters.Activation.Maxout || get_params()._activation == DeepLearningParameters.Activation.MaxoutWithDropout) ? 2 : 1; } mult[layers]=1; //Output is never Maxout // weights (to connect layers) dense_row_weights = new Storage.DenseRowMatrix[layers + 1]; dense_row_weights[0] = new Storage.DenseRowMatrix(mult[0]*units[1], units[0]); for (int i = 1; i <= layers; ++i) dense_row_weights[i] = new Storage.DenseRowMatrix(mult[i] * units[i + 1] /*rows*/, units[i] /*cols*/); // biases (only for hidden layers and output layer) biases = new Storage.DenseVector[layers + 1]; for (int i = 0; i <= layers; ++i) biases[i] = new Storage.DenseVector(mult[i] * units[i+1]); // average activation (only for hidden layers) if (get_params()._autoencoder && get_params()._sparsity_beta > 0) { avg_activations = new Storage.DenseVector[layers]; mean_a = new double[layers]; for (int i = 0; i < layers; ++i) avg_activations[i] = new Storage.DenseVector(mult[i] * units[i + 1]); } allocateHelperArrays(); // for diagnostics mean_rate = new double[units.length-1]; rms_rate = new double[units.length-1]; mean_bias = new double[units.length-1]; rms_bias = new double[units.length-1]; mean_weight = new double[units.length-1]; rms_weight = new double[units.length-1]; } // deep clone all weights/biases DeepLearningModelInfo deep_clone() { AutoBuffer ab = new AutoBuffer(); this.write(ab); ab.flipForReading(); return (DeepLearningModelInfo) new DeepLearningModelInfo().read(ab); } /** * Allocate helper arrays for momentum/learning rate, etc. */ void allocateHelperArrays() { int[] mult = new int[units.length-1]; for (int i=0;i<units.length-1;++i) { mult[i] = (get_params()._activation == DeepLearningParameters.Activation.Maxout || get_params()._activation == DeepLearningParameters.Activation.MaxoutWithDropout) ? 2 : 1; } mult[units.length-2]=1; //Output is never Maxout if (has_momenta()) { dense_row_weights_momenta = new Storage.DenseRowMatrix[dense_row_weights.length]; if (dense_row_weights[0] != null) dense_row_weights_momenta[0] = new Storage.DenseRowMatrix(mult[0]*units[1], units[0]); for (int i = 1; i < dense_row_weights_momenta.length; ++i) dense_row_weights_momenta[i] = new Storage.DenseRowMatrix(mult[i]*units[i + 1], units[i]); biases_momenta = new Storage.DenseVector[biases.length]; for (int i = 0; i < biases_momenta.length; ++i) biases_momenta[i] = new Storage.DenseVector(mult[i]*units[i + 1]); } else if (adaDelta()) { dense_row_ada_dx_g = new Storage.DenseRowMatrix[dense_row_weights.length]; //AdaGrad dense_row_ada_dx_g[0] = new Storage.DenseRowMatrix(mult[0]*2*units[1], units[0]); for (int i = 1; i < dense_row_ada_dx_g.length; ++i) { dense_row_ada_dx_g[i] = new Storage.DenseRowMatrix(mult[i]*units[i + 1], 2 * units[i]); } biases_ada_dx_g = new Storage.DenseVector[biases.length]; for (int i = 0; i < biases_ada_dx_g.length; ++i) { biases_ada_dx_g[i] = new Storage.DenseVector(mult[i]*2* units[i + 1]); } } } /** * Create a summary table * @return */ TwoDimTable createSummaryTable() { computeStats(); Neurons[] neurons = DeepLearningTask.makeNeuronsForTesting(this); long byte_size = new AutoBuffer().put(this).buf().length; TwoDimTable table = new TwoDimTable( "Status of Neuron Layers", (!get_params()._autoencoder ? ("predicting " + _train.lastVecName() + ", ") : "") + (get_params()._autoencoder ? "auto-encoder" : _classification ? (units[units.length - 1] + "-class classification") : "regression") + ", " + get_params()._distribution + " distribution, " + get_params()._loss + " loss, " + String.format("%,d", size()) + " weights/biases, " + PrettyPrint.bytes(byte_size) + ", " + String.format("%,d", get_processed_global()) + " training samples, " + "mini-batch size " + String.format("%,d", get_params()._mini_batch_size), new String[neurons.length], new String[]{"Layer", "Units", "Type", "Dropout", "L1", "L2", "Mean Rate", "Rate RMS", "Momentum", "Mean Weight", "Weight RMS", "Mean Bias", "Bias RMS" }, new String[]{"int", "int", "string", "double", "double", "double", "double", "double", "double", "double", "double", "double", "double" }, new String[]{"%d", "%d", "%s", "%2.2f %%", "%5f", "%5f", "%5f", "%5f", "%5f", "%5f", "%5f", "%5f", "%5f"}, ""); for (int i = 0; i < neurons.length; ++i) { table.set(i, 0, i + 1); table.set(i, 1, neurons[i].units); table.set(i, 2, neurons[i].getClass().getSimpleName()); if (i == 0) { table.set(i, 3, neurons[i].params._input_dropout_ratio * 100); continue; } else if (i < neurons.length - 1) { if (neurons[i].params._hidden_dropout_ratios == null) { table.set(i, 3, 0); } else { table.set(i, 3, neurons[i].params._hidden_dropout_ratios[i - 1] * 100); } } table.set(i, 4, neurons[i].params._l1); table.set(i, 5, neurons[i].params._l2); table.set(i, 6, (get_params()._adaptive_rate ? mean_rate[i-1] : neurons[i].rate(get_processed_total()))); table.set(i, 7, (get_params()._adaptive_rate ? rms_rate[i-1] : 0)); table.set(i, 8, get_params()._adaptive_rate ? 0 : neurons[i].momentum(get_processed_total())); table.set(i, 9, mean_weight[i-1]); table.set(i, 10, rms_weight[i-1]); table.set(i, 11, mean_bias[i-1]); table.set(i, 12, rms_bias[i-1]); } summaryTable = table; return summaryTable; } /** * Print a summary table * @return String containing ASCII version of summary table */ @Override public String toString() { StringBuilder sb = new StringBuilder(); if (!get_params()._quiet_mode) { if (get_params()._sparsity_beta > 0) { for (int k = 0; k < get_params()._hidden.length; k++) { sb.append("Average activation in hidden layer ").append(k).append(" is ").append(mean_a[k]).append(" \n"); } } createSummaryTable(); sb.append(summaryTable.toString(1)); } return sb.toString(); } /** * Debugging printout * @return String with useful info */ public String toStringAll() { StringBuilder sb = new StringBuilder(); sb.append(toString()); for (int i = 0; i < units.length - 1; ++i) sb.append("\nweights[").append(i).append("][]=").append(Arrays.toString(get_weights(i).raw())); for (int i = 0; i < units.length - 1; ++i) { sb.append("\nbiases[").append(i).append("][]=").append(Arrays.toString(get_biases(i).raw())); } if (has_momenta()) { for (int i = 0; i < units.length - 1; ++i) sb.append("\nweights_momenta[").append(i).append("][]=").append(Arrays.toString(get_weights_momenta(i).raw())); } if (biases_momenta != null) { for (int i = 0; i < units.length - 1; ++i) { sb.append("\nbiases_momenta[").append(i).append("][]=").append(Arrays.toString(biases_momenta[i].raw())); } } sb.append("\nunits[]=").append(Arrays.toString(units)); sb.append("\nprocessed global: ").append(get_processed_global()); sb.append("\nprocessed local: ").append(get_processed_local()); sb.append("\nprocessed total: ").append(get_processed_total()); sb.append("\n"); return sb.toString(); } /** * Initialize weights/biases */ void initializeMembers() { randomizeWeights(); //TODO: determine good/optimal/best initialization scheme for biases // hidden layers for (int i = 0; i < get_params()._hidden.length; ++i) { if (get_params()._activation == DeepLearningParameters.Activation.Rectifier || get_params()._activation == DeepLearningParameters.Activation.RectifierWithDropout || get_params()._activation == DeepLearningParameters.Activation.Maxout || get_params()._activation == DeepLearningParameters.Activation.MaxoutWithDropout ) { // Arrays.fill(biases[i], 1.); //old behavior Arrays.fill(biases[i].raw(), i == 0 ? 0.5f : 1f); //new behavior, might be slightly better } else if (get_params()._activation == DeepLearningParameters.Activation.Tanh || get_params()._activation == DeepLearningParameters.Activation.TanhWithDropout) { Arrays.fill(biases[i].raw(), 0f); } } Arrays.fill(biases[biases.length - 1].raw(), 0f); //output layer } /** * Add another model info into this * This will add the weights/biases/learning rate helpers, and the number of processed training samples * Note: It will NOT add the elastic averaging helpers, which are always kept constant (they already are the result of a reduction) * @param other */ public void add(DeepLearningModelInfo other) { for (int i = 0; i < dense_row_weights.length; ++i) ArrayUtils.add(get_weights(i).raw(), other.get_weights(i).raw()); for (int i = 0; i < biases.length; ++i) ArrayUtils.add(biases[i].raw(), other.biases[i].raw()); if (avg_activations != null) for (int i = 0; i < avg_activations.length; ++i) ArrayUtils.add(avg_activations[i].raw(), other.biases[i].raw()); if (has_momenta()) { assert (other.has_momenta()); for (int i = 0; i < dense_row_weights_momenta.length; ++i) ArrayUtils.add(get_weights_momenta(i).raw(), other.get_weights_momenta(i).raw()); for (int i = 0; i < biases_momenta.length; ++i) ArrayUtils.add(biases_momenta[i].raw(), other.biases_momenta[i].raw()); } if (adaDelta()) { assert (other.adaDelta()); for (int i = 0; i < dense_row_ada_dx_g.length; ++i) { ArrayUtils.add(get_ada_dx_g(i).raw(), other.get_ada_dx_g(i).raw()); } } add_processed_local(other.get_processed_local()); } /** * Multiply all weights/biases by a real-valued number * @param N */ protected void mult(double N) { div(1 / N); } /** * Divide all weights/biases by a real-valued number * @param N */ protected void div(double N) { for (int i = 0; i < dense_row_weights.length; ++i) ArrayUtils.div(get_weights(i).raw(), (float)N); for (Storage.Vector bias : biases) ArrayUtils.div(bias.raw(), N); if (avg_activations != null) for (Storage.Vector avgac : avg_activations) ArrayUtils.div(avgac.raw(), N); if (has_momenta()) { for (int i = 0; i < dense_row_weights_momenta.length; ++i) ArrayUtils.div(get_weights_momenta(i).raw(), (float)N); for (Storage.Vector bias_momenta : biases_momenta) ArrayUtils.div(bias_momenta.raw(), N); } if (adaDelta()) { for (int i = 0; i < dense_row_ada_dx_g.length; ++i) { ArrayUtils.div(get_ada_dx_g(i).raw(), (float)N); } } } double uniformDist(Random rand, double min, double max) { return min + rand.nextFloat() * (max - min); } /** * Initialization of neural net weights * cf. http://machinelearning.wustl.edu/mlpapers/paper_files/AISTATS2010_GlorotB10.pdf */ private void randomizeWeights() { for (int w = 0; w < dense_row_weights.length; ++w) { final Random rng = water.util.RandomUtils.getRNG(get_params()._seed + 0xBAD5EED + w + 1); //to match NeuralNet behavior final double range = Math.sqrt(6. / (units[w] + units[w + 1])); for (int i = 0; i < get_weights(w).rows(); i++) { for (int j = 0; j < get_weights(w).cols(); j++) { if (get_params()._initial_weight_distribution == DeepLearningParameters.InitialWeightDistribution.UniformAdaptive) { // cf. http://machinelearning.wustl.edu/mlpapers/paper_files/AISTATS2010_GlorotB10.pdf if (w == dense_row_weights.length - 1 && _classification) get_weights(w).set(i, j, (float) (4. * uniformDist(rng, -range, range))); //Softmax might need an extra factor 4, since it's like a sigmoid else get_weights(w).set(i, j, (float) uniformDist(rng, -range, range)); } else if (get_params()._initial_weight_distribution == DeepLearningParameters.InitialWeightDistribution.Uniform) { get_weights(w).set(i, j, (float) uniformDist(rng, -get_params()._initial_weight_scale, get_params()._initial_weight_scale)); } else if (get_params()._initial_weight_distribution == DeepLearningParameters.InitialWeightDistribution.Normal) { get_weights(w).set(i, j, (float) (rng.nextGaussian() * get_params()._initial_weight_scale)); } } } } } // TODO: Add "subset randomize" function // int count = Math.min(15, _previous.units); // double min = -.1f, max = +.1f; // //double min = -1f, max = +1f; // for( int o = 0; o < units; o++ ) { // for( int n = 0; n < count; n++ ) { // int i = rand.nextInt(_previous.units); // int w = o * _previous.units + i; // _w[w] = uniformDist(rand, min, max); // } // } /** * Compute Variable Importance, based on * GEDEON: DATA MINING OF INPUTS: ANALYSING MAGNITUDE AND FUNCTIONAL MEASURES * * @return variable importances for input features */ public float[] computeVariableImportances() { float[] vi = new float[units[0]]; Arrays.fill(vi, 0f); float[][] Qik = new float[units[0]][units[2]]; //importance of input i on output k float[] sum_wj = new float[units[1]]; //sum of incoming weights into first hidden layer float[] sum_wk = new float[units[2]]; //sum of incoming weights into output layer (or second hidden layer) for (float[] Qi : Qik) Arrays.fill(Qi, 0f); Arrays.fill(sum_wj, 0f); Arrays.fill(sum_wk, 0f); // compute sum of absolute incoming weights for (int j = 0; j < units[1]; j++) { for (int i = 0; i < units[0]; i++) { float wij = get_weights(0).get(j, i); sum_wj[j] += Math.abs(wij); } } for (int k = 0; k < units[2]; k++) { for (int j = 0; j < units[1]; j++) { float wjk = get_weights(1).get(k, j); sum_wk[k] += Math.abs(wjk); } } // compute importance of input i on output k as product of connecting weights going through j for (int i = 0; i < units[0]; i++) { for (int k = 0; k < units[2]; k++) { for (int j = 0; j < units[1]; j++) { float wij = get_weights(0).get(j, i); float wjk = get_weights(1).get(k, j); //Qik[i][k] += Math.abs(wij)/sum_wj[j] * wjk; //Wong,Gedeon,Taggart '95 Qik[i][k] += Math.abs(wij) / sum_wj[j] * Math.abs(wjk) / sum_wk[k]; //Gedeon '97 } } } // normalize Qik over all outputs k for (int k = 0; k < units[2]; k++) { float sumQk = 0; for (int i = 0; i < units[0]; i++) sumQk += Qik[i][k]; for (int i = 0; i < units[0]; i++) Qik[i][k] /= sumQk; } // importance for feature i is the sum over k of i->k importances for (int i = 0; i < units[0]; i++) vi[i] = ArrayUtils.sum(Qik[i]); //normalize importances such that max(vi) = 1 ArrayUtils.div(vi, ArrayUtils.maxValue(vi)); // zero out missing categorical variables if they were never seen if (_saw_missing_cats != null) { for (int i = 0; i < _saw_missing_cats.length; ++i) { assert (data_info._catMissing[i] == 1); //have a missing bucket for each categorical if (!_saw_missing_cats[i]) vi[data_info._catOffsets[i + 1] - 1] = 0; } } return vi; } /** * Compute statistics about this model on all nodes */ public void computeStats() { float[][] rate = get_params()._adaptive_rate ? new float[units.length - 1][] : null; if (get_params()._autoencoder && get_params()._sparsity_beta > 0) { for (int k = 0; k < get_params()._hidden.length; k++) { mean_a[k] = 0; for (int j = 0; j < avg_activations[k].size(); j++) mean_a[k] += avg_activations[k].get(j); mean_a[k] /= avg_activations[k].size(); } } for (int y = 0; y < units.length-1; y++) { mean_rate[y] = rms_rate[y] = 0; mean_bias[y] = rms_bias[y] = 0; mean_weight[y] = rms_weight[y] = 0; for (int u = 0; u < biases[y].size(); u++) { mean_bias[y] += biases[y].get(u); } if (rate != null) rate[y] = new float[get_weights(y).raw().length]; for (int u = 0; u < get_weights(y).raw().length; u++) { mean_weight[y] += get_weights(y).raw()[u]; if (rate != null) { // final float RMS_dx = (float)Math.sqrt(ada[y][2*u]+(float)get_params().epsilon); // final float invRMS_g = (float)(1/Math.sqrt(ada[y][2*u+1]+(float)get_params().epsilon)); final float RMS_dx = MathUtils.approxSqrt(get_ada_dx_g(y).raw()[2 * u] + (float) get_params()._epsilon); final float invRMS_g = MathUtils.approxInvSqrt(get_ada_dx_g(y).raw()[2 * u + 1] + (float) get_params()._epsilon); rate[y][u] = RMS_dx * invRMS_g; //not exactly right, RMS_dx should be from the previous time step -> but close enough for diagnostics. mean_rate[y] += rate[y][u]; } } mean_bias[y] /= biases[y].size(); mean_weight[y] /= get_weights(y).size(); if (rate != null) mean_rate[y] /= rate[y].length; for (int u = 0; u < biases[y].size(); u++) { final double db = biases[y].get(u) - mean_bias[y]; rms_bias[y] += db * db; } for (int u = 0; u < get_weights(y).size(); u++) { final double dw = get_weights(y).raw()[u] - mean_weight[y]; rms_weight[y] += dw * dw; if (rate != null) { final double drate = rate[y][u] - mean_rate[y]; rms_rate[y] += drate * drate; } } rms_bias[y] = MathUtils.approxSqrt(rms_bias[y] / biases[y].size()); rms_weight[y] = MathUtils.approxSqrt(rms_weight[y] / get_weights(y).size()); if (rate != null) rms_rate[y] = MathUtils.approxSqrt(rms_rate[y]/ rate[y].length); // rms_bias[y] = (float)Math.sqrt(rms_bias[y]/biases[y].length); // rms_weight[y] = (float)Math.sqrt(rms_weight[y]/weights[y].length); // if (rate != null) rms_rate[y] = (float)Math.sqrt(rms_rate[y]/rate[y].length); // Abort the run if weights or biases are unreasonably large (Note that all input values are normalized upfront) // This can happen with Rectifier units when L1/L2/max_w2 are all set to 0, especially when using more than 1 hidden layer. final double thresh = 1e10; final double bthresh = 1000; unstable |= Math.abs(mean_bias[y]) > bthresh || isNaN(mean_bias[y]) || rms_bias[y] > bthresh || isNaN(rms_bias[y]) || Math.abs(mean_weight[y]) > thresh || isNaN(mean_weight[y]) || rms_weight[y] > thresh || isNaN(rms_weight[y]); } } /** * Unique identifier for this model's state, based on raw numbers */ protected long checksum_impl() { computeStats(); Random rng = new Random(0xDECAFBBB); double cs = Double.longBitsToDouble(get_params()._seed); cs += size() * get_processed_total(); for (double d : mean_bias) cs += (rng.nextDouble() * (d+123.23)); for (double d : rms_bias) cs += (rng.nextDouble() * (d+123.23)); for (double d : mean_weight) cs += (rng.nextDouble() * (d+123.23)); for (double d : rms_weight) cs += (rng.nextDouble() * (d+123.23)); for (double d : mean_rate) cs += (rng.nextDouble() * (d+123.23)); for (double d : rms_rate) cs += (rng.nextDouble() * (d+123.23)); return Double.doubleToRawLongBits(cs); } /** * TimeAveraging as part of Elastic Averaging Algorithm * Cf. equation 6 of arXiv:1412.6651v5 * @param nodeAverageModel current average of per-node models * @return Time-average of node-averages (consensus model, "the" model) */ public static DeepLearningModelInfo timeAverage(DeepLearningModelInfo nodeAverageModel) { float pa = (float) nodeAverageModel.get_params()._elastic_averaging_moving_rate; assert(pa > 0 && pa <= 1); DeepLearningModelInfo elasticAverage = DKV.getGet(nodeAverageModel.elasticAverageModelInfoKey()); //get latest version from DKV if (elasticAverage == null || pa == 1) { elasticAverage = nodeAverageModel.deep_clone(); } else { nodeAverageModel.mult(pa); elasticAverage.mult(1 - pa); elasticAverage.add(nodeAverageModel); //ignore processed local value set here elasticAverage.set_processed_global(nodeAverageModel.get_processed_global()); } elasticAverage.set_processed_local(0); DKV.put(elasticAverage.elasticAverageModelInfoKey(), elasticAverage); // nodeAverageModel.computeStats(); // elasticAverage.computeStats(); // Log.info("Local Model :\n" + nodeAverageModel.toString()); // Log.info("Elastic Average:\n" + elasticAverage.toString()); return elasticAverage; } public Key localModelInfoKey(H2ONode node) { return Key.make(get_params()._model_id + ".node" + node.index(), (byte) 1 /*replica factor*/, (byte) 31 /*hidden user-key*/, true, node); } public Key elasticAverageModelInfoKey() { return Key.make(get_params()._model_id + ".elasticaverage", (byte) 1 /*replica factor*/, (byte) 31 /*hidden user-key*/, true, H2O.CLOUD._memary[0]); } static public class GradientCheck { GradientCheck(int l, int r, int c) { layer=l; row=r; col=c; gradient=0;} int layer; int row; int col; double gradient; void apply(int l, int r, int c, double g) { if (r==row && c==col && l==layer) { assert(gradient == 0); //there can only be one match gradient = g; } } } static public GradientCheck gradientCheck = null; }
h2o-algos/src/main/java/hex/deeplearning/DeepLearningModelInfo.java
package hex.deeplearning; import hex.DataInfo; import static java.lang.Double.isNaN; import water.*; import water.fvec.Frame; import water.util.*; import java.util.Arrays; import java.util.Random; /** * This class contains the state of the Deep Learning model * This will be shared: one per node */ public class DeepLearningModelInfo extends Iced { public TwoDimTable summaryTable; public DataInfo data_info; public DataInfo data_info() { return data_info; } // model is described by parameters and the following arrays private Storage.DenseRowMatrix[] dense_row_weights; //one 2D weight matrix per layer (stored as a 1D array each) private Storage.DenseVector[] biases; //one 1D bias array per layer private Storage.DenseVector[] avg_activations; //one 1D array per hidden layer // helpers for storing previous step deltas // Note: These two arrays *could* be made transient and then initialized freshly in makeNeurons() and in DeepLearningTask.initLocal() // But then, after each reduction, the weights would be lost and would have to restart afresh -> not *exactly* right, but close... private Storage.DenseRowMatrix[] dense_row_weights_momenta; private Storage.DenseVector[] biases_momenta; // helpers for AdaDelta private Storage.DenseRowMatrix[] dense_row_ada_dx_g; private Storage.DenseVector[] biases_ada_dx_g; private boolean[] _saw_missing_cats; // whether missing value was encountered for each categorical predictor - needed for varimp // compute model size (number of model parameters required for making predictions) // momenta are not counted here, but they are needed for model building public long size() { long siz = 0; for (Storage.DenseRowMatrix w : dense_row_weights) if (w != null) siz += w.size(); for (Storage.Vector b : biases) siz += b.size(); return siz; } /** * Check whether a missing value was found for every categorical predictor * @param cats */ void checkMissingCats(int[] cats) { if (cats == null) return; if (_saw_missing_cats == null) return; for (int i=0; i<cats.length; ++i) { assert(data_info._catMissing[i] == 1); //have a missing bucket for each categorical if (_saw_missing_cats[i]) continue; _saw_missing_cats[i] = (cats[i] == data_info._catOffsets[i+1]-1); } } // accessors to (shared) weights and biases - those will be updated racily (c.f. Hogwild!) boolean has_momenta() { return get_params()._momentum_start != 0 || get_params()._momentum_stable != 0; } boolean adaDelta() { return get_params()._adaptive_rate; } public final Storage.DenseRowMatrix get_weights(int i) { return dense_row_weights[i]; } public final Storage.DenseVector get_biases(int i) { return biases[i]; } public final Storage.DenseRowMatrix get_weights_momenta(int i) { return dense_row_weights_momenta[i]; } public final Storage.DenseVector get_biases_momenta(int i) { return biases_momenta[i]; } public final Storage.DenseRowMatrix get_ada_dx_g(int i) { return dense_row_ada_dx_g[i]; } public final Storage.DenseVector get_biases_ada_dx_g(int i) { return biases_ada_dx_g[i]; } //accessor to shared parameter defining avg activations public final Storage.DenseVector get_avg_activations(int i) { return avg_activations[i]; } public DeepLearningParameters parameters; public final DeepLearningParameters get_params() { return parameters; } public final void set_params(DeepLearningParameters p) { parameters = (DeepLearningParameters) p.clone(); } private double[] mean_rate; private double[] rms_rate; private double[] mean_bias; private double[] rms_bias; private double[] mean_weight; public double[] rms_weight; public double[] mean_a; private volatile boolean unstable = false; public boolean unstable() { return unstable; } public void set_unstable() { if (!unstable) computeStats(); unstable = true; } private long processed_global; public synchronized long get_processed_global() { return processed_global; } public synchronized void set_processed_global(long p) { processed_global = p; } public synchronized void add_processed_global(long p) { processed_global += p; } private long processed_local; public synchronized long get_processed_local() { return processed_local; } public synchronized void set_processed_local(long p) { processed_local = p; } public synchronized void add_processed_local(long p) { processed_local += p; } public synchronized long get_processed_total() { return processed_global + processed_local; } // package local helpers int[] units; //number of neurons per layer, extracted from parameters and from datainfo final boolean _classification; // Classification cache (nclasses>1) final Frame _train; // Prepared training frame final Frame _valid; // Prepared validation frame /** * Dummy constructor, only to be used for deserialization from autobuffer */ private DeepLearningModelInfo() { super(); // key is null _classification = false; _train = _valid = null; } /** * Main constructor * @param params Model parameters * @param dinfo Data Info * @param nClasses number of classes (1 for regression, 0 for autoencoder) * @param train User-given training data frame, prepared by AdaptTestTrain * @param valid User-specified validation data frame, prepared by AdaptTestTrain */ public DeepLearningModelInfo(final DeepLearningParameters params, final DataInfo dinfo, int nClasses, Frame train, Frame valid) { _classification = nClasses > 1; _train = train; _valid = valid; data_info = dinfo; parameters = (DeepLearningParameters) params.clone(); //make a copy, don't change model's parameters DeepLearningParameters.Sanity.modifyParms(parameters, parameters, nClasses); //sanitize the model_info's parameters final int num_input = dinfo.fullN(); final int num_output = get_params()._autoencoder ? num_input : (_classification ? train.lastVec().cardinality() : 1); if (!get_params()._autoencoder) assert(num_output == nClasses); _saw_missing_cats = dinfo._cats > 0 ? new boolean[data_info._cats] : null; assert (num_input > 0); assert (num_output > 0); if (has_momenta() && adaDelta()) throw new IllegalArgumentException("Cannot have non-zero momentum and adaptive rate at the same time."); final int layers = get_params()._hidden.length; // units (# neurons for each layer) units = new int[layers + 2]; if (get_params()._max_categorical_features <= Integer.MAX_VALUE - dinfo._nums) units[0] = Math.min(dinfo._nums + get_params()._max_categorical_features, num_input); else units[0] = num_input; System.arraycopy(get_params()._hidden, 0, units, 1, layers); units[layers + 1] = num_output; boolean printLevels = units[0] > 1000L; boolean warn = units[0] > 100000L; if (printLevels) { final String[][] domains = dinfo._adaptedFrame.domains(); int[] levels = new int[domains.length]; for (int i = 0; i < levels.length; ++i) { levels[i] = domains[i] != null ? domains[i].length : 0; } Arrays.sort(levels); if (warn) { Log.warn("==================================================================================================================================="); Log.warn(num_input + " input features" + (dinfo._cats > 0 ? " (after categorical one-hot encoding)" : "") + ". Can be slow and require a lot of memory."); } if (levels[levels.length - 1] > 0) { int levelcutoff = levels[levels.length - 1 - Math.min(10, levels.length - 1)]; int count = 0; for (int i = 0; i < dinfo._adaptedFrame.numCols() - (get_params()._autoencoder ? 0 : 1) && count < 10; ++i) { if (dinfo._adaptedFrame.domains()[i] != null && dinfo._adaptedFrame.domains()[i].length >= levelcutoff) { if (warn) { Log.warn("Categorical feature '" + dinfo._adaptedFrame._names[i] + "' has cardinality " + dinfo._adaptedFrame.domains()[i].length + "."); } else { Log.info("Categorical feature '" + dinfo._adaptedFrame._names[i] + "' has cardinality " + dinfo._adaptedFrame.domains()[i].length + "."); } } count++; } } if (warn) { Log.warn("Suggestions:"); Log.warn(" *) Limit the size of the first hidden layer"); if (dinfo._cats > 0) { Log.warn(" *) Limit the total number of one-hot encoded features with the parameter 'max_categorical_features'"); Log.warn(" *) Run h2o.interaction(...,pairwise=F) on high-cardinality categorical columns to limit the factor count, see http://learn.h2o.ai"); } Log.warn("==================================================================================================================================="); } } int[] mult = new int[layers + 1]; for (int i=0;i<layers;++i) { mult[i] = (get_params()._activation == DeepLearningParameters.Activation.Maxout || get_params()._activation == DeepLearningParameters.Activation.MaxoutWithDropout) ? 2 : 1; } mult[layers]=1; //Output is never Maxout // weights (to connect layers) dense_row_weights = new Storage.DenseRowMatrix[layers + 1]; dense_row_weights[0] = new Storage.DenseRowMatrix(mult[0]*units[1], units[0]); for (int i = 1; i <= layers; ++i) dense_row_weights[i] = new Storage.DenseRowMatrix(mult[i] * units[i + 1] /*rows*/, units[i] /*cols*/); // biases (only for hidden layers and output layer) biases = new Storage.DenseVector[layers + 1]; for (int i = 0; i <= layers; ++i) biases[i] = new Storage.DenseVector(mult[i] * units[i+1]); // average activation (only for hidden layers) if (get_params()._autoencoder && get_params()._sparsity_beta > 0) { avg_activations = new Storage.DenseVector[layers]; mean_a = new double[layers]; for (int i = 0; i < layers; ++i) avg_activations[i] = new Storage.DenseVector(mult[i] * units[i + 1]); } allocateHelperArrays(); // for diagnostics mean_rate = new double[units.length-1]; rms_rate = new double[units.length-1]; mean_bias = new double[units.length-1]; rms_bias = new double[units.length-1]; mean_weight = new double[units.length-1]; rms_weight = new double[units.length-1]; } // deep clone all weights/biases DeepLearningModelInfo deep_clone() { AutoBuffer ab = new AutoBuffer(); this.write(ab); ab.flipForReading(); return (DeepLearningModelInfo) new DeepLearningModelInfo().read(ab); } /** * Allocate helper arrays for momentum/learning rate, etc. */ void allocateHelperArrays() { int[] mult = new int[units.length-1]; for (int i=0;i<units.length-1;++i) { mult[i] = (get_params()._activation == DeepLearningParameters.Activation.Maxout || get_params()._activation == DeepLearningParameters.Activation.MaxoutWithDropout) ? 2 : 1; } mult[units.length-2]=1; //Output is never Maxout if (has_momenta()) { dense_row_weights_momenta = new Storage.DenseRowMatrix[dense_row_weights.length]; if (dense_row_weights[0] != null) dense_row_weights_momenta[0] = new Storage.DenseRowMatrix(mult[0]*units[1], units[0]); for (int i = 1; i < dense_row_weights_momenta.length; ++i) dense_row_weights_momenta[i] = new Storage.DenseRowMatrix(mult[i]*units[i + 1], units[i]); biases_momenta = new Storage.DenseVector[biases.length]; for (int i = 0; i < biases_momenta.length; ++i) biases_momenta[i] = new Storage.DenseVector(mult[i]*units[i + 1]); } else if (adaDelta()) { dense_row_ada_dx_g = new Storage.DenseRowMatrix[dense_row_weights.length]; //AdaGrad dense_row_ada_dx_g[0] = new Storage.DenseRowMatrix(mult[0]*2*units[1], units[0]); for (int i = 1; i < dense_row_ada_dx_g.length; ++i) { dense_row_ada_dx_g[i] = new Storage.DenseRowMatrix(mult[i]*units[i + 1], 2 * units[i]); } biases_ada_dx_g = new Storage.DenseVector[biases.length]; for (int i = 0; i < biases_ada_dx_g.length; ++i) { biases_ada_dx_g[i] = new Storage.DenseVector(mult[i]*2* units[i + 1]); } } } /** * Create a summary table * @return */ TwoDimTable createSummaryTable() { computeStats(); Neurons[] neurons = DeepLearningTask.makeNeuronsForTesting(this); long byte_size = new AutoBuffer().put(this).buf().length; TwoDimTable table = new TwoDimTable( "Status of Neuron Layers", (!get_params()._autoencoder ? ("predicting " + _train.lastVecName() + ", ") : "") + (get_params()._autoencoder ? "auto-encoder" : _classification ? (units[units.length - 1] + "-class classification") : "regression") + ", " + get_params()._distribution + " distribution, " + get_params()._loss + " loss, " + String.format("%,d", size()) + " weights/biases, " + PrettyPrint.bytes(byte_size) + ", " + String.format("%,d", get_processed_global()) + " training samples, " + "mini-batch size " + String.format("%,d", get_params()._mini_batch_size), new String[neurons.length], new String[]{"Layer", "Units", "Type", "Dropout", "L1", "L2", "Mean Rate", "Rate RMS", "Momentum", "Mean Weight", "Weight RMS", "Mean Bias", "Bias RMS" }, new String[]{"int", "int", "string", "double", "double", "double", "double", "double", "double", "double", "double", "double", "double" }, new String[]{"%d", "%d", "%s", "%2.2f %%", "%5f", "%5f", "%5f", "%5f", "%5f", "%5f", "%5f", "%5f", "%5f"}, ""); for (int i = 0; i < neurons.length; ++i) { table.set(i, 0, i + 1); table.set(i, 1, neurons[i].units); table.set(i, 2, neurons[i].getClass().getSimpleName()); if (i == 0) { table.set(i, 3, neurons[i].params._input_dropout_ratio * 100); continue; } else if (i < neurons.length - 1) { if (neurons[i].params._hidden_dropout_ratios == null) { table.set(i, 3, 0); } else { table.set(i, 3, neurons[i].params._hidden_dropout_ratios[i - 1] * 100); } } table.set(i, 4, neurons[i].params._l1); table.set(i, 5, neurons[i].params._l2); table.set(i, 6, (get_params()._adaptive_rate ? mean_rate[i-1] : neurons[i].rate(get_processed_total()))); table.set(i, 7, (get_params()._adaptive_rate ? rms_rate[i-1] : 0)); table.set(i, 8, get_params()._adaptive_rate ? 0 : neurons[i].momentum(get_processed_total())); table.set(i, 9, mean_weight[i-1]); table.set(i, 10, rms_weight[i-1]); table.set(i, 11, mean_bias[i-1]); table.set(i, 12, rms_bias[i-1]); } summaryTable = table; return summaryTable; } /** * Print a summary table * @return String containing ASCII version of summary table */ @Override public String toString() { StringBuilder sb = new StringBuilder(); if (!get_params()._quiet_mode) { if (get_params()._sparsity_beta > 0) { for (int k = 0; k < get_params()._hidden.length; k++) { sb.append("Average activation in hidden layer ").append(k).append(" is ").append(mean_a[k]).append(" \n"); } } createSummaryTable(); sb.append(summaryTable.toString(1)); } return sb.toString(); } /** * Debugging printout * @return String with useful info */ public String toStringAll() { StringBuilder sb = new StringBuilder(); sb.append(toString()); for (int i = 0; i < units.length - 1; ++i) sb.append("\nweights[").append(i).append("][]=").append(Arrays.toString(get_weights(i).raw())); for (int i = 0; i < units.length - 1; ++i) { sb.append("\nbiases[").append(i).append("][]=").append(Arrays.toString(get_biases(i).raw())); } if (has_momenta()) { for (int i = 0; i < units.length - 1; ++i) sb.append("\nweights_momenta[").append(i).append("][]=").append(Arrays.toString(get_weights_momenta(i).raw())); } if (biases_momenta != null) { for (int i = 0; i < units.length - 1; ++i) { sb.append("\nbiases_momenta[").append(i).append("][]=").append(Arrays.toString(biases_momenta[i].raw())); } } sb.append("\nunits[]=").append(Arrays.toString(units)); sb.append("\nprocessed global: ").append(get_processed_global()); sb.append("\nprocessed local: ").append(get_processed_local()); sb.append("\nprocessed total: ").append(get_processed_total()); sb.append("\n"); return sb.toString(); } /** * Initialize weights/biases */ void initializeMembers() { randomizeWeights(); //TODO: determine good/optimal/best initialization scheme for biases // hidden layers for (int i = 0; i < get_params()._hidden.length; ++i) { if (get_params()._activation == DeepLearningParameters.Activation.Rectifier || get_params()._activation == DeepLearningParameters.Activation.RectifierWithDropout || get_params()._activation == DeepLearningParameters.Activation.Maxout || get_params()._activation == DeepLearningParameters.Activation.MaxoutWithDropout ) { // Arrays.fill(biases[i], 1.); //old behavior Arrays.fill(biases[i].raw(), i == 0 ? 0.5f : 1f); //new behavior, might be slightly better } else if (get_params()._activation == DeepLearningParameters.Activation.Tanh || get_params()._activation == DeepLearningParameters.Activation.TanhWithDropout) { Arrays.fill(biases[i].raw(), 0f); } } Arrays.fill(biases[biases.length - 1].raw(), 0f); //output layer } /** * Add another model info into this * This will add the weights/biases/learning rate helpers, and the number of processed training samples * Note: It will NOT add the elastic averaging helpers, which are always kept constant (they already are the result of a reduction) * @param other */ public void add(DeepLearningModelInfo other) { for (int i = 0; i < dense_row_weights.length; ++i) ArrayUtils.add(get_weights(i).raw(), other.get_weights(i).raw()); for (int i = 0; i < biases.length; ++i) ArrayUtils.add(biases[i].raw(), other.biases[i].raw()); if (avg_activations != null) for (int i = 0; i < avg_activations.length; ++i) ArrayUtils.add(avg_activations[i].raw(), other.biases[i].raw()); if (has_momenta()) { assert (other.has_momenta()); for (int i = 0; i < dense_row_weights_momenta.length; ++i) ArrayUtils.add(get_weights_momenta(i).raw(), other.get_weights_momenta(i).raw()); for (int i = 0; i < biases_momenta.length; ++i) ArrayUtils.add(biases_momenta[i].raw(), other.biases_momenta[i].raw()); } if (adaDelta()) { assert (other.adaDelta()); for (int i = 0; i < dense_row_ada_dx_g.length; ++i) { ArrayUtils.add(get_ada_dx_g(i).raw(), other.get_ada_dx_g(i).raw()); } } add_processed_local(other.get_processed_local()); } /** * Multiply all weights/biases by a real-valued number * @param N */ protected void mult(double N) { div(1 / N); } /** * Divide all weights/biases by a real-valued number * @param N */ protected void div(double N) { for (int i = 0; i < dense_row_weights.length; ++i) ArrayUtils.div(get_weights(i).raw(), (float)N); for (Storage.Vector bias : biases) ArrayUtils.div(bias.raw(), N); if (avg_activations != null) for (Storage.Vector avgac : avg_activations) ArrayUtils.div(avgac.raw(), N); if (has_momenta()) { for (int i = 0; i < dense_row_weights_momenta.length; ++i) ArrayUtils.div(get_weights_momenta(i).raw(), (float)N); for (Storage.Vector bias_momenta : biases_momenta) ArrayUtils.div(bias_momenta.raw(), N); } if (adaDelta()) { for (int i = 0; i < dense_row_ada_dx_g.length; ++i) { ArrayUtils.div(get_ada_dx_g(i).raw(), (float)N); } } } double uniformDist(Random rand, double min, double max) { return min + rand.nextFloat() * (max - min); } /** * Initialization of neural net weights * cf. http://machinelearning.wustl.edu/mlpapers/paper_files/AISTATS2010_GlorotB10.pdf */ private void randomizeWeights() { for (int w = 0; w < dense_row_weights.length; ++w) { final Random rng = water.util.RandomUtils.getRNG(get_params()._seed + 0xBAD5EED + w + 1); //to match NeuralNet behavior final double range = Math.sqrt(6. / (units[w] + units[w + 1])); for (int i = 0; i < get_weights(w).rows(); i++) { for (int j = 0; j < get_weights(w).cols(); j++) { if (get_params()._initial_weight_distribution == DeepLearningParameters.InitialWeightDistribution.UniformAdaptive) { // cf. http://machinelearning.wustl.edu/mlpapers/paper_files/AISTATS2010_GlorotB10.pdf if (w == dense_row_weights.length - 1 && _classification) get_weights(w).set(i, j, (float) (4. * uniformDist(rng, -range, range))); //Softmax might need an extra factor 4, since it's like a sigmoid else get_weights(w).set(i, j, (float) uniformDist(rng, -range, range)); } else if (get_params()._initial_weight_distribution == DeepLearningParameters.InitialWeightDistribution.Uniform) { get_weights(w).set(i, j, (float) uniformDist(rng, -get_params()._initial_weight_scale, get_params()._initial_weight_scale)); } else if (get_params()._initial_weight_distribution == DeepLearningParameters.InitialWeightDistribution.Normal) { get_weights(w).set(i, j, (float) (rng.nextGaussian() * get_params()._initial_weight_scale)); } } } } } // TODO: Add "subset randomize" function // int count = Math.min(15, _previous.units); // double min = -.1f, max = +.1f; // //double min = -1f, max = +1f; // for( int o = 0; o < units; o++ ) { // for( int n = 0; n < count; n++ ) { // int i = rand.nextInt(_previous.units); // int w = o * _previous.units + i; // _w[w] = uniformDist(rand, min, max); // } // } /** * Compute Variable Importance, based on * GEDEON: DATA MINING OF INPUTS: ANALYSING MAGNITUDE AND FUNCTIONAL MEASURES * * @return variable importances for input features */ public float[] computeVariableImportances() { float[] vi = new float[units[0]]; Arrays.fill(vi, 0f); float[][] Qik = new float[units[0]][units[2]]; //importance of input i on output k float[] sum_wj = new float[units[1]]; //sum of incoming weights into first hidden layer float[] sum_wk = new float[units[2]]; //sum of incoming weights into output layer (or second hidden layer) for (float[] Qi : Qik) Arrays.fill(Qi, 0f); Arrays.fill(sum_wj, 0f); Arrays.fill(sum_wk, 0f); // compute sum of absolute incoming weights for (int j = 0; j < units[1]; j++) { for (int i = 0; i < units[0]; i++) { float wij = get_weights(0).get(j, i); sum_wj[j] += Math.abs(wij); } } for (int k = 0; k < units[2]; k++) { for (int j = 0; j < units[1]; j++) { float wjk = get_weights(1).get(k, j); sum_wk[k] += Math.abs(wjk); } } // compute importance of input i on output k as product of connecting weights going through j for (int i = 0; i < units[0]; i++) { for (int k = 0; k < units[2]; k++) { for (int j = 0; j < units[1]; j++) { float wij = get_weights(0).get(j, i); float wjk = get_weights(1).get(k, j); //Qik[i][k] += Math.abs(wij)/sum_wj[j] * wjk; //Wong,Gedeon,Taggart '95 Qik[i][k] += Math.abs(wij) / sum_wj[j] * Math.abs(wjk) / sum_wk[k]; //Gedeon '97 } } } // normalize Qik over all outputs k for (int k = 0; k < units[2]; k++) { float sumQk = 0; for (int i = 0; i < units[0]; i++) sumQk += Qik[i][k]; for (int i = 0; i < units[0]; i++) Qik[i][k] /= sumQk; } // importance for feature i is the sum over k of i->k importances for (int i = 0; i < units[0]; i++) vi[i] = ArrayUtils.sum(Qik[i]); //normalize importances such that max(vi) = 1 ArrayUtils.div(vi, ArrayUtils.maxValue(vi)); // zero out missing categorical variables if they were never seen if (_saw_missing_cats != null) { for (int i = 0; i < _saw_missing_cats.length; ++i) { assert (data_info._catMissing[i] == 1); //have a missing bucket for each categorical if (!_saw_missing_cats[i]) vi[data_info._catOffsets[i + 1] - 1] = 0; } } return vi; } /** * Compute statistics about this model on all nodes */ public void computeStats() { float[][] rate = get_params()._adaptive_rate ? new float[units.length - 1][] : null; if (get_params()._autoencoder && get_params()._sparsity_beta > 0) { for (int k = 0; k < get_params()._hidden.length; k++) { mean_a[k] = 0; for (int j = 0; j < avg_activations[k].size(); j++) mean_a[k] += avg_activations[k].get(j); mean_a[k] /= avg_activations[k].size(); } } for (int y = 0; y < units.length-1; y++) { mean_rate[y] = rms_rate[y] = 0; mean_bias[y] = rms_bias[y] = 0; mean_weight[y] = rms_weight[y] = 0; for (int u = 0; u < biases[y].size(); u++) { mean_bias[y] += biases[y].get(u); } if (rate != null) rate[y] = new float[get_weights(y).raw().length]; for (int u = 0; u < get_weights(y).raw().length; u++) { mean_weight[y] += get_weights(y).raw()[u]; if (rate != null) { // final float RMS_dx = (float)Math.sqrt(ada[y][2*u]+(float)get_params().epsilon); // final float invRMS_g = (float)(1/Math.sqrt(ada[y][2*u+1]+(float)get_params().epsilon)); final float RMS_dx = MathUtils.approxSqrt(get_ada_dx_g(y).raw()[2 * u] + (float) get_params()._epsilon); final float invRMS_g = MathUtils.approxInvSqrt(get_ada_dx_g(y).raw()[2 * u + 1] + (float) get_params()._epsilon); rate[y][u] = RMS_dx * invRMS_g; //not exactly right, RMS_dx should be from the previous time step -> but close enough for diagnostics. mean_rate[y] += rate[y][u]; } } mean_bias[y] /= biases[y].size(); mean_weight[y] /= get_weights(y).size(); if (rate != null) mean_rate[y] /= rate[y].length; for (int u = 0; u < biases[y].size(); u++) { final double db = biases[y].get(u) - mean_bias[y]; rms_bias[y] += db * db; } for (int u = 0; u < get_weights(y).size(); u++) { final double dw = get_weights(y).raw()[u] - mean_weight[y]; rms_weight[y] += dw * dw; if (rate != null) { final double drate = rate[y][u] - mean_rate[y]; rms_rate[y] += drate * drate; } } rms_bias[y] = MathUtils.approxSqrt(rms_bias[y] / biases[y].size()); rms_weight[y] = MathUtils.approxSqrt(rms_weight[y] / get_weights(y).size()); if (rate != null) rms_rate[y] = MathUtils.approxSqrt(rms_rate[y]/ rate[y].length); // rms_bias[y] = (float)Math.sqrt(rms_bias[y]/biases[y].length); // rms_weight[y] = (float)Math.sqrt(rms_weight[y]/weights[y].length); // if (rate != null) rms_rate[y] = (float)Math.sqrt(rms_rate[y]/rate[y].length); // Abort the run if weights or biases are unreasonably large (Note that all input values are normalized upfront) // This can happen with Rectifier units when L1/L2/max_w2 are all set to 0, especially when using more than 1 hidden layer. final double thresh = 1e10; unstable |= mean_bias[y] > thresh || isNaN(mean_bias[y]) || rms_bias[y] > thresh || isNaN(rms_bias[y]) || mean_weight[y] > thresh || isNaN(mean_weight[y]) || rms_weight[y] > thresh || isNaN(rms_weight[y]); } } /** * Unique identifier for this model's state, based on raw numbers */ protected long checksum_impl() { computeStats(); Random rng = new Random(0xDECAFBBB); double cs = Double.longBitsToDouble(get_params()._seed); cs += size() * get_processed_total(); for (double d : mean_bias) cs += (rng.nextDouble() * (d+123.23)); for (double d : rms_bias) cs += (rng.nextDouble() * (d+123.23)); for (double d : mean_weight) cs += (rng.nextDouble() * (d+123.23)); for (double d : rms_weight) cs += (rng.nextDouble() * (d+123.23)); for (double d : mean_rate) cs += (rng.nextDouble() * (d+123.23)); for (double d : rms_rate) cs += (rng.nextDouble() * (d+123.23)); return Double.doubleToRawLongBits(cs); } /** * TimeAveraging as part of Elastic Averaging Algorithm * Cf. equation 6 of arXiv:1412.6651v5 * @param nodeAverageModel current average of per-node models * @return Time-average of node-averages (consensus model, "the" model) */ public static DeepLearningModelInfo timeAverage(DeepLearningModelInfo nodeAverageModel) { float pa = (float) nodeAverageModel.get_params()._elastic_averaging_moving_rate; assert(pa > 0 && pa <= 1); DeepLearningModelInfo elasticAverage = DKV.getGet(nodeAverageModel.elasticAverageModelInfoKey()); //get latest version from DKV if (elasticAverage == null || pa == 1) { elasticAverage = nodeAverageModel.deep_clone(); } else { nodeAverageModel.mult(pa); elasticAverage.mult(1 - pa); elasticAverage.add(nodeAverageModel); //ignore processed local value set here elasticAverage.set_processed_global(nodeAverageModel.get_processed_global()); } elasticAverage.set_processed_local(0); DKV.put(elasticAverage.elasticAverageModelInfoKey(), elasticAverage); // nodeAverageModel.computeStats(); // elasticAverage.computeStats(); // Log.info("Local Model :\n" + nodeAverageModel.toString()); // Log.info("Elastic Average:\n" + elasticAverage.toString()); return elasticAverage; } public Key localModelInfoKey(H2ONode node) { return Key.make(get_params()._model_id + ".node" + node.index(), (byte) 1 /*replica factor*/, (byte) 31 /*hidden user-key*/, true, node); } public Key elasticAverageModelInfoKey() { return Key.make(get_params()._model_id + ".elasticaverage", (byte) 1 /*replica factor*/, (byte) 31 /*hidden user-key*/, true, H2O.CLOUD._memary[0]); } static public class GradientCheck { GradientCheck(int l, int r, int c) { layer=l; row=r; col=c; gradient=0;} int layer; int row; int col; double gradient; void apply(int l, int r, int c, double g) { if (r==row && c==col && l==layer) { assert(gradient == 0); //there can only be one match gradient = g; } } } static public GradientCheck gradientCheck = null; }
PUBEV-2010: Fix the instability detection.
h2o-algos/src/main/java/hex/deeplearning/DeepLearningModelInfo.java
PUBEV-2010: Fix the instability detection.
<ide><path>2o-algos/src/main/java/hex/deeplearning/DeepLearningModelInfo.java <ide> // Abort the run if weights or biases are unreasonably large (Note that all input values are normalized upfront) <ide> // This can happen with Rectifier units when L1/L2/max_w2 are all set to 0, especially when using more than 1 hidden layer. <ide> final double thresh = 1e10; <del> unstable |= mean_bias[y] > thresh || isNaN(mean_bias[y]) <del> || rms_bias[y] > thresh || isNaN(rms_bias[y]) <del> || mean_weight[y] > thresh || isNaN(mean_weight[y]) <add> final double bthresh = 1000; <add> unstable |= Math.abs(mean_bias[y]) > bthresh || isNaN(mean_bias[y]) <add> || rms_bias[y] > bthresh || isNaN(rms_bias[y]) <add> || Math.abs(mean_weight[y]) > thresh || isNaN(mean_weight[y]) <ide> || rms_weight[y] > thresh || isNaN(rms_weight[y]); <ide> } <ide> }
Java
apache-2.0
3bc3aff9c7c9db6d25efc96295fb5c6c517f19ad
0
deltaDNA/android-sdk,deltaDNA/android-sdk,deltaDNA/android-sdk
/* * Copyright (c) 2016 deltaDNA Ltd. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.deltadna.android.sdk; import android.os.AsyncTask; import androidx.annotation.Nullable; import android.os.Handler; import android.os.Looper; import android.util.Log; import com.deltadna.android.sdk.helpers.ClientInfo; import com.deltadna.android.sdk.listeners.EngageListener; import com.deltadna.android.sdk.listeners.RequestListener; import com.deltadna.android.sdk.net.CancelableRequest; import com.deltadna.android.sdk.net.NetworkManager; import com.deltadna.android.sdk.net.Response; import com.deltadna.android.sdk.util.CloseableIterator; import org.json.JSONException; import org.json.JSONObject; import java.util.Locale; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicReference; /** * Class which handles collect and engage events, ensuring that collect * events are saved to the store, and uploaded as appropriate, and that * engage requests go through the archive for caching purposes. */ final class EventHandler { private static final String TAG = BuildConfig.LOG_TAG + ' ' + EventHandler.class.getSimpleName(); private final ScheduledExecutorService executor = new ScheduledThreadPoolExecutor(1, r -> new Thread( r, EventHandler.class.getSimpleName())); private final Handler mainThreadTaskHandler = new Handler(Looper.getMainLooper()); private final EventStore events; private final EngageStore engagements; private final NetworkManager network; @Nullable private ScheduledFuture<?> uploadTask; @Nullable private Future<?> upload; EventHandler( EventStore events, EngageStore engagements, NetworkManager network) { this.events = events; this.engagements = engagements; this.network = network; } /** * Starts automatic background event uploads. * * @param startDelay start delay in seconds * @param repeatRate repeat rate in seconds */ synchronized void start(int startDelay, int repeatRate) { cancelUploadTask(); Log.d(TAG, "Starting scheduled event uploads"); uploadTask = executor.scheduleWithFixedDelay( new Upload(), startDelay, repeatRate, TimeUnit.SECONDS); } /** * Stops automatic background event uploads. * * @param dispatch if {@code true} events will be dispatched * after stopping */ synchronized void stop(boolean dispatch) { Log.d(TAG, "Stopping scheduled event uploads"); cancelUploadTask(); if (dispatch) { dispatch(); } } /** * Dispatches any events immediately. */ synchronized void dispatch() { if (uploadTask != null) { Log.w(TAG, "Event uploads are currently scheduled"); } if (upload == null || upload.isDone()) { Log.d(TAG, "Submitting immediate events upload"); upload = executor.submit(new Upload()); } } /** * Handles a collect {@code event} by placing into the queue, * to be sent at a later time. */ void handleEvent(JSONObject event) { events.add(event.toString()); } /** * Handles an engage {@code event}. */ public class HandleEngagementTask<E extends Engagement> extends AsyncTask<Void , Void, Void> { final E engagement; final EngageListener<E> listener; String userId; String sessionId; final int engageApiVersion; String sdkVersion; String platform; public HandleEngagementTask( final E engagement, final EngageListener<E> listener, String userId, String sessionId, final int engageApiVersion, String sdkVersion, String platform) { this.engagement = engagement; this.listener = listener; this.userId = userId; this.sessionId = sessionId; this.engageApiVersion = engageApiVersion; this.sdkVersion = sdkVersion; this.platform = platform; } @Override protected Void doInBackground(Void... voids) { final JSONObject event; try { event = new JSONObject() .put("userID", userId) .put("decisionPoint", engagement.name) .put("flavour", engagement.flavour) .put("sessionID", sessionId) .put("version", engageApiVersion) .put("sdkVersion", sdkVersion) .put("platform", platform) .put("manufacturer", ClientInfo.manufacturer()) .put("operatingSystemVersion", ClientInfo.operatingSystemVersion()) .put("timezoneOffset", ClientInfo.timezoneOffset()) .put("locale", ClientInfo.locale()); if (!engagement.params.isEmpty()) { event.put("parameters", engagement.params.json); } } catch (JSONException e) { // should never happen due to params enforcement throw new IllegalArgumentException(e); } network.engage(event, new RequestListener<JSONObject>() { @Override public void onCompleted(Response<JSONObject> result) { engagement.setResponse(result); if (engagement.isSuccessful()) { engagements.put(engagement); } else if (engagement.isCacheCandidate() ){ Log.w(TAG, String.format( Locale.US, "Not caching %s due to failure, checking cache", engagement)); final JSONObject cached = engagements.get(engagement); if (cached != null) { try { engagement.setResponse(new Response<>( engagement.getStatusCode(), true, null, cached.put("isCachedResponse", true), engagement.getError())); Log.d( TAG, "Using cached response " + engagement.getJson()); } catch (JSONException ignored) {} } } else { Log.w(TAG, String.format( Locale.US, "Not caching %s due to failure, and not checking cache due to client error response", engagement)); } listener.onCompleted(engagement); } @Override public void onError(Throwable t) { // This needs to be run off the main thread, as it involves blocking database // operations that can cause ANRs. executor.execute(() -> { final JSONObject cached = engagements.get(engagement); if (cached != null) { try { engagement.setResponse(new Response<>( 200, true, null, cached.put("isCachedResponse", true), null)); Log.d(TAG, "Using cached response " + engagement.getJson()); mainThreadTaskHandler.post(() -> listener.onCompleted(engagement)); } catch (JSONException e) { mainThreadTaskHandler.post(() -> listener.onError(e)); } } else { mainThreadTaskHandler.post(() -> listener.onError(t)); } }); } }, "config".equalsIgnoreCase(engagement.name) && "internal".equalsIgnoreCase(engagement.flavour)); return null; } } <E extends Engagement> void handleEngagement( final E engagement, final EngageListener<E> listener, String userId, String sessionId, final int engageApiVersion, String sdkVersion, String platform) { new HandleEngagementTask<E>(engagement, listener, userId, sessionId, engageApiVersion, sdkVersion, platform).execute(); } private void cancelUploadTask() { if (uploadTask != null) { if (uploadTask.cancel(false)) { Log.d(TAG, "Cancelled scheduled upload task"); } else { Log.w(TAG, "Failed to cancel scheduled upload task"); } uploadTask = null; } } private final class Upload implements Runnable { @Override public void run() { Log.v(TAG, "Starting event upload"); final CloseableIterator<EventStoreItem> items = events.items(); final AtomicReference<CloseableIterator.Mode> clearEvents = new AtomicReference<>(CloseableIterator.Mode.ALL); try { if (!items.hasNext()) { Log.d(TAG, "No stored events to upload"); clearEvents.set(CloseableIterator.Mode.NONE); } final StringBuilder builder = new StringBuilder("{\"eventList\":["); int count = 0; while (items.hasNext()) { final EventStoreItem event = items.next(); if (event.available()) { final String content = event.get(); if (content != null) { builder.append(content); builder.append(','); count++; } else { Log.w(TAG, "Failed retrieving event, skipping"); } } else { Log.w(TAG, "Stored event not available, pausing"); clearEvents.set(CloseableIterator.Mode.UP_TO_CURRENT); break; } } if (builder.charAt(builder.length() - 1) == ',') { builder.deleteCharAt(builder.length() - 1); } builder.append("]}"); JSONObject payload = null; try { payload = new JSONObject(builder.toString()); } catch (JSONException e) { Log.w(TAG, e); clearEvents.set(CloseableIterator.Mode.NONE); } Log.d(TAG, "Uploading " + count + " events"); final CountDownLatch latch = new CountDownLatch(1); final CancelableRequest request = network.collect( payload, new RequestListener<Void>() { @Override public void onCompleted(Response<Void> result) { if (result.isSuccessful()) { Log.d(TAG, "Successfully uploaded events"); } else { Log.w(TAG, "Failed to upload events due to " + result); if (result.code == 400) { Log.w(TAG, "Wiping event store due to unrecoverable data"); clearEvents.set(CloseableIterator.Mode.ALL); } } latch.countDown(); } @Override public void onError(Throwable t) { Log.w(TAG, "Failed to upload events, will retry later", t); clearEvents.set(CloseableIterator.Mode.NONE); latch.countDown(); } }); try { latch.await(); } catch (InterruptedException e) { Log.w(TAG, "Cancelling event upload", e); clearEvents.set(CloseableIterator.Mode.NONE); request.cancel(); } } finally { Log.v(TAG, "Finished event upload"); items.close(clearEvents.get()); } } } }
library/src/main/java/com/deltadna/android/sdk/EventHandler.java
/* * Copyright (c) 2016 deltaDNA Ltd. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.deltadna.android.sdk; import android.os.AsyncTask; import androidx.annotation.Nullable; import android.os.Handler; import android.os.Looper; import android.util.Log; import com.deltadna.android.sdk.helpers.ClientInfo; import com.deltadna.android.sdk.listeners.EngageListener; import com.deltadna.android.sdk.listeners.RequestListener; import com.deltadna.android.sdk.net.CancelableRequest; import com.deltadna.android.sdk.net.NetworkManager; import com.deltadna.android.sdk.net.Response; import com.deltadna.android.sdk.util.CloseableIterator; import org.json.JSONException; import org.json.JSONObject; import java.util.Locale; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicReference; /** * Class which handles collect and engage events, ensuring that collect * events are saved to the store, and uploaded as appropriate, and that * engage requests go through the archive for caching purposes. */ final class EventHandler { private static final String TAG = BuildConfig.LOG_TAG + ' ' + EventHandler.class.getSimpleName(); private final ScheduledExecutorService executor = new ScheduledThreadPoolExecutor(1, r -> new Thread( r, EventHandler.class.getSimpleName())); private final Handler handler = new Handler(Looper.getMainLooper()); private final EventStore events; private final EngageStore engagements; private final NetworkManager network; @Nullable private ScheduledFuture<?> uploadTask; @Nullable private Future<?> upload; EventHandler( EventStore events, EngageStore engagements, NetworkManager network) { this.events = events; this.engagements = engagements; this.network = network; } /** * Starts automatic background event uploads. * * @param startDelay start delay in seconds * @param repeatRate repeat rate in seconds */ synchronized void start(int startDelay, int repeatRate) { cancelUploadTask(); Log.d(TAG, "Starting scheduled event uploads"); uploadTask = executor.scheduleWithFixedDelay( new Upload(), startDelay, repeatRate, TimeUnit.SECONDS); } /** * Stops automatic background event uploads. * * @param dispatch if {@code true} events will be dispatched * after stopping */ synchronized void stop(boolean dispatch) { Log.d(TAG, "Stopping scheduled event uploads"); cancelUploadTask(); if (dispatch) { dispatch(); } } /** * Dispatches any events immediately. */ synchronized void dispatch() { if (uploadTask != null) { Log.w(TAG, "Event uploads are currently scheduled"); } if (upload == null || upload.isDone()) { Log.d(TAG, "Submitting immediate events upload"); upload = executor.submit(new Upload()); } } /** * Handles a collect {@code event} by placing into the queue, * to be sent at a later time. */ void handleEvent(JSONObject event) { events.add(event.toString()); } /** * Handles an engage {@code event}. */ public class HandleEngagementTask<E extends Engagement> extends AsyncTask<Void , Void, Void> { final E engagement; final EngageListener<E> listener; String userId; String sessionId; final int engageApiVersion; String sdkVersion; String platform; public HandleEngagementTask( final E engagement, final EngageListener<E> listener, String userId, String sessionId, final int engageApiVersion, String sdkVersion, String platform) { this.engagement = engagement; this.listener = listener; this.userId = userId; this.sessionId = sessionId; this.engageApiVersion = engageApiVersion; this.sdkVersion = sdkVersion; this.platform = platform; } @Override protected Void doInBackground(Void... voids) { final JSONObject event; try { event = new JSONObject() .put("userID", userId) .put("decisionPoint", engagement.name) .put("flavour", engagement.flavour) .put("sessionID", sessionId) .put("version", engageApiVersion) .put("sdkVersion", sdkVersion) .put("platform", platform) .put("manufacturer", ClientInfo.manufacturer()) .put("operatingSystemVersion", ClientInfo.operatingSystemVersion()) .put("timezoneOffset", ClientInfo.timezoneOffset()) .put("locale", ClientInfo.locale()); if (!engagement.params.isEmpty()) { event.put("parameters", engagement.params.json); } } catch (JSONException e) { // should never happen due to params enforcement throw new IllegalArgumentException(e); } network.engage(event, new RequestListener<JSONObject>() { @Override public void onCompleted(Response<JSONObject> result) { engagement.setResponse(result); if (engagement.isSuccessful()) { engagements.put(engagement); } else if (engagement.isCacheCandidate() ){ Log.w(TAG, String.format( Locale.US, "Not caching %s due to failure, checking cache", engagement)); final JSONObject cached = engagements.get(engagement); if (cached != null) { try { engagement.setResponse(new Response<>( engagement.getStatusCode(), true, null, cached.put("isCachedResponse", true), engagement.getError())); Log.d( TAG, "Using cached response " + engagement.getJson()); } catch (JSONException ignored) {} } } else { Log.w(TAG, String.format( Locale.US, "Not caching %s due to failure, and not checking cache due to client error response", engagement)); } listener.onCompleted(engagement); } @Override public void onError(Throwable t) { executor.execute(new Runnable() { @Override public void run() { final JSONObject cached = engagements.get(engagement); if (cached != null) { try { engagement.setResponse(new Response<>( 200, true, null, cached.put("isCachedResponse", true), null)); Log.d(TAG, "Using cached response " + engagement.getJson()); handler.post(new Runnable() { @Override public void run() { listener.onCompleted(engagement); } }); } catch (JSONException e) { handler.post(new Runnable() { @Override public void run() { listener.onError(e); } }); } } else { handler.post(new Runnable() { @Override public void run() { listener.onError(t); } }); } } }); } }, "config".equalsIgnoreCase(engagement.name) && "internal".equalsIgnoreCase(engagement.flavour)); return null; } } <E extends Engagement> void handleEngagement( final E engagement, final EngageListener<E> listener, String userId, String sessionId, final int engageApiVersion, String sdkVersion, String platform) { new HandleEngagementTask<E>(engagement, listener, userId, sessionId, engageApiVersion, sdkVersion, platform).execute(); } private void cancelUploadTask() { if (uploadTask != null) { if (uploadTask.cancel(false)) { Log.d(TAG, "Cancelled scheduled upload task"); } else { Log.w(TAG, "Failed to cancel scheduled upload task"); } uploadTask = null; } } private final class Upload implements Runnable { @Override public void run() { Log.v(TAG, "Starting event upload"); final CloseableIterator<EventStoreItem> items = events.items(); final AtomicReference<CloseableIterator.Mode> clearEvents = new AtomicReference<>(CloseableIterator.Mode.ALL); try { if (!items.hasNext()) { Log.d(TAG, "No stored events to upload"); clearEvents.set(CloseableIterator.Mode.NONE); } final StringBuilder builder = new StringBuilder("{\"eventList\":["); int count = 0; while (items.hasNext()) { final EventStoreItem event = items.next(); if (event.available()) { final String content = event.get(); if (content != null) { builder.append(content); builder.append(','); count++; } else { Log.w(TAG, "Failed retrieving event, skipping"); } } else { Log.w(TAG, "Stored event not available, pausing"); clearEvents.set(CloseableIterator.Mode.UP_TO_CURRENT); break; } } if (builder.charAt(builder.length() - 1) == ',') { builder.deleteCharAt(builder.length() - 1); } builder.append("]}"); JSONObject payload = null; try { payload = new JSONObject(builder.toString()); } catch (JSONException e) { Log.w(TAG, e); clearEvents.set(CloseableIterator.Mode.NONE); } Log.d(TAG, "Uploading " + count + " events"); final CountDownLatch latch = new CountDownLatch(1); final CancelableRequest request = network.collect( payload, new RequestListener<Void>() { @Override public void onCompleted(Response<Void> result) { if (result.isSuccessful()) { Log.d(TAG, "Successfully uploaded events"); } else { Log.w(TAG, "Failed to upload events due to " + result); if (result.code == 400) { Log.w(TAG, "Wiping event store due to unrecoverable data"); clearEvents.set(CloseableIterator.Mode.ALL); } } latch.countDown(); } @Override public void onError(Throwable t) { Log.w(TAG, "Failed to upload events, will retry later", t); clearEvents.set(CloseableIterator.Mode.NONE); latch.countDown(); } }); try { latch.await(); } catch (InterruptedException e) { Log.w(TAG, "Cancelling event upload", e); clearEvents.set(CloseableIterator.Mode.NONE); request.cancel(); } } finally { Log.v(TAG, "Finished event upload"); items.close(clearEvents.get()); } } } }
DDSDK-5: Tidy up the fix to be more readable
library/src/main/java/com/deltadna/android/sdk/EventHandler.java
DDSDK-5: Tidy up the fix to be more readable
<ide><path>ibrary/src/main/java/com/deltadna/android/sdk/EventHandler.java <ide> r, <ide> EventHandler.class.getSimpleName())); <ide> <del> private final Handler handler = new Handler(Looper.getMainLooper()); <add> private final Handler mainThreadTaskHandler = new Handler(Looper.getMainLooper()); <ide> <ide> private final EventStore events; <ide> private final EngageStore engagements; <ide> <ide> @Override <ide> public void onError(Throwable t) { <del> executor.execute(new Runnable() { <del> @Override <del> public void run() { <del> final JSONObject cached = engagements.get(engagement); <del> if (cached != null) { <del> try { <del> engagement.setResponse(new Response<>( <del> 200, <del> true, <del> null, <del> cached.put("isCachedResponse", true), <del> null)); <del> <del> Log.d(TAG, "Using cached response " + engagement.getJson()); <del> <del> handler.post(new Runnable() { <del> @Override <del> public void run() { <del> listener.onCompleted(engagement); <del> } <del> }); <del> } catch (JSONException e) { <del> handler.post(new Runnable() { <del> @Override <del> public void run() { <del> listener.onError(e); <del> } <del> }); <del> } <del> } else { <del> handler.post(new Runnable() { <del> @Override <del> public void run() { <del> listener.onError(t); <del> } <del> }); <add> // This needs to be run off the main thread, as it involves blocking database <add> // operations that can cause ANRs. <add> executor.execute(() -> { <add> final JSONObject cached = engagements.get(engagement); <add> if (cached != null) { <add> try { <add> engagement.setResponse(new Response<>( <add> 200, <add> true, <add> null, <add> cached.put("isCachedResponse", true), <add> null)); <add> <add> Log.d(TAG, "Using cached response " + engagement.getJson()); <add> <add> mainThreadTaskHandler.post(() -> listener.onCompleted(engagement)); <add> } catch (JSONException e) { <add> mainThreadTaskHandler.post(() -> listener.onError(e)); <ide> } <add> } else { <add> mainThreadTaskHandler.post(() -> listener.onError(t)); <ide> } <ide> }); <ide> }
Java
apache-2.0
6330fc64267670d037507cc89d650c261dfed427
0
bootique/bootique,nhl/bootique,ebondareva/bootique,bootique/bootique
package io.bootique.command; import io.bootique.cli.Cli; public class CommandOutcome { // UNIX success exit code private static final int SUCCESS_EXIT_CODE = 0; private static final CommandOutcome SUCCESS = new CommandOutcome(SUCCESS_EXIT_CODE, false, null, null); private final String message; private final int exitCode; private final Throwable exception; private final boolean forkedToBackground; private CommandOutcome(int exitCode, boolean forkedToBackground, String message, Throwable exception) { this.forkedToBackground = forkedToBackground; this.message = message; this.exitCode = exitCode; this.exception = exception; } /** * Returns a successful outcome with an indicator that that a process was left running on the background. * * @return a successful {@link CommandOutcome}. * @since 0.25 */ public static CommandOutcome succeededAndForkedToBackground() { return new CommandOutcome(SUCCESS_EXIT_CODE, true, null, null); } public static CommandOutcome succeeded() { return SUCCESS; } public static CommandOutcome failed(int exitCode, Throwable cause) { return failed(exitCode, null, cause); } public static CommandOutcome failed(int exitCode, String message) { return failed(exitCode, message, null); } public static CommandOutcome failed(int exitCode, String message, Throwable th) { if (exitCode == SUCCESS_EXIT_CODE) { throw new IllegalArgumentException("Success code '0' used for failure outcome."); } return new CommandOutcome(exitCode, false, message, th); } public String getMessage() { return message; } public int getExitCode() { return exitCode; } public Throwable getException() { return exception; } public boolean isSuccess() { return exitCode == SUCCESS_EXIT_CODE; } /** * Returns whether one or more tasks started by this command were still executing on threads other than the * command run thread as of {@link Command#run(Cli)} completion. * * @return whether one or more tasks started by this command were still executing on threads other than the * command run thread as of {@link Command#run(Cli)} completion. * @since 0.25 */ public boolean forkedToBackground() { return forkedToBackground; } /** * Exits the current OS process with the outcome exit code. If {@link #forkedToBackground} is true, this method * would not exit immediately and would block the thread until the app dies. */ public void exit() { // don't force exit if there are remaining tasks... if (forkedToBackground) { try { Thread.currentThread().join(); } catch (InterruptedException ie) { // interruption of a running Jetty daemon is a normal event, so unless we get shutdown errors, return success } } System.exit(exitCode); } @Override public String toString() { String message = this.message; if (message == null && exception != null) { message = exception.getMessage(); } StringBuilder buffer = new StringBuilder().append("[").append(exitCode); if (message != null) { buffer.append(": ").append(message); } return buffer.append("]").toString(); } }
bootique/src/main/java/io/bootique/command/CommandOutcome.java
package io.bootique.command; import io.bootique.cli.Cli; public class CommandOutcome { // UNIX success exit code private static final int SUCCESS_EXIT_CODE = 0; private static final CommandOutcome SUCCESS = new CommandOutcome(SUCCESS_EXIT_CODE, false, null, null); private final String message; private final int exitCode; private final Throwable exception; private final boolean forkedToBackground; private CommandOutcome(int exitCode, boolean forkedToBackground, String message, Throwable exception) { this.forkedToBackground = forkedToBackground; this.message = message; this.exitCode = exitCode; this.exception = exception; } /** * Returns a successful outcome with an indicator that that a process was left running on the background. * * @return a successful {@link CommandOutcome}. * @since 0.25 */ public static CommandOutcome succeededAndForkedToBackground() { return new CommandOutcome(SUCCESS_EXIT_CODE, true, null, null); } public static CommandOutcome succeeded() { return SUCCESS; } public static CommandOutcome failed(int exitCode, Throwable cause) { return failed(exitCode, null, cause); } public static CommandOutcome failed(int exitCode, String message) { return failed(exitCode, message, null); } public static CommandOutcome failed(int exitCode, String message, Throwable th) { if (exitCode == SUCCESS_EXIT_CODE) { throw new IllegalArgumentException("Success code '0' used for failure outcome."); } return new CommandOutcome(exitCode, false, message, th); } public String getMessage() { return message; } public int getExitCode() { return exitCode; } public Throwable getException() { return exception; } public boolean isSuccess() { return exitCode == SUCCESS_EXIT_CODE; } /** * Returns whether one or more tasks started by this command were still executing on threads other than the * command run thread as of {@link Command#run(Cli)} completion. * * @return whether one or more tasks started by this command were still executing on threads other than the * command run thread as of {@link Command#run(Cli)} completion. * @since 0.25 */ public boolean forkedToBackground() { return forkedToBackground; } /** * Exits the current OS process with the outcome exit code, unless {@link #forkedToBackground} is true. */ public void exit() { // don't force exit if there are remaining tasks... // TODO: a new name for the 'exit' method to reflect this behavior if (!forkedToBackground) { System.exit(exitCode); } } @Override public String toString() { String message = this.message; if (message == null && exception != null) { message = exception.getMessage(); } StringBuilder buffer = new StringBuilder().append("[").append(exitCode); if (message != null) { buffer.append(": ").append(message); } return buffer.append("]").toString(); } }
Commands starting background processes should unblock invoker threads #197 * if the app is forked, we should not exit in the exit() method. Instead should wait till the app dies
bootique/src/main/java/io/bootique/command/CommandOutcome.java
Commands starting background processes should unblock invoker threads #197
<ide><path>ootique/src/main/java/io/bootique/command/CommandOutcome.java <ide> } <ide> <ide> /** <del> * Exits the current OS process with the outcome exit code, unless {@link #forkedToBackground} is true. <add> * Exits the current OS process with the outcome exit code. If {@link #forkedToBackground} is true, this method <add> * would not exit immediately and would block the thread until the app dies. <ide> */ <ide> public void exit() { <ide> <ide> // don't force exit if there are remaining tasks... <del> // TODO: a new name for the 'exit' method to reflect this behavior <del> if (!forkedToBackground) { <del> System.exit(exitCode); <add> if (forkedToBackground) { <add> try { <add> Thread.currentThread().join(); <add> } catch (InterruptedException ie) { <add> // interruption of a running Jetty daemon is a normal event, so unless we get shutdown errors, return success <add> } <ide> } <add> <add> System.exit(exitCode); <ide> } <ide> <ide> @Override
Java
apache-2.0
dd584463501a0af7b7c66ac8c9af8097188093e3
0
illicitonion/buck,robbertvanginkel/buck,kageiit/buck,OkBuilds/buck,k21/buck,robbertvanginkel/buck,dsyang/buck,shs96c/buck,romanoid/buck,davido/buck,ilya-klyuchnikov/buck,robbertvanginkel/buck,illicitonion/buck,romanoid/buck,sdwilsh/buck,ilya-klyuchnikov/buck,k21/buck,justinmuller/buck,robbertvanginkel/buck,vschs007/buck,ilya-klyuchnikov/buck,davido/buck,justinmuller/buck,dsyang/buck,sdwilsh/buck,OkBuilds/buck,SeleniumHQ/buck,darkforestzero/buck,illicitonion/buck,marcinkwiatkowski/buck,davido/buck,davido/buck,marcinkwiatkowski/buck,zhan-xiong/buck,grumpyjames/buck,illicitonion/buck,raviagarwal7/buck,rmaz/buck,vschs007/buck,dsyang/buck,JoelMarcey/buck,LegNeato/buck,justinmuller/buck,grumpyjames/buck,robbertvanginkel/buck,zpao/buck,illicitonion/buck,janicduplessis/buck,janicduplessis/buck,SeleniumHQ/buck,shs96c/buck,brettwooldridge/buck,daedric/buck,JoelMarcey/buck,darkforestzero/buck,nguyentruongtho/buck,SeleniumHQ/buck,shybovycha/buck,vschs007/buck,brettwooldridge/buck,shybovycha/buck,grumpyjames/buck,janicduplessis/buck,raviagarwal7/buck,dsyang/buck,JoelMarcey/buck,rmaz/buck,Addepar/buck,sdwilsh/buck,zpao/buck,ilya-klyuchnikov/buck,ilya-klyuchnikov/buck,romanoid/buck,davido/buck,dsyang/buck,janicduplessis/buck,sdwilsh/buck,rmaz/buck,raviagarwal7/buck,Addepar/buck,darkforestzero/buck,sdwilsh/buck,JoelMarcey/buck,JoelMarcey/buck,SeleniumHQ/buck,kageiit/buck,ilya-klyuchnikov/buck,daedric/buck,raviagarwal7/buck,dsyang/buck,brettwooldridge/buck,shybovycha/buck,zhan-xiong/buck,shs96c/buck,romanoid/buck,daedric/buck,facebook/buck,zhan-xiong/buck,Addepar/buck,zhan-xiong/buck,sdwilsh/buck,shybovycha/buck,JoelMarcey/buck,Addepar/buck,shs96c/buck,rmaz/buck,vschs007/buck,kageiit/buck,marcinkwiatkowski/buck,LegNeato/buck,shs96c/buck,zhan-xiong/buck,Addepar/buck,facebook/buck,k21/buck,daedric/buck,vschs007/buck,zhan-xiong/buck,sdwilsh/buck,justinmuller/buck,darkforestzero/buck,marcinkwiatkowski/buck,darkforestzero/buck,zhan-xiong/buck,grumpyjames/buck,rmaz/buck,zhan-xiong/buck,illicitonion/buck,zhan-xiong/buck,Addepar/buck,shs96c/buck,facebook/buck,davido/buck,clonetwin26/buck,SeleniumHQ/buck,justinmuller/buck,darkforestzero/buck,zpao/buck,grumpyjames/buck,OkBuilds/buck,kageiit/buck,SeleniumHQ/buck,davido/buck,raviagarwal7/buck,zhan-xiong/buck,sdwilsh/buck,grumpyjames/buck,LegNeato/buck,janicduplessis/buck,justinmuller/buck,OkBuilds/buck,robbertvanginkel/buck,marcinkwiatkowski/buck,grumpyjames/buck,brettwooldridge/buck,clonetwin26/buck,justinmuller/buck,brettwooldridge/buck,justinmuller/buck,k21/buck,sdwilsh/buck,dsyang/buck,marcinkwiatkowski/buck,sdwilsh/buck,LegNeato/buck,dsyang/buck,vschs007/buck,dsyang/buck,daedric/buck,justinmuller/buck,vschs007/buck,k21/buck,robbertvanginkel/buck,romanoid/buck,OkBuilds/buck,OkBuilds/buck,zhan-xiong/buck,robbertvanginkel/buck,janicduplessis/buck,rmaz/buck,shybovycha/buck,illicitonion/buck,janicduplessis/buck,rmaz/buck,kageiit/buck,daedric/buck,romanoid/buck,illicitonion/buck,raviagarwal7/buck,brettwooldridge/buck,ilya-klyuchnikov/buck,Addepar/buck,davido/buck,ilya-klyuchnikov/buck,SeleniumHQ/buck,dsyang/buck,shybovycha/buck,clonetwin26/buck,darkforestzero/buck,zpao/buck,shybovycha/buck,janicduplessis/buck,shs96c/buck,nguyentruongtho/buck,facebook/buck,janicduplessis/buck,facebook/buck,marcinkwiatkowski/buck,darkforestzero/buck,davido/buck,clonetwin26/buck,romanoid/buck,dsyang/buck,JoelMarcey/buck,JoelMarcey/buck,raviagarwal7/buck,marcinkwiatkowski/buck,darkforestzero/buck,robbertvanginkel/buck,vschs007/buck,rmaz/buck,ilya-klyuchnikov/buck,clonetwin26/buck,k21/buck,shs96c/buck,marcinkwiatkowski/buck,Addepar/buck,illicitonion/buck,marcinkwiatkowski/buck,darkforestzero/buck,janicduplessis/buck,zhan-xiong/buck,kageiit/buck,clonetwin26/buck,clonetwin26/buck,sdwilsh/buck,rmaz/buck,JoelMarcey/buck,marcinkwiatkowski/buck,robbertvanginkel/buck,rmaz/buck,shybovycha/buck,LegNeato/buck,OkBuilds/buck,LegNeato/buck,k21/buck,robbertvanginkel/buck,clonetwin26/buck,darkforestzero/buck,JoelMarcey/buck,nguyentruongtho/buck,raviagarwal7/buck,LegNeato/buck,grumpyjames/buck,marcinkwiatkowski/buck,darkforestzero/buck,daedric/buck,robbertvanginkel/buck,SeleniumHQ/buck,illicitonion/buck,illicitonion/buck,ilya-klyuchnikov/buck,shybovycha/buck,k21/buck,vschs007/buck,ilya-klyuchnikov/buck,k21/buck,justinmuller/buck,Addepar/buck,Addepar/buck,SeleniumHQ/buck,romanoid/buck,k21/buck,OkBuilds/buck,grumpyjames/buck,daedric/buck,davido/buck,LegNeato/buck,LegNeato/buck,romanoid/buck,clonetwin26/buck,OkBuilds/buck,clonetwin26/buck,illicitonion/buck,Addepar/buck,darkforestzero/buck,brettwooldridge/buck,SeleniumHQ/buck,clonetwin26/buck,raviagarwal7/buck,LegNeato/buck,romanoid/buck,romanoid/buck,clonetwin26/buck,brettwooldridge/buck,LegNeato/buck,shs96c/buck,vschs007/buck,davido/buck,SeleniumHQ/buck,kageiit/buck,romanoid/buck,raviagarwal7/buck,justinmuller/buck,daedric/buck,raviagarwal7/buck,brettwooldridge/buck,shybovycha/buck,illicitonion/buck,justinmuller/buck,shybovycha/buck,shs96c/buck,daedric/buck,daedric/buck,davido/buck,rmaz/buck,zpao/buck,ilya-klyuchnikov/buck,vschs007/buck,LegNeato/buck,facebook/buck,k21/buck,shs96c/buck,zpao/buck,vschs007/buck,brettwooldridge/buck,davido/buck,nguyentruongtho/buck,k21/buck,OkBuilds/buck,daedric/buck,brettwooldridge/buck,k21/buck,sdwilsh/buck,clonetwin26/buck,OkBuilds/buck,JoelMarcey/buck,vschs007/buck,raviagarwal7/buck,JoelMarcey/buck,OkBuilds/buck,shs96c/buck,brettwooldridge/buck,romanoid/buck,grumpyjames/buck,dsyang/buck,nguyentruongtho/buck,raviagarwal7/buck,rmaz/buck,justinmuller/buck,zpao/buck,OkBuilds/buck,shybovycha/buck,grumpyjames/buck,ilya-klyuchnikov/buck,SeleniumHQ/buck,daedric/buck,shybovycha/buck,shs96c/buck,Addepar/buck,zhan-xiong/buck,marcinkwiatkowski/buck,SeleniumHQ/buck,sdwilsh/buck,JoelMarcey/buck,grumpyjames/buck,nguyentruongtho/buck,LegNeato/buck,robbertvanginkel/buck,janicduplessis/buck,rmaz/buck,Addepar/buck,nguyentruongtho/buck,facebook/buck,brettwooldridge/buck,dsyang/buck,janicduplessis/buck
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.cxx; import com.facebook.buck.graph.AbstractBreadthFirstTraversal; import com.facebook.buck.io.MorePaths; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.json.JsonConcatenate; import com.facebook.buck.log.Logger; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.model.Flavor; import com.facebook.buck.model.FlavorDomain; import com.facebook.buck.model.ImmutableFlavor; import com.facebook.buck.parser.NoSuchBuildTargetException; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.BuildTargetSourcePath; import com.facebook.buck.rules.CommandTool; import com.facebook.buck.rules.RuleKeyObjectSink; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.SourceWithFlags; import com.facebook.buck.rules.SymlinkTree; import com.facebook.buck.rules.Tool; import com.facebook.buck.rules.args.Arg; import com.facebook.buck.rules.args.FileListableLinkerInputArg; import com.facebook.buck.rules.args.MacroArg; import com.facebook.buck.rules.args.RuleKeyAppendableFunction; import com.facebook.buck.rules.args.SourcePathArg; import com.facebook.buck.rules.args.StringArg; import com.facebook.buck.rules.coercer.FrameworkPath; import com.facebook.buck.rules.coercer.PatternMatchedCollection; import com.facebook.buck.rules.coercer.SourceList; import com.facebook.buck.rules.macros.LocationMacroExpander; import com.facebook.buck.rules.macros.MacroExpander; import com.facebook.buck.rules.macros.MacroHandler; import com.facebook.buck.util.HumanReadableException; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.base.Suppliers; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.io.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; public class CxxDescriptionEnhancer { private static final Logger LOG = Logger.get(CxxDescriptionEnhancer.class); public static final Flavor HEADER_SYMLINK_TREE_FLAVOR = ImmutableFlavor.of("private-headers"); public static final Flavor EXPORTED_HEADER_SYMLINK_TREE_FLAVOR = ImmutableFlavor.of("headers"); public static final Flavor STATIC_FLAVOR = ImmutableFlavor.of("static"); public static final Flavor STATIC_PIC_FLAVOR = ImmutableFlavor.of("static-pic"); public static final Flavor SHARED_FLAVOR = ImmutableFlavor.of("shared"); public static final Flavor MACH_O_BUNDLE_FLAVOR = ImmutableFlavor.of("mach-o-bundle"); public static final Flavor SHARED_LIBRARY_SYMLINK_TREE_FLAVOR = ImmutableFlavor.of("shared-library-symlink-tree"); public static final Flavor CXX_LINK_BINARY_FLAVOR = ImmutableFlavor.of("binary"); protected static final MacroHandler MACRO_HANDLER = new MacroHandler( ImmutableMap.<String, MacroExpander>of( "location", new LocationMacroExpander())); private static final Pattern SONAME_EXT_MACRO_PATTERN = Pattern.compile("\\$\\(ext(?: ([.0-9]+))?\\)"); private CxxDescriptionEnhancer() {} public static HeaderSymlinkTree createHeaderSymlinkTree( BuildRuleParams params, BuildRuleResolver resolver, SourcePathResolver pathResolver, CxxPlatform cxxPlatform, ImmutableMap<Path, SourcePath> headers, HeaderVisibility headerVisibility) { BuildTarget headerSymlinkTreeTarget = CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget( params.getBuildTarget(), cxxPlatform.getFlavor(), headerVisibility); Path headerSymlinkTreeRoot = CxxDescriptionEnhancer.getHeaderSymlinkTreePath( params.getProjectFilesystem(), params.getBuildTarget(), cxxPlatform.getFlavor(), headerVisibility); Optional<Path> headerMapLocation = Optional.absent(); if (cxxPlatform.getCpp().resolve(resolver).supportsHeaderMaps() && cxxPlatform.getCxxpp().resolve(resolver).supportsHeaderMaps()) { headerMapLocation = Optional.of( getHeaderMapPath( params.getProjectFilesystem(), params.getBuildTarget(), cxxPlatform.getFlavor(), headerVisibility)); } return CxxPreprocessables.createHeaderSymlinkTreeBuildRule( pathResolver, headerSymlinkTreeTarget, params, headerSymlinkTreeRoot, headerMapLocation, headers); } public static HeaderSymlinkTree requireHeaderSymlinkTree( BuildRuleParams params, BuildRuleResolver ruleResolver, SourcePathResolver pathResolver, CxxPlatform cxxPlatform, ImmutableMap<Path, SourcePath> headers, HeaderVisibility headerVisibility) { BuildTarget headerSymlinkTreeTarget = CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget( params.getBuildTarget(), cxxPlatform.getFlavor(), headerVisibility); // Check the cache... Optional<BuildRule> rule = ruleResolver.getRuleOptional(headerSymlinkTreeTarget); if (rule.isPresent()) { Preconditions.checkState(rule.get() instanceof HeaderSymlinkTree); return (HeaderSymlinkTree) rule.get(); } HeaderSymlinkTree symlinkTree = createHeaderSymlinkTree( params, ruleResolver, pathResolver, cxxPlatform, headers, headerVisibility); ruleResolver.addToIndex(symlinkTree); return symlinkTree; } /** * @return the {@link BuildTarget} to use for the {@link BuildRule} generating the * symlink tree of headers. */ public static BuildTarget createHeaderSymlinkTreeTarget( BuildTarget target, Flavor platform, HeaderVisibility headerVisibility) { return BuildTarget .builder(target) .addFlavors(platform) .addFlavors(getHeaderSymlinkTreeFlavor(headerVisibility)) .build(); } /** * @return the absolute {@link Path} to use for the symlink tree of headers. */ public static Path getHeaderSymlinkTreePath( ProjectFilesystem filesystem, BuildTarget target, Flavor platform, HeaderVisibility headerVisibility) { return target.getCellPath().resolve( BuildTargets.getGenPath( filesystem, createHeaderSymlinkTreeTarget(target, platform, headerVisibility), "%s")); } public static Flavor getHeaderSymlinkTreeFlavor(HeaderVisibility headerVisibility) { switch (headerVisibility) { case PUBLIC: return EXPORTED_HEADER_SYMLINK_TREE_FLAVOR; case PRIVATE: return HEADER_SYMLINK_TREE_FLAVOR; default: throw new RuntimeException("Unexpected value of enum ExportMode"); } } /** * @return the {@link Path} to use for the header map for the given symlink tree. */ public static Path getHeaderMapPath( ProjectFilesystem filesystem, BuildTarget target, Flavor platform, HeaderVisibility headerVisibility) { return BuildTargets.getGenPath( filesystem, createHeaderSymlinkTreeTarget(target, platform, headerVisibility), "%s.hmap"); } /** * @return a map of header locations to input {@link SourcePath} objects formed by parsing the * input {@link SourcePath} objects for the "headers" parameter. */ public static ImmutableMap<Path, SourcePath> parseHeaders( BuildTarget buildTarget, SourcePathResolver resolver, Optional<CxxPlatform> cxxPlatform, CxxConstructorArg args) { ImmutableMap.Builder<String, SourcePath> headers = ImmutableMap.builder(); putAllHeaders(args.headers.get(), headers, resolver, "headers", buildTarget); if (cxxPlatform.isPresent()) { for (SourceList sourceList : args.platformHeaders.get().getMatchingValues( cxxPlatform.get().getFlavor().toString())) { putAllHeaders( sourceList, headers, resolver, "platform_headers", buildTarget); } } return CxxPreprocessables.resolveHeaderMap( args.headerNamespace.transform(MorePaths.TO_PATH) .or(buildTarget.getBasePath()), headers.build()); } /** * @return a map of header locations to input {@link SourcePath} objects formed by parsing the * input {@link SourcePath} objects for the "exportedHeaders" parameter. */ public static ImmutableMap<Path, SourcePath> parseExportedHeaders( BuildTarget buildTarget, SourcePathResolver resolver, Optional<CxxPlatform> cxxPlatform, CxxLibraryDescription.Arg args) { ImmutableMap.Builder<String, SourcePath> headers = ImmutableMap.builder(); putAllHeaders( args.exportedHeaders.get(), headers, resolver, "exported_headers", buildTarget); if (cxxPlatform.isPresent()) { for (SourceList sourceList : args.exportedPlatformHeaders.get().getMatchingValues( cxxPlatform.get().getFlavor().toString())) { putAllHeaders( sourceList, headers, resolver, "exported_platform_headers", buildTarget); } } return CxxPreprocessables.resolveHeaderMap( args.headerNamespace.transform(MorePaths.TO_PATH) .or(buildTarget.getBasePath()), headers.build()); } /** * Resolves the headers in `sourceList` and puts them into `sources` for the specificed * `buildTarget`. */ public static void putAllHeaders( SourceList sourceList, ImmutableMap.Builder<String, SourcePath> sources, SourcePathResolver sourcePathResolver, String parameterName, BuildTarget buildTarget) { switch (sourceList.getType()) { case NAMED: sources.putAll(sourceList.getNamedSources().get()); break; case UNNAMED: sources.putAll( sourcePathResolver.getSourcePathNames( buildTarget, parameterName, sourceList.getUnnamedSources().get())); break; } } /** * @return a list {@link CxxSource} objects formed by parsing the input {@link SourcePath} * objects for the "srcs" parameter. */ public static ImmutableMap<String, CxxSource> parseCxxSources( BuildTarget buildTarget, SourcePathResolver resolver, CxxPlatform cxxPlatform, CxxConstructorArg args) { return parseCxxSources( buildTarget, resolver, cxxPlatform, args.srcs.get(), args.platformSrcs.get()); } public static ImmutableMap<String, CxxSource> parseCxxSources( BuildTarget buildTarget, SourcePathResolver resolver, CxxPlatform cxxPlatform, ImmutableSortedSet<SourceWithFlags> srcs, PatternMatchedCollection<ImmutableSortedSet<SourceWithFlags>> platformSrcs) { ImmutableMap.Builder<String, SourceWithFlags> sources = ImmutableMap.builder(); putAllSources(srcs, sources, resolver, buildTarget); for (ImmutableSortedSet<SourceWithFlags> sourcesWithFlags : platformSrcs.getMatchingValues(cxxPlatform.getFlavor().toString())) { putAllSources(sourcesWithFlags, sources, resolver, buildTarget); } return resolveCxxSources(sources.build()); } private static void putAllSources( ImmutableSortedSet<SourceWithFlags> sourcesWithFlags, ImmutableMap.Builder<String, SourceWithFlags> sources, SourcePathResolver pathResolver, BuildTarget buildTarget) { sources.putAll( pathResolver.getSourcePathNames( buildTarget, "srcs", sourcesWithFlags, SourceWithFlags.TO_SOURCE_PATH)); } public static ImmutableList<CxxPreprocessorInput> collectCxxPreprocessorInput( BuildRuleParams params, CxxPlatform cxxPlatform, ImmutableMultimap<CxxSource.Type, String> preprocessorFlags, ImmutableList<HeaderSymlinkTree> headerSymlinkTrees, ImmutableSet<FrameworkPath> frameworks, Iterable<CxxPreprocessorInput> cxxPreprocessorInputFromDeps) throws NoSuchBuildTargetException { // Add the private includes of any rules which this rule depends on, and which list this rule as // a test. BuildTarget targetWithoutFlavor = BuildTarget.of( params.getBuildTarget().getUnflavoredBuildTarget()); ImmutableList.Builder<CxxPreprocessorInput> cxxPreprocessorInputFromTestedRulesBuilder = ImmutableList.builder(); for (BuildRule rule : params.getDeps()) { if (rule instanceof NativeTestable) { NativeTestable testable = (NativeTestable) rule; if (testable.isTestedBy(targetWithoutFlavor)) { LOG.debug( "Adding private includes of tested rule %s to testing rule %s", rule.getBuildTarget(), params.getBuildTarget()); cxxPreprocessorInputFromTestedRulesBuilder.add( testable.getCxxPreprocessorInput( cxxPlatform, HeaderVisibility.PRIVATE)); // Add any dependent headers cxxPreprocessorInputFromTestedRulesBuilder.addAll( CxxPreprocessables.getTransitiveCxxPreprocessorInput( cxxPlatform, ImmutableList.of(rule))); } } } ImmutableList<CxxPreprocessorInput> cxxPreprocessorInputFromTestedRules = cxxPreprocessorInputFromTestedRulesBuilder.build(); LOG.verbose( "Rules tested by target %s added private includes %s", params.getBuildTarget(), cxxPreprocessorInputFromTestedRules); ImmutableList.Builder<CxxHeaders> allIncludes = ImmutableList.builder(); for (HeaderSymlinkTree headerSymlinkTree : headerSymlinkTrees) { allIncludes.add( CxxSymlinkTreeHeaders.from(headerSymlinkTree, CxxPreprocessables.IncludeType.LOCAL)); } CxxPreprocessorInput localPreprocessorInput = CxxPreprocessorInput.builder() .putAllPreprocessorFlags(preprocessorFlags) .addAllIncludes(allIncludes.build()) .addAllFrameworks(frameworks) .build(); return ImmutableList.<CxxPreprocessorInput>builder() .add(localPreprocessorInput) .addAll(cxxPreprocessorInputFromDeps) .addAll(cxxPreprocessorInputFromTestedRules) .build(); } public static BuildTarget createStaticLibraryBuildTarget( BuildTarget target, Flavor platform, CxxSourceRuleFactory.PicType pic) { return BuildTarget.builder(target) .addFlavors(platform) .addFlavors(pic == CxxSourceRuleFactory.PicType.PDC ? STATIC_FLAVOR : STATIC_PIC_FLAVOR) .build(); } public static BuildTarget createSharedLibraryBuildTarget( BuildTarget target, Flavor platform, Linker.LinkType linkType) { Flavor linkFlavor; switch (linkType) { case SHARED: linkFlavor = SHARED_FLAVOR; break; case MACH_O_BUNDLE: linkFlavor = MACH_O_BUNDLE_FLAVOR; break; case EXECUTABLE: default: throw new IllegalStateException( "Only SHARED and MACH_O_BUNDLE types expected, got: " + linkType); } return BuildTarget.builder(target).addFlavors(platform).addFlavors(linkFlavor).build(); } public static Path getStaticLibraryPath( ProjectFilesystem filesystem, BuildTarget target, Flavor platform, CxxSourceRuleFactory.PicType pic) { String name = String.format("lib%s.a", target.getShortName()); return BuildTargets .getGenPath(filesystem, createStaticLibraryBuildTarget(target, platform, pic), "%s") .resolve(name); } public static String getSharedLibrarySoname( Optional<String> declaredSoname, BuildTarget target, CxxPlatform platform) { if (!declaredSoname.isPresent()) { return getDefaultSharedLibrarySoname(target, platform); } return getNonDefaultSharedLibrarySoname( declaredSoname.get(), platform.getSharedLibraryExtension(), platform.getSharedLibraryVersionedExtensionFormat()); } @VisibleForTesting static String getNonDefaultSharedLibrarySoname( String declared, String sharedLibraryExtension, String sharedLibraryVersionedExtensionFormat) { Matcher match = SONAME_EXT_MACRO_PATTERN.matcher(declared); if (!match.find()) { return declared; } String version = match.group(1); if (version == null) { return match.replaceFirst(sharedLibraryExtension); } return match.replaceFirst( String.format( sharedLibraryVersionedExtensionFormat, version)); } public static String getDefaultSharedLibrarySoname(BuildTarget target, CxxPlatform platform) { String libName = Joiner.on('_').join( ImmutableList.builder() .addAll( FluentIterable.from(target.getBasePath()) .transform(Functions.toStringFunction()) .filter(Predicates.not(Predicates.equalTo("")))) .add( target .withoutFlavors(ImmutableSet.of(platform.getFlavor())) .getShortNameAndFlavorPostfix()) .build()); String extension = platform.getSharedLibraryExtension(); return String.format("lib%s.%s", libName, extension); } public static Path getSharedLibraryPath( ProjectFilesystem filesystem, BuildTarget sharedLibraryTarget, String soname) { return BuildTargets.getGenPath(filesystem, sharedLibraryTarget, "%s/" + soname); } @VisibleForTesting protected static Path getLinkOutputPath(BuildTarget target, ProjectFilesystem filesystem) { return BuildTargets.getGenPath(filesystem, target, "%s"); } @VisibleForTesting protected static BuildTarget createCxxLinkTarget(BuildTarget target) { return BuildTarget.builder(target).addFlavors(CXX_LINK_BINARY_FLAVOR).build(); } /** * @return a function that transforms the {@link FrameworkPath} to search paths with any embedded * macros expanded. */ static RuleKeyAppendableFunction<FrameworkPath, Path> frameworkPathToSearchPath( final CxxPlatform cxxPlatform, final SourcePathResolver resolver) { return new RuleKeyAppendableFunction<FrameworkPath, Path>() { private RuleKeyAppendableFunction<String, String> translateMacrosFn = CxxFlags.getTranslateMacrosFn(cxxPlatform); @Override public void appendToRuleKey(RuleKeyObjectSink sink) { sink.setReflectively("translateMacrosFn", translateMacrosFn); } @Override public Path apply(FrameworkPath input) { Function<FrameworkPath, Path> convertToPath = FrameworkPath.getUnexpandedSearchPathFunction( resolver.getAbsolutePathFunction(), Functions.<Path>identity()); String pathAsString = convertToPath.apply(input).toString(); return Paths.get(translateMacrosFn.apply(pathAsString)); } }; } public static CxxLinkAndCompileRules createBuildRulesForCxxBinaryDescriptionArg( BuildRuleParams params, BuildRuleResolver resolver, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, CxxBinaryDescription.Arg args, Optional<StripStyle> stripStyle) throws NoSuchBuildTargetException { SourcePathResolver sourcePathResolver = new SourcePathResolver(resolver); ImmutableMap<String, CxxSource> srcs = parseCxxSources( params.getBuildTarget(), sourcePathResolver, cxxPlatform, args); ImmutableMap<Path, SourcePath> headers = parseHeaders( params.getBuildTarget(), new SourcePathResolver(resolver), Optional.of(cxxPlatform), args); return createBuildRulesForCxxBinary( params, resolver, cxxBuckConfig, cxxPlatform, srcs, headers, stripStyle, args.linkStyle.or(Linker.LinkableDepType.STATIC), args.preprocessorFlags, args.platformPreprocessorFlags, args.langPreprocessorFlags, args.frameworks, args.libraries, args.compilerFlags, args.langCompilerFlags, args.platformCompilerFlags, args.prefixHeader, args.linkerFlags, args.platformLinkerFlags, args.cxxRuntimeType); } public static CxxLinkAndCompileRules createBuildRulesForCxxBinary( BuildRuleParams params, BuildRuleResolver resolver, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, ImmutableMap<String, CxxSource> srcs, ImmutableMap<Path, SourcePath> headers, Optional<StripStyle> stripStyle, Linker.LinkableDepType linkStyle, Optional<ImmutableList<String>> preprocessorFlags, Optional<PatternMatchedCollection<ImmutableList<String>>> platformPreprocessorFlags, Optional<ImmutableMap<CxxSource.Type, ImmutableList<String>>> langPreprocessorFlags, Optional<ImmutableSortedSet<FrameworkPath>> frameworks, Optional<ImmutableSortedSet<FrameworkPath>> libraries, Optional<ImmutableList<String>> compilerFlags, Optional<ImmutableMap<CxxSource.Type, ImmutableList<String>>> langCompilerFlags, Optional<PatternMatchedCollection<ImmutableList<String>>> platformCompilerFlags, Optional<SourcePath> prefixHeader, Optional<ImmutableList<String>> linkerFlags, Optional<PatternMatchedCollection<ImmutableList<String>>> platformLinkerFlags, Optional<Linker.CxxRuntimeType> cxxRuntimeType) throws NoSuchBuildTargetException { SourcePathResolver sourcePathResolver = new SourcePathResolver(resolver); Path linkOutput = getLinkOutputPath(params.getBuildTarget(), params.getProjectFilesystem()); ImmutableList.Builder<Arg> argsBuilder = ImmutableList.builder(); CommandTool.Builder executableBuilder = new CommandTool.Builder(); // Setup the header symlink tree and combine all the preprocessor input from this rule // and all dependencies. HeaderSymlinkTree headerSymlinkTree = requireHeaderSymlinkTree( params, resolver, sourcePathResolver, cxxPlatform, headers, HeaderVisibility.PRIVATE); ImmutableList<CxxPreprocessorInput> cxxPreprocessorInput = collectCxxPreprocessorInput( params, cxxPlatform, CxxFlags.getLanguageFlags( preprocessorFlags, platformPreprocessorFlags, langPreprocessorFlags, cxxPlatform), ImmutableList.of(headerSymlinkTree), frameworks.get(), CxxPreprocessables.getTransitiveCxxPreprocessorInput( cxxPlatform, FluentIterable.from(params.getDeps()) .filter(Predicates.instanceOf(CxxPreprocessorDep.class)))); // Generate and add all the build rules to preprocess and compile the source to the // resolver and get the `SourcePath`s representing the generated object files. ImmutableMap<CxxPreprocessAndCompile, SourcePath> objects = CxxSourceRuleFactory.requirePreprocessAndCompileRules( params, resolver, sourcePathResolver, cxxBuckConfig, cxxPlatform, cxxPreprocessorInput, CxxFlags.getLanguageFlags( compilerFlags, platformCompilerFlags, langCompilerFlags, cxxPlatform), prefixHeader, cxxBuckConfig.getPreprocessMode(), srcs, linkStyle == Linker.LinkableDepType.STATIC ? CxxSourceRuleFactory.PicType.PDC : CxxSourceRuleFactory.PicType.PIC); // Build up the linker flags, which support macro expansion. ImmutableList<String> resolvedLinkerFlags = CxxFlags.getFlags( linkerFlags, platformLinkerFlags, cxxPlatform); argsBuilder.addAll( FluentIterable.from(resolvedLinkerFlags) .transform( MacroArg.toMacroArgFunction( MACRO_HANDLER, params.getBuildTarget(), params.getCellRoots(), resolver))); // Special handling for dynamically linked binaries. if (linkStyle == Linker.LinkableDepType.SHARED) { // Create a symlink tree with for all shared libraries needed by this binary. SymlinkTree sharedLibraries = requireSharedLibrarySymlinkTree( params, resolver, sourcePathResolver, cxxPlatform, params.getDeps(), Predicates.instanceOf(NativeLinkable.class)); // Embed a origin-relative library path into the binary so it can find the shared libraries. // The shared libraries root is absolute. Also need an absolute path to the linkOutput Path absLinkOut = params.getBuildTarget().getCellPath().resolve(linkOutput); argsBuilder.addAll( StringArg.from( Linkers.iXlinker( "-rpath", String.format( "%s/%s", cxxPlatform.getLd().resolve(resolver).origin(), absLinkOut.getParent().relativize(sharedLibraries.getRoot()).toString())))); // Add all the shared libraries and the symlink tree as inputs to the tool that represents // this binary, so that users can attach the proper deps. executableBuilder.addDep(sharedLibraries); executableBuilder.addInputs(sharedLibraries.getLinks().values()); } // Add object files into the args. ImmutableList<SourcePathArg> objectArgs = FluentIterable .from(SourcePathArg.from(sourcePathResolver, objects.values())) .transform(new Function<Arg, SourcePathArg>() { @Override public SourcePathArg apply(Arg input) { Preconditions.checkArgument(input instanceof SourcePathArg); return (SourcePathArg) input; } }) .toList(); argsBuilder.addAll(FileListableLinkerInputArg.from(objectArgs)); BuildTarget linkRuleTarget = createCxxLinkTarget(params.getBuildTarget()); CxxLink cxxLink = createCxxLinkRule( params, resolver, cxxBuckConfig, cxxPlatform, linkStyle, frameworks, libraries, cxxRuntimeType, sourcePathResolver, linkOutput, argsBuilder, linkRuleTarget); BuildRule binaryRuleForExecutable; Optional<CxxStrip> cxxStrip = Optional.absent(); if (stripStyle.isPresent()) { CxxStrip stripRule = createCxxStripRule( params, resolver, cxxPlatform.getStrip(), stripStyle.get(), sourcePathResolver, cxxLink); cxxStrip = Optional.of(stripRule); binaryRuleForExecutable = stripRule; } else { binaryRuleForExecutable = cxxLink; } // Add the output of the link as the lone argument needed to invoke this binary as a tool. executableBuilder.addArg( new SourcePathArg( sourcePathResolver, new BuildTargetSourcePath(binaryRuleForExecutable.getBuildTarget()))); return new CxxLinkAndCompileRules( cxxLink, cxxStrip, ImmutableSortedSet.copyOf(objects.keySet()), executableBuilder.build()); } private static CxxLink createCxxLinkRule( BuildRuleParams params, BuildRuleResolver resolver, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, Linker.LinkableDepType linkStyle, Optional<ImmutableSortedSet<FrameworkPath>> frameworks, Optional<ImmutableSortedSet<FrameworkPath>> libraries, Optional<Linker.CxxRuntimeType> cxxRuntimeType, SourcePathResolver sourcePathResolver, Path linkOutput, ImmutableList.Builder<Arg> argsBuilder, BuildTarget linkRuleTarget) throws NoSuchBuildTargetException { CxxLink cxxLink; Optional<BuildRule> existingCxxLinkRule = resolver.getRuleOptional(linkRuleTarget); if (existingCxxLinkRule.isPresent()) { Preconditions.checkArgument(existingCxxLinkRule.get() instanceof CxxLink); cxxLink = (CxxLink) existingCxxLinkRule.get(); } else { // Generate the final link rule. We use the top-level target as the link rule's // target, so that it corresponds to the actual binary we build. cxxLink = CxxLinkableEnhancer.createCxxLinkableBuildRule( cxxBuckConfig, cxxPlatform, params, resolver, sourcePathResolver, linkRuleTarget, Linker.LinkType.EXECUTABLE, Optional.<String>absent(), linkOutput, linkStyle, FluentIterable.from(params.getDeps()) .filter(NativeLinkable.class), cxxRuntimeType, Optional.<SourcePath>absent(), ImmutableSet.<BuildTarget>of(), NativeLinkableInput.builder() .setArgs(argsBuilder.build()) .setFrameworks(frameworks.or(ImmutableSortedSet.<FrameworkPath>of())) .setLibraries(libraries.or(ImmutableSortedSet.<FrameworkPath>of())) .build()); resolver.addToIndex(cxxLink); } return cxxLink; } public static CxxStrip createCxxStripRule( BuildRuleParams params, BuildRuleResolver resolver, Tool stripTool, StripStyle stripStyle, SourcePathResolver sourcePathResolver, BuildRule unstrippedBinaryRule) { BuildRuleParams stripRuleParams = params .copyWithChanges( params.getBuildTarget().withAppendedFlavors( CxxStrip.RULE_FLAVOR, stripStyle.getFlavor()), Suppliers.ofInstance(ImmutableSortedSet.of(unstrippedBinaryRule)), Suppliers.ofInstance(ImmutableSortedSet.<BuildRule>of())); Optional<BuildRule> exisitingRule = resolver.getRuleOptional(stripRuleParams.getBuildTarget()); if (exisitingRule.isPresent()) { Preconditions.checkArgument(exisitingRule.get() instanceof CxxStrip); return (CxxStrip) exisitingRule.get(); } else { CxxStrip cxxStrip = new CxxStrip( stripRuleParams, sourcePathResolver, stripStyle, new BuildTargetSourcePath(unstrippedBinaryRule.getBuildTarget()), stripTool, CxxDescriptionEnhancer.getLinkOutputPath( stripRuleParams.getBuildTarget(), params.getProjectFilesystem())); resolver.addToIndex(cxxStrip); return cxxStrip; } } public static ImmutableSortedSet<HeaderSymlinkTree> requireTransitiveCompilationDatabaseHeaderSymlinkTreeDeps( BuildRuleParams params, BuildRuleResolver ruleResolver, SourcePathResolver pathResolver, final CxxPlatform cxxPlatform, CxxConstructorArg arg) { BuildRuleParams paramsWithoutFlavor = params.withoutFlavor( CxxCompilationDatabase.COMPILATION_DATABASE); final ImmutableSortedSet.Builder<HeaderSymlinkTree> resultBuilder = ImmutableSortedSet.naturalOrder(); resultBuilder.add( CxxDescriptionEnhancer.requireHeaderSymlinkTree( paramsWithoutFlavor, ruleResolver, pathResolver, cxxPlatform, CxxDescriptionEnhancer.parseHeaders( params.getBuildTarget(), pathResolver, Optional.of(cxxPlatform), arg), HeaderVisibility.PRIVATE)); if (arg instanceof CxxLibraryDescription.Arg) { CxxLibraryDescription.Arg libArg = (CxxLibraryDescription.Arg) arg; resultBuilder.add( CxxDescriptionEnhancer.requireHeaderSymlinkTree( paramsWithoutFlavor, ruleResolver, pathResolver, cxxPlatform, CxxDescriptionEnhancer.parseExportedHeaders( params.getBuildTarget(), pathResolver, Optional.of(cxxPlatform), libArg), HeaderVisibility.PUBLIC)); } // Walk the transitive deps and add any exported headers present as // runtime dependencies. // // TODO(bhamiltoncx): Use BuildRuleResolver.requireMetadata() so we can // cache the result of this walk. AbstractBreadthFirstTraversal<BuildRule> visitor = new AbstractBreadthFirstTraversal<BuildRule>(params.getDeps()) { @Override public ImmutableSet<BuildRule> visit(BuildRule dep) { if (dep instanceof CxxPreprocessorDep) { CxxPreprocessorDep cxxPreprocessorDep = (CxxPreprocessorDep) dep; Optional<HeaderSymlinkTree> exportedHeaderSymlinkTree = cxxPreprocessorDep.getExportedHeaderSymlinkTree(cxxPlatform); if (exportedHeaderSymlinkTree.isPresent()) { resultBuilder.add(exportedHeaderSymlinkTree.get()); } } return dep.getDeps(); } }; visitor.start(); return resultBuilder.build(); } /** * Create all build rules needed to generate the compilation database. * * @return the {@link CxxCompilationDatabase} rule representing the actual compilation database. */ public static CxxCompilationDatabase createCompilationDatabase( BuildRuleParams params, BuildRuleResolver ruleResolver, SourcePathResolver pathResolver, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, CxxConstructorArg arg) throws NoSuchBuildTargetException { // Invoking requireObjects has the side-effect of invoking // CxxSourceRuleFactory.requirePreprocessAndCompileRules(), which has the side-effect of // creating CxxPreprocessAndCompile rules and adding them to the ruleResolver. BuildRuleParams paramsWithoutFlavor = params.withoutFlavor( CxxCompilationDatabase.COMPILATION_DATABASE); ImmutableMap<CxxPreprocessAndCompile, SourcePath> objects = requireObjects( paramsWithoutFlavor, ruleResolver, pathResolver, cxxBuckConfig, cxxPlatform, CxxSourceRuleFactory.PicType.PIC, arg); return CxxCompilationDatabase.createCompilationDatabase( params, pathResolver, cxxBuckConfig.getPreprocessMode(), objects.keySet(), requireTransitiveCompilationDatabaseHeaderSymlinkTreeDeps( params, ruleResolver, pathResolver, cxxPlatform, arg)); } public static BuildRule createUberCompilationDatabase( BuildRuleParams params, BuildRuleResolver ruleResolver) throws NoSuchBuildTargetException { Optional<CxxCompilationDatabaseDependencies> compilationDatabases = ruleResolver.requireMetadata( params .withoutFlavor(CxxCompilationDatabase.UBER_COMPILATION_DATABASE) .withFlavor(CxxCompilationDatabase.COMPILATION_DATABASE) .getBuildTarget(), CxxCompilationDatabaseDependencies.class); Preconditions.checkState(compilationDatabases.isPresent()); SourcePathResolver pathResolver = new SourcePathResolver(ruleResolver); return new JsonConcatenate( params.copyWithDeps( Suppliers.ofInstance( ImmutableSortedSet.copyOf( pathResolver.filterBuildRuleInputs( compilationDatabases.get().getSourcePaths()))), Suppliers.ofInstance(ImmutableSortedSet.<BuildRule>of())), pathResolver, ImmutableSortedSet.copyOf( pathResolver.getAllAbsolutePaths(compilationDatabases.get().getSourcePaths())), "compilation-database-concatenate", "Concatenate compilation databases", "uber-compilation-database", "compile_commands.json"); } public static Optional<CxxCompilationDatabaseDependencies> createCompilationDatabaseDependencies( BuildTarget buildTarget, FlavorDomain<CxxPlatform> platforms, BuildRuleResolver resolver, CxxConstructorArg args) throws NoSuchBuildTargetException { Preconditions.checkState( buildTarget.getFlavors().contains(CxxCompilationDatabase.COMPILATION_DATABASE)); Optional<Flavor> cxxPlatformFlavor = platforms.getFlavor(buildTarget); Preconditions.checkState( cxxPlatformFlavor.isPresent(), "Could not find cxx platform in:\n%s", Joiner.on(", ").join(buildTarget.getFlavors())); ImmutableSet.Builder<SourcePath> sourcePaths = ImmutableSet.builder(); for (BuildTarget dep : args.deps.get()) { Optional<CxxCompilationDatabaseDependencies> compilationDatabases = resolver.requireMetadata( BuildTarget.builder(dep) .addFlavors(CxxCompilationDatabase.COMPILATION_DATABASE) .addFlavors(cxxPlatformFlavor.get()) .build(), CxxCompilationDatabaseDependencies.class); if (compilationDatabases.isPresent()) { sourcePaths.addAll(compilationDatabases.get().getSourcePaths()); } } // Not all parts of Buck use require yet, so require the rule here so it's available in the // resolver for the parts that don't. resolver.requireRule(buildTarget); sourcePaths.add(new BuildTargetSourcePath(buildTarget)); return Optional.of(CxxCompilationDatabaseDependencies.of(sourcePaths.build())); } public static ImmutableMap<CxxPreprocessAndCompile, SourcePath> requireObjects( BuildRuleParams params, BuildRuleResolver ruleResolver, SourcePathResolver sourcePathResolver, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, CxxSourceRuleFactory.PicType pic, CxxConstructorArg args) throws NoSuchBuildTargetException { ImmutableMultimap<CxxSource.Type, String> exportedPreprocessorFlags; ImmutableMap<Path, SourcePath> exportedHeaders; if (args instanceof CxxLibraryDescription.Arg) { CxxLibraryDescription.Arg hasExportedArgs = (CxxLibraryDescription.Arg) args; exportedPreprocessorFlags = CxxFlags.getLanguageFlags( hasExportedArgs.exportedPreprocessorFlags, hasExportedArgs.exportedPlatformPreprocessorFlags, hasExportedArgs.exportedLangPreprocessorFlags, cxxPlatform); exportedHeaders = CxxDescriptionEnhancer.parseExportedHeaders( params.getBuildTarget(), sourcePathResolver, Optional.of(cxxPlatform), hasExportedArgs); } else { exportedPreprocessorFlags = ImmutableMultimap.of(); exportedHeaders = ImmutableMap.of(); } HeaderSymlinkTree headerSymlinkTree = CxxDescriptionEnhancer.requireHeaderSymlinkTree( params, ruleResolver, sourcePathResolver, cxxPlatform, CxxDescriptionEnhancer.parseHeaders( params.getBuildTarget(), sourcePathResolver, Optional.of(cxxPlatform), args), HeaderVisibility.PRIVATE); ImmutableList<CxxPreprocessorInput> cxxPreprocessorInputFromDependencies = CxxDescriptionEnhancer.collectCxxPreprocessorInput( params, cxxPlatform, CxxFlags.getLanguageFlags( args.preprocessorFlags, args.platformPreprocessorFlags, args.langPreprocessorFlags, cxxPlatform), ImmutableList.of(headerSymlinkTree), ImmutableSet.<FrameworkPath>of(), CxxLibraryDescription.getTransitiveCxxPreprocessorInput( params, ruleResolver, sourcePathResolver, cxxPlatform, exportedPreprocessorFlags, exportedHeaders, args.frameworks.or(ImmutableSortedSet.<FrameworkPath>of()))); // Create rule to build the object files. return CxxSourceRuleFactory.requirePreprocessAndCompileRules( params, ruleResolver, sourcePathResolver, cxxBuckConfig, cxxPlatform, cxxPreprocessorInputFromDependencies, CxxFlags.getLanguageFlags( args.compilerFlags, args.platformCompilerFlags, args.langCompilerFlags, cxxPlatform), args.prefixHeader, cxxBuckConfig.getPreprocessMode(), CxxDescriptionEnhancer.parseCxxSources( params.getBuildTarget(), sourcePathResolver, cxxPlatform, args), pic); } /** * @return the {@link BuildTarget} to use for the {@link BuildRule} generating the * symlink tree of shared libraries. */ public static BuildTarget createSharedLibrarySymlinkTreeTarget( BuildTarget target, Flavor platform) { return BuildTarget .builder(target) .addFlavors(SHARED_LIBRARY_SYMLINK_TREE_FLAVOR) .addFlavors(platform) .build(); } /** * @return the {@link Path} to use for the symlink tree of headers. */ public static Path getSharedLibrarySymlinkTreePath( ProjectFilesystem filesystem, BuildTarget target, Flavor platform) { return target.getCellPath().resolve(BuildTargets.getGenPath( filesystem, createSharedLibrarySymlinkTreeTarget(target, platform), "%s")); } /** * Build a {@link HeaderSymlinkTree} of all the shared libraries found via the top-level rule's * transitive dependencies. */ public static SymlinkTree createSharedLibrarySymlinkTree( BuildRuleParams params, SourcePathResolver pathResolver, CxxPlatform cxxPlatform, Iterable<? extends BuildRule> deps, Predicate<Object> traverse) throws NoSuchBuildTargetException { BuildTarget symlinkTreeTarget = createSharedLibrarySymlinkTreeTarget( params.getBuildTarget(), cxxPlatform.getFlavor()); Path symlinkTreeRoot = getSharedLibrarySymlinkTreePath( params.getProjectFilesystem(), params.getBuildTarget(), cxxPlatform.getFlavor()); ImmutableSortedMap<String, SourcePath> libraries = NativeLinkables.getTransitiveSharedLibraries( cxxPlatform, deps, traverse); ImmutableMap.Builder<Path, SourcePath> links = ImmutableMap.builder(); for (Map.Entry<String, SourcePath> ent : libraries.entrySet()) { links.put(Paths.get(ent.getKey()), ent.getValue()); } return new SymlinkTree( params.copyWithChanges( symlinkTreeTarget, Suppliers.ofInstance(ImmutableSortedSet.<BuildRule>of()), Suppliers.ofInstance(ImmutableSortedSet.<BuildRule>of())), pathResolver, symlinkTreeRoot, links.build()); } public static SymlinkTree requireSharedLibrarySymlinkTree( BuildRuleParams params, BuildRuleResolver resolver, SourcePathResolver pathResolver, CxxPlatform cxxPlatform, Iterable<? extends BuildRule> deps, Predicate<Object> traverse) throws NoSuchBuildTargetException { BuildTarget target = createSharedLibrarySymlinkTreeTarget(params.getBuildTarget(), cxxPlatform.getFlavor()); SymlinkTree tree = resolver.getRuleOptionalWithType(target, SymlinkTree.class).orNull(); if (tree == null) { tree = resolver.addToIndex( createSharedLibrarySymlinkTree( params, pathResolver, cxxPlatform, deps, traverse)); } return tree; } public static Flavor flavorForLinkableDepType(Linker.LinkableDepType linkableDepType) { switch (linkableDepType) { case STATIC: return STATIC_FLAVOR; case STATIC_PIC: return STATIC_PIC_FLAVOR; case SHARED: return SHARED_FLAVOR; } throw new RuntimeException( String.format("Unsupported LinkableDepType: '%s'", linkableDepType)); } /** * Resolve the map of names to SourcePaths to a map of names to CxxSource objects. */ private static ImmutableMap<String, CxxSource> resolveCxxSources( ImmutableMap<String, SourceWithFlags> sources) { ImmutableMap.Builder<String, CxxSource> cxxSources = ImmutableMap.builder(); // For each entry in the input C/C++ source, build a CxxSource object to wrap // it's name, input path, and output object file path. for (ImmutableMap.Entry<String, SourceWithFlags> ent : sources.entrySet()) { String extension = Files.getFileExtension(ent.getKey()); Optional<CxxSource.Type> type = CxxSource.Type.fromExtension(extension); if (!type.isPresent()) { throw new HumanReadableException( "invalid extension \"%s\": %s", extension, ent.getKey()); } cxxSources.put( ent.getKey(), CxxSource.of( type.get(), ent.getValue().getSourcePath(), ent.getValue().getFlags())); } return cxxSources.build(); } }
src/com/facebook/buck/cxx/CxxDescriptionEnhancer.java
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.cxx; import com.facebook.buck.graph.AbstractBreadthFirstTraversal; import com.facebook.buck.io.MorePaths; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.json.JsonConcatenate; import com.facebook.buck.log.Logger; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.model.Flavor; import com.facebook.buck.model.FlavorDomain; import com.facebook.buck.model.ImmutableFlavor; import com.facebook.buck.parser.NoSuchBuildTargetException; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.BuildTargetSourcePath; import com.facebook.buck.rules.CommandTool; import com.facebook.buck.rules.RuleKeyObjectSink; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.SourceWithFlags; import com.facebook.buck.rules.SymlinkTree; import com.facebook.buck.rules.Tool; import com.facebook.buck.rules.args.Arg; import com.facebook.buck.rules.args.FileListableLinkerInputArg; import com.facebook.buck.rules.args.MacroArg; import com.facebook.buck.rules.args.RuleKeyAppendableFunction; import com.facebook.buck.rules.args.SourcePathArg; import com.facebook.buck.rules.args.StringArg; import com.facebook.buck.rules.coercer.FrameworkPath; import com.facebook.buck.rules.coercer.PatternMatchedCollection; import com.facebook.buck.rules.coercer.SourceList; import com.facebook.buck.rules.macros.LocationMacroExpander; import com.facebook.buck.rules.macros.MacroExpander; import com.facebook.buck.rules.macros.MacroHandler; import com.facebook.buck.util.HumanReadableException; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.base.Suppliers; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.io.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; public class CxxDescriptionEnhancer { private static final Logger LOG = Logger.get(CxxDescriptionEnhancer.class); public static final Flavor HEADER_SYMLINK_TREE_FLAVOR = ImmutableFlavor.of("private-headers"); public static final Flavor EXPORTED_HEADER_SYMLINK_TREE_FLAVOR = ImmutableFlavor.of("headers"); public static final Flavor STATIC_FLAVOR = ImmutableFlavor.of("static"); public static final Flavor STATIC_PIC_FLAVOR = ImmutableFlavor.of("static-pic"); public static final Flavor SHARED_FLAVOR = ImmutableFlavor.of("shared"); public static final Flavor MACH_O_BUNDLE_FLAVOR = ImmutableFlavor.of("mach-o-bundle"); public static final Flavor SHARED_LIBRARY_SYMLINK_TREE_FLAVOR = ImmutableFlavor.of("shared-library-symlink-tree"); public static final Flavor CXX_LINK_BINARY_FLAVOR = ImmutableFlavor.of("binary"); protected static final MacroHandler MACRO_HANDLER = new MacroHandler( ImmutableMap.<String, MacroExpander>of( "location", new LocationMacroExpander())); private static final Pattern SONAME_EXT_MACRO_PATTERN = Pattern.compile("\\$\\(ext(?: ([.0-9]+))?\\)"); private CxxDescriptionEnhancer() {} public static HeaderSymlinkTree createHeaderSymlinkTree( BuildRuleParams params, BuildRuleResolver resolver, SourcePathResolver pathResolver, CxxPlatform cxxPlatform, ImmutableMap<Path, SourcePath> headers, HeaderVisibility headerVisibility) { BuildTarget headerSymlinkTreeTarget = CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget( params.getBuildTarget(), cxxPlatform.getFlavor(), headerVisibility); Path headerSymlinkTreeRoot = CxxDescriptionEnhancer.getHeaderSymlinkTreePath( params.getProjectFilesystem(), params.getBuildTarget(), cxxPlatform.getFlavor(), headerVisibility); Optional<Path> headerMapLocation = Optional.absent(); if (cxxPlatform.getCpp().resolve(resolver).supportsHeaderMaps() && cxxPlatform.getCxxpp().resolve(resolver).supportsHeaderMaps()) { headerMapLocation = Optional.of( getHeaderMapPath( params.getProjectFilesystem(), params.getBuildTarget(), cxxPlatform.getFlavor(), headerVisibility)); } return CxxPreprocessables.createHeaderSymlinkTreeBuildRule( pathResolver, headerSymlinkTreeTarget, params, headerSymlinkTreeRoot, headerMapLocation, headers); } public static HeaderSymlinkTree requireHeaderSymlinkTree( BuildRuleParams params, BuildRuleResolver ruleResolver, SourcePathResolver pathResolver, CxxPlatform cxxPlatform, ImmutableMap<Path, SourcePath> headers, HeaderVisibility headerVisibility) { BuildTarget headerSymlinkTreeTarget = CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget( params.getBuildTarget(), cxxPlatform.getFlavor(), headerVisibility); // Check the cache... Optional<BuildRule> rule = ruleResolver.getRuleOptional(headerSymlinkTreeTarget); if (rule.isPresent()) { Preconditions.checkState(rule.get() instanceof HeaderSymlinkTree); return (HeaderSymlinkTree) rule.get(); } HeaderSymlinkTree symlinkTree = createHeaderSymlinkTree( params, ruleResolver, pathResolver, cxxPlatform, headers, headerVisibility); ruleResolver.addToIndex(symlinkTree); return symlinkTree; } /** * @return the {@link BuildTarget} to use for the {@link BuildRule} generating the * symlink tree of headers. */ public static BuildTarget createHeaderSymlinkTreeTarget( BuildTarget target, Flavor platform, HeaderVisibility headerVisibility) { return BuildTarget .builder(target) .addFlavors(platform) .addFlavors(getHeaderSymlinkTreeFlavor(headerVisibility)) .build(); } /** * @return the absolute {@link Path} to use for the symlink tree of headers. */ public static Path getHeaderSymlinkTreePath( ProjectFilesystem filesystem, BuildTarget target, Flavor platform, HeaderVisibility headerVisibility) { return target.getCellPath().resolve( BuildTargets.getGenPath( filesystem, createHeaderSymlinkTreeTarget(target, platform, headerVisibility), "%s")); } public static Flavor getHeaderSymlinkTreeFlavor(HeaderVisibility headerVisibility) { switch (headerVisibility) { case PUBLIC: return EXPORTED_HEADER_SYMLINK_TREE_FLAVOR; case PRIVATE: return HEADER_SYMLINK_TREE_FLAVOR; default: throw new RuntimeException("Unexpected value of enum ExportMode"); } } /** * @return the {@link Path} to use for the header map for the given symlink tree. */ public static Path getHeaderMapPath( ProjectFilesystem filesystem, BuildTarget target, Flavor platform, HeaderVisibility headerVisibility) { return BuildTargets.getGenPath( filesystem, createHeaderSymlinkTreeTarget(target, platform, headerVisibility), "%s.hmap"); } /** * @return a map of header locations to input {@link SourcePath} objects formed by parsing the * input {@link SourcePath} objects for the "headers" parameter. */ public static ImmutableMap<Path, SourcePath> parseHeaders( BuildTarget buildTarget, SourcePathResolver resolver, Optional<CxxPlatform> cxxPlatform, CxxConstructorArg args) { ImmutableMap.Builder<String, SourcePath> headers = ImmutableMap.builder(); putAllHeaders(args.headers.get(), headers, resolver, "headers", buildTarget); if (cxxPlatform.isPresent()) { for (SourceList sourceList : args.platformHeaders.get().getMatchingValues( cxxPlatform.get().getFlavor().toString())) { putAllHeaders( sourceList, headers, resolver, "platform_headers", buildTarget); } } return CxxPreprocessables.resolveHeaderMap( args.headerNamespace.transform(MorePaths.TO_PATH) .or(buildTarget.getBasePath()), headers.build()); } /** * @return a map of header locations to input {@link SourcePath} objects formed by parsing the * input {@link SourcePath} objects for the "exportedHeaders" parameter. */ public static ImmutableMap<Path, SourcePath> parseExportedHeaders( BuildTarget buildTarget, SourcePathResolver resolver, Optional<CxxPlatform> cxxPlatform, CxxLibraryDescription.Arg args) { ImmutableMap.Builder<String, SourcePath> headers = ImmutableMap.builder(); putAllHeaders( args.exportedHeaders.get(), headers, resolver, "exported_headers", buildTarget); if (cxxPlatform.isPresent()) { for (SourceList sourceList : args.exportedPlatformHeaders.get().getMatchingValues( cxxPlatform.get().getFlavor().toString())) { putAllHeaders( sourceList, headers, resolver, "exported_platform_headers", buildTarget); } } return CxxPreprocessables.resolveHeaderMap( args.headerNamespace.transform(MorePaths.TO_PATH) .or(buildTarget.getBasePath()), headers.build()); } /** * Resolves the headers in `sourceList` and puts them into `sources` for the specificed * `buildTarget`. */ public static void putAllHeaders( SourceList sourceList, ImmutableMap.Builder<String, SourcePath> sources, SourcePathResolver sourcePathResolver, String parameterName, BuildTarget buildTarget) { switch (sourceList.getType()) { case NAMED: sources.putAll(sourceList.getNamedSources().get()); break; case UNNAMED: sources.putAll( sourcePathResolver.getSourcePathNames( buildTarget, parameterName, sourceList.getUnnamedSources().get())); break; } } /** * @return a list {@link CxxSource} objects formed by parsing the input {@link SourcePath} * objects for the "srcs" parameter. */ public static ImmutableMap<String, CxxSource> parseCxxSources( BuildTarget buildTarget, SourcePathResolver resolver, CxxPlatform cxxPlatform, CxxConstructorArg args) { return parseCxxSources( buildTarget, resolver, cxxPlatform, args.srcs.get(), args.platformSrcs.get()); } public static ImmutableMap<String, CxxSource> parseCxxSources( BuildTarget buildTarget, SourcePathResolver resolver, CxxPlatform cxxPlatform, ImmutableSortedSet<SourceWithFlags> srcs, PatternMatchedCollection<ImmutableSortedSet<SourceWithFlags>> platformSrcs) { ImmutableMap.Builder<String, SourceWithFlags> sources = ImmutableMap.builder(); putAllSources(srcs, sources, resolver, buildTarget); for (ImmutableSortedSet<SourceWithFlags> sourcesWithFlags : platformSrcs.getMatchingValues(cxxPlatform.getFlavor().toString())) { putAllSources(sourcesWithFlags, sources, resolver, buildTarget); } return resolveCxxSources(sources.build()); } private static void putAllSources( ImmutableSortedSet<SourceWithFlags> sourcesWithFlags, ImmutableMap.Builder<String, SourceWithFlags> sources, SourcePathResolver pathResolver, BuildTarget buildTarget) { sources.putAll( pathResolver.getSourcePathNames( buildTarget, "srcs", sourcesWithFlags, SourceWithFlags.TO_SOURCE_PATH)); } public static ImmutableList<CxxPreprocessorInput> collectCxxPreprocessorInput( BuildRuleParams params, CxxPlatform cxxPlatform, ImmutableMultimap<CxxSource.Type, String> preprocessorFlags, ImmutableList<HeaderSymlinkTree> headerSymlinkTrees, ImmutableSet<FrameworkPath> frameworks, Iterable<CxxPreprocessorInput> cxxPreprocessorInputFromDeps) throws NoSuchBuildTargetException { // Add the private includes of any rules which this rule depends on, and which list this rule as // a test. BuildTarget targetWithoutFlavor = BuildTarget.of( params.getBuildTarget().getUnflavoredBuildTarget()); ImmutableList.Builder<CxxPreprocessorInput> cxxPreprocessorInputFromTestedRulesBuilder = ImmutableList.builder(); for (BuildRule rule : params.getDeps()) { if (rule instanceof NativeTestable) { NativeTestable testable = (NativeTestable) rule; if (testable.isTestedBy(targetWithoutFlavor)) { LOG.debug( "Adding private includes of tested rule %s to testing rule %s", rule.getBuildTarget(), params.getBuildTarget()); cxxPreprocessorInputFromTestedRulesBuilder.add( testable.getCxxPreprocessorInput( cxxPlatform, HeaderVisibility.PRIVATE)); // Add any dependent headers cxxPreprocessorInputFromTestedRulesBuilder.addAll( CxxPreprocessables.getTransitiveCxxPreprocessorInput( cxxPlatform, ImmutableList.of(rule))); } } } ImmutableList<CxxPreprocessorInput> cxxPreprocessorInputFromTestedRules = cxxPreprocessorInputFromTestedRulesBuilder.build(); LOG.verbose( "Rules tested by target %s added private includes %s", params.getBuildTarget(), cxxPreprocessorInputFromTestedRules); ImmutableList.Builder<CxxHeaders> allIncludes = ImmutableList.builder(); for (HeaderSymlinkTree headerSymlinkTree : headerSymlinkTrees) { allIncludes.add( CxxSymlinkTreeHeaders.from(headerSymlinkTree, CxxPreprocessables.IncludeType.LOCAL)); } CxxPreprocessorInput localPreprocessorInput = CxxPreprocessorInput.builder() .putAllPreprocessorFlags(preprocessorFlags) .addAllIncludes(allIncludes.build()) .addAllFrameworks(frameworks) .build(); return ImmutableList.<CxxPreprocessorInput>builder() .add(localPreprocessorInput) .addAll(cxxPreprocessorInputFromDeps) .addAll(cxxPreprocessorInputFromTestedRules) .build(); } public static BuildTarget createStaticLibraryBuildTarget( BuildTarget target, Flavor platform, CxxSourceRuleFactory.PicType pic) { return BuildTarget.builder(target) .addFlavors(platform) .addFlavors(pic == CxxSourceRuleFactory.PicType.PDC ? STATIC_FLAVOR : STATIC_PIC_FLAVOR) .build(); } public static BuildTarget createSharedLibraryBuildTarget( BuildTarget target, Flavor platform, Linker.LinkType linkType) { Flavor linkFlavor; switch (linkType) { case SHARED: linkFlavor = SHARED_FLAVOR; break; case MACH_O_BUNDLE: linkFlavor = MACH_O_BUNDLE_FLAVOR; break; case EXECUTABLE: default: throw new IllegalStateException( "Only SHARED and MACH_O_BUNDLE types expected, got: " + linkType); } return BuildTarget.builder(target).addFlavors(platform).addFlavors(linkFlavor).build(); } public static Path getStaticLibraryPath( ProjectFilesystem filesystem, BuildTarget target, Flavor platform, CxxSourceRuleFactory.PicType pic) { String name = String.format("lib%s.a", target.getShortName()); return BuildTargets .getGenPath(filesystem, createStaticLibraryBuildTarget(target, platform, pic), "%s") .resolve(name); } public static String getSharedLibrarySoname( Optional<String> declaredSoname, BuildTarget target, CxxPlatform platform) { if (!declaredSoname.isPresent()) { return getDefaultSharedLibrarySoname(target, platform); } return getNonDefaultSharedLibrarySoname( declaredSoname.get(), platform.getSharedLibraryExtension(), platform.getSharedLibraryVersionedExtensionFormat()); } @VisibleForTesting static String getNonDefaultSharedLibrarySoname( String declared, String sharedLibraryExtension, String sharedLibraryVersionedExtensionFormat) { Matcher match = SONAME_EXT_MACRO_PATTERN.matcher(declared); if (!match.find()) { return declared; } String version = match.group(1); if (version == null) { return match.replaceFirst(sharedLibraryExtension); } return match.replaceFirst( String.format( sharedLibraryVersionedExtensionFormat, version)); } public static String getDefaultSharedLibrarySoname(BuildTarget target, CxxPlatform platform) { String libName = Joiner.on('_').join( ImmutableList.builder() .addAll( FluentIterable.from(target.getBasePath()) .transform(Functions.toStringFunction()) .filter(Predicates.not(Predicates.equalTo("")))) .add( target .withoutFlavors(ImmutableSet.of(platform.getFlavor())) .getShortNameAndFlavorPostfix()) .build()); String extension = platform.getSharedLibraryExtension(); return String.format("lib%s.%s", libName, extension); } public static Path getSharedLibraryPath( ProjectFilesystem filesystem, BuildTarget sharedLibraryTarget, String soname) { return BuildTargets.getGenPath(filesystem, sharedLibraryTarget, "%s/" + soname); } @VisibleForTesting protected static Path getLinkOutputPath(BuildTarget target, ProjectFilesystem filesystem) { return BuildTargets.getGenPath(filesystem, target, "%s"); } @VisibleForTesting protected static BuildTarget createCxxLinkTarget(BuildTarget target) { return BuildTarget.builder(target).addFlavors(CXX_LINK_BINARY_FLAVOR).build(); } /** * @return a function that transforms the {@link FrameworkPath} to search paths with any embedded * macros expanded. */ static RuleKeyAppendableFunction<FrameworkPath, Path> frameworkPathToSearchPath( final CxxPlatform cxxPlatform, final SourcePathResolver resolver) { return new RuleKeyAppendableFunction<FrameworkPath, Path>() { private RuleKeyAppendableFunction<String, String> translateMacrosFn = CxxFlags.getTranslateMacrosFn(cxxPlatform); @Override public void appendToRuleKey(RuleKeyObjectSink sink) { sink.setReflectively("translateMacrosFn", translateMacrosFn); } @Override public Path apply(FrameworkPath input) { Function<FrameworkPath, Path> convertToPath = FrameworkPath.getUnexpandedSearchPathFunction( resolver.getAbsolutePathFunction(), Functions.<Path>identity()); String pathAsString = convertToPath.apply(input).toString(); return Paths.get(translateMacrosFn.apply(pathAsString)); } }; } public static CxxLinkAndCompileRules createBuildRulesForCxxBinaryDescriptionArg( BuildRuleParams params, BuildRuleResolver resolver, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, CxxBinaryDescription.Arg args, Optional<StripStyle> stripStyle) throws NoSuchBuildTargetException { SourcePathResolver sourcePathResolver = new SourcePathResolver(resolver); ImmutableMap<String, CxxSource> srcs = parseCxxSources( params.getBuildTarget(), sourcePathResolver, cxxPlatform, args); ImmutableMap<Path, SourcePath> headers = parseHeaders( params.getBuildTarget(), new SourcePathResolver(resolver), Optional.of(cxxPlatform), args); return createBuildRulesForCxxBinary( params, resolver, cxxBuckConfig, cxxPlatform, srcs, headers, stripStyle, args.linkStyle.or(Linker.LinkableDepType.STATIC), args.preprocessorFlags, args.platformPreprocessorFlags, args.langPreprocessorFlags, args.frameworks, args.libraries, args.compilerFlags, args.langCompilerFlags, args.platformCompilerFlags, args.prefixHeader, args.linkerFlags, args.platformLinkerFlags, args.cxxRuntimeType); } public static CxxLinkAndCompileRules createBuildRulesForCxxBinary( BuildRuleParams params, BuildRuleResolver resolver, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, ImmutableMap<String, CxxSource> srcs, ImmutableMap<Path, SourcePath> headers, Optional<StripStyle> stripStyle, Linker.LinkableDepType linkStyle, Optional<ImmutableList<String>> preprocessorFlags, Optional<PatternMatchedCollection<ImmutableList<String>>> platformPreprocessorFlags, Optional<ImmutableMap<CxxSource.Type, ImmutableList<String>>> langPreprocessorFlags, Optional<ImmutableSortedSet<FrameworkPath>> frameworks, Optional<ImmutableSortedSet<FrameworkPath>> libraries, Optional<ImmutableList<String>> compilerFlags, Optional<ImmutableMap<CxxSource.Type, ImmutableList<String>>> langCompilerFlags, Optional<PatternMatchedCollection<ImmutableList<String>>> platformCompilerFlags, Optional<SourcePath> prefixHeader, Optional<ImmutableList<String>> linkerFlags, Optional<PatternMatchedCollection<ImmutableList<String>>> platformLinkerFlags, Optional<Linker.CxxRuntimeType> cxxRuntimeType) throws NoSuchBuildTargetException { SourcePathResolver sourcePathResolver = new SourcePathResolver(resolver); Path linkOutput = getLinkOutputPath(params.getBuildTarget(), params.getProjectFilesystem()); ImmutableList.Builder<Arg> argsBuilder = ImmutableList.builder(); CommandTool.Builder executableBuilder = new CommandTool.Builder(); // Setup the header symlink tree and combine all the preprocessor input from this rule // and all dependencies. HeaderSymlinkTree headerSymlinkTree = requireHeaderSymlinkTree( params, resolver, sourcePathResolver, cxxPlatform, headers, HeaderVisibility.PRIVATE); ImmutableList<CxxPreprocessorInput> cxxPreprocessorInput = collectCxxPreprocessorInput( params, cxxPlatform, CxxFlags.getLanguageFlags( preprocessorFlags, platformPreprocessorFlags, langPreprocessorFlags, cxxPlatform), ImmutableList.of(headerSymlinkTree), frameworks.get(), CxxPreprocessables.getTransitiveCxxPreprocessorInput( cxxPlatform, FluentIterable.from(params.getDeps()) .filter(Predicates.instanceOf(CxxPreprocessorDep.class)))); // Generate and add all the build rules to preprocess and compile the source to the // resolver and get the `SourcePath`s representing the generated object files. ImmutableMap<CxxPreprocessAndCompile, SourcePath> objects = CxxSourceRuleFactory.requirePreprocessAndCompileRules( params, resolver, sourcePathResolver, cxxBuckConfig, cxxPlatform, cxxPreprocessorInput, CxxFlags.getLanguageFlags( compilerFlags, platformCompilerFlags, langCompilerFlags, cxxPlatform), prefixHeader, cxxBuckConfig.getPreprocessMode(), srcs, linkStyle == Linker.LinkableDepType.STATIC ? CxxSourceRuleFactory.PicType.PDC : CxxSourceRuleFactory.PicType.PIC); // Build up the linker flags, which support macro expansion. ImmutableList<String> resolvedLinkerFlags = CxxFlags.getFlags( linkerFlags, platformLinkerFlags, cxxPlatform); argsBuilder.addAll( FluentIterable.from(resolvedLinkerFlags) .transform( MacroArg.toMacroArgFunction( MACRO_HANDLER, params.getBuildTarget(), params.getCellRoots(), resolver))); // Special handling for dynamically linked binaries. if (linkStyle == Linker.LinkableDepType.SHARED) { // Create a symlink tree with for all shared libraries needed by this binary. SymlinkTree sharedLibraries = resolver.addToIndex( createSharedLibrarySymlinkTree( params, sourcePathResolver, cxxPlatform, params.getDeps(), Predicates.instanceOf(NativeLinkable.class))); // Embed a origin-relative library path into the binary so it can find the shared libraries. // The shared libraries root is absolute. Also need an absolute path to the linkOutput Path absLinkOut = params.getBuildTarget().getCellPath().resolve(linkOutput); argsBuilder.addAll( StringArg.from( Linkers.iXlinker( "-rpath", String.format( "%s/%s", cxxPlatform.getLd().resolve(resolver).origin(), absLinkOut.getParent().relativize(sharedLibraries.getRoot()).toString())))); // Add all the shared libraries and the symlink tree as inputs to the tool that represents // this binary, so that users can attach the proper deps. executableBuilder.addDep(sharedLibraries); executableBuilder.addInputs(sharedLibraries.getLinks().values()); } // Add object files into the args. ImmutableList<SourcePathArg> objectArgs = FluentIterable .from(SourcePathArg.from(sourcePathResolver, objects.values())) .transform(new Function<Arg, SourcePathArg>() { @Override public SourcePathArg apply(Arg input) { Preconditions.checkArgument(input instanceof SourcePathArg); return (SourcePathArg) input; } }) .toList(); argsBuilder.addAll(FileListableLinkerInputArg.from(objectArgs)); BuildTarget linkRuleTarget = createCxxLinkTarget(params.getBuildTarget()); CxxLink cxxLink = createCxxLinkRule( params, resolver, cxxBuckConfig, cxxPlatform, linkStyle, frameworks, libraries, cxxRuntimeType, sourcePathResolver, linkOutput, argsBuilder, linkRuleTarget); BuildRule binaryRuleForExecutable; Optional<CxxStrip> cxxStrip = Optional.absent(); if (stripStyle.isPresent()) { CxxStrip stripRule = createCxxStripRule( params, resolver, cxxPlatform.getStrip(), stripStyle.get(), sourcePathResolver, cxxLink); cxxStrip = Optional.of(stripRule); binaryRuleForExecutable = stripRule; } else { binaryRuleForExecutable = cxxLink; } // Add the output of the link as the lone argument needed to invoke this binary as a tool. executableBuilder.addArg( new SourcePathArg( sourcePathResolver, new BuildTargetSourcePath(binaryRuleForExecutable.getBuildTarget()))); return new CxxLinkAndCompileRules( cxxLink, cxxStrip, ImmutableSortedSet.copyOf(objects.keySet()), executableBuilder.build()); } private static CxxLink createCxxLinkRule( BuildRuleParams params, BuildRuleResolver resolver, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, Linker.LinkableDepType linkStyle, Optional<ImmutableSortedSet<FrameworkPath>> frameworks, Optional<ImmutableSortedSet<FrameworkPath>> libraries, Optional<Linker.CxxRuntimeType> cxxRuntimeType, SourcePathResolver sourcePathResolver, Path linkOutput, ImmutableList.Builder<Arg> argsBuilder, BuildTarget linkRuleTarget) throws NoSuchBuildTargetException { CxxLink cxxLink; Optional<BuildRule> existingCxxLinkRule = resolver.getRuleOptional(linkRuleTarget); if (existingCxxLinkRule.isPresent()) { Preconditions.checkArgument(existingCxxLinkRule.get() instanceof CxxLink); cxxLink = (CxxLink) existingCxxLinkRule.get(); } else { // Generate the final link rule. We use the top-level target as the link rule's // target, so that it corresponds to the actual binary we build. cxxLink = CxxLinkableEnhancer.createCxxLinkableBuildRule( cxxBuckConfig, cxxPlatform, params, resolver, sourcePathResolver, linkRuleTarget, Linker.LinkType.EXECUTABLE, Optional.<String>absent(), linkOutput, linkStyle, FluentIterable.from(params.getDeps()) .filter(NativeLinkable.class), cxxRuntimeType, Optional.<SourcePath>absent(), ImmutableSet.<BuildTarget>of(), NativeLinkableInput.builder() .setArgs(argsBuilder.build()) .setFrameworks(frameworks.or(ImmutableSortedSet.<FrameworkPath>of())) .setLibraries(libraries.or(ImmutableSortedSet.<FrameworkPath>of())) .build()); resolver.addToIndex(cxxLink); } return cxxLink; } public static CxxStrip createCxxStripRule( BuildRuleParams params, BuildRuleResolver resolver, Tool stripTool, StripStyle stripStyle, SourcePathResolver sourcePathResolver, BuildRule unstrippedBinaryRule) { BuildRuleParams stripRuleParams = params .copyWithChanges( params.getBuildTarget().withAppendedFlavors( CxxStrip.RULE_FLAVOR, stripStyle.getFlavor()), Suppliers.ofInstance(ImmutableSortedSet.of(unstrippedBinaryRule)), Suppliers.ofInstance(ImmutableSortedSet.<BuildRule>of())); Optional<BuildRule> exisitingRule = resolver.getRuleOptional(stripRuleParams.getBuildTarget()); if (exisitingRule.isPresent()) { Preconditions.checkArgument(exisitingRule.get() instanceof CxxStrip); return (CxxStrip) exisitingRule.get(); } else { CxxStrip cxxStrip = new CxxStrip( stripRuleParams, sourcePathResolver, stripStyle, new BuildTargetSourcePath(unstrippedBinaryRule.getBuildTarget()), stripTool, CxxDescriptionEnhancer.getLinkOutputPath( stripRuleParams.getBuildTarget(), params.getProjectFilesystem())); resolver.addToIndex(cxxStrip); return cxxStrip; } } public static ImmutableSortedSet<HeaderSymlinkTree> requireTransitiveCompilationDatabaseHeaderSymlinkTreeDeps( BuildRuleParams params, BuildRuleResolver ruleResolver, SourcePathResolver pathResolver, final CxxPlatform cxxPlatform, CxxConstructorArg arg) { BuildRuleParams paramsWithoutFlavor = params.withoutFlavor( CxxCompilationDatabase.COMPILATION_DATABASE); final ImmutableSortedSet.Builder<HeaderSymlinkTree> resultBuilder = ImmutableSortedSet.naturalOrder(); resultBuilder.add( CxxDescriptionEnhancer.requireHeaderSymlinkTree( paramsWithoutFlavor, ruleResolver, pathResolver, cxxPlatform, CxxDescriptionEnhancer.parseHeaders( params.getBuildTarget(), pathResolver, Optional.of(cxxPlatform), arg), HeaderVisibility.PRIVATE)); if (arg instanceof CxxLibraryDescription.Arg) { CxxLibraryDescription.Arg libArg = (CxxLibraryDescription.Arg) arg; resultBuilder.add( CxxDescriptionEnhancer.requireHeaderSymlinkTree( paramsWithoutFlavor, ruleResolver, pathResolver, cxxPlatform, CxxDescriptionEnhancer.parseExportedHeaders( params.getBuildTarget(), pathResolver, Optional.of(cxxPlatform), libArg), HeaderVisibility.PUBLIC)); } // Walk the transitive deps and add any exported headers present as // runtime dependencies. // // TODO(bhamiltoncx): Use BuildRuleResolver.requireMetadata() so we can // cache the result of this walk. AbstractBreadthFirstTraversal<BuildRule> visitor = new AbstractBreadthFirstTraversal<BuildRule>(params.getDeps()) { @Override public ImmutableSet<BuildRule> visit(BuildRule dep) { if (dep instanceof CxxPreprocessorDep) { CxxPreprocessorDep cxxPreprocessorDep = (CxxPreprocessorDep) dep; Optional<HeaderSymlinkTree> exportedHeaderSymlinkTree = cxxPreprocessorDep.getExportedHeaderSymlinkTree(cxxPlatform); if (exportedHeaderSymlinkTree.isPresent()) { resultBuilder.add(exportedHeaderSymlinkTree.get()); } } return dep.getDeps(); } }; visitor.start(); return resultBuilder.build(); } /** * Create all build rules needed to generate the compilation database. * * @return the {@link CxxCompilationDatabase} rule representing the actual compilation database. */ public static CxxCompilationDatabase createCompilationDatabase( BuildRuleParams params, BuildRuleResolver ruleResolver, SourcePathResolver pathResolver, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, CxxConstructorArg arg) throws NoSuchBuildTargetException { // Invoking requireObjects has the side-effect of invoking // CxxSourceRuleFactory.requirePreprocessAndCompileRules(), which has the side-effect of // creating CxxPreprocessAndCompile rules and adding them to the ruleResolver. BuildRuleParams paramsWithoutFlavor = params.withoutFlavor( CxxCompilationDatabase.COMPILATION_DATABASE); ImmutableMap<CxxPreprocessAndCompile, SourcePath> objects = requireObjects( paramsWithoutFlavor, ruleResolver, pathResolver, cxxBuckConfig, cxxPlatform, CxxSourceRuleFactory.PicType.PIC, arg); return CxxCompilationDatabase.createCompilationDatabase( params, pathResolver, cxxBuckConfig.getPreprocessMode(), objects.keySet(), requireTransitiveCompilationDatabaseHeaderSymlinkTreeDeps( params, ruleResolver, pathResolver, cxxPlatform, arg)); } public static BuildRule createUberCompilationDatabase( BuildRuleParams params, BuildRuleResolver ruleResolver) throws NoSuchBuildTargetException { Optional<CxxCompilationDatabaseDependencies> compilationDatabases = ruleResolver.requireMetadata( params .withoutFlavor(CxxCompilationDatabase.UBER_COMPILATION_DATABASE) .withFlavor(CxxCompilationDatabase.COMPILATION_DATABASE) .getBuildTarget(), CxxCompilationDatabaseDependencies.class); Preconditions.checkState(compilationDatabases.isPresent()); SourcePathResolver pathResolver = new SourcePathResolver(ruleResolver); return new JsonConcatenate( params.copyWithDeps( Suppliers.ofInstance( ImmutableSortedSet.copyOf( pathResolver.filterBuildRuleInputs( compilationDatabases.get().getSourcePaths()))), Suppliers.ofInstance(ImmutableSortedSet.<BuildRule>of())), pathResolver, ImmutableSortedSet.copyOf( pathResolver.getAllAbsolutePaths(compilationDatabases.get().getSourcePaths())), "compilation-database-concatenate", "Concatenate compilation databases", "uber-compilation-database", "compile_commands.json"); } public static Optional<CxxCompilationDatabaseDependencies> createCompilationDatabaseDependencies( BuildTarget buildTarget, FlavorDomain<CxxPlatform> platforms, BuildRuleResolver resolver, CxxConstructorArg args) throws NoSuchBuildTargetException { Preconditions.checkState( buildTarget.getFlavors().contains(CxxCompilationDatabase.COMPILATION_DATABASE)); Optional<Flavor> cxxPlatformFlavor = platforms.getFlavor(buildTarget); Preconditions.checkState( cxxPlatformFlavor.isPresent(), "Could not find cxx platform in:\n%s", Joiner.on(", ").join(buildTarget.getFlavors())); ImmutableSet.Builder<SourcePath> sourcePaths = ImmutableSet.builder(); for (BuildTarget dep : args.deps.get()) { Optional<CxxCompilationDatabaseDependencies> compilationDatabases = resolver.requireMetadata( BuildTarget.builder(dep) .addFlavors(CxxCompilationDatabase.COMPILATION_DATABASE) .addFlavors(cxxPlatformFlavor.get()) .build(), CxxCompilationDatabaseDependencies.class); if (compilationDatabases.isPresent()) { sourcePaths.addAll(compilationDatabases.get().getSourcePaths()); } } // Not all parts of Buck use require yet, so require the rule here so it's available in the // resolver for the parts that don't. resolver.requireRule(buildTarget); sourcePaths.add(new BuildTargetSourcePath(buildTarget)); return Optional.of(CxxCompilationDatabaseDependencies.of(sourcePaths.build())); } public static ImmutableMap<CxxPreprocessAndCompile, SourcePath> requireObjects( BuildRuleParams params, BuildRuleResolver ruleResolver, SourcePathResolver sourcePathResolver, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, CxxSourceRuleFactory.PicType pic, CxxConstructorArg args) throws NoSuchBuildTargetException { ImmutableMultimap<CxxSource.Type, String> exportedPreprocessorFlags; ImmutableMap<Path, SourcePath> exportedHeaders; if (args instanceof CxxLibraryDescription.Arg) { CxxLibraryDescription.Arg hasExportedArgs = (CxxLibraryDescription.Arg) args; exportedPreprocessorFlags = CxxFlags.getLanguageFlags( hasExportedArgs.exportedPreprocessorFlags, hasExportedArgs.exportedPlatformPreprocessorFlags, hasExportedArgs.exportedLangPreprocessorFlags, cxxPlatform); exportedHeaders = CxxDescriptionEnhancer.parseExportedHeaders( params.getBuildTarget(), sourcePathResolver, Optional.of(cxxPlatform), hasExportedArgs); } else { exportedPreprocessorFlags = ImmutableMultimap.of(); exportedHeaders = ImmutableMap.of(); } HeaderSymlinkTree headerSymlinkTree = CxxDescriptionEnhancer.requireHeaderSymlinkTree( params, ruleResolver, sourcePathResolver, cxxPlatform, CxxDescriptionEnhancer.parseHeaders( params.getBuildTarget(), sourcePathResolver, Optional.of(cxxPlatform), args), HeaderVisibility.PRIVATE); ImmutableList<CxxPreprocessorInput> cxxPreprocessorInputFromDependencies = CxxDescriptionEnhancer.collectCxxPreprocessorInput( params, cxxPlatform, CxxFlags.getLanguageFlags( args.preprocessorFlags, args.platformPreprocessorFlags, args.langPreprocessorFlags, cxxPlatform), ImmutableList.of(headerSymlinkTree), ImmutableSet.<FrameworkPath>of(), CxxLibraryDescription.getTransitiveCxxPreprocessorInput( params, ruleResolver, sourcePathResolver, cxxPlatform, exportedPreprocessorFlags, exportedHeaders, args.frameworks.or(ImmutableSortedSet.<FrameworkPath>of()))); // Create rule to build the object files. return CxxSourceRuleFactory.requirePreprocessAndCompileRules( params, ruleResolver, sourcePathResolver, cxxBuckConfig, cxxPlatform, cxxPreprocessorInputFromDependencies, CxxFlags.getLanguageFlags( args.compilerFlags, args.platformCompilerFlags, args.langCompilerFlags, cxxPlatform), args.prefixHeader, cxxBuckConfig.getPreprocessMode(), CxxDescriptionEnhancer.parseCxxSources( params.getBuildTarget(), sourcePathResolver, cxxPlatform, args), pic); } /** * @return the {@link BuildTarget} to use for the {@link BuildRule} generating the * symlink tree of shared libraries. */ public static BuildTarget createSharedLibrarySymlinkTreeTarget( BuildTarget target, Flavor platform) { return BuildTarget .builder(target) .addFlavors(SHARED_LIBRARY_SYMLINK_TREE_FLAVOR) .addFlavors(platform) .build(); } /** * @return the {@link Path} to use for the symlink tree of headers. */ public static Path getSharedLibrarySymlinkTreePath( ProjectFilesystem filesystem, BuildTarget target, Flavor platform) { return target.getCellPath().resolve(BuildTargets.getGenPath( filesystem, createSharedLibrarySymlinkTreeTarget(target, platform), "%s")); } /** * Build a {@link HeaderSymlinkTree} of all the shared libraries found via the top-level rule's * transitive dependencies. */ public static SymlinkTree createSharedLibrarySymlinkTree( BuildRuleParams params, SourcePathResolver pathResolver, CxxPlatform cxxPlatform, Iterable<? extends BuildRule> deps, Predicate<Object> traverse) throws NoSuchBuildTargetException { BuildTarget symlinkTreeTarget = createSharedLibrarySymlinkTreeTarget( params.getBuildTarget(), cxxPlatform.getFlavor()); Path symlinkTreeRoot = getSharedLibrarySymlinkTreePath( params.getProjectFilesystem(), params.getBuildTarget(), cxxPlatform.getFlavor()); ImmutableSortedMap<String, SourcePath> libraries = NativeLinkables.getTransitiveSharedLibraries( cxxPlatform, deps, traverse); ImmutableMap.Builder<Path, SourcePath> links = ImmutableMap.builder(); for (Map.Entry<String, SourcePath> ent : libraries.entrySet()) { links.put(Paths.get(ent.getKey()), ent.getValue()); } return new SymlinkTree( params.copyWithChanges( symlinkTreeTarget, Suppliers.ofInstance(ImmutableSortedSet.<BuildRule>of()), Suppliers.ofInstance(ImmutableSortedSet.<BuildRule>of())), pathResolver, symlinkTreeRoot, links.build()); } public static Flavor flavorForLinkableDepType(Linker.LinkableDepType linkableDepType) { switch (linkableDepType) { case STATIC: return STATIC_FLAVOR; case STATIC_PIC: return STATIC_PIC_FLAVOR; case SHARED: return SHARED_FLAVOR; } throw new RuntimeException( String.format("Unsupported LinkableDepType: '%s'", linkableDepType)); } /** * Resolve the map of names to SourcePaths to a map of names to CxxSource objects. */ private static ImmutableMap<String, CxxSource> resolveCxxSources( ImmutableMap<String, SourceWithFlags> sources) { ImmutableMap.Builder<String, CxxSource> cxxSources = ImmutableMap.builder(); // For each entry in the input C/C++ source, build a CxxSource object to wrap // it's name, input path, and output object file path. for (ImmutableMap.Entry<String, SourceWithFlags> ent : sources.entrySet()) { String extension = Files.getFileExtension(ent.getKey()); Optional<CxxSource.Type> type = CxxSource.Type.fromExtension(extension); if (!type.isPresent()) { throw new HumanReadableException( "invalid extension \"%s\": %s", extension, ent.getKey()); } cxxSources.put( ent.getKey(), CxxSource.of( type.get(), ent.getValue().getSourcePath(), ent.getValue().getFlags())); } return cxxSources.build(); } }
cxx: only create shared lib symlink trees for binaries if needed Summary: All other rules in `CxxDescriptionEnhancer.createBuildRulesForCxxBinary` are only created if they don't already exist, so do the same for the shared library symlink tree. Test Plan: CI Reviewed By: Coneko fbshipit-source-id: 649c20e
src/com/facebook/buck/cxx/CxxDescriptionEnhancer.java
cxx: only create shared lib symlink trees for binaries if needed
<ide><path>rc/com/facebook/buck/cxx/CxxDescriptionEnhancer.java <ide> <ide> // Create a symlink tree with for all shared libraries needed by this binary. <ide> SymlinkTree sharedLibraries = <del> resolver.addToIndex( <del> createSharedLibrarySymlinkTree( <del> params, <del> sourcePathResolver, <del> cxxPlatform, <del> params.getDeps(), <del> Predicates.instanceOf(NativeLinkable.class))); <add> requireSharedLibrarySymlinkTree( <add> params, <add> resolver, <add> sourcePathResolver, <add> cxxPlatform, <add> params.getDeps(), <add> Predicates.instanceOf(NativeLinkable.class)); <ide> <ide> // Embed a origin-relative library path into the binary so it can find the shared libraries. <ide> // The shared libraries root is absolute. Also need an absolute path to the linkOutput <ide> links.build()); <ide> } <ide> <add> public static SymlinkTree requireSharedLibrarySymlinkTree( <add> BuildRuleParams params, <add> BuildRuleResolver resolver, <add> SourcePathResolver pathResolver, <add> CxxPlatform cxxPlatform, <add> Iterable<? extends BuildRule> deps, <add> Predicate<Object> traverse) <add> throws NoSuchBuildTargetException { <add> BuildTarget target = <add> createSharedLibrarySymlinkTreeTarget(params.getBuildTarget(), cxxPlatform.getFlavor()); <add> SymlinkTree tree = resolver.getRuleOptionalWithType(target, SymlinkTree.class).orNull(); <add> if (tree == null) { <add> tree = <add> resolver.addToIndex( <add> createSharedLibrarySymlinkTree( <add> params, <add> pathResolver, <add> cxxPlatform, <add> deps, <add> traverse)); <add> } <add> return tree; <add> } <add> <ide> public static Flavor flavorForLinkableDepType(Linker.LinkableDepType linkableDepType) { <ide> switch (linkableDepType) { <ide> case STATIC:
Java
mit
bd8f1642f44627d67f688dde61b90ae053d26844
0
codistmonk/IMJ
package imj3.draft.processing; import static imj3.draft.machinelearning.ClassDataSource.classes; import static imj3.draft.machinelearning.Max.max; import static imj3.draft.machinelearning.Mean.mean; import static imj3.draft.processing.Image2DRawSource.raw; import static java.lang.Math.rint; import imj2.tools.BitwiseQuantizationTest.DoubleArrayComparator; import imj3.core.Channels; import imj3.draft.machinelearning.BufferedDataSource; import imj3.draft.machinelearning.Classification; import imj3.draft.machinelearning.ClassifiedDataSource; import imj3.draft.machinelearning.Classifier; import imj3.draft.machinelearning.ClassifierClass; import imj3.draft.machinelearning.DataSource; import imj3.draft.machinelearning.KMeansClustering; import imj3.draft.machinelearning.LinearTransform; import imj3.draft.machinelearning.Measure; import imj3.draft.machinelearning.MedianCutClustering; import imj3.draft.machinelearning.NearestNeighborClassifier; import imj3.draft.machinelearning.NearestNeighborClustering; import imj3.draft.machinelearning.StreamingClustering; import imj3.draft.machinelearning.NearestNeighborClassifier.Prototype; import imj3.tools.AwtImage2D; import java.io.File; import java.io.Serializable; import java.util.Map; import java.util.Random; import java.util.TreeMap; import net.sourceforge.aprog.swing.SwingTools; import net.sourceforge.aprog.tools.CommandLineArgumentsParser; import net.sourceforge.aprog.tools.IllegalInstantiationException; import net.sourceforge.aprog.tools.TicToc; import net.sourceforge.aprog.tools.Tools; /** * @author codistmonk (creation 2015-02-06) */ public final class Analyze { private Analyze() { throw new IllegalInstantiationException(); } /** * @param commandLineArguments * <br>Unused */ public static final void main(final String[] commandLineArguments) { final CommandLineArgumentsParser arguments = new CommandLineArgumentsParser(commandLineArguments); final File file = new File(arguments.get("file", "")); final AwtImage2D image = new AwtImage2D(file.getPath()); SwingTools.show(image.getSource(), file.getName(), false); if (false) { final DataSource<Image2DSource.Metadata, ?> raw = raw(image, 2, 1, 2); SwingTools.show(image(classes(mean(raw, 3))).getSource(), "Mean", false); SwingTools.show(image(classes(max(raw))).getSource(), "Max", false); SwingTools.show(image(classes(mean(classes(classify(raw, new StreamingClustering(Measure.Predefined.L1_ES, 8).cluster(raw))), 3))).getSource(), "Indirect (streaming)", false); SwingTools.show(image(classes(mean(classes(classify(raw, new KMeansClustering(Measure.Predefined.L1_ES, 8, 6).cluster(raw))), 3))).getSource(), "Indirect (k-means)", false); } if (true) { final DataSource<? extends Patch2DSource.Metadata, ?> source = new BufferedDataSource<>(raw(image, 1, 1, 1)); Tools.debugPrint(new Histogram().add(source).getCounts().size()); final DataSource<? extends Patch2DSource.Metadata, ?> trainingSet = source; // final NearestNeighborClustering clustering = new KMeansClustering(Measure.Predefined.L2_ES, 256, 8); final NearestNeighborClustering clustering = new MedianCutClustering(Measure.Predefined.L2_ES, 256); // final NearestNeighborClustering clustering = new StreamingClustering(Measure.Predefined.L1_ES, 3); final NearestNeighborClassifier quantizer = clustering.cluster(trainingSet); final DataSource<? extends Patch2DSource.Metadata, Prototype> quantized = classify(source, quantizer); final LinearTransform rgbRenderer = new LinearTransform(Measure.Predefined.L2_ES, newRGBRenderingMatrix(source.getMetadata().getPatchPixelCount())); final DataSource<? extends Patch2DSource.Metadata, ?> rendered = classify(classes(quantized), rgbRenderer); // SwingTools.show(image(classes(mean(classes(quantized), 3))).getSource(), clustering.getClass().getSimpleName() + " -> rendered", false); SwingTools.show(image(classes(rendered)).getSource(), clustering.getClass().getSimpleName() + " -> rendered", false); } } public static final <M extends DataSource.Metadata, In extends ClassifierClass, Out extends ClassifierClass> ClassifiedDataSource<M, In, Out> classify(final DataSource<M, In> quantized, final Classifier<Out> rgbRenderer) { return new ClassifiedDataSource<>(quantized, rgbRenderer); } public static final double[][] newRGBRenderingMatrix(final int inputPatchPixelCount) { final double[][] result = new double[3][]; final int n = 3 * inputPatchPixelCount; for (int i = 0; i < 3; ++i) { final double[] row = new double[n]; for (int j = i; j < n; j += 3) { row[j] = 1.0 / inputPatchPixelCount; } result[i] = row; } return result; } public static final AwtImage2D image(final DataSource<? extends Patch2DSource.Metadata, ?> source) { return image(source, new AwtImage2D(Long.toHexString(new Random().nextLong()), source.getMetadata().sizeX(), source.getMetadata().sizeY())); } public static final AwtImage2D image(final DataSource<? extends Patch2DSource.Metadata, ?> source, final AwtImage2D result) { final TicToc timer = new TicToc(); final int dimension = source.getInputDimension(); if (dimension != 1 && dimension != 3) { Tools.debugError(dimension); throw new IllegalArgumentException(); } final int width = source.getMetadata().sizeX(); int pixel = 0; for (final Classification<?> c : source) { final int x = pixel % width; final int y = pixel / width; final double[] input = c.getInput(); final int rgb = dimension == 1 ? gray(input) : argb(input); result.setPixelValue(x, y, rgb); ++pixel; } Tools.debugPrint("Awt image created in", timer.toc(), "ms"); return result; } public static final int argb(final double[] rgb) { return Channels.Predefined.a8r8g8b8(0xFF, int8(rgb[0]), int8(rgb[1]), int8(rgb[2])); } public static final int gray(final double[] rgb) { final int gray = int8(rgb[0]); return Channels.Predefined.a8r8g8b8(0xFF, gray, gray, gray); } public static final int int8(final double value0255) { return ((int) value0255) & 0xFF; } /** * @author codistmonk (creation 2015-02-12) */ public static final class Histogram implements Serializable { private final Map<double[], double[]> counts = new TreeMap<>(new DoubleArrayComparator()); private long totalCount; public final Map<double[], double[]> getCounts() { return this.counts; } public final long getTotalCount() { return this.totalCount; } public final Histogram reset() { this.getCounts().clear(); this.totalCount = 0L; return this; } public final Histogram add(final double... input) { ++this.getCounts().computeIfAbsent(input, i -> new double[1])[0]; ++this.totalCount; return this; } public final Histogram add(final DataSource<?, ?> inputs) { for (final Classification<?> classification : inputs) { this.add(classification.getInput().clone()); } return this; } public final Histogram normalize() { final long n = this.getTotalCount(); if (0L < n) { for (final double[] count : this.getCounts().values()) { count[0] /= n; } } return this; } public final Histogram denormalize() { final long n = this.getTotalCount(); if (0L < n) { for (final double[] count : this.getCounts().values()) { count[0] = rint(count[0] * n); } } return this; } public final double[] pack(final int binCount) { final double[] result = new double[binCount]; for (final Map.Entry<double[], double[]> entry : this.getCounts().entrySet()) { final double[] key = entry.getKey(); if (key.length != 1) { throw new IllegalArgumentException(); } result[(int) key[0]] = entry.getValue()[0]; } return result; } private static final long serialVersionUID = -4974336898629198663L; } }
IMJ/src/imj3/draft/processing/Analyze.java
package imj3.draft.processing; import static imj3.draft.machinelearning.Max.max; import static imj3.draft.machinelearning.Mean.mean; import static imj3.draft.processing.Image2DRawSource.raw; import imj3.core.Channels; import imj3.draft.machinelearning.BufferedDataSource; import imj3.draft.machinelearning.ClassDataSource; import imj3.draft.machinelearning.Classification; import imj3.draft.machinelearning.ClassifiedDataSource; import imj3.draft.machinelearning.Classifier; import imj3.draft.machinelearning.ClassifierClass; import imj3.draft.machinelearning.DataSource; import imj3.draft.machinelearning.KMeansClustering; import imj3.draft.machinelearning.LinearTransform; import imj3.draft.machinelearning.Measure; import imj3.draft.machinelearning.MedianCutClustering; import imj3.draft.machinelearning.NearestNeighborClassifier; import imj3.draft.machinelearning.NearestNeighborClustering; import imj3.draft.machinelearning.StreamingClustering; import imj3.draft.machinelearning.NearestNeighborClassifier.Prototype; import imj3.tools.AwtImage2D; import java.io.File; import java.util.Random; import net.sourceforge.aprog.swing.SwingTools; import net.sourceforge.aprog.tools.CommandLineArgumentsParser; import net.sourceforge.aprog.tools.IllegalInstantiationException; import net.sourceforge.aprog.tools.TicToc; import net.sourceforge.aprog.tools.Tools; /** * @author codistmonk (creation 2015-02-06) */ public final class Analyze { private Analyze() { throw new IllegalInstantiationException(); } /** * @param commandLineArguments * <br>Unused */ public static final void main(final String[] commandLineArguments) { final CommandLineArgumentsParser arguments = new CommandLineArgumentsParser(commandLineArguments); final File file = new File(arguments.get("file", "")); final AwtImage2D image = new AwtImage2D(file.getPath()); SwingTools.show(image.getSource(), file.getName(), false); if (false) { final DataSource<Image2DSource.Metadata, ?> raw = raw(image, 2, 1, 2); SwingTools.show(image(classes(mean(raw, 3))).getSource(), "Mean", false); SwingTools.show(image(classes(max(raw))).getSource(), "Max", false); SwingTools.show(image(classes(mean(classes(classify(raw, new StreamingClustering(Measure.Predefined.L1_ES, 8).cluster(raw))), 3))).getSource(), "Indirect (streaming)", false); SwingTools.show(image(classes(mean(classes(classify(raw, new KMeansClustering(Measure.Predefined.L1_ES, 8, 6).cluster(raw))), 3))).getSource(), "Indirect (k-means)", false); } if (true) { final DataSource<? extends Patch2DSource.Metadata, ?> source = new BufferedDataSource<>(raw(image, 1, 1, 1)); final DataSource<? extends Patch2DSource.Metadata, ?> trainingSet = source; // final NearestNeighborClustering clustering = new KMeansClustering(Measure.Predefined.L2_ES, 256, 8); final NearestNeighborClustering clustering = new MedianCutClustering(Measure.Predefined.L2_ES, 256); // final NearestNeighborClustering clustering = new StreamingClustering(Measure.Predefined.L1_ES, 3); final NearestNeighborClassifier quantizer = clustering.cluster(trainingSet); final DataSource<? extends Patch2DSource.Metadata, Prototype> quantized = classify(source, quantizer); final LinearTransform rgbRenderer = new LinearTransform(Measure.Predefined.L2_ES, newRGBRenderingMatrix(source.getMetadata().getPatchPixelCount())); final DataSource<? extends Patch2DSource.Metadata, ?> rendered = classify(classes(quantized), rgbRenderer); // SwingTools.show(image(classes(mean(classes(quantized), 3))).getSource(), clustering.getClass().getSimpleName() + " -> rendered", false); SwingTools.show(image(classes(rendered)).getSource(), clustering.getClass().getSimpleName() + " -> rendered", false); } } public static final <M extends DataSource.Metadata, In extends ClassifierClass, Out extends ClassifierClass> ClassifiedDataSource<M, In, Out> classify(final DataSource<M, In> quantized, final Classifier<Out> rgbRenderer) { return new ClassifiedDataSource<>(quantized, rgbRenderer); } public static final <M extends DataSource.Metadata> ClassDataSource<M> classes(final DataSource<M, ?> inputs) { return new ClassDataSource<>(inputs); } public static final double[][] newRGBRenderingMatrix(final int inputPatchPixelCount) { final double[][] result = new double[3][]; final int n = 3 * inputPatchPixelCount; for (int i = 0; i < 3; ++i) { final double[] row = new double[n]; for (int j = i; j < n; j += 3) { row[j] = 1.0 / inputPatchPixelCount; } result[i] = row; } return result; } public static final AwtImage2D image(final DataSource<? extends Patch2DSource.Metadata, ?> source) { return image(source, new AwtImage2D(Long.toHexString(new Random().nextLong()), source.getMetadata().sizeX(), source.getMetadata().sizeY())); } public static final AwtImage2D image(final DataSource<? extends Patch2DSource.Metadata, ?> source, final AwtImage2D result) { final TicToc timer = new TicToc(); final int dimension = source.getInputDimension(); if (dimension != 1 && dimension != 3) { Tools.debugError(dimension); throw new IllegalArgumentException(); } final int width = source.getMetadata().sizeX(); int pixel = 0; for (final Classification<?> c : source) { final int x = pixel % width; final int y = pixel / width; final double[] input = c.getInput(); final int rgb = dimension == 1 ? gray(input) : argb(input); result.setPixelValue(x, y, rgb); ++pixel; } Tools.debugPrint("Awt image created in", timer.toc(), "ms"); return result; } public static final int argb(final double[] rgb) { return Channels.Predefined.a8r8g8b8(0xFF, int8(rgb[0]), int8(rgb[1]), int8(rgb[2])); } public static final int gray(final double[] rgb) { final int gray = int8(rgb[0]); return Channels.Predefined.a8r8g8b8(0xFF, gray, gray, gray); } public static final int int8(final double value0255) { return ((int) value0255) & 0xFF; } }
[IMJ][imj3][segmentation2] Updated Analyze.
IMJ/src/imj3/draft/processing/Analyze.java
[IMJ][imj3][segmentation2] Updated Analyze.
<ide><path>MJ/src/imj3/draft/processing/Analyze.java <ide> package imj3.draft.processing; <ide> <add>import static imj3.draft.machinelearning.ClassDataSource.classes; <ide> import static imj3.draft.machinelearning.Max.max; <ide> import static imj3.draft.machinelearning.Mean.mean; <ide> import static imj3.draft.processing.Image2DRawSource.raw; <add>import static java.lang.Math.rint; <add> <add>import imj2.tools.BitwiseQuantizationTest.DoubleArrayComparator; <add> <ide> import imj3.core.Channels; <ide> import imj3.draft.machinelearning.BufferedDataSource; <del>import imj3.draft.machinelearning.ClassDataSource; <ide> import imj3.draft.machinelearning.Classification; <ide> import imj3.draft.machinelearning.ClassifiedDataSource; <ide> import imj3.draft.machinelearning.Classifier; <ide> import imj3.tools.AwtImage2D; <ide> <ide> import java.io.File; <add>import java.io.Serializable; <add>import java.util.Map; <ide> import java.util.Random; <add>import java.util.TreeMap; <ide> <ide> import net.sourceforge.aprog.swing.SwingTools; <ide> import net.sourceforge.aprog.tools.CommandLineArgumentsParser; <ide> <ide> if (true) { <ide> final DataSource<? extends Patch2DSource.Metadata, ?> source = new BufferedDataSource<>(raw(image, 1, 1, 1)); <add> <add> Tools.debugPrint(new Histogram().add(source).getCounts().size()); <add> <ide> final DataSource<? extends Patch2DSource.Metadata, ?> trainingSet = source; <ide> <ide> // final NearestNeighborClustering clustering = new KMeansClustering(Measure.Predefined.L2_ES, 256, 8); <ide> return new ClassifiedDataSource<>(quantized, rgbRenderer); <ide> } <ide> <del> public static final <M extends DataSource.Metadata> ClassDataSource<M> classes(final DataSource<M, ?> inputs) { <del> return new ClassDataSource<>(inputs); <del> } <del> <ide> public static final double[][] newRGBRenderingMatrix(final int inputPatchPixelCount) { <ide> final double[][] result = new double[3][]; <ide> final int n = 3 * inputPatchPixelCount; <ide> return ((int) value0255) & 0xFF; <ide> } <ide> <add> /** <add> * @author codistmonk (creation 2015-02-12) <add> */ <add> public static final class Histogram implements Serializable { <add> <add> private final Map<double[], double[]> counts = new TreeMap<>(new DoubleArrayComparator()); <add> <add> private long totalCount; <add> <add> public final Map<double[], double[]> getCounts() { <add> return this.counts; <add> } <add> <add> public final long getTotalCount() { <add> return this.totalCount; <add> } <add> <add> public final Histogram reset() { <add> this.getCounts().clear(); <add> this.totalCount = 0L; <add> <add> return this; <add> } <add> <add> public final Histogram add(final double... input) { <add> ++this.getCounts().computeIfAbsent(input, i -> new double[1])[0]; <add> ++this.totalCount; <add> <add> return this; <add> } <add> <add> public final Histogram add(final DataSource<?, ?> inputs) { <add> for (final Classification<?> classification : inputs) { <add> this.add(classification.getInput().clone()); <add> } <add> <add> return this; <add> } <add> <add> public final Histogram normalize() { <add> final long n = this.getTotalCount(); <add> <add> if (0L < n) { <add> for (final double[] count : this.getCounts().values()) { <add> count[0] /= n; <add> } <add> } <add> <add> return this; <add> } <add> <add> public final Histogram denormalize() { <add> final long n = this.getTotalCount(); <add> <add> if (0L < n) { <add> for (final double[] count : this.getCounts().values()) { <add> count[0] = rint(count[0] * n); <add> } <add> } <add> <add> return this; <add> } <add> <add> public final double[] pack(final int binCount) { <add> final double[] result = new double[binCount]; <add> <add> for (final Map.Entry<double[], double[]> entry : this.getCounts().entrySet()) { <add> final double[] key = entry.getKey(); <add> <add> if (key.length != 1) { <add> throw new IllegalArgumentException(); <add> } <add> <add> result[(int) key[0]] = entry.getValue()[0]; <add> } <add> <add> return result; <add> } <add> <add> private static final long serialVersionUID = -4974336898629198663L; <add> <add> } <add> <ide> }
Java
mit
8dd1446d3b1077be6c331b00b376aa402b86c34e
0
msrb/jenkins,fbelzunc/jenkins,wangyikai/jenkins,kohsuke/hudson,shahharsh/jenkins,Krasnyanskiy/jenkins,mcanthony/jenkins,thomassuckow/jenkins,hashar/jenkins,SenolOzer/jenkins,mrobinet/jenkins,daniel-beck/jenkins,lvotypko/jenkins2,akshayabd/jenkins,SebastienGllmt/jenkins,gitaccountforprashant/gittest,jzjzjzj/jenkins,christ66/jenkins,mrooney/jenkins,lvotypko/jenkins,MarkEWaite/jenkins,msrb/jenkins,everyonce/jenkins,liupugong/jenkins,hemantojhaa/jenkins,stephenc/jenkins,huybrechts/hudson,varmenise/jenkins,gusreiber/jenkins,aduprat/jenkins,fbelzunc/jenkins,github-api-test-org/jenkins,tangkun75/jenkins,amuniz/jenkins,jcarrothers-sap/jenkins,jenkinsci/jenkins,duzifang/my-jenkins,CodeShane/jenkins,iqstack/jenkins,hudson/hudson-2.x,lordofthejars/jenkins,tangkun75/jenkins,ajshastri/jenkins,CodeShane/jenkins,singh88/jenkins,Jochen-A-Fuerbacher/jenkins,MarkEWaite/jenkins,ChrisA89/jenkins,tastatur/jenkins,alvarolobato/jenkins,intelchen/jenkins,synopsys-arc-oss/jenkins,pselle/jenkins,hemantojhaa/jenkins,ajshastri/jenkins,bkmeneguello/jenkins,AustinKwang/jenkins,ikedam/jenkins,andresrc/jenkins,maikeffi/hudson,noikiy/jenkins,kohsuke/hudson,lindzh/jenkins,Wilfred/jenkins,singh88/jenkins,lilyJi/jenkins,gitaccountforprashant/gittest,SebastienGllmt/jenkins,lvotypko/jenkins2,arunsingh/jenkins,MichaelPranovich/jenkins_sc,vvv444/jenkins,escoem/jenkins,MichaelPranovich/jenkins_sc,olivergondza/jenkins,aheritier/jenkins,tangkun75/jenkins,vvv444/jenkins,wuwen5/jenkins,h4ck3rm1k3/jenkins,MadsNielsen/jtemp,varmenise/jenkins,ns163/jenkins,wuwen5/jenkins,alvarolobato/jenkins,akshayabd/jenkins,fbelzunc/jenkins,292388900/jenkins,aheritier/jenkins,samatdav/jenkins,Jochen-A-Fuerbacher/jenkins,protazy/jenkins,aquarellian/jenkins,Jochen-A-Fuerbacher/jenkins,msrb/jenkins,pselle/jenkins,daniel-beck/jenkins,ChrisA89/jenkins,rlugojr/jenkins,albers/jenkins,morficus/jenkins,pjanouse/jenkins,wangyikai/jenkins,DanielWeber/jenkins,vivek/hudson,jpederzolli/jenkins-1,oleg-nenashev/jenkins,sathiya-mit/jenkins,AustinKwang/jenkins,brunocvcunha/jenkins,hplatou/jenkins,deadmoose/jenkins,gorcz/jenkins,christ66/jenkins,liupugong/jenkins,6WIND/jenkins,mdonohue/jenkins,ajshastri/jenkins,singh88/jenkins,stefanbrausch/hudson-main,Vlatombe/jenkins,brunocvcunha/jenkins,thomassuckow/jenkins,lvotypko/jenkins,wuwen5/jenkins,rsandell/jenkins,mattclark/jenkins,jenkinsci/jenkins,azweb76/jenkins,amuniz/jenkins,Ykus/jenkins,nandan4/Jenkins,albers/jenkins,mattclark/jenkins,bkmeneguello/jenkins,maikeffi/hudson,my7seven/jenkins,abayer/jenkins,AustinKwang/jenkins,aheritier/jenkins,everyonce/jenkins,FTG-003/jenkins,jenkinsci/jenkins,ajshastri/jenkins,dariver/jenkins,hemantojhaa/jenkins,wuwen5/jenkins,jzjzjzj/jenkins,sathiya-mit/jenkins,gorcz/jenkins,guoxu0514/jenkins,mrobinet/jenkins,thomassuckow/jenkins,huybrechts/hudson,Krasnyanskiy/jenkins,Jimilian/jenkins,aldaris/jenkins,6WIND/jenkins,gitaccountforprashant/gittest,hemantojhaa/jenkins,KostyaSha/jenkins,pselle/jenkins,h4ck3rm1k3/jenkins,aduprat/jenkins,oleg-nenashev/jenkins,evernat/jenkins,intelchen/jenkins,msrb/jenkins,vivek/hudson,ErikVerheul/jenkins,github-api-test-org/jenkins,daspilker/jenkins,ChrisA89/jenkins,deadmoose/jenkins,gorcz/jenkins,github-api-test-org/jenkins,hashar/jenkins,shahharsh/jenkins,jhoblitt/jenkins,rlugojr/jenkins,bpzhang/jenkins,iterate/coding-dojo,stephenc/jenkins,ydubreuil/jenkins,paulwellnerbou/jenkins,soenter/jenkins,iterate/coding-dojo,NehemiahMi/jenkins,jpederzolli/jenkins-1,guoxu0514/jenkins,aquarellian/jenkins,arunsingh/jenkins,my7seven/jenkins,khmarbaise/jenkins,mpeltonen/jenkins,shahharsh/jenkins,oleg-nenashev/jenkins,liorhson/jenkins,iqstack/jenkins,paulwellnerbou/jenkins,pantheon-systems/jenkins,iterate/coding-dojo,svanoort/jenkins,olivergondza/jenkins,ndeloof/jenkins,sathiya-mit/jenkins,jzjzjzj/jenkins,amruthsoft9/Jenkis,amruthsoft9/Jenkis,lilyJi/jenkins,oleg-nenashev/jenkins,gitaccountforprashant/gittest,rlugojr/jenkins,mrobinet/jenkins,albers/jenkins,protazy/jenkins,FarmGeek4Life/jenkins,petermarcoen/jenkins,tangkun75/jenkins,aheritier/jenkins,iterate/coding-dojo,damianszczepanik/jenkins,daspilker/jenkins,sathiya-mit/jenkins,lindzh/jenkins,wuwen5/jenkins,protazy/jenkins,paulwellnerbou/jenkins,stefanbrausch/hudson-main,dariver/jenkins,jhoblitt/jenkins,shahharsh/jenkins,SebastienGllmt/jenkins,jcsirot/jenkins,patbos/jenkins,NehemiahMi/jenkins,akshayabd/jenkins,guoxu0514/jenkins,jpederzolli/jenkins-1,vjuranek/jenkins,vlajos/jenkins,csimons/jenkins,godfath3r/jenkins,synopsys-arc-oss/jenkins,jpbriend/jenkins,morficus/jenkins,andresrc/jenkins,vivek/hudson,intelchen/jenkins,arcivanov/jenkins,iqstack/jenkins,evernat/jenkins,ydubreuil/jenkins,vijayto/jenkins,jtnord/jenkins,alvarolobato/jenkins,verbitan/jenkins,liupugong/jenkins,rsandell/jenkins,scoheb/jenkins,svanoort/jenkins,jglick/jenkins,ydubreuil/jenkins,batmat/jenkins,Vlatombe/jenkins,liorhson/jenkins,rsandell/jenkins,soenter/jenkins,SenolOzer/jenkins,AustinKwang/jenkins,ChrisA89/jenkins,jzjzjzj/jenkins,verbitan/jenkins,yonglehou/jenkins,vjuranek/jenkins,luoqii/jenkins,wangyikai/jenkins,jpbriend/jenkins,deadmoose/jenkins,lilyJi/jenkins,MarkEWaite/jenkins,tastatur/jenkins,KostyaSha/jenkins,thomassuckow/jenkins,olivergondza/jenkins,batmat/jenkins,dbroady1/jenkins,yonglehou/jenkins,azweb76/jenkins,rlugojr/jenkins,maikeffi/hudson,bpzhang/jenkins,chbiel/jenkins,azweb76/jenkins,dbroady1/jenkins,arcivanov/jenkins,tfennelly/jenkins,viqueen/jenkins,gorcz/jenkins,abayer/jenkins,deadmoose/jenkins,Ykus/jenkins,tfennelly/jenkins,kzantow/jenkins,fbelzunc/jenkins,azweb76/jenkins,viqueen/jenkins,arunsingh/jenkins,ns163/jenkins,huybrechts/hudson,elkingtonmcb/jenkins,kzantow/jenkins,morficus/jenkins,samatdav/jenkins,CodeShane/jenkins,jk47/jenkins,jpbriend/jenkins,mdonohue/jenkins,luoqii/jenkins,pjanouse/jenkins,kohsuke/hudson,vvv444/jenkins,arcivanov/jenkins,rsandell/jenkins,arcivanov/jenkins,wangyikai/jenkins,synopsys-arc-oss/jenkins,mdonohue/jenkins,tfennelly/jenkins,jhoblitt/jenkins,hplatou/jenkins,thomassuckow/jenkins,abayer/jenkins,maikeffi/hudson,paulmillar/jenkins,guoxu0514/jenkins,khmarbaise/jenkins,dbroady1/jenkins,lindzh/jenkins,mrooney/jenkins,morficus/jenkins,rlugojr/jenkins,ErikVerheul/jenkins,scoheb/jenkins,SebastienGllmt/jenkins,rashmikanta-1984/jenkins,jpbriend/jenkins,tfennelly/jenkins,jenkinsci/jenkins,evernat/jenkins,elkingtonmcb/jenkins,tangkun75/jenkins,lordofthejars/jenkins,Krasnyanskiy/jenkins,vlajos/jenkins,ndeloof/jenkins,Ykus/jenkins,MichaelPranovich/jenkins_sc,wangyikai/jenkins,stefanbrausch/hudson-main,ErikVerheul/jenkins,rsandell/jenkins,soenter/jenkins,my7seven/jenkins,hashar/jenkins,nandan4/Jenkins,aquarellian/jenkins,NehemiahMi/jenkins,daniel-beck/jenkins,daspilker/jenkins,thomassuckow/jenkins,amuniz/jenkins,stephenc/jenkins,andresrc/jenkins,deadmoose/jenkins,rashmikanta-1984/jenkins,singh88/jenkins,paulwellnerbou/jenkins,rlugojr/jenkins,stefanbrausch/hudson-main,rashmikanta-1984/jenkins,iqstack/jenkins,elkingtonmcb/jenkins,rsandell/jenkins,scoheb/jenkins,lvotypko/jenkins2,liupugong/jenkins,lordofthejars/jenkins,mdonohue/jenkins,mrooney/jenkins,everyonce/jenkins,dariver/jenkins,daspilker/jenkins,h4ck3rm1k3/jenkins,liupugong/jenkins,paulwellnerbou/jenkins,Vlatombe/jenkins,hashar/jenkins,petermarcoen/jenkins,mcanthony/jenkins,lordofthejars/jenkins,aldaris/jenkins,keyurpatankar/hudson,stephenc/jenkins,albers/jenkins,scoheb/jenkins,pjanouse/jenkins,ChrisA89/jenkins,dennisjlee/jenkins,jk47/jenkins,stefanbrausch/hudson-main,azweb76/jenkins,hudson/hudson-2.x,vijayto/jenkins,h4ck3rm1k3/jenkins,csimons/jenkins,oleg-nenashev/jenkins,lindzh/jenkins,amuniz/jenkins,vlajos/jenkins,noikiy/jenkins,ns163/jenkins,292388900/jenkins,maikeffi/hudson,aheritier/jenkins,github-api-test-org/jenkins,vvv444/jenkins,mrooney/jenkins,ikedam/jenkins,lordofthejars/jenkins,github-api-test-org/jenkins,hemantojhaa/jenkins,samatdav/jenkins,FarmGeek4Life/jenkins,6WIND/jenkins,rlugojr/jenkins,KostyaSha/jenkins,6WIND/jenkins,Wilfred/jenkins,Wilfred/jenkins,MarkEWaite/jenkins,arunsingh/jenkins,tastatur/jenkins,dariver/jenkins,keyurpatankar/hudson,escoem/jenkins,Wilfred/jenkins,Ykus/jenkins,DoctorQ/jenkins,lvotypko/jenkins,albers/jenkins,ndeloof/jenkins,samatdav/jenkins,brunocvcunha/jenkins,aduprat/jenkins,DoctorQ/jenkins,huybrechts/hudson,goldchang/jenkins,csimons/jenkins,protazy/jenkins,mcanthony/jenkins,MadsNielsen/jtemp,hashar/jenkins,arunsingh/jenkins,olivergondza/jenkins,dennisjlee/jenkins,292388900/jenkins,ndeloof/jenkins,ns163/jenkins,292388900/jenkins,seanlin816/jenkins,rashmikanta-1984/jenkins,FTG-003/jenkins,noikiy/jenkins,bpzhang/jenkins,jtnord/jenkins,amruthsoft9/Jenkis,csimons/jenkins,seanlin816/jenkins,ChrisA89/jenkins,jcarrothers-sap/jenkins,jcarrothers-sap/jenkins,1and1/jenkins,Wilfred/jenkins,soenter/jenkins,bkmeneguello/jenkins,Krasnyanskiy/jenkins,vivek/hudson,goldchang/jenkins,v1v/jenkins,Vlatombe/jenkins,Krasnyanskiy/jenkins,dariver/jenkins,amruthsoft9/Jenkis,iqstack/jenkins,mdonohue/jenkins,paulmillar/jenkins,verbitan/jenkins,duzifang/my-jenkins,mcanthony/jenkins,abayer/jenkins,jglick/jenkins,soenter/jenkins,paulwellnerbou/jenkins,iqstack/jenkins,hashar/jenkins,gorcz/jenkins,iterate/coding-dojo,damianszczepanik/jenkins,fbelzunc/jenkins,christ66/jenkins,alvarolobato/jenkins,vjuranek/jenkins,intelchen/jenkins,v1v/jenkins,github-api-test-org/jenkins,v1v/jenkins,goldchang/jenkins,pselle/jenkins,shahharsh/jenkins,rashmikanta-1984/jenkins,jpbriend/jenkins,jpederzolli/jenkins-1,petermarcoen/jenkins,jcsirot/jenkins,MichaelPranovich/jenkins_sc,chbiel/jenkins,jk47/jenkins,Krasnyanskiy/jenkins,huybrechts/hudson,nandan4/Jenkins,MarkEWaite/jenkins,DoctorQ/jenkins,KostyaSha/jenkins,gorcz/jenkins,patbos/jenkins,mpeltonen/jenkins,rashmikanta-1984/jenkins,azweb76/jenkins,jtnord/jenkins,arcivanov/jenkins,MadsNielsen/jtemp,h4ck3rm1k3/jenkins,lvotypko/jenkins2,damianszczepanik/jenkins,jglick/jenkins,lvotypko/jenkins,verbitan/jenkins,arcivanov/jenkins,ajshastri/jenkins,duzifang/my-jenkins,vlajos/jenkins,lvotypko/jenkins3,alvarolobato/jenkins,mpeltonen/jenkins,mrobinet/jenkins,jk47/jenkins,jcsirot/jenkins,elkingtonmcb/jenkins,goldchang/jenkins,AustinKwang/jenkins,paulmillar/jenkins,my7seven/jenkins,MichaelPranovich/jenkins_sc,samatdav/jenkins,gusreiber/jenkins,recena/jenkins,ndeloof/jenkins,akshayabd/jenkins,mdonohue/jenkins,guoxu0514/jenkins,kohsuke/hudson,my7seven/jenkins,patbos/jenkins,lvotypko/jenkins3,lvotypko/jenkins3,DanielWeber/jenkins,paulmillar/jenkins,abayer/jenkins,chbiel/jenkins,csimons/jenkins,svanoort/jenkins,ns163/jenkins,fbelzunc/jenkins,stephenc/jenkins,svanoort/jenkins,keyurpatankar/hudson,huybrechts/hudson,noikiy/jenkins,escoem/jenkins,guoxu0514/jenkins,Jochen-A-Fuerbacher/jenkins,aldaris/jenkins,recena/jenkins,duzifang/my-jenkins,pantheon-systems/jenkins,ChrisA89/jenkins,SenolOzer/jenkins,DanielWeber/jenkins,FarmGeek4Life/jenkins,kzantow/jenkins,vvv444/jenkins,andresrc/jenkins,hudson/hudson-2.x,pantheon-systems/jenkins,v1v/jenkins,jzjzjzj/jenkins,dbroady1/jenkins,msrb/jenkins,daniel-beck/jenkins,mcanthony/jenkins,rashmikanta-1984/jenkins,scoheb/jenkins,Jochen-A-Fuerbacher/jenkins,everyonce/jenkins,vjuranek/jenkins,elkingtonmcb/jenkins,akshayabd/jenkins,hplatou/jenkins,viqueen/jenkins,aquarellian/jenkins,patbos/jenkins,hudson/hudson-2.x,seanlin816/jenkins,keyurpatankar/hudson,christ66/jenkins,olivergondza/jenkins,stephenc/jenkins,292388900/jenkins,verbitan/jenkins,vivek/hudson,DoctorQ/jenkins,tastatur/jenkins,wangyikai/jenkins,Krasnyanskiy/jenkins,duzifang/my-jenkins,nandan4/Jenkins,bkmeneguello/jenkins,godfath3r/jenkins,vlajos/jenkins,SenolOzer/jenkins,MadsNielsen/jtemp,evernat/jenkins,damianszczepanik/jenkins,Vlatombe/jenkins,daspilker/jenkins,FarmGeek4Life/jenkins,6WIND/jenkins,pantheon-systems/jenkins,ydubreuil/jenkins,vijayto/jenkins,godfath3r/jenkins,evernat/jenkins,lindzh/jenkins,bkmeneguello/jenkins,shahharsh/jenkins,hemantojhaa/jenkins,ajshastri/jenkins,varmenise/jenkins,vjuranek/jenkins,fbelzunc/jenkins,damianszczepanik/jenkins,292388900/jenkins,mcanthony/jenkins,FarmGeek4Life/jenkins,jtnord/jenkins,gitaccountforprashant/gittest,dariver/jenkins,khmarbaise/jenkins,daniel-beck/jenkins,iterate/coding-dojo,yonglehou/jenkins,CodeShane/jenkins,liupugong/jenkins,NehemiahMi/jenkins,dennisjlee/jenkins,ns163/jenkins,recena/jenkins,vlajos/jenkins,stefanbrausch/hudson-main,kzantow/jenkins,seanlin816/jenkins,svanoort/jenkins,oleg-nenashev/jenkins,batmat/jenkins,vijayto/jenkins,huybrechts/hudson,h4ck3rm1k3/jenkins,damianszczepanik/jenkins,amuniz/jenkins,CodeShane/jenkins,escoem/jenkins,gitaccountforprashant/gittest,khmarbaise/jenkins,vjuranek/jenkins,liorhson/jenkins,evernat/jenkins,tastatur/jenkins,arunsingh/jenkins,6WIND/jenkins,FTG-003/jenkins,jhoblitt/jenkins,gorcz/jenkins,daniel-beck/jenkins,Jimilian/jenkins,chbiel/jenkins,aldaris/jenkins,jenkinsci/jenkins,mrobinet/jenkins,mcanthony/jenkins,damianszczepanik/jenkins,yonglehou/jenkins,maikeffi/hudson,pselle/jenkins,varmenise/jenkins,luoqii/jenkins,mpeltonen/jenkins,my7seven/jenkins,jhoblitt/jenkins,gitaccountforprashant/gittest,liorhson/jenkins,FarmGeek4Life/jenkins,DoctorQ/jenkins,paulmillar/jenkins,godfath3r/jenkins,rsandell/jenkins,everyonce/jenkins,azweb76/jenkins,DoctorQ/jenkins,AustinKwang/jenkins,ydubreuil/jenkins,dennisjlee/jenkins,chbiel/jenkins,tfennelly/jenkins,bpzhang/jenkins,lvotypko/jenkins,godfath3r/jenkins,dennisjlee/jenkins,batmat/jenkins,intelchen/jenkins,godfath3r/jenkins,CodeShane/jenkins,ikedam/jenkins,amuniz/jenkins,DanielWeber/jenkins,MarkEWaite/jenkins,petermarcoen/jenkins,patbos/jenkins,albers/jenkins,aduprat/jenkins,v1v/jenkins,v1v/jenkins,recena/jenkins,ns163/jenkins,gusreiber/jenkins,lvotypko/jenkins2,Jimilian/jenkins,morficus/jenkins,varmenise/jenkins,iqstack/jenkins,seanlin816/jenkins,batmat/jenkins,singh88/jenkins,nandan4/Jenkins,maikeffi/hudson,mrooney/jenkins,dennisjlee/jenkins,jzjzjzj/jenkins,daspilker/jenkins,aduprat/jenkins,Ykus/jenkins,noikiy/jenkins,daspilker/jenkins,escoem/jenkins,abayer/jenkins,abayer/jenkins,lilyJi/jenkins,mpeltonen/jenkins,bkmeneguello/jenkins,mpeltonen/jenkins,andresrc/jenkins,patbos/jenkins,petermarcoen/jenkins,SenolOzer/jenkins,tangkun75/jenkins,vijayto/jenkins,aheritier/jenkins,csimons/jenkins,verbitan/jenkins,khmarbaise/jenkins,daniel-beck/jenkins,jglick/jenkins,hplatou/jenkins,stefanbrausch/hudson-main,liorhson/jenkins,lilyJi/jenkins,pantheon-systems/jenkins,Jochen-A-Fuerbacher/jenkins,synopsys-arc-oss/jenkins,mattclark/jenkins,petermarcoen/jenkins,NehemiahMi/jenkins,1and1/jenkins,DanielWeber/jenkins,aldaris/jenkins,bpzhang/jenkins,varmenise/jenkins,MichaelPranovich/jenkins_sc,ikedam/jenkins,lvotypko/jenkins2,pjanouse/jenkins,lvotypko/jenkins3,SebastienGllmt/jenkins,shahharsh/jenkins,liorhson/jenkins,lindzh/jenkins,khmarbaise/jenkins,rsandell/jenkins,albers/jenkins,lvotypko/jenkins2,aduprat/jenkins,ErikVerheul/jenkins,Jochen-A-Fuerbacher/jenkins,synopsys-arc-oss/jenkins,brunocvcunha/jenkins,sathiya-mit/jenkins,nandan4/Jenkins,DanielWeber/jenkins,jcarrothers-sap/jenkins,protazy/jenkins,jhoblitt/jenkins,aheritier/jenkins,kzantow/jenkins,jtnord/jenkins,godfath3r/jenkins,dennisjlee/jenkins,jhoblitt/jenkins,github-api-test-org/jenkins,KostyaSha/jenkins,liorhson/jenkins,gusreiber/jenkins,guoxu0514/jenkins,ydubreuil/jenkins,jenkinsci/jenkins,hplatou/jenkins,seanlin816/jenkins,synopsys-arc-oss/jenkins,SenolOzer/jenkins,Wilfred/jenkins,hudson/hudson-2.x,KostyaSha/jenkins,jcsirot/jenkins,yonglehou/jenkins,pjanouse/jenkins,amuniz/jenkins,varmenise/jenkins,ikedam/jenkins,tangkun75/jenkins,soenter/jenkins,jtnord/jenkins,arcivanov/jenkins,dbroady1/jenkins,morficus/jenkins,evernat/jenkins,FTG-003/jenkins,dariver/jenkins,liupugong/jenkins,mrooney/jenkins,damianszczepanik/jenkins,verbitan/jenkins,vivek/hudson,dbroady1/jenkins,elkingtonmcb/jenkins,tfennelly/jenkins,viqueen/jenkins,chbiel/jenkins,tastatur/jenkins,scoheb/jenkins,FarmGeek4Life/jenkins,jenkinsci/jenkins,hudson/hudson-2.x,hplatou/jenkins,Ykus/jenkins,NehemiahMi/jenkins,sathiya-mit/jenkins,292388900/jenkins,khmarbaise/jenkins,lvotypko/jenkins,ErikVerheul/jenkins,bpzhang/jenkins,jcarrothers-sap/jenkins,olivergondza/jenkins,andresrc/jenkins,lvotypko/jenkins3,daniel-beck/jenkins,ndeloof/jenkins,jcsirot/jenkins,recena/jenkins,pjanouse/jenkins,jzjzjzj/jenkins,vijayto/jenkins,github-api-test-org/jenkins,kzantow/jenkins,Vlatombe/jenkins,protazy/jenkins,deadmoose/jenkins,aldaris/jenkins,elkingtonmcb/jenkins,lordofthejars/jenkins,soenter/jenkins,FTG-003/jenkins,vijayto/jenkins,mpeltonen/jenkins,wuwen5/jenkins,alvarolobato/jenkins,jglick/jenkins,maikeffi/hudson,gusreiber/jenkins,iterate/coding-dojo,christ66/jenkins,bkmeneguello/jenkins,brunocvcunha/jenkins,viqueen/jenkins,jpederzolli/jenkins-1,pjanouse/jenkins,1and1/jenkins,jzjzjzj/jenkins,keyurpatankar/hudson,goldchang/jenkins,NehemiahMi/jenkins,MadsNielsen/jtemp,jk47/jenkins,deadmoose/jenkins,Jimilian/jenkins,luoqii/jenkins,ErikVerheul/jenkins,my7seven/jenkins,samatdav/jenkins,keyurpatankar/hudson,everyonce/jenkins,mrooney/jenkins,noikiy/jenkins,ndeloof/jenkins,hashar/jenkins,oleg-nenashev/jenkins,duzifang/my-jenkins,MarkEWaite/jenkins,SebastienGllmt/jenkins,yonglehou/jenkins,jk47/jenkins,aquarellian/jenkins,nandan4/Jenkins,1and1/jenkins,mrobinet/jenkins,ydubreuil/jenkins,arunsingh/jenkins,vvv444/jenkins,thomassuckow/jenkins,jenkinsci/jenkins,1and1/jenkins,mrobinet/jenkins,olivergondza/jenkins,morficus/jenkins,luoqii/jenkins,Jimilian/jenkins,viqueen/jenkins,intelchen/jenkins,jglick/jenkins,Ykus/jenkins,vvv444/jenkins,jpederzolli/jenkins-1,msrb/jenkins,batmat/jenkins,andresrc/jenkins,msrb/jenkins,aquarellian/jenkins,kohsuke/hudson,jpbriend/jenkins,escoem/jenkins,paulwellnerbou/jenkins,paulmillar/jenkins,mattclark/jenkins,lvotypko/jenkins3,goldchang/jenkins,hplatou/jenkins,mdonohue/jenkins,mattclark/jenkins,amruthsoft9/Jenkis,jcarrothers-sap/jenkins,sathiya-mit/jenkins,escoem/jenkins,bpzhang/jenkins,keyurpatankar/hudson,MadsNielsen/jtemp,kzantow/jenkins,pselle/jenkins,KostyaSha/jenkins,SebastienGllmt/jenkins,christ66/jenkins,noikiy/jenkins,DoctorQ/jenkins,lvotypko/jenkins,v1v/jenkins,KostyaSha/jenkins,goldchang/jenkins,singh88/jenkins,mattclark/jenkins,pselle/jenkins,svanoort/jenkins,FTG-003/jenkins,brunocvcunha/jenkins,Jimilian/jenkins,paulmillar/jenkins,gusreiber/jenkins,wangyikai/jenkins,jpederzolli/jenkins-1,MarkEWaite/jenkins,kohsuke/hudson,6WIND/jenkins,CodeShane/jenkins,vivek/hudson,chbiel/jenkins,jpbriend/jenkins,Wilfred/jenkins,tfennelly/jenkins,lvotypko/jenkins3,lilyJi/jenkins,kohsuke/hudson,luoqii/jenkins,intelchen/jenkins,goldchang/jenkins,gorcz/jenkins,brunocvcunha/jenkins,samatdav/jenkins,lindzh/jenkins,gusreiber/jenkins,stephenc/jenkins,luoqii/jenkins,recena/jenkins,christ66/jenkins,lilyJi/jenkins,jtnord/jenkins,amruthsoft9/Jenkis,lordofthejars/jenkins,vjuranek/jenkins,pantheon-systems/jenkins,vivek/hudson,h4ck3rm1k3/jenkins,jcarrothers-sap/jenkins,dbroady1/jenkins,batmat/jenkins,vlajos/jenkins,ikedam/jenkins,akshayabd/jenkins,1and1/jenkins,ErikVerheul/jenkins,tastatur/jenkins,aldaris/jenkins,akshayabd/jenkins,wuwen5/jenkins,MadsNielsen/jtemp,Vlatombe/jenkins,recena/jenkins,jcsirot/jenkins,mattclark/jenkins,yonglehou/jenkins,duzifang/my-jenkins,AustinKwang/jenkins,csimons/jenkins,ikedam/jenkins,aquarellian/jenkins,jcsirot/jenkins,protazy/jenkins,viqueen/jenkins,petermarcoen/jenkins,svanoort/jenkins,ikedam/jenkins,scoheb/jenkins,keyurpatankar/hudson,hemantojhaa/jenkins,ajshastri/jenkins,amruthsoft9/Jenkis,singh88/jenkins,pantheon-systems/jenkins,jglick/jenkins,kohsuke/hudson,FTG-003/jenkins,alvarolobato/jenkins,jk47/jenkins,DoctorQ/jenkins,SenolOzer/jenkins,MichaelPranovich/jenkins_sc,aduprat/jenkins,DanielWeber/jenkins,jcarrothers-sap/jenkins,seanlin816/jenkins,synopsys-arc-oss/jenkins,shahharsh/jenkins,1and1/jenkins,Jimilian/jenkins,everyonce/jenkins,patbos/jenkins
package hudson.scm; import hudson.EnvVars; import hudson.FilePath; import hudson.Launcher; import hudson.Proc; import hudson.model.BuildListener; import hudson.model.TaskListener; import hudson.util.ArgumentListBuilder; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.OutputStream; import java.util.HashMap; import java.util.Map; /** * Common implementation between {@link CVSSCM} and {@link SubversionSCM}. * * @author Kohsuke Kawaguchi */ abstract class AbstractCVSFamilySCM implements SCM { /** * Invokes the command with the specified command line option and wait for its completion. * * @param dir * if launching locally this is a local path, otherwise a remote path. * @param out * Receives output from the executed program. */ protected final boolean run(Launcher launcher, ArgumentListBuilder cmd, TaskListener listener, FilePath dir, OutputStream out) throws IOException { Map env = createEnvVarMap(true); int r = launcher.launch(cmd.toCommandArray(),env,out,dir).join(); if(r!=0) listener.fatalError(getDescriptor().getDisplayName()+" failed. exit code="+r); return r==0; } protected final boolean run(Launcher launcher, ArgumentListBuilder cmd, TaskListener listener, FilePath dir) throws IOException { return run(launcher,cmd,listener,dir,listener.getLogger()); } /** * * @param overrideOnly * true to indicate that the returned map shall only contain * properties that need to be overridden. This is for use with {@link Launcher}. * false to indicate that the map should contain complete map. * This is to invoke {@link Proc} directly. */ protected final Map createEnvVarMap(boolean overrideOnly) { Map env = new HashMap(); if(!overrideOnly) env.putAll(EnvVars.masterEnvVars); buildEnvVars(env); return env; } protected final boolean createEmptyChangeLog(File changelogFile, BuildListener listener, String rootTag) { try { FileWriter w = new FileWriter(changelogFile); w.write("<"+rootTag +"/>"); w.close(); return true; } catch (IOException e) { e.printStackTrace(listener.error(e.getMessage())); return false; } } protected final String nullify(String s) { if(s==null) return null; if(s.trim().length()==0) return null; return s; } }
core/src/main/java/hudson/scm/AbstractCVSFamilySCM.java
package hudson.scm; import hudson.EnvVars; import hudson.FilePath; import hudson.Launcher; import hudson.Proc; import hudson.model.BuildListener; import hudson.model.TaskListener; import hudson.util.ArgumentListBuilder; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.OutputStream; import java.util.HashMap; import java.util.Map; /** * Common implementation between {@link CVSSCM} and {@link SubversionSCM}. * * @author Kohsuke Kawaguchi */ abstract class AbstractCVSFamilySCM implements SCM { /** * Invokes the command with the specified command line option and wait for its completion. * * @param dir * if launching locally this is a local path, otherwise a remote path. * @param out * Receives output from the executed program. */ protected final boolean run(Launcher launcher, ArgumentListBuilder cmd, TaskListener listener, FilePath dir, OutputStream out) throws IOException { Map env = createEnvVarMap(true); int r = launcher.launch(cmd.toCommandArray(),env,out,dir).join(); if(r!=0) listener.fatalError(getDescriptor().getDisplayName()+" failed"); return r==0; } protected final boolean run(Launcher launcher, ArgumentListBuilder cmd, TaskListener listener, FilePath dir) throws IOException { return run(launcher,cmd,listener,dir,listener.getLogger()); } /** * * @param overrideOnly * true to indicate that the returned map shall only contain * properties that need to be overridden. This is for use with {@link Launcher}. * false to indicate that the map should contain complete map. * This is to invoke {@link Proc} directly. */ protected final Map createEnvVarMap(boolean overrideOnly) { Map env = new HashMap(); if(!overrideOnly) env.putAll(EnvVars.masterEnvVars); buildEnvVars(env); return env; } protected final boolean createEmptyChangeLog(File changelogFile, BuildListener listener, String rootTag) { try { FileWriter w = new FileWriter(changelogFile); w.write("<"+rootTag +"/>"); w.close(); return true; } catch (IOException e) { e.printStackTrace(listener.error(e.getMessage())); return false; } } protected final String nullify(String s) { if(s==null) return null; if(s.trim().length()==0) return null; return s; } }
modified to report an exit code upon a failure. git-svn-id: 28f34f9aa52bc55a5ddd5be9e183c5cccadc6ee4@1163 71c3de6d-444a-0410-be80-ed276b4c234a
core/src/main/java/hudson/scm/AbstractCVSFamilySCM.java
modified to report an exit code upon a failure.
<ide><path>ore/src/main/java/hudson/scm/AbstractCVSFamilySCM.java <ide> <ide> int r = launcher.launch(cmd.toCommandArray(),env,out,dir).join(); <ide> if(r!=0) <del> listener.fatalError(getDescriptor().getDisplayName()+" failed"); <add> listener.fatalError(getDescriptor().getDisplayName()+" failed. exit code="+r); <ide> <ide> return r==0; <ide> }
JavaScript
mit
7d81ab896f58ae6c24802ad9f80abbbf11b4457f
0
machty/ember-concurrency,machty/ember-concurrency,machty/ember-concurrency,machty/ember-concurrency
import Ember from 'ember'; import { task, timeout } from 'ember-concurrency'; // BEGIN-SNIPPET detail-route export default Ember.Route.extend({ notify: Ember.inject.service('notify'), setupController(controller, model) { this._super(...arguments); this.get('pollServerForChanges').perform(model.id); }, pollServerForChanges: task(function * (id) { let notify = this.get('notify'); yield timeout(500); try { notify.info(`Thing ${id}: Starting to poll for changes`); while (true) { yield timeout(5000); notify.info(`Thing ${id}: Polling now...`); } } finally { notify.warning(`Thing ${id}: No longer polling for changes`); } }).cancelOn('deactivate').restartable(), }); // END-SNIPPET
tests/dummy/app/docs/examples/route-tasks/detail/route.js
import Ember from 'ember'; import { task, timeout } from 'ember-concurrency'; // BEGIN-SNIPPET detail-route export default Ember.Route.extend({ notify: Ember.inject.service('notify'), setupController(controller, model) { this.get('pollServerForChanges').perform(model.id); }, pollServerForChanges: task(function * (id) { let notify = this.get('notify'); yield timeout(500); try { notify.info(`Thing ${id}: Starting to poll for changes`); while (true) { yield timeout(5000); notify.info(`Thing ${id}: Polling now...`); } } finally { notify.warning(`Thing ${id}: No longer polling for changes`); } }).cancelOn('deactivate').restartable(), }); // END-SNIPPET
Route tasks: + this._super to setupController (#93)
tests/dummy/app/docs/examples/route-tasks/detail/route.js
Route tasks: + this._super to setupController (#93)
<ide><path>ests/dummy/app/docs/examples/route-tasks/detail/route.js <ide> notify: Ember.inject.service('notify'), <ide> <ide> setupController(controller, model) { <add> this._super(...arguments); <ide> this.get('pollServerForChanges').perform(model.id); <ide> }, <ide>
Java
mit
3b760e45fb001f32ca24cd1fcdf22f1099b18f97
0
techdev-solutions/trackr-backend,hongyang070/trackr-backend,agilemobiledev/trackr-backend,ashwinrayaprolu1984/trackr-backend,techdev-solutions/trackr-backend,hongyang070/trackr-backend,agilemobiledev/trackr-backend,ashwinrayaprolu1984/trackr-backend
package de.techdev.trackr.domain; import lombok.Data; import javax.persistence.*; import javax.validation.constraints.NotNull; import java.sql.Time; import java.util.Date; /** * @author Moritz Schulze */ @Entity @Data public class WorkTime { @Id @GeneratedValue(strategy = GenerationType.AUTO) private Long id; @Version private Integer version; @ManyToOne @NotNull @JoinColumn(name = "employee") private Employee employee; @ManyToOne @NotNull @JoinColumn(name = "project") private Project project; @NotNull @Temporal(TemporalType.DATE) private Date date; private Time start; private Time end; private String comment; }
src/main/java/de/techdev/trackr/domain/WorkTime.java
package de.techdev.trackr.domain; import lombok.Data; import javax.persistence.*; import javax.validation.constraints.NotNull; import java.sql.Time; import java.util.Date; /** * @author Moritz Schulze */ @Entity @Data public class WorkTime { @Id @GeneratedValue(strategy = GenerationType.AUTO) private Long id; @Version private Integer version; @ManyToOne @NotNull @JoinColumn(name = "employee") private Employee employee; @ManyToOne @NotNull @JoinColumn(name = "project") private Project project; @NotNull private Date date; private Time start; private Time end; private String comment; }
TRACKR-1 - Tracking of worked hours Annotated date of WorkTime.java with @Temporal
src/main/java/de/techdev/trackr/domain/WorkTime.java
TRACKR-1 - Tracking of worked hours Annotated date of WorkTime.java with @Temporal
<ide><path>rc/main/java/de/techdev/trackr/domain/WorkTime.java <ide> private Project project; <ide> <ide> @NotNull <add> @Temporal(TemporalType.DATE) <ide> private Date date; <ide> <ide> private Time start;
Java
apache-2.0
6f97c42f2fb81367ac3077286beacd4568ec8050
0
SpiralsSeminaire/commons-imaging,mohanaraosv/commons-imaging,yuuhayashi/commons-imaging,SpiralsSeminaire/commons-imaging,mohanaraosv/commons-imaging,yuuhayashi/commons-imaging,apache/commons-imaging,apache/commons-imaging
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.imaging.formats.gif; import java.awt.Dimension; import java.awt.image.BufferedImage; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.imaging.FormatCompliance; import org.apache.commons.imaging.ImageFormat; import org.apache.commons.imaging.ImageInfo; import org.apache.commons.imaging.ImageParser; import org.apache.commons.imaging.ImageReadException; import org.apache.commons.imaging.ImageWriteException; import org.apache.commons.imaging.common.BinaryOutputStream; import org.apache.commons.imaging.common.ByteOrder; import org.apache.commons.imaging.common.IImageMetadata; import org.apache.commons.imaging.common.ImageBuilder; import org.apache.commons.imaging.common.bytesource.ByteSource; import org.apache.commons.imaging.common.mylzw.MyLzwCompressor; import org.apache.commons.imaging.common.mylzw.MyLzwDecompressor; import org.apache.commons.imaging.palette.Palette; import org.apache.commons.imaging.palette.PaletteFactory; import org.apache.commons.imaging.util.Debug; import org.apache.commons.imaging.util.ParamMap; public class GifImageParser extends ImageParser { public GifImageParser() { super.setByteOrder(ByteOrder.LITTLE_ENDIAN); } @Override public String getName() { return "Gif-Custom"; } @Override public String getDefaultExtension() { return DEFAULT_EXTENSION; } private static final String DEFAULT_EXTENSION = ".gif"; private static final String ACCEPTED_EXTENSIONS[] = { DEFAULT_EXTENSION, }; @Override protected String[] getAcceptedExtensions() { return ACCEPTED_EXTENSIONS; } @Override protected ImageFormat[] getAcceptedTypes() { return new ImageFormat[] { ImageFormat.GIF, // }; } private static final byte GIF_HEADER_SIGNATURE[] = { 71, 73, 70 }; private GifHeaderInfo readHeader(final InputStream is, final FormatCompliance formatCompliance) throws ImageReadException, IOException { final byte identifier1 = readByte("identifier1", is, "Not a Valid GIF File"); final byte identifier2 = readByte("identifier2", is, "Not a Valid GIF File"); final byte identifier3 = readByte("identifier3", is, "Not a Valid GIF File"); final byte version1 = readByte("version1", is, "Not a Valid GIF File"); final byte version2 = readByte("version2", is, "Not a Valid GIF File"); final byte version3 = readByte("version3", is, "Not a Valid GIF File"); if (formatCompliance != null) { formatCompliance.compare_bytes("Signature", GIF_HEADER_SIGNATURE, new byte[] { identifier1, identifier2, identifier3, }); formatCompliance.compare("version", 56, version1); formatCompliance .compare("version", new int[] { 55, 57, }, version2); formatCompliance.compare("version", 97, version3); } if (getDebug()) { printCharQuad("identifier: ", ((identifier1 << 16) | (identifier2 << 8) | (identifier3 << 0))); printCharQuad("version: ", ((version1 << 16) | (version2 << 8) | (version3 << 0))); } final int logicalScreenWidth = read2Bytes("Logical Screen Width", is, "Not a Valid GIF File"); final int logicalScreenHeight = read2Bytes("Logical Screen Height", is, "Not a Valid GIF File"); if (formatCompliance != null) { formatCompliance.checkBounds("Width", 1, Integer.MAX_VALUE, logicalScreenWidth); formatCompliance.checkBounds("Height", 1, Integer.MAX_VALUE, logicalScreenHeight); } final byte packedFields = readByte("Packed Fields", is, "Not a Valid GIF File"); final byte backgroundColorIndex = readByte("Background Color Index", is, "Not a Valid GIF File"); final byte pixelAspectRatio = readByte("Pixel Aspect Ratio", is, "Not a Valid GIF File"); if (getDebug()) { printByteBits("PackedFields bits", packedFields); } final boolean globalColorTableFlag = ((packedFields & 128) > 0); if (getDebug()) { System.out.println("GlobalColorTableFlag: " + globalColorTableFlag); } final byte colorResolution = (byte) ((packedFields >> 4) & 7); if (getDebug()) { System.out.println("ColorResolution: " + colorResolution); } final boolean sortFlag = ((packedFields & 8) > 0); if (getDebug()) { System.out.println("SortFlag: " + sortFlag); } final byte sizeofGlobalColorTable = (byte) (packedFields & 7); if (getDebug()) { System.out.println("SizeofGlobalColorTable: " + sizeofGlobalColorTable); } if (formatCompliance != null) { if (globalColorTableFlag && backgroundColorIndex != -1) { formatCompliance.checkBounds("Background Color Index", 0, convertColorTableSize(sizeofGlobalColorTable), backgroundColorIndex); } } return new GifHeaderInfo(identifier1, identifier2, identifier3, version1, version2, version3, logicalScreenWidth, logicalScreenHeight, packedFields, backgroundColorIndex, pixelAspectRatio, globalColorTableFlag, colorResolution, sortFlag, sizeofGlobalColorTable); } private GraphicControlExtension readGraphicControlExtension(final int code, final InputStream is) throws IOException { readByte("block_size", is, "GIF: corrupt GraphicControlExt"); final int packed = readByte("packed fields", is, "GIF: corrupt GraphicControlExt"); final int dispose = (packed & 0x1c) >> 2; // disposal method final boolean transparency = (packed & 1) != 0; final int delay = read2Bytes("delay in milliseconds", is, "GIF: corrupt GraphicControlExt"); final int transparentColorIndex = 0xff & readByte("transparent color index", is, "GIF: corrupt GraphicControlExt"); readByte("block terminator", is, "GIF: corrupt GraphicControlExt"); return new GraphicControlExtension(code, packed, dispose, transparency, delay, transparentColorIndex); } private byte[] readSubBlock(final InputStream is) throws IOException { final int block_size = 0xff & readByte("block_size", is, "GIF: corrupt block"); final byte bytes[] = readBytes("block", is, block_size, "GIF: corrupt block"); return bytes; } protected GenericGifBlock readGenericGIFBlock(final InputStream is, final int code) throws IOException { return readGenericGIFBlock(is, code, null); } protected GenericGifBlock readGenericGIFBlock(final InputStream is, final int code, final byte first[]) throws IOException { final List<byte[]> subblocks = new ArrayList<byte[]>(); if (first != null) { subblocks.add(first); } while (true) { final byte bytes[] = readSubBlock(is); if (bytes.length < 1) { break; } subblocks.add(bytes); } return new GenericGifBlock(code, subblocks); } private final static int EXTENSION_CODE = 0x21; private final static int IMAGE_SEPARATOR = 0x2C; private final static int GRAPHIC_CONTROL_EXTENSION = (EXTENSION_CODE << 8) | 0xf9; private final static int COMMENT_EXTENSION = 0xfe; private final static int PLAIN_TEXT_EXTENSION = 0x01; private final static int XMP_EXTENSION = 0xff; private final static int TERMINATOR_BYTE = 0x3b; private final static int APPLICATION_EXTENSION_LABEL = 0xff; private final static int XMP_COMPLETE_CODE = (EXTENSION_CODE << 8) | XMP_EXTENSION; private List<GifBlock> readBlocks(final GifHeaderInfo ghi, final InputStream is, final boolean stopBeforeImageData, final FormatCompliance formatCompliance) throws ImageReadException, IOException { final List<GifBlock> result = new ArrayList<GifBlock>(); while (true) { final int code = is.read(); switch (code) { case -1: throw new ImageReadException("GIF: unexpected end of data"); case IMAGE_SEPARATOR: final ImageDescriptor id = readImageDescriptor(ghi, code, is, stopBeforeImageData, formatCompliance); result.add(id); // if(stopBeforeImageData) // return result; break; case EXTENSION_CODE: // extension { final int extensionCode = is.read(); final int completeCode = ((0xff & code) << 8) | (0xff & extensionCode); switch (extensionCode) { case 0xf9: final GraphicControlExtension gce = readGraphicControlExtension( completeCode, is); result.add(gce); break; case COMMENT_EXTENSION: case PLAIN_TEXT_EXTENSION: { final GenericGifBlock block = readGenericGIFBlock(is, completeCode); result.add(block); break; } case APPLICATION_EXTENSION_LABEL: // 255 (hex 0xFF) Application // Extension Label { final byte label[] = readSubBlock(is); if (formatCompliance != null) { formatCompliance.addComment( "Unknown Application Extension (" + new String(label, "US-ASCII") + ")", completeCode); } // if (label == new String("ICCRGBG1")) { // GIF's can have embedded ICC Profiles - who knew? } if ((label != null) && (label.length > 0)) { final GenericGifBlock block = readGenericGIFBlock(is, completeCode, label); result.add(block); } break; } default: { if (formatCompliance != null) { formatCompliance.addComment("Unknown block", completeCode); } final GenericGifBlock block = readGenericGIFBlock(is, completeCode); result.add(block); break; } } } break; case TERMINATOR_BYTE: return result; case 0x00: // bad byte, but keep going and see what happens break; default: throw new ImageReadException("GIF: unknown code: " + code); } } } private ImageDescriptor readImageDescriptor(final GifHeaderInfo ghi, final int blockCode, final InputStream is, final boolean stopBeforeImageData, final FormatCompliance formatCompliance) throws ImageReadException, IOException { final int ImageLeftPosition = read2Bytes("Image Left Position", is, "Not a Valid GIF File"); final int ImageTopPosition = read2Bytes("Image Top Position", is, "Not a Valid GIF File"); final int imageWidth = read2Bytes("Image Width", is, "Not a Valid GIF File"); final int imageHeight = read2Bytes("Image Height", is, "Not a Valid GIF File"); final byte PackedFields = readByte("Packed Fields", is, "Not a Valid GIF File"); if (formatCompliance != null) { formatCompliance.checkBounds("Width", 1, ghi.logicalScreenWidth, imageWidth); formatCompliance.checkBounds("Height", 1, ghi.logicalScreenHeight, imageHeight); formatCompliance.checkBounds("Left Position", 0, ghi.logicalScreenWidth - imageWidth, ImageLeftPosition); formatCompliance.checkBounds("Top Position", 0, ghi.logicalScreenHeight - imageHeight, ImageTopPosition); } if (getDebug()) { printByteBits("PackedFields bits", PackedFields); } final boolean LocalColorTableFlag = (((PackedFields >> 7) & 1) > 0); if (getDebug()) { System.out.println("LocalColorTableFlag: " + LocalColorTableFlag); } final boolean InterlaceFlag = (((PackedFields >> 6) & 1) > 0); if (getDebug()) { System.out.println("Interlace Flag: " + InterlaceFlag); } final boolean SortFlag = (((PackedFields >> 5) & 1) > 0); if (getDebug()) { System.out.println("Sort Flag: " + SortFlag); } final byte SizeofLocalColorTable = (byte) (PackedFields & 7); if (getDebug()) { System.out.println("SizeofLocalColorTable: " + SizeofLocalColorTable); } byte LocalColorTable[] = null; if (LocalColorTableFlag) { LocalColorTable = readColorTable(is, SizeofLocalColorTable, formatCompliance); } byte imageData[] = null; if (!stopBeforeImageData) { final int LZWMinimumCodeSize = is.read(); final GenericGifBlock block = readGenericGIFBlock(is, -1); final byte bytes[] = block.appendSubBlocks(); final InputStream bais = new ByteArrayInputStream(bytes); final int size = imageWidth * imageHeight; final MyLzwDecompressor myLzwDecompressor = new MyLzwDecompressor( LZWMinimumCodeSize, ByteOrder.LITTLE_ENDIAN); imageData = myLzwDecompressor.decompress(bais, size); } else { final int LZWMinimumCodeSize = is.read(); if (getDebug()) { System.out.println("LZWMinimumCodeSize: " + LZWMinimumCodeSize); } readGenericGIFBlock(is, -1); } final ImageDescriptor result = new ImageDescriptor(blockCode, ImageLeftPosition, ImageTopPosition, imageWidth, imageHeight, PackedFields, LocalColorTableFlag, InterlaceFlag, SortFlag, SizeofLocalColorTable, LocalColorTable, imageData); return result; } private int simple_pow(final int base, final int power) { int result = 1; for (int i = 0; i < power; i++) { result *= base; } return result; } private int convertColorTableSize(final int ct_size) { return 3 * simple_pow(2, ct_size + 1); } private byte[] readColorTable(final InputStream is, final int ct_size, final FormatCompliance formatCompliance) throws IOException { final int actual_size = convertColorTableSize(ct_size); final byte bytes[] = readBytes("block", is, actual_size, "GIF: corrupt Color Table"); return bytes; } private GifBlock findBlock(final List<GifBlock> v, final int code) { for (int i = 0; i < v.size(); i++) { final GifBlock gifBlock = v.get(i); if (gifBlock.blockCode == code) { return gifBlock; } } return null; } private ImageContents readFile(final ByteSource byteSource, final boolean stopBeforeImageData) throws ImageReadException, IOException { return readFile(byteSource, stopBeforeImageData, FormatCompliance.getDefault()); } private ImageContents readFile(final ByteSource byteSource, final boolean stopBeforeImageData, final FormatCompliance formatCompliance) throws ImageReadException, IOException { InputStream is = null; try { is = byteSource.getInputStream(); final GifHeaderInfo ghi = readHeader(is, formatCompliance); byte globalColorTable[] = null; if (ghi.globalColorTableFlag) { globalColorTable = readColorTable(is, ghi.sizeOfGlobalColorTable, formatCompliance); } final List<GifBlock> blocks = readBlocks(ghi, is, stopBeforeImageData, formatCompliance); final ImageContents result = new ImageContents(ghi, globalColorTable, blocks); return result; } finally { try { if (is != null) { is.close(); } } catch (final Exception e) { Debug.debug(e); } } } @Override public byte[] getICCProfileBytes(final ByteSource byteSource, final Map<String,Object> params) throws ImageReadException, IOException { return null; } @Override public Dimension getImageSize(final ByteSource byteSource, final Map<String,Object> params) throws ImageReadException, IOException { final ImageContents blocks = readFile(byteSource, false); if (blocks == null) { throw new ImageReadException("GIF: Couldn't read blocks"); } final GifHeaderInfo bhi = blocks.gifHeaderInfo; if (bhi == null) { throw new ImageReadException("GIF: Couldn't read Header"); } final ImageDescriptor id = (ImageDescriptor) findBlock(blocks.blocks, IMAGE_SEPARATOR); if (id == null) { throw new ImageReadException("GIF: Couldn't read ImageDescriptor"); } // Prefer the size information in the ImageDescriptor; it is more // reliable // than the size information in the header. return new Dimension(id.imageWidth, id.imageHeight); } public byte[] embedICCProfile(final byte image[], final byte profile[]) { return null; } @Override public boolean embedICCProfile(final File src, final File dst, final byte profile[]) { return false; } @Override public IImageMetadata getMetadata(final ByteSource byteSource, final Map<String,Object> params) throws ImageReadException, IOException { return null; } private List<String> getComments(final List<GifBlock> v) throws IOException { final List<String> result = new ArrayList<String>(); final int code = 0x21fe; for (int i = 0; i < v.size(); i++) { final GifBlock block = v.get(i); if (block.blockCode == code) { final byte bytes[] = ((GenericGifBlock) block).appendSubBlocks(); result.add(new String(bytes, "US-ASCII")); } } return result; } @Override public ImageInfo getImageInfo(final ByteSource byteSource, final Map<String,Object> params) throws ImageReadException, IOException { final ImageContents blocks = readFile(byteSource, false); if (blocks == null) { throw new ImageReadException("GIF: Couldn't read blocks"); } final GifHeaderInfo bhi = blocks.gifHeaderInfo; if (bhi == null) { throw new ImageReadException("GIF: Couldn't read Header"); } final ImageDescriptor id = (ImageDescriptor) findBlock(blocks.blocks, IMAGE_SEPARATOR); if (id == null) { throw new ImageReadException("GIF: Couldn't read ImageDescriptor"); } final GraphicControlExtension gce = (GraphicControlExtension) findBlock( blocks.blocks, GRAPHIC_CONTROL_EXTENSION); // Prefer the size information in the ImageDescriptor; it is more // reliable than the size information in the header. final int height = id.imageHeight; final int width = id.imageWidth; final List<String> comments = getComments(blocks.blocks); final int bitsPerPixel = (bhi.colorResolution + 1); final ImageFormat format = ImageFormat.GIF; final String formatName = "GIF Graphics Interchange Format"; final String mimeType = "image/gif"; // we ought to count images, but don't yet. final int numberOfImages = -1; final boolean isProgressive = id.interlaceFlag; final int physicalWidthDpi = 72; final float physicalWidthInch = (float) ((double) width / (double) physicalWidthDpi); final int physicalHeightDpi = 72; final float physicalHeightInch = (float) ((double) height / (double) physicalHeightDpi); final String formatDetails = "Gif " + ((char) blocks.gifHeaderInfo.version1) + ((char) blocks.gifHeaderInfo.version2) + ((char) blocks.gifHeaderInfo.version3); boolean isTransparent = false; if (gce != null && gce.transparency) { isTransparent = true; } final boolean usesPalette = true; final int colorType = ImageInfo.COLOR_TYPE_RGB; final String compressionAlgorithm = ImageInfo.COMPRESSION_ALGORITHM_LZW; final ImageInfo result = new ImageInfo(formatDetails, bitsPerPixel, comments, format, formatName, height, mimeType, numberOfImages, physicalHeightDpi, physicalHeightInch, physicalWidthDpi, physicalWidthInch, width, isProgressive, isTransparent, usesPalette, colorType, compressionAlgorithm); return result; } @Override public boolean dumpImageFile(final PrintWriter pw, final ByteSource byteSource) throws ImageReadException, IOException { pw.println("gif.dumpImageFile"); final ImageInfo imageData = getImageInfo(byteSource); if (imageData == null) { return false; } imageData.toString(pw, ""); final ImageContents blocks = readFile(byteSource, false); pw.println("gif.blocks: " + blocks.blocks.size()); for (int i = 0; i < blocks.blocks.size(); i++) { final GifBlock gifBlock = blocks.blocks.get(i); this.debugNumber(pw, "\t" + i + " (" + gifBlock.getClass().getName() + ")", gifBlock.blockCode, 4); } pw.println(""); return true; } private int[] getColorTable(final byte bytes[]) throws ImageReadException { if ((bytes.length % 3) != 0) { throw new ImageReadException("Bad Color Table Length: " + bytes.length); } final int length = bytes.length / 3; final int result[] = new int[length]; for (int i = 0; i < length; i++) { final int red = 0xff & bytes[(i * 3) + 0]; final int green = 0xff & bytes[(i * 3) + 1]; final int blue = 0xff & bytes[(i * 3) + 2]; final int alpha = 0xff; final int rgb = (alpha << 24) | (red << 16) | (green << 8) | (blue << 0); result[i] = rgb; } return result; } @Override public FormatCompliance getFormatCompliance(final ByteSource byteSource) throws ImageReadException, IOException { final FormatCompliance result = new FormatCompliance( byteSource.getDescription()); readFile(byteSource, false, result); return result; } @Override public BufferedImage getBufferedImage(final ByteSource byteSource, final Map<String,Object> params) throws ImageReadException, IOException { final ImageContents imageContents = readFile(byteSource, false); if (imageContents == null) { throw new ImageReadException("GIF: Couldn't read blocks"); } final GifHeaderInfo ghi = imageContents.gifHeaderInfo; if (ghi == null) { throw new ImageReadException("GIF: Couldn't read Header"); } final ImageDescriptor id = (ImageDescriptor) findBlock(imageContents.blocks, IMAGE_SEPARATOR); if (id == null) { throw new ImageReadException("GIF: Couldn't read Image Descriptor"); } final GraphicControlExtension gce = (GraphicControlExtension) findBlock( imageContents.blocks, GRAPHIC_CONTROL_EXTENSION); // Prefer the size information in the ImageDescriptor; it is more // reliable // than the size information in the header. final int width = id.imageWidth; final int height = id.imageHeight; boolean hasAlpha = false; if (gce != null && gce.transparency) { hasAlpha = true; } final ImageBuilder imageBuilder = new ImageBuilder(width, height, hasAlpha); int colorTable[]; if (id.localColorTable != null) { colorTable = getColorTable(id.localColorTable); } else if (imageContents.globalColorTable != null) { colorTable = getColorTable(imageContents.globalColorTable); } else { throw new ImageReadException("Gif: No Color Table"); } int transparentIndex = -1; if (hasAlpha) { transparentIndex = gce.transparentColorIndex; } int counter = 0; final int rowsInPass1 = (height + 7) / 8; final int rowsInPass2 = (height + 3) / 8; final int rowsInPass3 = (height + 1) / 4; final int rowsInPass4 = (height) / 2; for (int row = 0; row < height; row++) { int y; if (id.interlaceFlag) { int the_row = row; if (the_row < rowsInPass1) { y = the_row * 8; } else { the_row -= rowsInPass1; if (the_row < (rowsInPass2)) { y = 4 + (the_row * 8); } else { the_row -= rowsInPass2; if (the_row < (rowsInPass3)) { y = 2 + (the_row * 4); } else { the_row -= rowsInPass3; if (the_row < (rowsInPass4)) { y = 1 + (the_row * 2); } else { throw new ImageReadException( "Gif: Strange Row"); } } } } } else { y = row; } for (int x = 0; x < width; x++) { final int index = 0xff & id.imageData[counter++]; int rgb = colorTable[index]; if (transparentIndex == index) { rgb = 0x00; } imageBuilder.setRGB(x, y, rgb); } } return imageBuilder.getBufferedImage(); } private void writeAsSubBlocks(final OutputStream os, final byte bytes[]) throws IOException { int index = 0; while (index < bytes.length) { final int block_size = Math.min(bytes.length - index, 255); os.write(block_size); os.write(bytes, index, block_size); index += block_size; } os.write(0); // last block } private static final int LOCAL_COLOR_TABLE_FLAG_MASK = 1 << 7; private static final int INTERLACE_FLAG_MASK = 1 << 6; private static final int SORT_FLAG_MASK = 1 << 5; @Override public void writeImage(final BufferedImage src, final OutputStream os, Map<String,Object> params) throws ImageWriteException, IOException { // make copy of params; we'll clear keys as we consume them. params = new HashMap<String,Object>(params); final boolean verbose = ParamMap.getParamBoolean(params, PARAM_KEY_VERBOSE, false); // clear format key. if (params.containsKey(PARAM_KEY_FORMAT)) { params.remove(PARAM_KEY_FORMAT); } if (params.containsKey(PARAM_KEY_VERBOSE)) { params.remove(PARAM_KEY_VERBOSE); } String xmpXml = null; if (params.containsKey(PARAM_KEY_XMP_XML)) { xmpXml = (String) params.get(PARAM_KEY_XMP_XML); params.remove(PARAM_KEY_XMP_XML); } if (params.size() > 0) { final Object firstKey = params.keySet().iterator().next(); throw new ImageWriteException("Unknown parameter: " + firstKey); } final int width = src.getWidth(); final int height = src.getHeight(); final boolean hasAlpha = new PaletteFactory().hasTransparency(src); final int max_colors = hasAlpha ? 255 : 256; Palette palette2 = new PaletteFactory().makeExactRgbPaletteSimple(src, max_colors); // int palette[] = new PaletteFactory().makePaletteSimple(src, 256); // Map palette_map = paletteToMap(palette); if (palette2 == null) { palette2 = new PaletteFactory().makeQuantizedRgbPalette(src, max_colors); if (verbose) { System.out.println("quantizing"); } } else if (verbose) { System.out.println("exact palette"); } if (palette2 == null) { throw new ImageWriteException( "Gif: can't write images with more than 256 colors"); } final int palette_size = palette2.length() + (hasAlpha ? 1 : 0); final BinaryOutputStream bos = new BinaryOutputStream(os, ByteOrder.LITTLE_ENDIAN); // write Header os.write(0x47); // G magic numbers os.write(0x49); // I os.write(0x46); // F os.write(0x38); // 8 version magic numbers os.write(0x39); // 9 os.write(0x61); // a // Logical Screen Descriptor. bos.write2Bytes(width); bos.write2Bytes(height); final int colorTableScaleLessOne = (palette_size > 128) ? 7 : (palette_size > 64) ? 6 : (palette_size > 32) ? 5 : (palette_size > 16) ? 4 : (palette_size > 8) ? 3 : (palette_size > 4) ? 2 : (palette_size > 2) ? 1 : 0; final int colorTableSizeInFormat = 1 << (colorTableScaleLessOne + 1); { final byte colorResolution = (byte) colorTableScaleLessOne; // TODO: final boolean globalColorTableFlag = false; final boolean sortFlag = false; final int globalColorTableFlagMask = 1 << 7; final int sortFlagMask = 8; final int sizeOfGlobalColorTable = 0; final int packedFields = ((globalColorTableFlag ? globalColorTableFlagMask : 0) | (sortFlag ? sortFlagMask : 0) | ((7 & colorResolution) << 4) | (7 & sizeOfGlobalColorTable)); bos.write(packedFields); // one byte } { final byte BackgroundColorIndex = 0; bos.write(BackgroundColorIndex); } { final byte PixelAspectRatio = 0; bos.write(PixelAspectRatio); } { // write Global Color Table. } { // ALWAYS write GraphicControlExtension bos.write(EXTENSION_CODE); bos.write((byte) 0xf9); // bos.write(0xff & (kGraphicControlExtension >> 8)); // bos.write(0xff & (kGraphicControlExtension >> 0)); bos.write((byte) 4); // block size; final int packedFields = hasAlpha ? 1 : 0; // transparency flag bos.write((byte) packedFields); bos.write((byte) 0); // Delay Time bos.write((byte) 0); // Delay Time bos.write((byte) (hasAlpha ? palette2.length() : 0)); // Transparent // Color // Index bos.write((byte) 0); // terminator } if (null != xmpXml) { bos.write(EXTENSION_CODE); bos.write(APPLICATION_EXTENSION_LABEL); bos.write(XMP_APPLICATION_ID_AND_AUTH_CODE.length); // 0x0B bos.write(XMP_APPLICATION_ID_AND_AUTH_CODE); final byte xmpXmlBytes[] = xmpXml.getBytes("utf-8"); bos.write(xmpXmlBytes); // write "magic trailer" for (int magic = 0; magic <= 0xff; magic++) { bos.write(0xff - magic); } bos.write((byte) 0); // terminator } { // Image Descriptor. bos.write(IMAGE_SEPARATOR); bos.write2Bytes(0); // Image Left Position bos.write2Bytes(0); // Image Top Position bos.write2Bytes(width); // Image Width bos.write2Bytes(height); // Image Height { final boolean LocalColorTableFlag = true; // boolean LocalColorTableFlag = false; final boolean InterlaceFlag = false; final boolean SortFlag = false; final int SizeOfLocalColorTable = colorTableScaleLessOne; // int SizeOfLocalColorTable = 0; final int PackedFields = ((LocalColorTableFlag ? LOCAL_COLOR_TABLE_FLAG_MASK : 0) | (InterlaceFlag ? INTERLACE_FLAG_MASK : 0) | (SortFlag ? SORT_FLAG_MASK : 0) | (7 & SizeOfLocalColorTable)); bos.write(PackedFields); // one byte } } { // write Local Color Table. for (int i = 0; i < colorTableSizeInFormat; i++) { if (i < palette2.length()) { final int rgb = palette2.getEntry(i); final int red = 0xff & (rgb >> 16); final int green = 0xff & (rgb >> 8); final int blue = 0xff & (rgb >> 0); bos.write(red); bos.write(green); bos.write(blue); } else { bos.write(0); bos.write(0); bos.write(0); } } } { // get Image Data. // int image_data_total = 0; int LZWMinimumCodeSize = colorTableScaleLessOne + 1; // LZWMinimumCodeSize = Math.max(8, LZWMinimumCodeSize); if (LZWMinimumCodeSize < 2) { LZWMinimumCodeSize = 2; } // TODO: // make // better // choice // here. bos.write(LZWMinimumCodeSize); final MyLzwCompressor compressor = new MyLzwCompressor( LZWMinimumCodeSize, ByteOrder.LITTLE_ENDIAN, false); // GIF // Mode); final byte imagedata[] = new byte[width * height]; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { final int argb = src.getRGB(x, y); final int rgb = 0xffffff & argb; int index; if (hasAlpha) { final int alpha = 0xff & (argb >> 24); final int alphaThreshold = 255; if (alpha < alphaThreshold) { index = palette2.length(); // is transparent } else { index = palette2.getPaletteIndex(rgb); } } else { index = palette2.getPaletteIndex(rgb); } imagedata[y * width + x] = (byte) index; } } final byte compressed[] = compressor.compress(imagedata); writeAsSubBlocks(bos, compressed); // image_data_total += compressed.length; } // palette2.dump(); bos.write(TERMINATOR_BYTE); bos.close(); os.close(); } private static final byte XMP_APPLICATION_ID_AND_AUTH_CODE[] = { 0x58, // X 0x4D, // M 0x50, // P 0x20, // 0x44, // D 0x61, // a 0x74, // t 0x61, // a 0x58, // X 0x4D, // M 0x50, // P }; /** * Extracts embedded XML metadata as XML string. * <p> * * @param byteSource * File containing image data. * @param params * Map of optional parameters, defined in ImagingConstants. * @return Xmp Xml as String, if present. Otherwise, returns null. */ @Override public String getXmpXml(final ByteSource byteSource, final Map<String,Object> params) throws ImageReadException, IOException { InputStream is = null; try { is = byteSource.getInputStream(); final FormatCompliance formatCompliance = null; final GifHeaderInfo ghi = readHeader(is, formatCompliance); if (ghi.globalColorTableFlag) { readColorTable(is, ghi.sizeOfGlobalColorTable, formatCompliance); } final List<GifBlock> blocks = readBlocks(ghi, is, true, formatCompliance); final List<String> result = new ArrayList<String>(); for (int i = 0; i < blocks.size(); i++) { final GifBlock block = blocks.get(i); if (block.blockCode != XMP_COMPLETE_CODE) { continue; } final GenericGifBlock genericBlock = (GenericGifBlock) block; final byte blockBytes[] = genericBlock.appendSubBlocks(true); if (blockBytes.length < XMP_APPLICATION_ID_AND_AUTH_CODE.length) { continue; } if (!compareBytes(blockBytes, 0, XMP_APPLICATION_ID_AND_AUTH_CODE, 0, XMP_APPLICATION_ID_AND_AUTH_CODE.length)) { continue; } final byte GIF_MAGIC_TRAILER[] = new byte[256]; for (int magic = 0; magic <= 0xff; magic++) { GIF_MAGIC_TRAILER[magic] = (byte) (0xff - magic); } if (blockBytes.length < XMP_APPLICATION_ID_AND_AUTH_CODE.length + GIF_MAGIC_TRAILER.length) { continue; } if (!compareBytes(blockBytes, blockBytes.length - GIF_MAGIC_TRAILER.length, GIF_MAGIC_TRAILER, 0, GIF_MAGIC_TRAILER.length)) { throw new ImageReadException( "XMP block in GIF missing magic trailer."); } try { // XMP is UTF-8 encoded xml. final String xml = new String( blockBytes, XMP_APPLICATION_ID_AND_AUTH_CODE.length, blockBytes.length - (XMP_APPLICATION_ID_AND_AUTH_CODE.length + GIF_MAGIC_TRAILER.length), "utf-8"); result.add(xml); } catch (final UnsupportedEncodingException e) { throw new ImageReadException("Invalid XMP Block in GIF."); } } if (result.size() < 1) { return null; } if (result.size() > 1) { throw new ImageReadException("More than one XMP Block in GIF."); } return result.get(0); } finally { try { if (is != null) { is.close(); } } catch (final Exception e) { Debug.debug(e); } } } }
src/main/java/org/apache/commons/imaging/formats/gif/GifImageParser.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.imaging.formats.gif; import java.awt.Dimension; import java.awt.image.BufferedImage; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.imaging.FormatCompliance; import org.apache.commons.imaging.ImageFormat; import org.apache.commons.imaging.ImageInfo; import org.apache.commons.imaging.ImageParser; import org.apache.commons.imaging.ImageReadException; import org.apache.commons.imaging.ImageWriteException; import org.apache.commons.imaging.common.BinaryOutputStream; import org.apache.commons.imaging.common.ByteOrder; import org.apache.commons.imaging.common.IImageMetadata; import org.apache.commons.imaging.common.ImageBuilder; import org.apache.commons.imaging.common.bytesource.ByteSource; import org.apache.commons.imaging.common.mylzw.MyLzwCompressor; import org.apache.commons.imaging.common.mylzw.MyLzwDecompressor; import org.apache.commons.imaging.palette.Palette; import org.apache.commons.imaging.palette.PaletteFactory; import org.apache.commons.imaging.util.Debug; import org.apache.commons.imaging.util.ParamMap; public class GifImageParser extends ImageParser { public GifImageParser() { super.setByteOrder(ByteOrder.LITTLE_ENDIAN); } @Override public String getName() { return "Gif-Custom"; } @Override public String getDefaultExtension() { return DEFAULT_EXTENSION; } private static final String DEFAULT_EXTENSION = ".gif"; private static final String ACCEPTED_EXTENSIONS[] = { DEFAULT_EXTENSION, }; @Override protected String[] getAcceptedExtensions() { return ACCEPTED_EXTENSIONS; } @Override protected ImageFormat[] getAcceptedTypes() { return new ImageFormat[] { ImageFormat.GIF, // }; } private static final byte GIF_HEADER_SIGNATURE[] = { 71, 73, 70 }; private GifHeaderInfo readHeader(final InputStream is, final FormatCompliance formatCompliance) throws ImageReadException, IOException { final byte identifier1 = readByte("identifier1", is, "Not a Valid GIF File"); final byte identifier2 = readByte("identifier2", is, "Not a Valid GIF File"); final byte identifier3 = readByte("identifier3", is, "Not a Valid GIF File"); final byte version1 = readByte("version1", is, "Not a Valid GIF File"); final byte version2 = readByte("version2", is, "Not a Valid GIF File"); final byte version3 = readByte("version3", is, "Not a Valid GIF File"); if (formatCompliance != null) { formatCompliance.compare_bytes("Signature", GIF_HEADER_SIGNATURE, new byte[] { identifier1, identifier2, identifier3, }); formatCompliance.compare("version", 56, version1); formatCompliance .compare("version", new int[] { 55, 57, }, version2); formatCompliance.compare("version", 97, version3); } if (getDebug()) { printCharQuad("identifier: ", ((identifier1 << 16) | (identifier2 << 8) | (identifier3 << 0))); printCharQuad("version: ", ((version1 << 16) | (version2 << 8) | (version3 << 0))); } final int logicalScreenWidth = read2Bytes("Logical Screen Width", is, "Not a Valid GIF File"); final int logicalScreenHeight = read2Bytes("Logical Screen Height", is, "Not a Valid GIF File"); if (formatCompliance != null) { formatCompliance.checkBounds("Width", 1, Integer.MAX_VALUE, logicalScreenWidth); formatCompliance.checkBounds("Height", 1, Integer.MAX_VALUE, logicalScreenHeight); } final byte packedFields = readByte("Packed Fields", is, "Not a Valid GIF File"); final byte backgroundColorIndex = readByte("Background Color Index", is, "Not a Valid GIF File"); final byte pixelAspectRatio = readByte("Pixel Aspect Ratio", is, "Not a Valid GIF File"); if (getDebug()) { printByteBits("PackedFields bits", packedFields); } final boolean globalColorTableFlag = ((packedFields & 128) > 0); if (getDebug()) { System.out.println("GlobalColorTableFlag: " + globalColorTableFlag); } final byte colorResolution = (byte) ((packedFields >> 4) & 7); if (getDebug()) { System.out.println("ColorResolution: " + colorResolution); } final boolean sortFlag = ((packedFields & 8) > 0); if (getDebug()) { System.out.println("SortFlag: " + sortFlag); } final byte sizeofGlobalColorTable = (byte) (packedFields & 7); if (getDebug()) { System.out.println("SizeofGlobalColorTable: " + sizeofGlobalColorTable); } if (formatCompliance != null) { if (globalColorTableFlag && backgroundColorIndex != -1) { formatCompliance.checkBounds("Background Color Index", 0, convertColorTableSize(sizeofGlobalColorTable), backgroundColorIndex); } } return new GifHeaderInfo(identifier1, identifier2, identifier3, version1, version2, version3, logicalScreenWidth, logicalScreenHeight, packedFields, backgroundColorIndex, pixelAspectRatio, globalColorTableFlag, colorResolution, sortFlag, sizeofGlobalColorTable); } private GraphicControlExtension readGraphicControlExtension(final int code, final InputStream is) throws ImageReadException, IOException { readByte("block_size", is, "GIF: corrupt GraphicControlExt"); final int packed = readByte("packed fields", is, "GIF: corrupt GraphicControlExt"); final int dispose = (packed & 0x1c) >> 2; // disposal method final boolean transparency = (packed & 1) != 0; final int delay = read2Bytes("delay in milliseconds", is, "GIF: corrupt GraphicControlExt"); final int transparentColorIndex = 0xff & readByte("transparent color index", is, "GIF: corrupt GraphicControlExt"); readByte("block terminator", is, "GIF: corrupt GraphicControlExt"); return new GraphicControlExtension(code, packed, dispose, transparency, delay, transparentColorIndex); } private byte[] readSubBlock(final InputStream is) throws IOException { final int block_size = 0xff & readByte("block_size", is, "GIF: corrupt block"); final byte bytes[] = readBytes("block", is, block_size, "GIF: corrupt block"); return bytes; } protected GenericGifBlock readGenericGIFBlock(final InputStream is, final int code) throws IOException { return readGenericGIFBlock(is, code, null); } protected GenericGifBlock readGenericGIFBlock(final InputStream is, final int code, final byte first[]) throws IOException { final List<byte[]> subblocks = new ArrayList<byte[]>(); if (first != null) { subblocks.add(first); } while (true) { final byte bytes[] = readSubBlock(is); if (bytes.length < 1) { break; } subblocks.add(bytes); } return new GenericGifBlock(code, subblocks); } private final static int EXTENSION_CODE = 0x21; private final static int IMAGE_SEPARATOR = 0x2C; private final static int GRAPHIC_CONTROL_EXTENSION = (EXTENSION_CODE << 8) | 0xf9; private final static int COMMENT_EXTENSION = 0xfe; private final static int PLAIN_TEXT_EXTENSION = 0x01; private final static int XMP_EXTENSION = 0xff; private final static int TERMINATOR_BYTE = 0x3b; private final static int APPLICATION_EXTENSION_LABEL = 0xff; private final static int XMP_COMPLETE_CODE = (EXTENSION_CODE << 8) | XMP_EXTENSION; private List<GifBlock> readBlocks(final GifHeaderInfo ghi, final InputStream is, final boolean stopBeforeImageData, final FormatCompliance formatCompliance) throws ImageReadException, IOException { final List<GifBlock> result = new ArrayList<GifBlock>(); while (true) { final int code = is.read(); switch (code) { case -1: throw new ImageReadException("GIF: unexpected end of data"); case IMAGE_SEPARATOR: final ImageDescriptor id = readImageDescriptor(ghi, code, is, stopBeforeImageData, formatCompliance); result.add(id); // if(stopBeforeImageData) // return result; break; case EXTENSION_CODE: // extension { final int extensionCode = is.read(); final int completeCode = ((0xff & code) << 8) | (0xff & extensionCode); switch (extensionCode) { case 0xf9: final GraphicControlExtension gce = readGraphicControlExtension( completeCode, is); result.add(gce); break; case COMMENT_EXTENSION: case PLAIN_TEXT_EXTENSION: { final GenericGifBlock block = readGenericGIFBlock(is, completeCode); result.add(block); break; } case APPLICATION_EXTENSION_LABEL: // 255 (hex 0xFF) Application // Extension Label { final byte label[] = readSubBlock(is); if (formatCompliance != null) { formatCompliance.addComment( "Unknown Application Extension (" + new String(label, "US-ASCII") + ")", completeCode); } // if (label == new String("ICCRGBG1")) { // GIF's can have embedded ICC Profiles - who knew? } if ((label != null) && (label.length > 0)) { final GenericGifBlock block = readGenericGIFBlock(is, completeCode, label); result.add(block); } break; } default: { if (formatCompliance != null) { formatCompliance.addComment("Unknown block", completeCode); } final GenericGifBlock block = readGenericGIFBlock(is, completeCode); result.add(block); break; } } } break; case TERMINATOR_BYTE: return result; case 0x00: // bad byte, but keep going and see what happens break; default: throw new ImageReadException("GIF: unknown code: " + code); } } } private ImageDescriptor readImageDescriptor(final GifHeaderInfo ghi, final int blockCode, final InputStream is, final boolean stopBeforeImageData, final FormatCompliance formatCompliance) throws ImageReadException, IOException { final int ImageLeftPosition = read2Bytes("Image Left Position", is, "Not a Valid GIF File"); final int ImageTopPosition = read2Bytes("Image Top Position", is, "Not a Valid GIF File"); final int imageWidth = read2Bytes("Image Width", is, "Not a Valid GIF File"); final int imageHeight = read2Bytes("Image Height", is, "Not a Valid GIF File"); final byte PackedFields = readByte("Packed Fields", is, "Not a Valid GIF File"); if (formatCompliance != null) { formatCompliance.checkBounds("Width", 1, ghi.logicalScreenWidth, imageWidth); formatCompliance.checkBounds("Height", 1, ghi.logicalScreenHeight, imageHeight); formatCompliance.checkBounds("Left Position", 0, ghi.logicalScreenWidth - imageWidth, ImageLeftPosition); formatCompliance.checkBounds("Top Position", 0, ghi.logicalScreenHeight - imageHeight, ImageTopPosition); } if (getDebug()) { printByteBits("PackedFields bits", PackedFields); } final boolean LocalColorTableFlag = (((PackedFields >> 7) & 1) > 0); if (getDebug()) { System.out.println("LocalColorTableFlag: " + LocalColorTableFlag); } final boolean InterlaceFlag = (((PackedFields >> 6) & 1) > 0); if (getDebug()) { System.out.println("Interlace Flag: " + InterlaceFlag); } final boolean SortFlag = (((PackedFields >> 5) & 1) > 0); if (getDebug()) { System.out.println("Sort Flag: " + SortFlag); } final byte SizeofLocalColorTable = (byte) (PackedFields & 7); if (getDebug()) { System.out.println("SizeofLocalColorTable: " + SizeofLocalColorTable); } byte LocalColorTable[] = null; if (LocalColorTableFlag) { LocalColorTable = readColorTable(is, SizeofLocalColorTable, formatCompliance); } byte imageData[] = null; if (!stopBeforeImageData) { final int LZWMinimumCodeSize = is.read(); final GenericGifBlock block = readGenericGIFBlock(is, -1); final byte bytes[] = block.appendSubBlocks(); final InputStream bais = new ByteArrayInputStream(bytes); final int size = imageWidth * imageHeight; final MyLzwDecompressor myLzwDecompressor = new MyLzwDecompressor( LZWMinimumCodeSize, ByteOrder.LITTLE_ENDIAN); imageData = myLzwDecompressor.decompress(bais, size); } else { final int LZWMinimumCodeSize = is.read(); if (getDebug()) { System.out.println("LZWMinimumCodeSize: " + LZWMinimumCodeSize); } readGenericGIFBlock(is, -1); } final ImageDescriptor result = new ImageDescriptor(blockCode, ImageLeftPosition, ImageTopPosition, imageWidth, imageHeight, PackedFields, LocalColorTableFlag, InterlaceFlag, SortFlag, SizeofLocalColorTable, LocalColorTable, imageData); return result; } private int simple_pow(final int base, final int power) { int result = 1; for (int i = 0; i < power; i++) { result *= base; } return result; } private int convertColorTableSize(final int ct_size) { return 3 * simple_pow(2, ct_size + 1); } private byte[] readColorTable(final InputStream is, final int ct_size, final FormatCompliance formatCompliance) throws IOException { final int actual_size = convertColorTableSize(ct_size); final byte bytes[] = readBytes("block", is, actual_size, "GIF: corrupt Color Table"); return bytes; } private GifBlock findBlock(final List<GifBlock> v, final int code) { for (int i = 0; i < v.size(); i++) { final GifBlock gifBlock = v.get(i); if (gifBlock.blockCode == code) { return gifBlock; } } return null; } private ImageContents readFile(final ByteSource byteSource, final boolean stopBeforeImageData) throws ImageReadException, IOException { return readFile(byteSource, stopBeforeImageData, FormatCompliance.getDefault()); } private ImageContents readFile(final ByteSource byteSource, final boolean stopBeforeImageData, final FormatCompliance formatCompliance) throws ImageReadException, IOException { InputStream is = null; try { is = byteSource.getInputStream(); final GifHeaderInfo ghi = readHeader(is, formatCompliance); byte globalColorTable[] = null; if (ghi.globalColorTableFlag) { globalColorTable = readColorTable(is, ghi.sizeOfGlobalColorTable, formatCompliance); } final List<GifBlock> blocks = readBlocks(ghi, is, stopBeforeImageData, formatCompliance); final ImageContents result = new ImageContents(ghi, globalColorTable, blocks); return result; } finally { try { if (is != null) { is.close(); } } catch (final Exception e) { Debug.debug(e); } } } @Override public byte[] getICCProfileBytes(final ByteSource byteSource, final Map<String,Object> params) throws ImageReadException, IOException { return null; } @Override public Dimension getImageSize(final ByteSource byteSource, final Map<String,Object> params) throws ImageReadException, IOException { final ImageContents blocks = readFile(byteSource, false); if (blocks == null) { throw new ImageReadException("GIF: Couldn't read blocks"); } final GifHeaderInfo bhi = blocks.gifHeaderInfo; if (bhi == null) { throw new ImageReadException("GIF: Couldn't read Header"); } final ImageDescriptor id = (ImageDescriptor) findBlock(blocks.blocks, IMAGE_SEPARATOR); if (id == null) { throw new ImageReadException("GIF: Couldn't read ImageDescriptor"); } // Prefer the size information in the ImageDescriptor; it is more // reliable // than the size information in the header. return new Dimension(id.imageWidth, id.imageHeight); } public byte[] embedICCProfile(final byte image[], final byte profile[]) { return null; } @Override public boolean embedICCProfile(final File src, final File dst, final byte profile[]) { return false; } @Override public IImageMetadata getMetadata(final ByteSource byteSource, final Map<String,Object> params) throws ImageReadException, IOException { return null; } private List<String> getComments(final List<GifBlock> v) throws IOException { final List<String> result = new ArrayList<String>(); final int code = 0x21fe; for (int i = 0; i < v.size(); i++) { final GifBlock block = v.get(i); if (block.blockCode == code) { final byte bytes[] = ((GenericGifBlock) block).appendSubBlocks(); result.add(new String(bytes, "US-ASCII")); } } return result; } @Override public ImageInfo getImageInfo(final ByteSource byteSource, final Map<String,Object> params) throws ImageReadException, IOException { final ImageContents blocks = readFile(byteSource, false); if (blocks == null) { throw new ImageReadException("GIF: Couldn't read blocks"); } final GifHeaderInfo bhi = blocks.gifHeaderInfo; if (bhi == null) { throw new ImageReadException("GIF: Couldn't read Header"); } final ImageDescriptor id = (ImageDescriptor) findBlock(blocks.blocks, IMAGE_SEPARATOR); if (id == null) { throw new ImageReadException("GIF: Couldn't read ImageDescriptor"); } final GraphicControlExtension gce = (GraphicControlExtension) findBlock( blocks.blocks, GRAPHIC_CONTROL_EXTENSION); // Prefer the size information in the ImageDescriptor; it is more // reliable than the size information in the header. final int height = id.imageHeight; final int width = id.imageWidth; final List<String> comments = getComments(blocks.blocks); final int bitsPerPixel = (bhi.colorResolution + 1); final ImageFormat format = ImageFormat.GIF; final String formatName = "GIF Graphics Interchange Format"; final String mimeType = "image/gif"; // we ought to count images, but don't yet. final int numberOfImages = -1; final boolean isProgressive = id.interlaceFlag; final int physicalWidthDpi = 72; final float physicalWidthInch = (float) ((double) width / (double) physicalWidthDpi); final int physicalHeightDpi = 72; final float physicalHeightInch = (float) ((double) height / (double) physicalHeightDpi); final String formatDetails = "Gif " + ((char) blocks.gifHeaderInfo.version1) + ((char) blocks.gifHeaderInfo.version2) + ((char) blocks.gifHeaderInfo.version3); boolean isTransparent = false; if (gce != null && gce.transparency) { isTransparent = true; } final boolean usesPalette = true; final int colorType = ImageInfo.COLOR_TYPE_RGB; final String compressionAlgorithm = ImageInfo.COMPRESSION_ALGORITHM_LZW; final ImageInfo result = new ImageInfo(formatDetails, bitsPerPixel, comments, format, formatName, height, mimeType, numberOfImages, physicalHeightDpi, physicalHeightInch, physicalWidthDpi, physicalWidthInch, width, isProgressive, isTransparent, usesPalette, colorType, compressionAlgorithm); return result; } @Override public boolean dumpImageFile(final PrintWriter pw, final ByteSource byteSource) throws ImageReadException, IOException { pw.println("gif.dumpImageFile"); final ImageInfo imageData = getImageInfo(byteSource); if (imageData == null) { return false; } imageData.toString(pw, ""); final ImageContents blocks = readFile(byteSource, false); pw.println("gif.blocks: " + blocks.blocks.size()); for (int i = 0; i < blocks.blocks.size(); i++) { final GifBlock gifBlock = blocks.blocks.get(i); this.debugNumber(pw, "\t" + i + " (" + gifBlock.getClass().getName() + ")", gifBlock.blockCode, 4); } pw.println(""); return true; } private int[] getColorTable(final byte bytes[]) throws ImageReadException { if ((bytes.length % 3) != 0) { throw new ImageReadException("Bad Color Table Length: " + bytes.length); } final int length = bytes.length / 3; final int result[] = new int[length]; for (int i = 0; i < length; i++) { final int red = 0xff & bytes[(i * 3) + 0]; final int green = 0xff & bytes[(i * 3) + 1]; final int blue = 0xff & bytes[(i * 3) + 2]; final int alpha = 0xff; final int rgb = (alpha << 24) | (red << 16) | (green << 8) | (blue << 0); result[i] = rgb; } return result; } @Override public FormatCompliance getFormatCompliance(final ByteSource byteSource) throws ImageReadException, IOException { final FormatCompliance result = new FormatCompliance( byteSource.getDescription()); readFile(byteSource, false, result); return result; } @Override public BufferedImage getBufferedImage(final ByteSource byteSource, final Map<String,Object> params) throws ImageReadException, IOException { final ImageContents imageContents = readFile(byteSource, false); if (imageContents == null) { throw new ImageReadException("GIF: Couldn't read blocks"); } final GifHeaderInfo ghi = imageContents.gifHeaderInfo; if (ghi == null) { throw new ImageReadException("GIF: Couldn't read Header"); } final ImageDescriptor id = (ImageDescriptor) findBlock(imageContents.blocks, IMAGE_SEPARATOR); if (id == null) { throw new ImageReadException("GIF: Couldn't read Image Descriptor"); } final GraphicControlExtension gce = (GraphicControlExtension) findBlock( imageContents.blocks, GRAPHIC_CONTROL_EXTENSION); // Prefer the size information in the ImageDescriptor; it is more // reliable // than the size information in the header. final int width = id.imageWidth; final int height = id.imageHeight; boolean hasAlpha = false; if (gce != null && gce.transparency) { hasAlpha = true; } final ImageBuilder imageBuilder = new ImageBuilder(width, height, hasAlpha); int colorTable[]; if (id.localColorTable != null) { colorTable = getColorTable(id.localColorTable); } else if (imageContents.globalColorTable != null) { colorTable = getColorTable(imageContents.globalColorTable); } else { throw new ImageReadException("Gif: No Color Table"); } int transparentIndex = -1; if (hasAlpha) { transparentIndex = gce.transparentColorIndex; } int counter = 0; final int rowsInPass1 = (height + 7) / 8; final int rowsInPass2 = (height + 3) / 8; final int rowsInPass3 = (height + 1) / 4; final int rowsInPass4 = (height) / 2; for (int row = 0; row < height; row++) { int y; if (id.interlaceFlag) { int the_row = row; if (the_row < rowsInPass1) { y = the_row * 8; } else { the_row -= rowsInPass1; if (the_row < (rowsInPass2)) { y = 4 + (the_row * 8); } else { the_row -= rowsInPass2; if (the_row < (rowsInPass3)) { y = 2 + (the_row * 4); } else { the_row -= rowsInPass3; if (the_row < (rowsInPass4)) { y = 1 + (the_row * 2); } else { throw new ImageReadException( "Gif: Strange Row"); } } } } } else { y = row; } for (int x = 0; x < width; x++) { final int index = 0xff & id.imageData[counter++]; int rgb = colorTable[index]; if (transparentIndex == index) { rgb = 0x00; } imageBuilder.setRGB(x, y, rgb); } } return imageBuilder.getBufferedImage(); } private void writeAsSubBlocks(final OutputStream os, final byte bytes[]) throws IOException { int index = 0; while (index < bytes.length) { final int block_size = Math.min(bytes.length - index, 255); os.write(block_size); os.write(bytes, index, block_size); index += block_size; } os.write(0); // last block } private static final int LOCAL_COLOR_TABLE_FLAG_MASK = 1 << 7; private static final int INTERLACE_FLAG_MASK = 1 << 6; private static final int SORT_FLAG_MASK = 1 << 5; @Override public void writeImage(final BufferedImage src, final OutputStream os, Map<String,Object> params) throws ImageWriteException, IOException { // make copy of params; we'll clear keys as we consume them. params = new HashMap<String,Object>(params); final boolean verbose = ParamMap.getParamBoolean(params, PARAM_KEY_VERBOSE, false); // clear format key. if (params.containsKey(PARAM_KEY_FORMAT)) { params.remove(PARAM_KEY_FORMAT); } if (params.containsKey(PARAM_KEY_VERBOSE)) { params.remove(PARAM_KEY_VERBOSE); } String xmpXml = null; if (params.containsKey(PARAM_KEY_XMP_XML)) { xmpXml = (String) params.get(PARAM_KEY_XMP_XML); params.remove(PARAM_KEY_XMP_XML); } if (params.size() > 0) { final Object firstKey = params.keySet().iterator().next(); throw new ImageWriteException("Unknown parameter: " + firstKey); } final int width = src.getWidth(); final int height = src.getHeight(); final boolean hasAlpha = new PaletteFactory().hasTransparency(src); final int max_colors = hasAlpha ? 255 : 256; Palette palette2 = new PaletteFactory().makeExactRgbPaletteSimple(src, max_colors); // int palette[] = new PaletteFactory().makePaletteSimple(src, 256); // Map palette_map = paletteToMap(palette); if (palette2 == null) { palette2 = new PaletteFactory().makeQuantizedRgbPalette(src, max_colors); if (verbose) { System.out.println("quantizing"); } } else if (verbose) { System.out.println("exact palette"); } if (palette2 == null) { throw new ImageWriteException( "Gif: can't write images with more than 256 colors"); } final int palette_size = palette2.length() + (hasAlpha ? 1 : 0); final BinaryOutputStream bos = new BinaryOutputStream(os, ByteOrder.LITTLE_ENDIAN); // write Header os.write(0x47); // G magic numbers os.write(0x49); // I os.write(0x46); // F os.write(0x38); // 8 version magic numbers os.write(0x39); // 9 os.write(0x61); // a // Logical Screen Descriptor. bos.write2Bytes(width); bos.write2Bytes(height); final int colorTableScaleLessOne = (palette_size > 128) ? 7 : (palette_size > 64) ? 6 : (palette_size > 32) ? 5 : (palette_size > 16) ? 4 : (palette_size > 8) ? 3 : (palette_size > 4) ? 2 : (palette_size > 2) ? 1 : 0; final int colorTableSizeInFormat = 1 << (colorTableScaleLessOne + 1); { final byte colorResolution = (byte) colorTableScaleLessOne; // TODO: final boolean globalColorTableFlag = false; final boolean sortFlag = false; final int globalColorTableFlagMask = 1 << 7; final int sortFlagMask = 8; final int sizeOfGlobalColorTable = 0; final int packedFields = ((globalColorTableFlag ? globalColorTableFlagMask : 0) | (sortFlag ? sortFlagMask : 0) | ((7 & colorResolution) << 4) | (7 & sizeOfGlobalColorTable)); bos.write(packedFields); // one byte } { final byte BackgroundColorIndex = 0; bos.write(BackgroundColorIndex); } { final byte PixelAspectRatio = 0; bos.write(PixelAspectRatio); } { // write Global Color Table. } { // ALWAYS write GraphicControlExtension bos.write(EXTENSION_CODE); bos.write((byte) 0xf9); // bos.write(0xff & (kGraphicControlExtension >> 8)); // bos.write(0xff & (kGraphicControlExtension >> 0)); bos.write((byte) 4); // block size; final int packedFields = hasAlpha ? 1 : 0; // transparency flag bos.write((byte) packedFields); bos.write((byte) 0); // Delay Time bos.write((byte) 0); // Delay Time bos.write((byte) (hasAlpha ? palette2.length() : 0)); // Transparent // Color // Index bos.write((byte) 0); // terminator } if (null != xmpXml) { bos.write(EXTENSION_CODE); bos.write(APPLICATION_EXTENSION_LABEL); bos.write(XMP_APPLICATION_ID_AND_AUTH_CODE.length); // 0x0B bos.write(XMP_APPLICATION_ID_AND_AUTH_CODE); final byte xmpXmlBytes[] = xmpXml.getBytes("utf-8"); bos.write(xmpXmlBytes); // write "magic trailer" for (int magic = 0; magic <= 0xff; magic++) { bos.write(0xff - magic); } bos.write((byte) 0); // terminator } { // Image Descriptor. bos.write(IMAGE_SEPARATOR); bos.write2Bytes(0); // Image Left Position bos.write2Bytes(0); // Image Top Position bos.write2Bytes(width); // Image Width bos.write2Bytes(height); // Image Height { final boolean LocalColorTableFlag = true; // boolean LocalColorTableFlag = false; final boolean InterlaceFlag = false; final boolean SortFlag = false; final int SizeOfLocalColorTable = colorTableScaleLessOne; // int SizeOfLocalColorTable = 0; final int PackedFields = ((LocalColorTableFlag ? LOCAL_COLOR_TABLE_FLAG_MASK : 0) | (InterlaceFlag ? INTERLACE_FLAG_MASK : 0) | (SortFlag ? SORT_FLAG_MASK : 0) | (7 & SizeOfLocalColorTable)); bos.write(PackedFields); // one byte } } { // write Local Color Table. for (int i = 0; i < colorTableSizeInFormat; i++) { if (i < palette2.length()) { final int rgb = palette2.getEntry(i); final int red = 0xff & (rgb >> 16); final int green = 0xff & (rgb >> 8); final int blue = 0xff & (rgb >> 0); bos.write(red); bos.write(green); bos.write(blue); } else { bos.write(0); bos.write(0); bos.write(0); } } } { // get Image Data. // int image_data_total = 0; int LZWMinimumCodeSize = colorTableScaleLessOne + 1; // LZWMinimumCodeSize = Math.max(8, LZWMinimumCodeSize); if (LZWMinimumCodeSize < 2) { LZWMinimumCodeSize = 2; } // TODO: // make // better // choice // here. bos.write(LZWMinimumCodeSize); final MyLzwCompressor compressor = new MyLzwCompressor( LZWMinimumCodeSize, ByteOrder.LITTLE_ENDIAN, false); // GIF // Mode); final byte imagedata[] = new byte[width * height]; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { final int argb = src.getRGB(x, y); final int rgb = 0xffffff & argb; int index; if (hasAlpha) { final int alpha = 0xff & (argb >> 24); final int alphaThreshold = 255; if (alpha < alphaThreshold) { index = palette2.length(); // is transparent } else { index = palette2.getPaletteIndex(rgb); } } else { index = palette2.getPaletteIndex(rgb); } imagedata[y * width + x] = (byte) index; } } final byte compressed[] = compressor.compress(imagedata); writeAsSubBlocks(bos, compressed); // image_data_total += compressed.length; } // palette2.dump(); bos.write(TERMINATOR_BYTE); bos.close(); os.close(); } private static final byte XMP_APPLICATION_ID_AND_AUTH_CODE[] = { 0x58, // X 0x4D, // M 0x50, // P 0x20, // 0x44, // D 0x61, // a 0x74, // t 0x61, // a 0x58, // X 0x4D, // M 0x50, // P }; /** * Extracts embedded XML metadata as XML string. * <p> * * @param byteSource * File containing image data. * @param params * Map of optional parameters, defined in ImagingConstants. * @return Xmp Xml as String, if present. Otherwise, returns null. */ @Override public String getXmpXml(final ByteSource byteSource, final Map<String,Object> params) throws ImageReadException, IOException { InputStream is = null; try { is = byteSource.getInputStream(); final FormatCompliance formatCompliance = null; final GifHeaderInfo ghi = readHeader(is, formatCompliance); if (ghi.globalColorTableFlag) { readColorTable(is, ghi.sizeOfGlobalColorTable, formatCompliance); } final List<GifBlock> blocks = readBlocks(ghi, is, true, formatCompliance); final List<String> result = new ArrayList<String>(); for (int i = 0; i < blocks.size(); i++) { final GifBlock block = blocks.get(i); if (block.blockCode != XMP_COMPLETE_CODE) { continue; } final GenericGifBlock genericBlock = (GenericGifBlock) block; final byte blockBytes[] = genericBlock.appendSubBlocks(true); if (blockBytes.length < XMP_APPLICATION_ID_AND_AUTH_CODE.length) { continue; } if (!compareBytes(blockBytes, 0, XMP_APPLICATION_ID_AND_AUTH_CODE, 0, XMP_APPLICATION_ID_AND_AUTH_CODE.length)) { continue; } final byte GIF_MAGIC_TRAILER[] = new byte[256]; for (int magic = 0; magic <= 0xff; magic++) { GIF_MAGIC_TRAILER[magic] = (byte) (0xff - magic); } if (blockBytes.length < XMP_APPLICATION_ID_AND_AUTH_CODE.length + GIF_MAGIC_TRAILER.length) { continue; } if (!compareBytes(blockBytes, blockBytes.length - GIF_MAGIC_TRAILER.length, GIF_MAGIC_TRAILER, 0, GIF_MAGIC_TRAILER.length)) { throw new ImageReadException( "XMP block in GIF missing magic trailer."); } try { // XMP is UTF-8 encoded xml. final String xml = new String( blockBytes, XMP_APPLICATION_ID_AND_AUTH_CODE.length, blockBytes.length - (XMP_APPLICATION_ID_AND_AUTH_CODE.length + GIF_MAGIC_TRAILER.length), "utf-8"); result.add(xml); } catch (final UnsupportedEncodingException e) { throw new ImageReadException("Invalid XMP Block in GIF."); } } if (result.size() < 1) { return null; } if (result.size() > 1) { throw new ImageReadException("More than one XMP Block in GIF."); } return result.get(0); } finally { try { if (is != null) { is.close(); } } catch (final Exception e) { Debug.debug(e); } } } }
The declared exception ImageReadException is not actually thrown by the method readGraphicControlExtension(int, InputStream) from type GifImageParser. git-svn-id: ef215b97ec449bc9c69e2ae1448853f14b3d8f41@1535223 13f79535-47bb-0310-9956-ffa450edef68
src/main/java/org/apache/commons/imaging/formats/gif/GifImageParser.java
The declared exception ImageReadException is not actually thrown by the method readGraphicControlExtension(int, InputStream) from type GifImageParser.
<ide><path>rc/main/java/org/apache/commons/imaging/formats/gif/GifImageParser.java <ide> } <ide> <ide> private GraphicControlExtension readGraphicControlExtension(final int code, <del> final InputStream is) throws ImageReadException, IOException { <add> final InputStream is) throws IOException { <ide> readByte("block_size", is, "GIF: corrupt GraphicControlExt"); <ide> final int packed = readByte("packed fields", is, <ide> "GIF: corrupt GraphicControlExt");
JavaScript
apache-2.0
eae2edafdd26ddbcd4a0b788906888724afb0830
0
ExpressGateway/express-gateway
const session = require('supertest-session'); const should = require('should'); const app = require('./bootstrap'); const services = require('../../lib/services'); const db = require('../../lib/db'); const checkTokenResponse = require('./checkTokenResponse'); const credentialService = services.credential; const userService = services.user; const applicationService = services.application; const tokenService = services.token; describe('Functional Test Client Password grant', function () { let fromDbUser1, fromDbApp, refreshToken; const user1 = { username: 'irfanbaqui', firstname: 'irfan', lastname: 'baqui', email: '[email protected]' }; const user2 = { username: 'somejoe', firstname: 'joe', lastname: 'smith', email: '[email protected]' }; before(() => db.flushdb() .then(() => Promise.all([userService.insert(user1), userService.insert(user2)])) .then(([_fromDbUser1, _fromDbUser2]) => { should.exist(_fromDbUser1); should.exist(_fromDbUser2); fromDbUser1 = _fromDbUser1; const app1 = { name: 'irfan_app', redirectUri: 'https://some.host.com/some/route' }; return applicationService.insert(app1, fromDbUser1.id); }) .then(_fromDbApp => { should.exist(_fromDbApp); fromDbApp = _fromDbApp; return credentialService.insertScopes(['someScope']); }) .then(() => Promise.all([ credentialService.insertCredential(fromDbUser1.id, 'basic-auth', { password: 'user-secret' }), credentialService.insertCredential(fromDbApp.id, 'oauth2', { secret: 'app-secret', scopes: ['someScope'] }) ])) .then(([userRes, appRes]) => { should.exist(userRes); should.exist(appRes); }) ); it('should grant access token when no scopes are specified', function (done) { const request = session(app); const credentials = Buffer.from(fromDbApp.id.concat(':app-secret')).toString('base64'); request .post('/oauth2/token') .set('Authorization', `basic ${credentials}`) .set('content-type', 'application/x-www-form-urlencoded') .type('form') .send({ grant_type: 'password', username: 'irfanbaqui', password: 'user-secret' }) .expect(200) .end(function (err, res) { if (err) return done(err); checkTokenResponse(res.body); done(); }); }); it('should grant access token with authorized scopes', function (done) { const request = session(app); const credentials = Buffer.from(fromDbApp.id.concat(':app-secret')).toString('base64'); request .post('/oauth2/token') .set('Authorization', 'basic ' + credentials) .set('content-type', 'application/x-www-form-urlencoded') .type('form') .send({ grant_type: 'password', username: 'irfanbaqui', password: 'user-secret', scope: 'someScope' }) .expect(200) .end(function (err, res) { if (err) return done(err); checkTokenResponse(res.body, ['refresh_token']); refreshToken = res.body.refresh_token; tokenService.get(res.body.access_token) .then(fromDbToken => { should.exist(fromDbToken); fromDbToken.scopes.should.eql(['someScope']); [fromDbToken.id, fromDbToken.tokenDecrypted].should.eql(res.body.access_token.split('|')); done(); }); }); }); it('should grant access token in exchange of refresh token', function (done) { const request = session(app); request .post('/oauth2/token') .set('Content-Type', 'application/json') .send({ grant_type: 'refresh_token', client_id: fromDbApp.id, client_secret: 'app-secret', refresh_token: refreshToken }) .expect(200) .end((err, res) => { if (done) return done(err); checkTokenResponse(res.body); tokenService.get(res.body.access_token) .then(token => { should.exist(token); token.scopes.should.eql(['someScope']); [token.id, token.tokenDecrypted].should.eql(res.body.access_token.split('|')); done(); }); }); }); it('should not grant access token with unauthorized scopes', function (done) { const request = session(app); const credentials = Buffer.from(fromDbApp.id.concat(':app-secret')).toString('base64'); request .post('/oauth2/token') .set('Authorization', 'basic ' + credentials) .set('content-type', 'application/x-www-form-urlencoded') .type('form') .send({ grant_type: 'password', username: 'irfanbaqui', password: 'user-secret', scope: 'someScope unauthorizedScope' }) .expect(401) .end(done); }); });
test/oauth/password.test.js
const session = require('supertest-session'); const should = require('should'); const app = require('./bootstrap'); const services = require('../../lib/services'); const credentialService = services.credential; const userService = services.user; const applicationService = services.application; const tokenService = services.token; const db = require('../../lib/db'); describe('Functional Test Client Password grant', function () { let fromDbUser1, fromDbApp, refreshToken; const user1 = { username: 'irfanbaqui', firstname: 'irfan', lastname: 'baqui', email: '[email protected]' }; const user2 = { username: 'somejoe', firstname: 'joe', lastname: 'smith', email: '[email protected]' }; before(() => db.flushdb() .then(() => Promise.all([userService.insert(user1), userService.insert(user2)])) .then(([_fromDbUser1, _fromDbUser2]) => { should.exist(_fromDbUser1); should.exist(_fromDbUser2); fromDbUser1 = _fromDbUser1; const app1 = { name: 'irfan_app', redirectUri: 'https://some.host.com/some/route' }; return applicationService.insert(app1, fromDbUser1.id); }) .then(_fromDbApp => { should.exist(_fromDbApp); fromDbApp = _fromDbApp; return credentialService.insertScopes(['someScope']); }) .then(() => Promise.all([ credentialService.insertCredential(fromDbUser1.id, 'basic-auth', { password: 'user-secret' }), credentialService.insertCredential(fromDbApp.id, 'oauth2', { secret: 'app-secret', scopes: ['someScope'] }) ])) .then(([userRes, appRes]) => { should.exist(userRes); should.exist(appRes); }) ); it('should grant access token when no scopes are specified', function (done) { const request = session(app); const credentials = Buffer.from(fromDbApp.id.concat(':app-secret')).toString('base64'); request .post('/oauth2/token') .set('Authorization', `basic ${credentials}`) .set('content-type', 'application/x-www-form-urlencoded') .type('form') .send({ grant_type: 'password', username: 'irfanbaqui', password: 'user-secret' }) .expect(200) .end(function (err, res) { should.not.exist(err); const token = res.body; should.exist(token); should.exist(token.access_token); token.token_type.should.equal('Bearer'); done(); }); }); it('should grant access token with authorized scopes', function (done) { const request = session(app); const credentials = Buffer.from(fromDbApp.id.concat(':app-secret')).toString('base64'); request .post('/oauth2/token') .set('Authorization', 'basic ' + credentials) .set('content-type', 'application/x-www-form-urlencoded') .type('form') .send({ grant_type: 'password', username: 'irfanbaqui', password: 'user-secret', scope: 'someScope' }) .expect(200) .end(function (err, res) { should.not.exist(err); const token = res.body; should.exist(token); should.exist(token.access_token); should.exist(token.refresh_token); token.token_type.should.equal('Bearer'); refreshToken = token.refresh_token; tokenService.get(token.access_token) .then(fromDbToken => { should.exist(fromDbToken); fromDbToken.scopes.should.eql(['someScope']); [fromDbToken.id, fromDbToken.tokenDecrypted].should.eql(token.access_token.split('|')); done(); }); }); }); it('should grant access token in exchange of refresh token', function (done) { const request = session(app); request .post('/oauth2/token') .set('Content-Type', 'application/json') .send({ grant_type: 'refresh_token', client_id: fromDbApp.id, client_secret: 'app-secret', refresh_token: refreshToken }) .expect(200) .end((err, res) => { should.not.exist(err); should.exist(res.body.access_token); res.body.access_token.length.should.be.greaterThan(15); should.exist(res.body.token_type); res.body.token_type.should.eql('Bearer'); tokenService.get(res.body.access_token) .then(token => { should.exist(token); token.scopes.should.eql(['someScope']); [token.id, token.tokenDecrypted].should.eql(res.body.access_token.split('|')); done(); }); }); }); it('should not grant access token with unauthorized scopes', function (done) { const request = session(app); const credentials = Buffer.from(fromDbApp.id.concat(':app-secret')).toString('base64'); request .post('/oauth2/token') .set('Authorization', 'basic ' + credentials) .set('content-type', 'application/x-www-form-urlencoded') .type('form') .send({ grant_type: 'password', username: 'irfanbaqui', password: 'user-secret', scope: 'someScope unauthorizedScope' }) .expect(401) .end(function (err) { should.not.exist(err); done(); }); }); });
DRY the password test using the generic token check function
test/oauth/password.test.js
DRY the password test using the generic token check function
<ide><path>est/oauth/password.test.js <ide> const session = require('supertest-session'); <ide> const should = require('should'); <add> <ide> const app = require('./bootstrap'); <add>const services = require('../../lib/services'); <add>const db = require('../../lib/db'); <add>const checkTokenResponse = require('./checkTokenResponse'); <ide> <del>const services = require('../../lib/services'); <ide> const credentialService = services.credential; <ide> const userService = services.user; <ide> const applicationService = services.application; <ide> const tokenService = services.token; <del>const db = require('../../lib/db'); <ide> <ide> describe('Functional Test Client Password grant', function () { <ide> let fromDbUser1, fromDbApp, refreshToken; <ide> }) <ide> .expect(200) <ide> .end(function (err, res) { <del> should.not.exist(err); <del> const token = res.body; <del> should.exist(token); <del> should.exist(token.access_token); <del> token.token_type.should.equal('Bearer'); <add> if (err) return done(err); <add> checkTokenResponse(res.body); <ide> done(); <ide> }); <ide> }); <ide> }) <ide> .expect(200) <ide> .end(function (err, res) { <del> should.not.exist(err); <del> const token = res.body; <del> should.exist(token); <del> should.exist(token.access_token); <del> should.exist(token.refresh_token); <del> token.token_type.should.equal('Bearer'); <del> refreshToken = token.refresh_token; <add> if (err) return done(err); <add> checkTokenResponse(res.body, ['refresh_token']); <add> refreshToken = res.body.refresh_token; <ide> <del> tokenService.get(token.access_token) <add> tokenService.get(res.body.access_token) <ide> .then(fromDbToken => { <ide> should.exist(fromDbToken); <ide> fromDbToken.scopes.should.eql(['someScope']); <del> [fromDbToken.id, fromDbToken.tokenDecrypted].should.eql(token.access_token.split('|')); <add> [fromDbToken.id, fromDbToken.tokenDecrypted].should.eql(res.body.access_token.split('|')); <ide> done(); <ide> }); <ide> }); <ide> }) <ide> .expect(200) <ide> .end((err, res) => { <del> should.not.exist(err); <del> should.exist(res.body.access_token); <del> res.body.access_token.length.should.be.greaterThan(15); <del> should.exist(res.body.token_type); <del> res.body.token_type.should.eql('Bearer'); <add> if (done) return done(err); <add> checkTokenResponse(res.body); <ide> tokenService.get(res.body.access_token) <ide> .then(token => { <ide> should.exist(token); <ide> scope: 'someScope unauthorizedScope' <ide> }) <ide> .expect(401) <del> .end(function (err) { <del> should.not.exist(err); <del> done(); <del> }); <add> .end(done); <ide> }); <ide> });
Java
mit
28129c3e80eb75996b658a4afdda6f7391740d1b
0
Um-Mitternacht/Witchworks
package com.witchworks.common.brew; import com.witchworks.common.block.ModBlocks; import net.minecraft.block.Block; import net.minecraft.block.state.IBlockState; import net.minecraft.init.Blocks; import net.minecraft.util.EnumFacing; import net.minecraft.util.math.BlockPos; import net.minecraft.world.IBlockAccess; import net.minecraft.world.World; import javax.annotation.Nullable; import java.util.HashMap; import java.util.Map; /** * This class was created by Arekkuusu on 11/06/2017. * It's distributed as part of Witchworks under * the MIT license. */ public class IceWorldBrew extends BlockHitBrew { private final Map<Block, IBlockState> stateMap = new HashMap<>(); public IceWorldBrew() { stateMap.put(Blocks.GRASS_PATH, Blocks.PACKED_ICE.getDefaultState()); stateMap.put(Blocks.GRAVEL, Blocks.PACKED_ICE.getDefaultState()); stateMap.put(Blocks.COBBLESTONE, Blocks.PACKED_ICE.getDefaultState()); stateMap.put(Blocks.LOG, Blocks.PACKED_ICE.getDefaultState()); stateMap.put(Blocks.DIRT, Blocks.SNOW.getDefaultState()); stateMap.put(Blocks.GRASS, Blocks.SNOW.getDefaultState()); stateMap.put(Blocks.MYCELIUM, Blocks.SNOW.getDefaultState()); stateMap.put(Blocks.SANDSTONE, ModBlocks.fake_ice.getDefaultState()); stateMap.put(Blocks.NETHER_BRICK, ModBlocks.fake_ice.getDefaultState()); stateMap.put(Blocks.RED_NETHER_BRICK, ModBlocks.fake_ice.getDefaultState()); stateMap.put(Blocks.END_BRICKS, ModBlocks.fake_ice.getDefaultState()); } @Override public int getColor() { return 0xB0E0E6; } @Override public String getName() { return "ice_world"; } @Override public void safeImpact(BlockPos pos, @Nullable EnumFacing side, World world, int amplifier) { int box = 1 + (int) ((float) amplifier / 2F); BlockPos posI = pos.add(box, box, box); BlockPos posF = pos.add(-box, -box, -box); Iterable<BlockPos> spots = BlockPos.getAllInBox(posI, posF); for (BlockPos spot : spots) { Block block = world.getBlockState(spot).getBlock(); IBlockState state = world.getBlockState(spot); boolean place = amplifier > 2 || world.rand.nextBoolean(); if (place && stateMap.containsKey(block)) { world.setBlockState(spot, stateMap.get(block), 11); } else if (state.getBlock() == Blocks.LEAVES) { world.setBlockState(spot, ModBlocks.fake_ice.getDefaultState(), 3); } else if (state.getBlock() == Blocks.LEAVES2) { world.setBlockState(spot, ModBlocks.fake_ice.getDefaultState(), 3); } else if (state.getBlock() == Blocks.PLANKS) { world.setBlockState(spot, ModBlocks.fake_ice.getDefaultState(), 3); } else if (state.getBlock() == Blocks.STONE) { world.setBlockState(spot, ModBlocks.fake_ice.getDefaultState(), 3); } else if (state.getBlock() == Blocks.BRICK_BLOCK) { world.setBlockState(spot, ModBlocks.fake_ice.getDefaultState(), 3); } else if (state.getBlock() == Blocks.STONEBRICK) { world.setBlockState(spot, ModBlocks.fake_ice.getDefaultState(), 3); } else if (state.getBlock() == Blocks.OAK_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.SPRUCE_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.BIRCH_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.DARK_OAK_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.JUNGLE_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.ACACIA_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.RED_SANDSTONE_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.PURPUR_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.SANDSTONE_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.STONE_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.STONE_BRICK_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.OAK_FENCE) { world.setBlockState(spot, ModBlocks.fake_ice_fence.getDefaultState(), 3); } else if (state.getBlock() == Blocks.ACACIA_FENCE) { world.setBlockState(spot, ModBlocks.fake_ice_fence.getDefaultState(), 3); } else if (state.getBlock() == Blocks.BIRCH_FENCE) { world.setBlockState(spot, ModBlocks.fake_ice_fence.getDefaultState(), 3); } else if (state.getBlock() == Blocks.DARK_OAK_FENCE) { world.setBlockState(spot, ModBlocks.fake_ice_fence.getDefaultState(), 3); } else if (state.getBlock() == Blocks.JUNGLE_FENCE) { world.setBlockState(spot, ModBlocks.fake_ice_fence.getDefaultState(), 3); } else if (state.getBlock() == Blocks.SPRUCE_FENCE) { world.setBlockState(spot, ModBlocks.fake_ice_fence.getDefaultState(), 3); } } } }
src/main/java/com/witchworks/common/brew/IceWorldBrew.java
package com.witchworks.common.brew; import com.witchworks.common.block.ModBlocks; import net.minecraft.block.Block; import net.minecraft.block.state.IBlockState; import net.minecraft.init.Blocks; import net.minecraft.util.EnumFacing; import net.minecraft.util.math.BlockPos; import net.minecraft.world.IBlockAccess; import net.minecraft.world.World; import javax.annotation.Nullable; import java.util.HashMap; import java.util.Map; /** * This class was created by Arekkuusu on 11/06/2017. * It's distributed as part of Witchworks under * the MIT license. */ public class IceWorldBrew extends BlockHitBrew { private final Map<Block, IBlockState> stateMap = new HashMap<>(); public IceWorldBrew() { stateMap.put(Blocks.GRASS_PATH, Blocks.PACKED_ICE.getDefaultState()); stateMap.put(Blocks.GRAVEL, Blocks.PACKED_ICE.getDefaultState()); stateMap.put(Blocks.COBBLESTONE, Blocks.PACKED_ICE.getDefaultState()); stateMap.put(Blocks.LOG, Blocks.PACKED_ICE.getDefaultState()); stateMap.put(Blocks.DIRT, Blocks.SNOW.getDefaultState()); stateMap.put(Blocks.GRASS, Blocks.SNOW.getDefaultState()); stateMap.put(Blocks.MYCELIUM, Blocks.SNOW.getDefaultState()); stateMap.put(Blocks.SANDSTONE, ModBlocks.fake_ice.getDefaultState()); stateMap.put(Blocks.NETHER_BRICK, ModBlocks.fake_ice.getDefaultState()); stateMap.put(Blocks.RED_NETHER_BRICK, ModBlocks.fake_ice.getDefaultState()); stateMap.put(Blocks.END_BRICKS, ModBlocks.fake_ice.getDefaultState()); } @Override public int getColor() { return 0xB0E0E6; } @Override public String getName() { return "ice_world"; } @Override public void safeImpact(BlockPos pos, @Nullable EnumFacing side, World world, int amplifier) { int box = 1 + (int) ((float) amplifier / 2F); BlockPos posI = pos.add(box, box, box); BlockPos posF = pos.add(-box, -box, -box); Iterable<BlockPos> spots = BlockPos.getAllInBox(posI, posF); for (BlockPos spot : spots) { Block block = world.getBlockState(spot).getBlock(); IBlockState state = world.getBlockState(spot); boolean place = amplifier > 2 || world.rand.nextBoolean(); if (place && stateMap.containsKey(block)) { world.setBlockState(spot, stateMap.get(block), 11); } else if (state.getBlock() == Blocks.LEAVES) { world.setBlockState(spot, ModBlocks.fake_ice.getDefaultState(), 3); } else if (state.getBlock() == Blocks.LEAVES2) { world.setBlockState(spot, ModBlocks.fake_ice.getDefaultState(), 3); } else if (state.getBlock() == Blocks.PLANKS) { world.setBlockState(spot, ModBlocks.fake_ice.getDefaultState(), 3); } else if (state.getBlock() == Blocks.STONE) { world.setBlockState(spot, ModBlocks.fake_ice.getDefaultState(), 3); } else if (state.getBlock() == Blocks.BRICK_BLOCK) { world.setBlockState(spot, ModBlocks.fake_ice.getDefaultState(), 3); } else if (state.getBlock() == Blocks.STONEBRICK) { world.setBlockState(spot, ModBlocks.fake_ice.getDefaultState(), 3); } else if (state.getBlock() == Blocks.OAK_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.SPRUCE_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.BIRCH_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.DARK_OAK_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.JUNGLE_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.ACACIA_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.RED_SANDSTONE_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.PURPUR_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.SANDSTONE_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.STONE_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } else if (state.getBlock() == Blocks.STONE_BRICK_STAIRS) { world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); } } } }
Even more tedium towards this fix
src/main/java/com/witchworks/common/brew/IceWorldBrew.java
Even more tedium towards this fix
<ide><path>rc/main/java/com/witchworks/common/brew/IceWorldBrew.java <ide> world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); <ide> } else if (state.getBlock() == Blocks.STONE_BRICK_STAIRS) { <ide> world.setBlockState(spot, ModBlocks.fake_ice_stairs.getDefaultState(), 3); <add> } else if (state.getBlock() == Blocks.OAK_FENCE) { <add> world.setBlockState(spot, ModBlocks.fake_ice_fence.getDefaultState(), 3); <add> } else if (state.getBlock() == Blocks.ACACIA_FENCE) { <add> world.setBlockState(spot, ModBlocks.fake_ice_fence.getDefaultState(), 3); <add> } else if (state.getBlock() == Blocks.BIRCH_FENCE) { <add> world.setBlockState(spot, ModBlocks.fake_ice_fence.getDefaultState(), 3); <add> } else if (state.getBlock() == Blocks.DARK_OAK_FENCE) { <add> world.setBlockState(spot, ModBlocks.fake_ice_fence.getDefaultState(), 3); <add> } else if (state.getBlock() == Blocks.JUNGLE_FENCE) { <add> world.setBlockState(spot, ModBlocks.fake_ice_fence.getDefaultState(), 3); <add> } else if (state.getBlock() == Blocks.SPRUCE_FENCE) { <add> world.setBlockState(spot, ModBlocks.fake_ice_fence.getDefaultState(), 3); <ide> } <ide> } <ide> }
Java
mit
0bddc55844e1b87619527f48e4654b5ad7cecd0a
0
kkkon/kk-android-reinstall-apk
package jp.ne.sakura.kkkon.android.reinstallapk; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.pm.ApplicationInfo; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.graphics.drawable.Drawable; import android.net.Uri; import android.os.Bundle; import android.util.Log; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.BaseAdapter; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.TextView; import android.widget.Toast; import java.io.File; import java.util.ArrayList; import java.util.List; import jp.ne.sakura.kkkon.android.exceptionhandler.SettingsCompat; public class MainActivity extends Activity implements ListView.OnItemClickListener { private static final String TAG = "kk-ReInstall-Apk"; private List<MyListData> mDataList = new ArrayList<MyListData>(128); private ListView mListView; private TextView mUnknownSourceTextView; private static final int INTENT_REQUEST_CODE_INSTALL = 0; public class MyListData { private Drawable image; private String text; private String packageName; private long firstInstallTime; private long lastUpdateTime; private String apkPath; public void setImage( final Drawable image ) { this.image = image; } public Drawable getImage() { return this.image; } public void setPackageName( final String packageName ) { this.packageName = packageName; } public String getPackageName() { return this.packageName; } public void setText( final String text ) { this.text = text; } public String getText() { return this.text; } public long getFirstInstallTime() { return firstInstallTime; } public void setFirstInstallTime(long firstInstallTime) { this.firstInstallTime = firstInstallTime; } public long getLastUpdateTime() { return lastUpdateTime; } public void setLastUpdateTime(long lastUpdateTime) { this.lastUpdateTime = lastUpdateTime; } public String getApkPath() { return apkPath; } public void setApkPath(String apkPath) { this.apkPath = apkPath; } } /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { Log.d( TAG, "onCreate:"); super.onCreate(savedInstanceState); LinearLayout layout = new LinearLayout( this ); layout.setOrientation( LinearLayout.VERTICAL ); mUnknownSourceTextView = new TextView( this ); { final StringBuilder sb = new StringBuilder(); { final String label = this.getString( R.string.system_setting ); if ( null != label ) { sb.append( label ); } } sb.append( ": " ); { final String label = this.getString( R.string.unknown_sources ); if ( null != label ) { sb.append( label ); } } sb.append( " " ); SettingsCompat.initialize( this.getApplicationContext() ); final boolean isAllow = SettingsCompat.isAllowedNonMarketApps(); if ( isAllow ) { final String label = this.getString( R.string.unknown_sources_ok ); if ( null != label ) { sb.append( label ); } else { sb.append( "OK" ); } } else { final String label = this.getString( R.string.unknown_sources_ng ); if ( null != label ) { sb.append( label ); } else { sb.append( "NG" ); } } mUnknownSourceTextView.setGravity( Gravity.RIGHT ); mUnknownSourceTextView.setText( sb.toString() ); } layout.addView( mUnknownSourceTextView ); ImageView imageView = new ImageView( this ); layout.addView( imageView); mListView = new ListView( this ); TextView emptyTextView = new TextView( this ); emptyTextView.setText( "No items found" ); mListView.setEmptyView( emptyTextView ); makeApplicationList(); MyAdapter adapter = new MyAdapter( this ); mListView.setAdapter( adapter ); mListView.setOnItemClickListener( this ); layout.addView( mListView ); setContentView( layout ); } @Override protected void onResume() { super.onResume(); } @Override protected void onPause() { super.onPause(); } private void makeApplicationList() { PackageManager pm = getPackageManager(); if ( null == pm ) { return; } final List<ApplicationInfo> listApplicationInfo = pm.getInstalledApplications( 0 ); if ( null == listApplicationInfo ) { return; } for ( final ApplicationInfo appInfo : listApplicationInfo ) { if ( null == appInfo ) { continue; } if ( null != appInfo.sourceDir ) { if ( appInfo.sourceDir.startsWith( "/system/" ) ) { continue; } if ( null != appInfo.packageName ) { if ( appInfo.packageName.startsWith( "com.example." ) ) { continue; } if ( appInfo.packageName.startsWith( "com.android." ) ) { continue; } if ( appInfo.packageName.startsWith( "com.google.android." ) ) { continue; } } Log.d( TAG, "package=" + appInfo.packageName ); Log.d( TAG, "name=" + appInfo.name ); Log.d( TAG, "sourcedir=" + appInfo.sourceDir ); Log.d( TAG, "label=" + appInfo.loadLabel( pm ) ); MyListData item = new MyListData(); item.setApkPath( appInfo.sourceDir ); { final CharSequence label = appInfo.loadLabel( pm ); if ( null == label ) { item.setText( appInfo.packageName ); } else { item.setText( label.toString() ); } } item.setPackageName( appInfo.packageName ); final Drawable drawable = appInfo.loadIcon(pm); if ( null != drawable ) { Log.d( TAG, "icon: w=" + drawable.getIntrinsicWidth() + ",h=" + drawable.getIntrinsicHeight() ); } item.setImage( drawable ); { try { final PackageInfo packageInfo = pm.getPackageInfo( appInfo.packageName, 0 ); if ( null != packageInfo ) { final long firstInstallTime = packageInfo.firstInstallTime; // API9 final long lastUpdateTime = packageInfo.firstInstallTime; // API9 Log.d( TAG, "firstInstallTime=" + firstInstallTime ); Log.d( TAG, "lastUpdateTime=" + lastUpdateTime ); item.setFirstInstallTime( firstInstallTime ); item.setLastUpdateTime( lastUpdateTime ); } } catch ( PackageManager.NameNotFoundException e ) { Log.e( TAG, "got Exception=" + e.toString(), e ); } } Log.d( TAG, "" ); mDataList.add( item ); } } } public class MyAdapter extends BaseAdapter { private Context mContext; public MyAdapter( Context context ) { this.mContext = context; } @Override public int getCount() { if ( null != mDataList ) { return mDataList.size(); } return 0; } @Override public Object getItem(int i) { if ( null != mDataList ) { return mDataList.get(i); } return null; } @Override public long getItemId(int i) { return i; } @Override public View getView(int i, View view, ViewGroup vg) { View v = view; if ( null == v ) { LinearLayout layout = new LinearLayout( mContext ); layout.setOrientation( LinearLayout.HORIZONTAL ); ImageView imageView = new ImageView( mContext ); imageView.setId( 0 ); imageView.setScaleType( ImageView.ScaleType.FIT_XY ); imageView.setLayoutParams( new ViewGroup.LayoutParams( 144, 144 ) ); imageView.setAdjustViewBounds( true ); TextView textView = new TextView( mContext ); textView.setId( 1 ); textView.setLayoutParams( new ViewGroup.LayoutParams( ViewGroup.LayoutParams.FILL_PARENT, ViewGroup.LayoutParams.FILL_PARENT ) ); textView.setGravity( Gravity.CENTER_VERTICAL ); textView.setPadding( 20, 20, 20, 20 ); layout.addView( imageView ); layout.addView( textView ); v = layout; } MyListData itemData = (MyListData)this.getItem(i); if ( null != itemData ) { ImageView imageView = (ImageView) v.findViewById(0); TextView textView = (TextView) v.findViewById(1); if ( null != imageView ) { imageView.setBackgroundDrawable( itemData.getImage() ); } if ( null != textView ) { textView.setText( itemData.getText() ); } } return v; } } public void onItemClick(AdapterView<?> av, View view, int position, long id) { if ( this.mListView == av ) { if ( null == this.mDataList ) { Log.d( TAG, "mDataList is null " ); return; } final MyListData itemData = this.mDataList.get( position ); if ( null == itemData ) { Log.d( TAG, "itemData is null index=" + position ); return; } { final String apkPath = itemData.getApkPath(); boolean installCalled = false; if ( null != apkPath ) { final File fileApk = new File(apkPath); if ( fileApk.exists() ) { installCalled = true; Intent promptInstall = new Intent(Intent.ACTION_VIEW); promptInstall.setDataAndType(Uri.fromFile( fileApk ), "application/vnd.android.package-archive"); //promptInstall.addFlags( Intent.FLAG_ACTIVITY_NEW_TASK ); this.startActivityForResult( promptInstall, INTENT_REQUEST_CODE_INSTALL ); } else { Log.d( TAG, "fileApk not exists. path=" + fileApk.getAbsolutePath() ); } } else { Log.d( TAG, "apkPath is null" ); } if ( !installCalled ) { Toast toast = Toast.makeText( this, R.string.apk_not_found, Toast.LENGTH_LONG ); toast.show(); } } } } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); Log.d( TAG, "requestCode=" + requestCode + ",resultCode=" + resultCode + ",intent=" + data ); if ( INTENT_REQUEST_CODE_INSTALL == requestCode ) { boolean needRefresh = false; // TODO queue update, current all refresh if ( null != mDataList ) { mDataList.clear(); } this.makeApplicationList(); needRefresh = true; if ( needRefresh ) { if ( null != mListView ) { mListView.requestLayout(); } } } } }
src/jp/ne/sakura/kkkon/android/reinstallapk/MainActivity.java
package jp.ne.sakura.kkkon.android.reinstallapk; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.pm.ApplicationInfo; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.graphics.drawable.Drawable; import android.net.Uri; import android.os.Bundle; import android.util.Log; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.BaseAdapter; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.TextView; import android.widget.Toast; import java.io.File; import java.util.ArrayList; import java.util.List; import jp.ne.sakura.kkkon.android.exceptionhandler.SettingsCompat; public class MainActivity extends Activity implements ListView.OnItemClickListener { private static final String TAG = "kk-ReInstall-Apk"; private List<MyListData> mDataList = new ArrayList<MyListData>(128); private ListView mListView; private TextView mUnknownSourceTextView; public class MyListData { private Drawable image; private String text; private String packageName; private long firstInstallTime; private long lastUpdateTime; private String apkPath; public void setImage( final Drawable image ) { this.image = image; } public Drawable getImage() { return this.image; } public void setPackageName( final String packageName ) { this.packageName = packageName; } public String getPackageName() { return this.packageName; } public void setText( final String text ) { this.text = text; } public String getText() { return this.text; } public long getFirstInstallTime() { return firstInstallTime; } public void setFirstInstallTime(long firstInstallTime) { this.firstInstallTime = firstInstallTime; } public long getLastUpdateTime() { return lastUpdateTime; } public void setLastUpdateTime(long lastUpdateTime) { this.lastUpdateTime = lastUpdateTime; } public String getApkPath() { return apkPath; } public void setApkPath(String apkPath) { this.apkPath = apkPath; } } /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { Log.d( TAG, "onCreate:"); super.onCreate(savedInstanceState); LinearLayout layout = new LinearLayout( this ); layout.setOrientation( LinearLayout.VERTICAL ); mUnknownSourceTextView = new TextView( this ); { final StringBuilder sb = new StringBuilder(); { final String label = this.getString( R.string.system_setting ); if ( null != label ) { sb.append( label ); } } sb.append( ": " ); { final String label = this.getString( R.string.unknown_sources ); if ( null != label ) { sb.append( label ); } } sb.append( " " ); SettingsCompat.initialize( this.getApplicationContext() ); final boolean isAllow = SettingsCompat.isAllowedNonMarketApps(); if ( isAllow ) { final String label = this.getString( R.string.unknown_sources_ok ); if ( null != label ) { sb.append( label ); } else { sb.append( "OK" ); } } else { final String label = this.getString( R.string.unknown_sources_ng ); if ( null != label ) { sb.append( label ); } else { sb.append( "NG" ); } } mUnknownSourceTextView.setGravity( Gravity.RIGHT ); mUnknownSourceTextView.setText( sb.toString() ); } layout.addView( mUnknownSourceTextView ); ImageView imageView = new ImageView( this ); layout.addView( imageView); mListView = new ListView( this ); TextView emptyTextView = new TextView( this ); emptyTextView.setText( "No items found" ); mListView.setEmptyView( emptyTextView ); makeApplicationList(); MyAdapter adapter = new MyAdapter( this ); mListView.setAdapter( adapter ); mListView.setOnItemClickListener( this ); layout.addView( mListView ); setContentView( layout ); } @Override protected void onResume() { super.onResume(); } @Override protected void onPause() { super.onPause(); } private void makeApplicationList() { PackageManager pm = getPackageManager(); if ( null == pm ) { return; } final List<ApplicationInfo> listApplicationInfo = pm.getInstalledApplications( 0 ); if ( null == listApplicationInfo ) { return; } for ( final ApplicationInfo appInfo : listApplicationInfo ) { if ( null == appInfo ) { continue; } if ( null != appInfo.sourceDir ) { if ( appInfo.sourceDir.startsWith( "/system/" ) ) { continue; } if ( null != appInfo.packageName ) { if ( appInfo.packageName.startsWith( "com.example." ) ) { continue; } if ( appInfo.packageName.startsWith( "com.android." ) ) { continue; } if ( appInfo.packageName.startsWith( "com.google.android." ) ) { continue; } } Log.d( TAG, "package=" + appInfo.packageName ); Log.d( TAG, "name=" + appInfo.name ); Log.d( TAG, "sourcedir=" + appInfo.sourceDir ); Log.d( TAG, "label=" + appInfo.loadLabel( pm ) ); MyListData item = new MyListData(); item.setApkPath( appInfo.sourceDir ); { final CharSequence label = appInfo.loadLabel( pm ); if ( null == label ) { item.setText( appInfo.packageName ); } else { item.setText( label.toString() ); } } item.setPackageName( appInfo.packageName ); final Drawable drawable = appInfo.loadIcon(pm); if ( null != drawable ) { Log.d( TAG, "icon: w=" + drawable.getIntrinsicWidth() + ",h=" + drawable.getIntrinsicHeight() ); } item.setImage( drawable ); { try { final PackageInfo packageInfo = pm.getPackageInfo( appInfo.packageName, 0 ); if ( null != packageInfo ) { final long firstInstallTime = packageInfo.firstInstallTime; // API9 final long lastUpdateTime = packageInfo.firstInstallTime; // API9 Log.d( TAG, "firstInstallTime=" + firstInstallTime ); Log.d( TAG, "lastUpdateTime=" + lastUpdateTime ); item.setFirstInstallTime( firstInstallTime ); item.setLastUpdateTime( lastUpdateTime ); } } catch ( PackageManager.NameNotFoundException e ) { Log.e( TAG, "got Exception=" + e.toString(), e ); } } Log.d( TAG, "" ); mDataList.add( item ); } } } public class MyAdapter extends BaseAdapter { private Context mContext; public MyAdapter( Context context ) { this.mContext = context; } @Override public int getCount() { if ( null != mDataList ) { return mDataList.size(); } return 0; } @Override public Object getItem(int i) { if ( null != mDataList ) { return mDataList.get(i); } return null; } @Override public long getItemId(int i) { return i; } @Override public View getView(int i, View view, ViewGroup vg) { View v = view; if ( null == v ) { LinearLayout layout = new LinearLayout( mContext ); layout.setOrientation( LinearLayout.HORIZONTAL ); ImageView imageView = new ImageView( mContext ); imageView.setId( 0 ); imageView.setScaleType( ImageView.ScaleType.FIT_XY ); imageView.setLayoutParams( new ViewGroup.LayoutParams( 144, 144 ) ); imageView.setAdjustViewBounds( true ); TextView textView = new TextView( mContext ); textView.setId( 1 ); textView.setLayoutParams( new ViewGroup.LayoutParams( ViewGroup.LayoutParams.FILL_PARENT, ViewGroup.LayoutParams.FILL_PARENT ) ); textView.setGravity( Gravity.CENTER_VERTICAL ); textView.setPadding( 20, 20, 20, 20 ); layout.addView( imageView ); layout.addView( textView ); v = layout; } MyListData itemData = (MyListData)this.getItem(i); if ( null != itemData ) { ImageView imageView = (ImageView) v.findViewById(0); TextView textView = (TextView) v.findViewById(1); if ( null != imageView ) { imageView.setBackgroundDrawable( itemData.getImage() ); } if ( null != textView ) { textView.setText( itemData.getText() ); } } return v; } } public void onItemClick(AdapterView<?> av, View view, int position, long id) { if ( this.mListView == av ) { if ( null == this.mDataList ) { Log.d( TAG, "mDataList is null " ); return; } final MyListData itemData = this.mDataList.get( position ); if ( null == itemData ) { Log.d( TAG, "itemData is null index=" + position ); return; } { final String apkPath = itemData.getApkPath(); boolean installCalled = false; if ( null != apkPath ) { final File fileApk = new File(apkPath); if ( fileApk.exists() ) { installCalled = true; Intent promptInstall = new Intent(Intent.ACTION_VIEW); promptInstall.setDataAndType(Uri.fromFile( fileApk ), "application/vnd.android.package-archive"); //promptInstall.addFlags( Intent.FLAG_ACTIVITY_NEW_TASK ); this.startActivity( promptInstall ); } else { Log.d( TAG, "fileApk not exists. path=" + fileApk.getAbsolutePath() ); } } else { Log.d( TAG, "apkPath is null" ); } if ( !installCalled ) { Toast toast = Toast.makeText( this, R.string.apk_not_found, Toast.LENGTH_LONG ); toast.show(); } else { boolean needRefresh = false; { final String packageName = itemData.getPackageName(); { final PackageManager pm = this.getPackageManager(); if ( null != pm ) { try { final ApplicationInfo appInfo = pm.getApplicationInfo( packageName, 0 ); if ( null != appInfo ) { itemData.setApkPath( appInfo.sourceDir ); this.mDataList.set( position, itemData ); needRefresh = true; } } catch ( PackageManager.NameNotFoundException e ) { Log.d( TAG, "got Exception: " + e.toString(), e); } } } } } } } } }
change startActivity to startActivityForResult, refresh apk path
src/jp/ne/sakura/kkkon/android/reinstallapk/MainActivity.java
change startActivity to startActivityForResult, refresh apk path
<ide><path>rc/jp/ne/sakura/kkkon/android/reinstallapk/MainActivity.java <ide> private ListView mListView; <ide> private TextView mUnknownSourceTextView; <ide> <add> private static final int INTENT_REQUEST_CODE_INSTALL = 0; <add> <ide> public class MyListData <ide> { <ide> private Drawable image; <ide> Intent promptInstall = new Intent(Intent.ACTION_VIEW); <ide> promptInstall.setDataAndType(Uri.fromFile( fileApk ), "application/vnd.android.package-archive"); <ide> //promptInstall.addFlags( Intent.FLAG_ACTIVITY_NEW_TASK ); <del> this.startActivity( promptInstall ); <add> this.startActivityForResult( promptInstall, INTENT_REQUEST_CODE_INSTALL ); <ide> } <ide> else <ide> { <ide> Toast toast = Toast.makeText( this, R.string.apk_not_found, Toast.LENGTH_LONG ); <ide> toast.show(); <ide> } <del> else <del> { <del> boolean needRefresh = false; <del> <del> { <del> final String packageName = itemData.getPackageName(); <del> { <del> final PackageManager pm = this.getPackageManager(); <del> if ( null != pm ) <del> { <del> try <del> { <del> final ApplicationInfo appInfo = pm.getApplicationInfo( packageName, 0 ); <del> if ( null != appInfo ) <del> { <del> itemData.setApkPath( appInfo.sourceDir ); <del> this.mDataList.set( position, itemData ); <del> needRefresh = true; <del> } <del> } <del> catch ( PackageManager.NameNotFoundException e ) <del> { <del> Log.d( TAG, "got Exception: " + e.toString(), e); <del> } <del> } <del> } <del> } <del> } <del> } <del> } <del> } <add> } <add> } <add> } <add> <add> @Override <add> protected void onActivityResult(int requestCode, int resultCode, Intent data) <add> { <add> super.onActivityResult(requestCode, resultCode, data); <add> Log.d( TAG, "requestCode=" + requestCode + ",resultCode=" + resultCode + ",intent=" + data ); <add> <add> if ( INTENT_REQUEST_CODE_INSTALL == requestCode ) <add> { <add> boolean needRefresh = false; <add> <add> // TODO queue update, current all refresh <add> if ( null != mDataList ) <add> { <add> mDataList.clear(); <add> } <add> this.makeApplicationList(); <add> needRefresh = true; <add> <add> if ( needRefresh ) <add> { <add> if ( null != mListView ) <add> { <add> mListView.requestLayout(); <add> } <add> } <add> } <add> } <add> <ide> <ide> }
Java
apache-2.0
d5a0477765273a73f7f2aa294bc2a78dfd090170
0
velo/incubator-tinkerpop,robertdale/tinkerpop,artem-aliev/tinkerpop,PommeVerte/incubator-tinkerpop,artem-aliev/tinkerpop,samiunn/incubator-tinkerpop,mike-tr-adamson/incubator-tinkerpop,n-tran/incubator-tinkerpop,gdelafosse/incubator-tinkerpop,vtslab/incubator-tinkerpop,robertdale/tinkerpop,mpollmeier/tinkerpop3,jorgebay/tinkerpop,BrynCooke/incubator-tinkerpop,apache/tinkerpop,RussellSpitzer/incubator-tinkerpop,BrynCooke/incubator-tinkerpop,newkek/incubator-tinkerpop,RussellSpitzer/incubator-tinkerpop,artem-aliev/tinkerpop,vtslab/incubator-tinkerpop,mike-tr-adamson/incubator-tinkerpop,RedSeal-co/incubator-tinkerpop,RedSeal-co/incubator-tinkerpop,velo/incubator-tinkerpop,dalaro/incubator-tinkerpop,samiunn/incubator-tinkerpop,apache/tinkerpop,pluradj/incubator-tinkerpop,apache/tinkerpop,jorgebay/tinkerpop,robertdale/tinkerpop,artem-aliev/tinkerpop,Lab41/tinkerpop3,apache/incubator-tinkerpop,BrynCooke/incubator-tinkerpop,edgarRd/incubator-tinkerpop,gdelafosse/incubator-tinkerpop,vtslab/incubator-tinkerpop,apache/incubator-tinkerpop,edgarRd/incubator-tinkerpop,dalaro/incubator-tinkerpop,krlohnes/tinkerpop,rmagen/incubator-tinkerpop,rmagen/incubator-tinkerpop,krlohnes/tinkerpop,velo/incubator-tinkerpop,dalaro/incubator-tinkerpop,newkek/incubator-tinkerpop,apache/incubator-tinkerpop,newkek/incubator-tinkerpop,krlohnes/tinkerpop,artem-aliev/tinkerpop,n-tran/incubator-tinkerpop,edgarRd/incubator-tinkerpop,krlohnes/tinkerpop,gdelafosse/incubator-tinkerpop,Lab41/tinkerpop3,apache/tinkerpop,RedSeal-co/incubator-tinkerpop,pluradj/incubator-tinkerpop,PommeVerte/incubator-tinkerpop,krlohnes/tinkerpop,apache/tinkerpop,robertdale/tinkerpop,pluradj/incubator-tinkerpop,rmagen/incubator-tinkerpop,robertdale/tinkerpop,samiunn/incubator-tinkerpop,jorgebay/tinkerpop,apache/tinkerpop,n-tran/incubator-tinkerpop,mpollmeier/tinkerpop3,jorgebay/tinkerpop,mike-tr-adamson/incubator-tinkerpop,apache/tinkerpop,PommeVerte/incubator-tinkerpop,RussellSpitzer/incubator-tinkerpop
package com.tinkerpop.gremlin.algorithm.generator; import com.tinkerpop.gremlin.AbstractGremlinTest; import com.tinkerpop.gremlin.structure.Graph; import com.tinkerpop.gremlin.structure.Vertex; import org.apache.commons.configuration.Configuration; import org.javatuples.Triplet; import org.junit.Test; import org.junit.experimental.runners.Enclosed; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.util.Arrays; import static org.junit.Assert.*; /** * @author Stephen Mallette (http://stephen.genoprime.com) */ @RunWith(Enclosed.class) public class CommunityGeneratorTest { @RunWith(Parameterized.class) public static class DifferentDistributionsTest extends AbstractGremlinTest { @Parameterized.Parameters(name = "{index}: {0}.test({1},{2})") public static Iterable<Object[]> data() { return Arrays.asList(new Object[][]{ {new NormalDistribution(2), new PowerLawDistribution(2.4), 0.1}, {new NormalDistribution(2), new PowerLawDistribution(2.4), 0.5}, {new NormalDistribution(2), new NormalDistribution(4), 0.5}, {new NormalDistribution(2), new NormalDistribution(4), 0.1}, {new PowerLawDistribution(2.3), new PowerLawDistribution(2.4), 0.2}, {new PowerLawDistribution(2.3), new NormalDistribution(4), 0.2} }); } @Parameterized.Parameter(value = 0) public Distribution communityDistribution; @Parameterized.Parameter(value = 1) public Distribution degreeDistribution; @Parameterized.Parameter(value = 2) public double crossPcent; private static final int numberOfVertices = 100; @Test public void shouldGenerateRandomGraph() throws Exception { final CommunityGenerator generator = new CommunityGenerator("knows"); communityGeneratorTest(g, generator); final Configuration configuration = graphProvider.newGraphConfiguration("g1"); final Graph g1 = graphProvider.openTestGraph(configuration); prepareGraph(g1); final CommunityGenerator generator1 = new CommunityGenerator("knows"); communityGeneratorTest(g1, generator1); // don't assert counts of edges...those may be the same, just ensure that not every vertex has the // same number of edges between graphs. that should make it harder for the test to fail. assertFalse(g.V().toList().stream() .map(v -> Triplet.with(v.getValue("oid"), v.inE().count(), v.outE().count())) .allMatch(p -> { final Vertex v = (Vertex) g1.V().has("oid", p.getValue0()).next(); return p.getValue1() == v.inE().count() && p.getValue2() == v.outE().count(); })); graphProvider.clear(g1, configuration); } @Test public void shouldGenerateSameGraph() throws Exception { final CommunityGenerator generator = new CommunityGenerator("knows", null, null, () -> 123456789l); communityGeneratorTest(g, generator); final Configuration configuration = graphProvider.newGraphConfiguration("g1"); final Graph g1 = graphProvider.openTestGraph(configuration); prepareGraph(g1); final CommunityGenerator generator1 = new CommunityGenerator("knows", null, null, () -> 123456789l); communityGeneratorTest(g1, generator1); assertEquals(g.E().count(), g1.E().count()); // ensure that every vertex has the same number of edges between graphs. assertTrue(g.V().toList().stream() .map(v -> Triplet.with(v.getValue("oid"), v.inE().count(), v.outE().count())) .allMatch(p -> { final Vertex v = (Vertex) g1.V().has("oid", p.getValue0()).next(); return p.getValue1() == v.inE().count() && p.getValue2() == v.outE().count(); })); graphProvider.clear(g1, configuration); } @Override protected void prepareGraph(final Graph g) throws Exception { final int numNodes = numberOfVertices; for (int i = 0; i < numNodes; i++) g.addVertex("oid", i); } private void communityGeneratorTest(final Graph graph, final CommunityGenerator generator) throws Exception { boolean generated = false; double localCrossPcent = crossPcent; while (!generated) { try { generator.setCommunityDistribution(communityDistribution); generator.setDegreeDistribution(degreeDistribution); generator.setCrossCommunityPercentage(localCrossPcent); final int numEdges = generator.generate(graph, numberOfVertices / 10, numberOfVertices * 10); assertEquals(numEdges, graph.E().count()); generated = true; } catch (IllegalArgumentException iae) { generated = false; localCrossPcent = localCrossPcent - 0.05d; g.V().remove(); prepareGraph(graph); System.out.println(String.format("Ran CommunityGeneratorTest with different CrossCommunityPercentage, expected %s but used %s", crossPcent, localCrossPcent)); } } } } public static class AnnotatorTest extends AbstractGremlinTest { @Test public void shouldAnnotateEdges() { final CommunityGenerator generator = new CommunityGenerator("knows", e -> e.setProperty("data", "test")); final Distribution dist = new NormalDistribution(2); generator.setCommunityDistribution(dist); generator.setDegreeDistribution(dist); generator.setCrossCommunityPercentage(0.0); generator.generate(g, 100, 1000); tryCommit(g, g -> assertTrue(g.E().toList().stream().allMatch(e -> e.getValue("data").equals("test")))); } @Test public void shouldAnnotateVertices() { final CommunityGenerator generator = new CommunityGenerator("knows", e -> e.setProperty("data", "test")); final Distribution dist = new NormalDistribution(2); generator.setCommunityDistribution(dist); generator.setDegreeDistribution(dist); generator.setCrossCommunityPercentage(0.0); generator.generate(g, 100, 1000); tryCommit(g, g -> assertTrue(g.E().toList().stream().allMatch(e -> e.getValue("data").equals("test")))); } @Test public void shouldAnnotateVerticesEdges() { final CommunityGenerator generator = new CommunityGenerator("knows", e -> e.setProperty("data", "test"), (v, m) -> { m.forEach(v::setProperty); v.setProperty("test", "data"); }); final Distribution dist = new NormalDistribution(2); generator.setCommunityDistribution(dist); generator.setDegreeDistribution(dist); generator.setCrossCommunityPercentage(0.0); generator.generate(g, 100, 1000); tryCommit(g, g -> { assertTrue(g.E().toList().stream().allMatch(e -> e.getValue("data").equals("test"))); assertTrue(g.V().toList().stream().allMatch( v -> v.getValue("test").equals("data") && v.getProperty("communityIndex").isPresent() )); }); } } }
gremlin/gremlin-test/src/main/java/com/tinkerpop/gremlin/algorithm/generator/CommunityGeneratorTest.java
package com.tinkerpop.gremlin.algorithm.generator; import com.tinkerpop.gremlin.AbstractGremlinTest; import com.tinkerpop.gremlin.structure.Graph; import com.tinkerpop.gremlin.structure.Vertex; import org.apache.commons.configuration.Configuration; import org.javatuples.Triplet; import org.junit.Test; import org.junit.experimental.runners.Enclosed; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.util.Arrays; import static org.junit.Assert.*; /** * @author Stephen Mallette (http://stephen.genoprime.com) */ @RunWith(Enclosed.class) public class CommunityGeneratorTest { @RunWith(Parameterized.class) public static class DifferentDistributionsTest extends AbstractGremlinTest { @Parameterized.Parameters(name = "{index}: {0}.test({1},{2})") public static Iterable<Object[]> data() { return Arrays.asList(new Object[][]{ {new NormalDistribution(2), new PowerLawDistribution(2.4), 0.1}, {new NormalDistribution(2), new PowerLawDistribution(2.4), 0.5}, {new NormalDistribution(2), new NormalDistribution(4), 0.5}, {new NormalDistribution(2), new NormalDistribution(4), 0.1}, {new PowerLawDistribution(2.3), new PowerLawDistribution(2.4), 0.2}, {new PowerLawDistribution(2.3), new NormalDistribution(4), 0.2} }); } @Parameterized.Parameter(value = 0) public Distribution communityDistribution; @Parameterized.Parameter(value = 1) public Distribution degreeDistribution; @Parameterized.Parameter(value = 2) public double crossPcent; private static final int numberOfVertices = 100; @Test public void shouldGenerateRandomGraph() throws Exception { final CommunityGenerator generator = new CommunityGenerator("knows"); communityGeneratorTest(g, generator); final Configuration configuration = graphProvider.newGraphConfiguration("g1"); final Graph g1 = graphProvider.openTestGraph(configuration); prepareGraph(g1); final CommunityGenerator generator1 = new CommunityGenerator("knows"); communityGeneratorTest(g1, generator1); // don't assert counts of edges...those may be the same, just ensure that not every vertex has the // same number of edges between graphs. that should make it harder for the test to fail. assertFalse(g.V().toList().stream() .map(v -> Triplet.with(v.getValue("oid"), v.inE().count(), v.outE().count())) .allMatch(p -> { final Vertex v = (Vertex) g1.V().has("oid", p.getValue0()).next(); return p.getValue1() == v.inE().count() && p.getValue2() == v.outE().count(); })); graphProvider.clear(g1, configuration); } @Test public void shouldGenerateSameGraph() throws Exception { final CommunityGenerator generator = new CommunityGenerator("knows", null, null, () -> 123456789l); communityGeneratorTest(g, generator); final Configuration configuration = graphProvider.newGraphConfiguration("g1"); final Graph g1 = graphProvider.openTestGraph(configuration); prepareGraph(g1); final CommunityGenerator generator1 = new CommunityGenerator("knows", null, null, () -> 123456789l); communityGeneratorTest(g1, generator1); assertEquals(g.E().count(), g1.E().count()); // ensure that every vertex has the same number of edges between graphs. assertTrue(g.V().toList().stream() .map(v -> Triplet.with(v.getValue("oid"), v.inE().count(), v.outE().count())) .allMatch(p -> { final Vertex v = (Vertex) g1.V().has("oid", p.getValue0()).next(); return p.getValue1() == v.inE().count() && p.getValue2() == v.outE().count(); })); graphProvider.clear(g1, configuration); } @Override protected void prepareGraph(final Graph g) throws Exception { final int numNodes = numberOfVertices; for (int i = 0; i < numNodes; i++) g.addVertex("oid", i); } private void communityGeneratorTest(final Graph graph, final CommunityGenerator generator) throws Exception { boolean generated = false; double localCrossPcent = crossPcent; while (!generated) { try { generator.setCommunityDistribution(communityDistribution); generator.setDegreeDistribution(degreeDistribution); generator.setCrossCommunityPercentage(localCrossPcent); final int numEdges = generator.generate(graph, numberOfVertices / 10, numberOfVertices * 10); assertEquals(numEdges, graph.E().count()); generated = true; } catch (IllegalArgumentException iae) { generated = false; localCrossPcent = localCrossPcent - 0.05d; g.V().forEach(Vertex::remove); prepareGraph(graph); System.out.println(String.format("Ran CommunityGeneratorTest with different CrossCommunityPercentage, expected %s but used %s", crossPcent, localCrossPcent)); } } } } public static class AnnotatorTest extends AbstractGremlinTest { @Test public void shouldAnnotateEdges() { final CommunityGenerator generator = new CommunityGenerator("knows", e -> e.setProperty("data", "test")); final Distribution dist = new NormalDistribution(2); generator.setCommunityDistribution(dist); generator.setDegreeDistribution(dist); generator.setCrossCommunityPercentage(0.0); generator.generate(g, 100, 1000); tryCommit(g, g -> assertTrue(g.E().toList().stream().allMatch(e -> e.getValue("data").equals("test")))); } @Test public void shouldAnnotateVertices() { final CommunityGenerator generator = new CommunityGenerator("knows", e -> e.setProperty("data", "test")); final Distribution dist = new NormalDistribution(2); generator.setCommunityDistribution(dist); generator.setDegreeDistribution(dist); generator.setCrossCommunityPercentage(0.0); generator.generate(g, 100, 1000); tryCommit(g, g -> assertTrue(g.E().toList().stream().allMatch(e -> e.getValue("data").equals("test")))); } @Test public void shouldAnnotateVerticesEdges() { final CommunityGenerator generator = new CommunityGenerator("knows", e -> e.setProperty("data", "test"), (v, m) -> { m.forEach(v::setProperty); v.setProperty("test", "data"); }); final Distribution dist = new NormalDistribution(2); generator.setCommunityDistribution(dist); generator.setDegreeDistribution(dist); generator.setCrossCommunityPercentage(0.0); generator.generate(g, 100, 1000); tryCommit(g, g -> { assertTrue(g.E().toList().stream().allMatch(e -> e.getValue("data").equals("test"))); assertTrue(g.V().toList().stream().allMatch( v -> v.getValue("test").equals("data") && v.getProperty("communityIndex").isPresent() )); }); } } }
Harden tests again around community generator.
gremlin/gremlin-test/src/main/java/com/tinkerpop/gremlin/algorithm/generator/CommunityGeneratorTest.java
Harden tests again around community generator.
<ide><path>remlin/gremlin-test/src/main/java/com/tinkerpop/gremlin/algorithm/generator/CommunityGeneratorTest.java <ide> } catch (IllegalArgumentException iae) { <ide> generated = false; <ide> localCrossPcent = localCrossPcent - 0.05d; <del> g.V().forEach(Vertex::remove); <add> g.V().remove(); <ide> prepareGraph(graph); <ide> System.out.println(String.format("Ran CommunityGeneratorTest with different CrossCommunityPercentage, expected %s but used %s", crossPcent, localCrossPcent)); <ide> }
JavaScript
mit
8120d362c678db1d802bc05a38cb3e31faa06d22
0
uber/tchannel-node,uber/tchannel-node
// Copyright (c) 2015 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. 'use strict'; var assert = require('assert'); var inherits = require('util').inherits; var EventEmitter = require('./lib/event_emitter'); var stat = require('./lib/stat.js'); var net = require('net'); var CountedReadySignal = require('ready-signal/counted'); var TChannelConnection = require('./connection'); var errors = require('./errors'); var Request = require('./request'); var PreferOutgoing = require('./peer_score_strategies.js').PreferOutgoing; var NoPreference = require('./peer_score_strategies.js').NoPreference; var PreferIncoming = require('./peer_score_strategies.js').PreferIncoming; var DEFAULT_REPORT_INTERVAL = 1000; function TChannelPeer(channel, hostPort, options) { assert(hostPort !== '0.0.0.0:0', 'Cannot create ephemeral peer'); if (!(this instanceof TChannelPeer)) { return new TChannelPeer(channel, hostPort, options); } var self = this; options = options || {}; EventEmitter.call(self); self.stateChangedEvent = self.defineEvent('stateChanged'); self.allocConnectionEvent = self.defineEvent('allocConnection'); self.removeConnectionEvent = self.defineEvent('removeConnection'); self.channel = channel; self.logger = self.channel.logger; self.timers = self.channel.timers; self.random = self.channel.random; self.hostPort = hostPort; self.connections = []; self.pendingIdentified = 0; self.heapElements = []; self.scoreStrategy = null; self.boundOnIdentified = onIdentified; self.boundOnConnectionError = onConnectionError; self.boundOnConnectionClose = onConnectionClose; self.draining = false; self.drainTimer = null; self.drainReason = ''; self.drainDirection = ''; self.reportInterval = options.reportInterval || DEFAULT_REPORT_INTERVAL; if (self.reportInterval > 0 && self.channel.emitConnectionMetrics) { self.reportTimer = self.timers.setTimeout( onReport, self.reportInterval ); } var direction = options.preferConnectionDirection || 'any'; self.setPreferConnectionDirection(direction); function onIdentified(_, conn) { self.onIdentified(conn); } function onConnectionError(err, conn) { self.onConnectionError(err, conn); } function onConnectionClose(_, conn) { self.onConnectionClose(conn); } function onReport() { if (!self.hostPort) { return; } var count = self.countConnections('out'); if (self.channel.emitConnectionMetrics) { self.channel.emitFastStat(self.channel.buildStat( 'tchannel.connections.active', 'gauge', count, new stat.ConnectionsActiveTags( self.channel.hostPort, self.hostPort ) )); } self.reportTimer = self.timers.setTimeout( onReport, self.reportInterval ); } } inherits(TChannelPeer, EventEmitter); TChannelPeer.prototype.extendLogInfo = function extendLogInfo(info) { var self = this; info.hostPort = self.hostPort; info.peerDraining = self.draining; return info; }; TChannelPeer.prototype.drain = function drain(options, callback) { var self = this; var chan = self.channel.topChannel || self.channel; assert(options, 'options is required'); assert(options.reason, 'a reason is required'); assert(!chan.draining, 'cannot drain a peer while channel is draining'); assert(!self.draining, 'cannot double drain a peer'); self.draining = true; self.drainReason = options.reason; self.drainDirection = options.direction || 'both'; if (options.timeout) { var drainTimer = chan.timers.setTimeout(drainTimedOut, options.timeout); self.drainTimer = drainTimer; } var start = chan.timers.now(); var finished = false; var drained = CountedReadySignal(1); process.nextTick(drained.signal); drained(drainDone); for (var i = 0; i < self.connections.length; i++) { var conn = self.connections[i]; if (self.drainDirection === 'both' || self.drainDirection === conn.direction) { drained.counter++; conn.drain(self.drainReason, drained.signal); } } self.logger.info('draining peer', self.extendLogInfo({ reason: self.drainReason, direction: self.drainDirection, count: drained.counter })); function drainDone() { finish(null); } function drainTimedOut() { if (finished) { return; } var now = chan.timers.now(); finish(errors.PeerDrainTimedOutError({ direction: self.drainDirection, elapsed: now - start, timeout: options.timeout })); } function finish(err) { if (drainTimer) { chan.timers.clearTimeout(drainTimer); if (self.drainTimer === drainTimer) { self.drainTimer = null; } } if (!finished) { finished = true; callback(err); } } }; TChannelPeer.prototype.clearDrain = function clearDrain() { var self = this; var chan = self.channel.topChannel || self.channel; self.draining = false; self.drainReason = ''; self.drainDirection = ''; if (self.drainTimer) { chan.timers.clearTimeout(self.drainTimer); self.drainTimer = null; } }; TChannelPeer.prototype.setPreferConnectionDirection = function setPreferConnectionDirection(direction) { var self = this; if (self.preferConnectionDirection === direction) { return; } self.preferConnectionDirection = direction; if (self.preferConnectionDirection === 'out') { self.setScoreStrategy(PreferOutgoing); } else if (self.preferConnectionDirection === 'in') { self.setScoreStrategy(PreferIncoming); } else { self.setScoreStrategy(NoPreference); } self.invalidateScore('setPreferConnectionDirection'); }; TChannelPeer.prototype.setScoreStrategy = function setScoreStrategy(ScoreStrategy) { var self = this; self.scoreStrategy = new ScoreStrategy(self); }; TChannelPeer.prototype.invalidateScore = function invalidateScore(reason) { var self = this; if (!self.heapElements.length) { return; } var info = self.channel.peerScoredEvent ? { peer: self, reason: reason || 'unknown', score: 0, oldScores: [], scores: [] } : null; var score = self.scoreStrategy.getScore(); for (var i = 0; i < self.heapElements.length; i++) { var el = self.heapElements[i]; if (info) { info.oldScores.push(el.score); info.scores.push(score); } el.rescore(score); } if (info) { self.channel.peerScoredEvent.emit(self, info); } }; TChannelPeer.prototype.isConnected = function isConnected(direction, identified) { var self = this; if (identified === undefined) identified = true; for (var i = 0; i < self.connections.length; i++) { var conn = self.connections[i]; if (direction && conn.direction !== direction) { continue; } else if (conn.closing) { continue; } else if (conn.remoteName !== null || !identified) { return true; } } return false; }; TChannelPeer.prototype.closeDrainedConnections = function closeDrainedConnections(callback) { var self = this; var counter = 1; var conns = self.connections.slice(0); for (var i = 0; i < conns.length; i++) { var conn = conns[i]; if (conn.draining) { counter++; conn.close(onClose); } } onClose(); function onClose() { if (--counter <= 0) { if (counter < 0) { self.logger.error('closed more peer sockets than expected', { counter: counter }); } callback(null); } } }; TChannelPeer.prototype.close = function close(callback) { var self = this; if (self.reportTimer) { self.timers.clearTimeout(self.reportTimer); self.reportTimer = null; } var conns = self.connections.slice(0); var counter = conns.length; if (counter) { for (var i = 0; i < conns.length; i++) { conns[i].close(onClose); } } else { callback(null); } function onClose() { if (--counter <= 0) { if (counter < 0) { self.logger.error('closed more peer sockets than expected', { counter: counter }); } callback(null); } } }; TChannelPeer.prototype.getInConnection = function getInConnection(preferIdentified) { var self = this; var candidate = null; for (var i = 0; i < self.connections.length; i++) { var conn = self.connections[i]; if (conn.closing) continue; if (!preferIdentified) return conn; // user doesn't care, take first incoming if (conn.remoteName) return conn; // user wanted an identified channel, and we found one if (!candidate) candidate = conn; // we'll fallback to returning this if we can't find an identified one } return candidate; }; TChannelPeer.prototype.getIdentifiedInConnection = function getIdentifiedInConnection() { var self = this; return self.getInConnection(true); }; TChannelPeer.prototype.getOutConnection = function getOutConnection(preferIdentified) { var self = this; var candidate = null; for (var i = self.connections.length - 1; i >= 0; i--) { var conn = self.connections[i]; if (conn.closing) continue; if (!preferIdentified) return conn; // user doesn't care, take last outgoing if (conn.remoteName) return conn; // user wanted an identified channel, and we found one if (!candidate) candidate = conn; // we'll fallback to returning this if we can't find an identified one } return candidate; }; TChannelPeer.prototype.getIdentifiedOutConnection = function getIdentifiedOutConnection() { var self = this; return self.getOutConnection(true); }; TChannelPeer.prototype.countConnections = function countConnections(direction) { var self = this; if (!direction) { return self.connections.length; } var count = 0; for (var i = 0; i < self.connections.length; i++) { var conn = self.connections[i]; if (conn.direction === direction) { count++; } } return count; }; // ensures that a connection exists TChannelPeer.prototype.connect = function connect(outOnly) { var self = this; var conn = null; if (self.preferConnectionDirection === 'in' && !outOnly) { conn = self.getIdentifiedInConnection(); } else { conn = self.getIdentifiedOutConnection(); } if (!conn || (outOnly && conn.direction !== 'out')) { var socket = self.makeOutSocket(); conn = self.makeOutConnection(socket); self.addConnection(conn); } return conn; }; // ensures that an outbound connection exists TChannelPeer.prototype.connectTo = function connectTo() { var self = this; self.connect(true); }; TChannelPeer.prototype.waitForIdentified = function waitForIdentified(conn, callback) { var self = this; if (typeof conn === 'function' && !callback) { callback = conn; conn = self.connect(); } if (conn.closing) { callback(conn.closeError); } else if (conn.remoteName) { callback(null); } else { self._waitForIdentified(conn, callback); } }; TChannelPeer.prototype._waitForIdentified = function _waitForIdentified(conn, callback) { var self = this; self.pendingIdentified++; conn.errorEvent.on(onConnectionError); conn.closeEvent.on(onConnectionClose); conn.identifiedEvent.on(onIdentified); self.invalidateScore('waitForIdentified'); function onConnectionError(err) { finish(err); } function onConnectionClose(err) { finish(err); } function onIdentified() { finish(null); } function finish(err) { self.pendingIdentified = 0; conn.errorEvent.removeListener(onConnectionError); conn.closeEvent.removeListener(onConnectionClose); conn.identifiedEvent.removeListener(onIdentified); self.invalidateScore('waitForIdentified > finish'); callback(err); } }; TChannelPeer.prototype.request = function peerRequest(options) { var self = this; options.timeout = options.timeout || Request.defaultTimeout; return self.connect().request(options); }; TChannelPeer.prototype.addConnection = function addConnection(conn) { var self = this; // TODO: first approx alert for self.connections.length > 2 // TODO: second approx support pruning if (conn.direction === 'out') { self.connections.push(conn); } else { self.connections.unshift(conn); } conn.errorEvent.on(self.boundOnConnectionError); conn.closeEvent.on(self.boundOnConnectionClose); self._maybeInvalidateScore('addConnection'); if (!conn.remoteName) { // TODO: could optimize if handler had a way of saying "would a new // identified connection change your Tier?" conn.identifiedEvent.on(self.boundOnIdentified); } if (!conn.draining) { if (conn.channel.draining) { conn.drain(conn.channel.drainReason, null); } else if (self.draining && ( self.drainDirection === 'both' || self.drainDirection === conn.direction)) { conn.drain(self.drainReason, null); } } return conn; }; TChannelPeer.prototype.onIdentified = function onIdentified(conn) { var self = this; conn.identifiedEvent.removeListener(self.boundOnIdentified); self._maybeInvalidateScore('addConnection > onIdentified'); }; TChannelPeer.prototype.onConnectionError = function onConnectionError(err, conn) { var self = this; conn.closeEvent.removeListener(self.boundOnConnectionClose); conn.errorEvent.removeListener(self.boundOnConnectionError); conn.identifiedEvent.removeListener(self.boundOnIdentified); self.removeConnectionFrom(err, conn); }; TChannelPeer.prototype.onConnectionClose = function onConnectionClose(conn) { var self = this; conn.closeEvent.removeListener(self.boundOnConnectionClose); conn.errorEvent.removeListener(self.boundOnConnectionError); conn.identifiedEvent.removeListener(self.boundOnIdentified); self.removeConnectionFrom(null, conn); }; TChannelPeer.prototype.removeConnectionFrom = function removeConnectionFrom(err, conn) { var self = this; if (err) { var loggerInfo = { error: err, direction: conn.direction, remoteName: conn.remoteName, socketRemoteAddr: conn.socketRemoteAddr }; var codeName = errors.classify(err); if (codeName === 'Timeout') { self.logger.warn('Got a connection error', loggerInfo); } else { self.logger.error('Got an unexpected connection error', loggerInfo); } } self.removeConnection(conn); }; TChannelPeer.prototype.removeConnection = function removeConnection(conn) { var self = this; var ret = null; var index = self.connections ? self.connections.indexOf(conn) : -1; if (index !== -1) { ret = self.connections.splice(index, 1)[0]; } self._maybeInvalidateScore('removeConnection'); self.removeConnectionEvent.emit(self, conn); return ret; }; TChannelPeer.prototype.makeOutSocket = function makeOutSocket() { var self = this; var parts = self.hostPort.split(':'); assert(parts.length === 2, 'invalid destination'); var host = parts[0]; var port = parts[1]; assert(host !== '0.0.0.0', 'cannot connect to ephemeral peer'); assert(port !== '0', 'cannot connect to dynamic port'); var socket = net.createConnection({host: host, port: port}); return socket; }; TChannelPeer.prototype.makeOutConnection = function makeOutConnection(socket) { var self = this; var chan = self.channel.topChannel || self.channel; var conn = new TChannelConnection(chan, socket, 'out', self.hostPort); self.allocConnectionEvent.emit(self, conn); return conn; }; TChannelPeer.prototype.pendingWeightedRandom = function pendingWeightedRandom() { // Returns a score in the range from 0 to 1, where it is preferable to use // a peer with a higher score over one with a lower score. // This range is divided among an infinite set of subranges corresponding // to peers with the same number of pending requests. // So, the range (1/2, 1) is reserved for peers with 0 pending connections. // The range (1/4, 1/2) is reserved for peers with 1 pending connections. // The range (1/8, 1/4) is reserved for peers with 2 pending connections. // Ad nauseam. // Within each equivalence class, each peer receives a uniform random // value. // // The previous score was a weighted random variable: // random() ** (1 + pending) // This had the attribute that a less loaded peer was merely more likely to // be chosen over a more loaded peer. // We observed with the introduction of a heap, that a less favored peer // would have its score less frequently re-evaluated. // An emergent behavior was that scores would, over time, be squeezed // toward zero and the least favored peer would remain the least favored // for ever increasing durations. // // This remains true with this algorithm, within each equivalence class. var self = this; var pending = self.pendingIdentified + self.countPending(); var max = Math.pow(0.5, pending); var min = max / 2; var diff = max - min; return min + diff * self.random(); }; TChannelPeer.prototype.countPending = function countPending() { var self = this; var pending = 0; for (var index = 0; index < self.connections.length; index++) { var connPending = self.connections[index].ops.getPending(); pending += connPending.out; pending += connPending.errors; } return pending; }; // TODO: on connection #getScore impacting event // - on identified // Called on connection change event TChannelPeer.prototype._maybeInvalidateScore = function _maybeInvalidateScore(reason) { var self = this; if (self.scoreStrategy.getTier() !== self.scoreStrategy.lastTier) { self.invalidateScore(reason); } }; TChannelPeer.prototype.getScore = function getScore() { var self = this; return self.scoreStrategy.getScore(); }; module.exports = TChannelPeer;
peer.js
// Copyright (c) 2015 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. 'use strict'; var assert = require('assert'); var inherits = require('util').inherits; var EventEmitter = require('./lib/event_emitter'); var stat = require('./lib/stat.js'); var net = require('net'); var CountedReadySignal = require('ready-signal/counted'); var TChannelConnection = require('./connection'); var errors = require('./errors'); var Request = require('./request'); var PreferOutgoing = require('./peer_score_strategies.js').PreferOutgoing; var NoPreference = require('./peer_score_strategies.js').NoPreference; var PreferIncoming = require('./peer_score_strategies.js').PreferIncoming; var DEFAULT_REPORT_INTERVAL = 1000; function TChannelPeer(channel, hostPort, options) { assert(hostPort !== '0.0.0.0:0', 'Cannot create ephemeral peer'); if (!(this instanceof TChannelPeer)) { return new TChannelPeer(channel, hostPort, options); } var self = this; options = options || {}; EventEmitter.call(self); self.stateChangedEvent = self.defineEvent('stateChanged'); self.allocConnectionEvent = self.defineEvent('allocConnection'); self.removeConnectionEvent = self.defineEvent('removeConnection'); self.channel = channel; self.logger = self.channel.logger; self.timers = self.channel.timers; self.random = self.channel.random; self.hostPort = hostPort; self.connections = []; self.pendingIdentified = 0; self.heapElements = []; self.scoreStrategy = null; self.boundOnIdentified = onIdentified; self.boundOnConnectionError = onConnectionError; self.boundOnConnectionClose = onConnectionClose; self.draining = false; self.drainTimer = null; self.drainReason = ''; self.drainDirection = ''; self.reportInterval = options.reportInterval || DEFAULT_REPORT_INTERVAL; if (self.reportInterval > 0 && self.channel.emitConnectionMetrics) { self.reportTimer = self.timers.setTimeout( onReport, self.reportInterval ); } var direction = options.preferConnectionDirection || 'any'; self.setPreferConnectionDirection(direction); function onIdentified(_, conn) { self.onIdentified(conn); } function onConnectionError(err, conn) { self.onConnectionError(err, conn); } function onConnectionClose(_, conn) { self.onConnectionClose(conn); } function onReport() { if (!self.hostPort) { return; } var count = self.countConnections('out'); if (self.channel.emitConnectionMetrics) { self.channel.emitFastStat(self.channel.buildStat( 'tchannel.connections.active', 'gauge', count, new stat.ConnectionsActiveTags( self.channel.hostPort, self.hostPort ) )); } self.reportTimer = self.timers.setTimeout( onReport, self.reportInterval ); } } inherits(TChannelPeer, EventEmitter); TChannelPeer.prototype.extendLogInfo = function extendLogInfo(info) { var self = this; info.hostPort = self.hostPort; info.peerDraining = self.draining; return info; }; TChannelPeer.prototype.drain = function drain(options, callback) { var self = this; var chan = self.channel.topChannel || self.channel; assert(options, 'options is required'); assert(options.reason, 'a reason is required'); assert(!chan.draining, 'cannot drain a peer while channel is draining'); assert(!self.draining, 'cannot double drain a peer'); self.draining = true; self.drainReason = options.reason; self.drainDirection = options.direction || 'both'; if (options.timeout) { var drainTimer = chan.timers.setTimeout(drainTimedOut, options.timeout); self.drainTimer = drainTimer; } var start = chan.timers.now(); var finished = false; var drained = CountedReadySignal(1); process.nextTick(drained.signal); drained(drainDone); for (var i = 0; i < self.connections.length; i++) { var conn = self.connections[i]; if (self.drainDirection === 'both' || self.drainDirection === conn.direction) { drained.counter++; conn.drain(self.drainReason, drained.signal); } } self.logger.info('draining peer', self.extendLogInfo({ reason: self.drainReason, direction: self.drainDirection, count: drained.counter })); function drainDone() { finish(null); } function drainTimedOut() { if (finished) { return; } var now = chan.timers.now(); finish(errors.PeerDrainTimedOutError({ direction: self.drainDirection, elapsed: now - start, timeout: options.timeout })); } function finish(err) { if (drainTimer) { chan.timers.clearTimeout(drainTimer); if (self.drainTimer === drainTimer) { self.drainTimer = null; } } if (!finished) { finished = true; callback(err); } } }; TChannelPeer.prototype.clearDrain = function clearDrain() { var self = this; var chan = self.channel.topChannel || self.channel; self.draining = false; self.drainReason = ''; self.drainDirection = ''; if (self.drainTimer) { chan.timers.clearTimeout(self.drainTimer); self.drainTimer = null; } }; TChannelPeer.prototype.setPreferConnectionDirection = function setPreferConnectionDirection(direction) { var self = this; if (self.preferConnectionDirection === direction) { return; } self.preferConnectionDirection = direction; if (self.preferConnectionDirection === 'out') { self.setScoreStrategy(PreferOutgoing); } else if (self.preferConnectionDirection === 'in') { self.setScoreStrategy(PreferIncoming); } else { self.setScoreStrategy(NoPreference); } self.invalidateScore('setPreferConnectionDirection'); }; TChannelPeer.prototype.setScoreStrategy = function setScoreStrategy(ScoreStrategy) { var self = this; self.scoreStrategy = new ScoreStrategy(self); }; TChannelPeer.prototype.invalidateScore = function invalidateScore(reason) { var self = this; if (!self.heapElements.length) { return; } var info = self.channel.peerScoredEvent ? { peer: self, reason: reason || 'unknown', score: 0, oldScores: [], scores: [] } : null; var score = self.scoreStrategy.getScore(); for (var i = 0; i < self.heapElements.length; i++) { var el = self.heapElements[i]; if (info) { info.oldScores.push(el.score); info.scores.push(score); } el.rescore(score); } if (info) { self.channel.peerScoredEvent.emit(self, info); } }; TChannelPeer.prototype.isConnected = function isConnected(direction, identified) { var self = this; if (identified === undefined) identified = true; for (var i = 0; i < self.connections.length; i++) { var conn = self.connections[i]; if (direction && conn.direction !== direction) { continue; } else if (conn.closing) { continue; } else if (conn.remoteName !== null || !identified) { return true; } } return false; }; TChannelPeer.prototype.closeDrainedConnections = function closeDrainedConnections(callback) { var self = this; var counter = 1; var conns = self.connections.slice(0); for (var i = 0; i < conns.length; i++) { var conn = conns[i]; if (conn.draining) { counter++; conn.close(onClose); } } onClose(); function onClose() { if (--counter <= 0) { if (counter < 0) { self.logger.error('closed more peer sockets than expected', { counter: counter }); } callback(null); } } }; TChannelPeer.prototype.close = function close(callback) { var self = this; if (self.reportTimer) { self.timers.clearTimeout(self.reportTimer); self.reportTimer = null; } var conns = self.connections.slice(0); var counter = conns.length; if (counter) { for (var i = 0; i < conns.length; i++) { conns[i].close(onClose); } } else { callback(null); } function onClose() { if (--counter <= 0) { if (counter < 0) { self.logger.error('closed more peer sockets than expected', { counter: counter }); } callback(null); } } }; TChannelPeer.prototype.getInConnection = function getInConnection(preferIdentified) { var self = this; var candidate = null; for (var i = 0; i < self.connections.length; i++) { var conn = self.connections[i]; if (conn.closing) continue; if (!preferIdentified) return conn; // user doesn't care, take first incoming if (conn.remoteName) return conn; // user wanted an identified channel, and we found one if (!candidate) candidate = conn; // we'll fallback to returning this if we can't find an identified one } return candidate; }; TChannelPeer.prototype.getIdentifiedInConnection = function getIdentifiedInConnection() { var self = this; return self.getInConnection(true); }; TChannelPeer.prototype.getOutConnection = function getOutConnection(preferIdentified) { var self = this; var candidate = null; for (var i = self.connections.length - 1; i >= 0; i--) { var conn = self.connections[i]; if (conn.closing) continue; if (!preferIdentified) return conn; // user doesn't care, take last outgoing if (conn.remoteName) return conn; // user wanted an identified channel, and we found one if (!candidate) candidate = conn; // we'll fallback to returning this if we can't find an identified one } return candidate; }; TChannelPeer.prototype.getIdentifiedOutConnection = function getIdentifiedOutConnection() { var self = this; return self.getOutConnection(true); }; TChannelPeer.prototype.countConnections = function countConnections(direction) { var self = this; if (!direction) { return self.connections.length; } var count = 0; for (var i = 0; i < self.connections.length; i++) { var conn = self.connections[i]; if (conn.direction === direction) { count++; } } return count; }; // ensures that a connection exists TChannelPeer.prototype.connect = function connect(outOnly) { var self = this; var conn = null; if (self.preferConnectionDirection === 'in' && !outOnly) { conn = self.getIdentifiedInConnection(); } else { conn = self.getIdentifiedOutConnection(); } if (!conn || (outOnly && conn.direction !== 'out')) { var socket = self.makeOutSocket(); conn = self.makeOutConnection(socket); self.addConnection(conn); } return conn; }; // ensures that an outbound connection exists TChannelPeer.prototype.connectTo = function connectTo() { var self = this; self.connect(true); }; TChannelPeer.prototype.waitForIdentified = function waitForIdentified(conn, callback) { var self = this; if (typeof conn === 'function' && !callback) { callback = conn; conn = self.connect(); } if (conn.closing) { callback(conn.closeError); } else if (conn.remoteName) { callback(null); } else { self._waitForIdentified(conn, callback); } }; TChannelPeer.prototype._waitForIdentified = function _waitForIdentified(conn, callback) { var self = this; self.pendingIdentified++; conn.errorEvent.on(onConnectionError); conn.closeEvent.on(onConnectionClose); conn.identifiedEvent.on(onIdentified); self.invalidateScore('waitForIdentified'); function onConnectionError(err) { finish(err); } function onConnectionClose(err) { finish(err); } function onIdentified() { finish(null); } function finish(err) { self.pendingIdentified = 0; conn.errorEvent.removeListener(onConnectionError); conn.closeEvent.removeListener(onConnectionClose); conn.identifiedEvent.removeListener(onIdentified); self.invalidateScore('waitForIdentified > finish'); callback(err); } }; TChannelPeer.prototype.request = function peerRequest(options) { var self = this; options.timeout = options.timeout || Request.defaultTimeout; return self.connect().request(options); }; TChannelPeer.prototype.addConnection = function addConnection(conn) { var self = this; // TODO: first approx alert for self.connections.length > 2 // TODO: second approx support pruning if (conn.direction === 'out') { self.connections.push(conn); } else { self.connections.unshift(conn); } conn.errorEvent.on(self.boundOnConnectionError); conn.closeEvent.on(self.boundOnConnectionClose); self._maybeInvalidateScore('addConnection'); if (!conn.remoteName) { // TODO: could optimize if handler had a way of saying "would a new // identified connection change your Tier?" conn.identifiedEvent.on(self.boundOnIdentified); } if (!conn.draining) { if (conn.channel.draining) { conn.drain(conn.channel.drainReason, null); } else if (self.draining && ( self.drainDirection === 'both' || self.drainDirection === conn.direction)) { conn.drain(self.drainReason, null); } } return conn; }; TChannelPeer.prototype.onIdentified = function onIdentified(conn) { var self = this; conn.identifiedEvent.removeListener(self.boundOnIdentified); self._maybeInvalidateScore('addConnection > onIdentified'); }; TChannelPeer.prototype.onConnectionError = function onConnectionError(err, conn) { var self = this; conn.closeEvent.removeListener(self.boundOnConnectionClose); conn.errorEvent.removeListener(self.boundOnConnectionError); conn.identifiedEvent.removeListener(self.boundOnIdentified); self.removeConnectionFrom(err, conn); }; TChannelPeer.prototype.onConnectionClose = function onConnectionClose(conn) { var self = this; conn.closeEvent.removeListener(self.boundOnConnectionClose); conn.errorEvent.removeListener(self.boundOnConnectionError); conn.identifiedEvent.removeListener(self.boundOnIdentified); self.removeConnectionFrom(null, conn); }; TChannelPeer.prototype.removeConnectionFrom = function removeConnectionFrom(err, conn) { var self = this; if (err) { var loggerInfo = { error: err, direction: conn.direction, remoteName: conn.remoteName, socketRemoteAddr: conn.socketRemoteAddr }; var codeName = errors.classify(err); if (codeName === 'Timeout') { self.logger.warn('Got a connection error', loggerInfo); } else { self.logger.error('Got an unexpected connection error', loggerInfo); } } self.removeConnection(conn); }; TChannelPeer.prototype.removeConnection = function removeConnection(conn) { var self = this; var ret = null; var index = self.connections ? self.connections.indexOf(conn) : -1; if (index !== -1) { ret = self.connections.splice(index, 1)[0]; } self._maybeInvalidateScore('removeConnection'); self.removeConnectionEvent.emit(self, conn); return ret; }; TChannelPeer.prototype.makeOutSocket = function makeOutSocket() { var self = this; var parts = self.hostPort.split(':'); assert(parts.length === 2, 'invalid destination'); var host = parts[0]; var port = parts[1]; assert(host !== '0.0.0.0', 'cannot connect to ephemeral peer'); assert(port !== '0', 'cannot connect to dynamic port'); var socket = net.createConnection({host: host, port: port}); return socket; }; TChannelPeer.prototype.makeOutConnection = function makeOutConnection(socket) { var self = this; var chan = self.channel.topChannel || self.channel; var conn = new TChannelConnection(chan, socket, 'out', self.hostPort); self.allocConnectionEvent.emit(self, conn); return conn; }; TChannelPeer.prototype.pendingWeightedRandom = function pendingWeightedRandom() { // Returns a score in the range from 0 to 1, where it is preferable to use // a peer with a higher score over one with a lower score. // This range is divided among an infinite set of subranges corresponding // to peers with the same number of pending requests. // So, the range (1/2, 1) is reserved for peers with 0 pending connections. // The range (1/4, 1/2) is reserved for peers with 1 pending connections. // The range (1/8, 1/4) is reserved for peers with 2 pending connections. // Ad nauseam. // Within each equivalence class, each peer receives a uniform random // value. // // The previous score was a weighted random variable: // random() ** (1 + pending) // This had the attribute that a less loaded peer was merely more likely to // be chosen over a more loaded peer. // We observed with the introduction of a heap, that a less favored peer // would have its score less frequently re-evaluated. // An emergent behavior was that scores would, over time, be squeezed // toward zero and the least favored peer would remain the least favored // for ever increasing durations. // // This remains true with this algorithm, within each equivalence class. var self = this; var pending = self.pendingIdentified + self.countPending(); var max = Math.pow(0.5, pending); var min = max / 2; var diff = max - min; return min + diff * self.random(); }; TChannelPeer.prototype.countPending = function countPending() { var self = this; var pending = 0; for (var index = 0; index < self.connections.length; index++) { var connPending = self.connections[index].ops.getPending(); pending += connPending.out; pending += connPending.errors; } return pending; }; // TODO: on connection #getScore impacting event // - on identified // Called on connection change event TChannelPeer.prototype._maybeInvalidateScore = function _maybeInvalidateScore(reason) { var self = this; if (self.scoreStrategy.getTier() !== self.scoreStrategy.lastTier) { self.invalidateScore(reason); } }; TChannelPeer.prototype.getScore = function getScore() { var self = this; return self.scoreStrategy.getScore(); }; module.exports = TChannelPeer;
TChannelPeer: trivial cleanup
peer.js
TChannelPeer: trivial cleanup
<ide><path>eer.js <ide> } <ide> }; <ide> <del>TChannelPeer.prototype.close = function close(callback) { <add>TChannelPeer.prototype.close = <add>function close(callback) { <ide> var self = this; <ide> <ide> if (self.reportTimer) { <ide> }; <ide> <ide> // ensures that a connection exists <del>TChannelPeer.prototype.connect = function connect(outOnly) { <add>TChannelPeer.prototype.connect = <add>function connect(outOnly) { <ide> var self = this; <ide> var conn = null; <ide> if (self.preferConnectionDirection === 'in' && !outOnly) {
Java
apache-2.0
6dfb563bbef429681c6597bae17e8ce1bace16da
0
java110/MicroCommunity,java110/MicroCommunity,java110/MicroCommunity,java110/MicroCommunity
package com.java110.utils.constant; /** * 个人物品常量类 * Created by wuxw on 2017/5/20. */ public class ServiceCodeUserStorehouseConstant { /** * 添加 个人物品 */ public static final String ADD_USERSTOREHOUSE = "resourceStore.saveUserStorehouse"; /** * 修改 个人物品 */ public static final String UPDATE_USERSTOREHOUSE = "resourceStore.updateUserStorehouse"; /** * 删除 个人物品 */ public static final String DELETE_USERSTOREHOUSE = "resourceStore.deleteUserStorehouse"; /** * 查询 个人物品 */ public static final String LIST_USERSTOREHOUSES = "resourceStore.listUserStorehouses"; }
java110-utils/src/main/java/com/java110/utils/constant/ServiceCodeUserStorehouseConstant.java
package com.java110.utils.constant; /** * 个人物品常量类 * Created by wuxw on 2017/5/20. */ public class ServiceCodeUserStorehouseConstant { /** * 添加 个人物品 */ public static final String ADD_USERSTOREHOUSE = "userStorehouse.saveUserStorehouse"; /** * 修改 个人物品 */ public static final String UPDATE_USERSTOREHOUSE = "userStorehouse.updateUserStorehouse"; /** * 删除 个人物品 */ public static final String DELETE_USERSTOREHOUSE = "userStorehouse.deleteUserStorehouse"; /** * 查询 个人物品 */ public static final String LIST_USERSTOREHOUSES = "userStorehouse.listUserStorehouses"; }
优化查询我的物品代码
java110-utils/src/main/java/com/java110/utils/constant/ServiceCodeUserStorehouseConstant.java
优化查询我的物品代码
<ide><path>ava110-utils/src/main/java/com/java110/utils/constant/ServiceCodeUserStorehouseConstant.java <ide> /** <ide> * 添加 个人物品 <ide> */ <del> public static final String ADD_USERSTOREHOUSE = "userStorehouse.saveUserStorehouse"; <add> public static final String ADD_USERSTOREHOUSE = "resourceStore.saveUserStorehouse"; <ide> <ide> <ide> /** <ide> * 修改 个人物品 <ide> */ <del> public static final String UPDATE_USERSTOREHOUSE = "userStorehouse.updateUserStorehouse"; <add> public static final String UPDATE_USERSTOREHOUSE = "resourceStore.updateUserStorehouse"; <ide> /** <ide> * 删除 个人物品 <ide> */ <del> public static final String DELETE_USERSTOREHOUSE = "userStorehouse.deleteUserStorehouse"; <add> public static final String DELETE_USERSTOREHOUSE = "resourceStore.deleteUserStorehouse"; <ide> <ide> <ide> /** <ide> * 查询 个人物品 <ide> */ <del> public static final String LIST_USERSTOREHOUSES = "userStorehouse.listUserStorehouses"; <add> public static final String LIST_USERSTOREHOUSES = "resourceStore.listUserStorehouses"; <ide> <ide> <ide> }
JavaScript
apache-2.0
ccc30754d5b616df3c05e82d06c26a3d95f5163e
0
porkepic/squiggle,porkepic/squiggle
import Ember from "ember"; export default Ember.Mixin.create({ exportToPng: function(){ var img = this.get("image"), canvas = this.$("canvas")[0], context = canvas.getContext("2d"), outerSvg = this.$(".squiggle-paper").clone(), svg, svgImg = new Image(), url, promise, that = this; outerSvg.find("textarea").remove(); outerSvg.find("image").remove(); svg = outerSvg.find("svg"); promise = new Ember.RSVP.Promise(function(resolve, reject){ try { if(img){ img = that.$("img")[0]; canvas.width = img.naturalWidth; canvas.height = img.naturalHeight; svg.attr("width", canvas.width); svg.attr("height", canvas.height); context.drawImage(img, 0, 0); }else{ canvas.width = that.$().width(); canvas.height = that.$().height(); } url = "data:image/svg+xml," + outerSvg.html(); svgImg.onload = function(){ context.drawImage(svgImg, 0, 0); resolve(canvas.toDataURL()); }; svgImg.onError = reject; svgImg.src = url; } catch(e){ reject(e); } }); return promise; } });
addon/mixins/export-to-png.js
import Ember from "ember"; export default Ember.Mixin.create({ exportToPng: function(){ var img = this.get("image"), canvas = this.$("canvas")[0], context = canvas.getContext("2d"), svg = this.$("svg").clone(), svgImg = new Image(), url, promise, that = this; promise = new Ember.RSVP.Promise(function(resolve, reject){ try { if(img){ img = that.$("img")[0]; canvas.width = img.naturalWidth; canvas.height = img.naturalHeight; svg.attr("width", canvas.width); svg.attr("height", canvas.height); context.drawImage(img, 0, 0); }else{ canvas.width = that.$().width(); canvas.height = that.$().height(); } url = "data:image/svg+xml," + svg[0].outerHTML; svgImg.onload = function () { context.drawImage(svgImg, 0, 0); resolve(canvas.toDataURL()); } svgImg.onError = reject; svgImg.src = url; } catch(e){ reject(e); } }); return promise; } });
Export to png: svg image does not work in safari and fails silently (l) safari
addon/mixins/export-to-png.js
Export to png: svg image does not work in safari and fails silently (l) safari
<ide><path>ddon/mixins/export-to-png.js <ide> var img = this.get("image"), <ide> canvas = this.$("canvas")[0], <ide> context = canvas.getContext("2d"), <del> svg = this.$("svg").clone(), <add> outerSvg = this.$(".squiggle-paper").clone(), <add> svg, <ide> svgImg = new Image(), <ide> url, promise, <ide> that = this; <add> <add> outerSvg.find("textarea").remove(); <add> outerSvg.find("image").remove(); <add> svg = outerSvg.find("svg"); <ide> <ide> promise = new Ember.RSVP.Promise(function(resolve, reject){ <ide> try { <ide> canvas.width = that.$().width(); <ide> canvas.height = that.$().height(); <ide> } <del> url = "data:image/svg+xml," + svg[0].outerHTML; <ide> <del> svgImg.onload = function () { <add> url = "data:image/svg+xml," + outerSvg.html(); <add> <add> svgImg.onload = function(){ <ide> context.drawImage(svgImg, 0, 0); <ide> resolve(canvas.toDataURL()); <del> } <add> }; <ide> svgImg.onError = reject; <ide> svgImg.src = url; <add> <ide> } catch(e){ <ide> reject(e); <ide> }
Java
mpl-2.0
578cb4a059af8076226c9675a2286ac1430cfdbd
0
etomica/etomica,etomica/etomica,ajschult/etomica,ajschult/etomica,ajschult/etomica,etomica/etomica
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package etomica.graph.operations; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import etomica.graph.iterators.IteratorWrapper; import etomica.graph.iterators.filters.IdenticalGraphFilter; import etomica.graph.model.Graph; import etomica.graph.model.impl.CoefficientImpl; import etomica.graph.property.Property; /** * Returns all graphs that are isomorphs of the original graph(s). Optionally, * condense graphs that are identical. * * @author Andrew Schultz */ public class AllIsomorphs implements Unary { public Set<Graph> apply(Set<Graph> argument, Parameters params) { assert (params instanceof AllIsomorphsParameters); Set<Graph> result = new HashSet<Graph>(); for (Graph g : argument) { result.addAll(apply(g, (AllIsomorphsParameters)params)); } return result; } public Set<Graph> apply(Graph g, AllIsomorphsParameters params) { Set<Graph> result = new HashSet<Graph>(); if (params.propertyFilter == null || params.propertyFilter.check(g)) result.add(g.copy()); if (g.nodeCount() < 2) { return result; } byte nodeCount = g.nodeCount(); byte[] labels = new byte[nodeCount]; for (byte i=0; i<labels.length; i++) { labels[i] = i; } Relabel relabel = new Relabel(); RelabelParameters rp = new RelabelParameters(labels); // swaps is the number of times we have swapped node i since the last time we swapped node i-1 // we'll need to swap nodeCount-1-i times byte[] swaps = new byte[nodeCount-1]; while (true) { boolean success = false; for (byte iNode = (byte)(nodeCount-2); iNode>-1; iNode--) { if (swaps[iNode] < nodeCount-iNode-1) { byte tmp = labels[iNode]; byte minMax = nodeCount; byte swapNode = -1; // we want to swap with the node at a higher index with the next highest value... // we'll have to find it for (byte jNode=(byte)(iNode+1); jNode<nodeCount; jNode++) { if (labels[jNode] > tmp && labels[jNode] < minMax) { minMax = labels[jNode]; swapNode = jNode; } } if (swapNode > -1) { labels[iNode] = labels[swapNode]; labels[swapNode] = tmp; swaps[iNode]++; for (byte jNode = (byte)(iNode+1); jNode<(byte)(nodeCount-1); jNode++) { swaps[jNode] = 0; } if (iNode+1 < nodeCount-1) { Arrays.sort(labels, iNode+1, nodeCount); } success = true; break; } } } if (!success) { break; } Graph newGraph = relabel.apply(g, rp); if (params.propertyFilter == null || params.propertyFilter.check(newGraph)) { result.add(newGraph); } } int nPermutations = result.size(); CoefficientImpl multiplier = new CoefficientImpl(1, nPermutations); for (Graph gr : result) { gr.coefficient().multiply(multiplier); } if (params.onlyUnique) { IdenticalGraphFilter identicalFilter = new IdenticalGraphFilter(new IteratorWrapper(result.iterator())); Set<Graph> filteredResult = new HashSet<Graph>(); while (identicalFilter.hasNext()) { filteredResult.add(identicalFilter.next()); } result = filteredResult; } return result; } public static class AllIsomorphsParameters implements Parameters { public boolean onlyUnique; public Property propertyFilter; public AllIsomorphsParameters(boolean onlyUnique) { this(onlyUnique, null); } public AllIsomorphsParameters(boolean onlyUnique, Property propertyFilter) { this.onlyUnique = onlyUnique; this.propertyFilter = propertyFilter; } } }
etomica.graph/etomica/graph/operations/AllIsomorphs.java
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package etomica.graph.operations; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import etomica.graph.iterators.IteratorWrapper; import etomica.graph.iterators.filters.IdenticalGraphFilter; import etomica.graph.model.Graph; import etomica.graph.model.impl.CoefficientImpl; import etomica.graph.property.Property; /** * Returns all graphs that are isomorphs of the original graph(s). Optionally, * condense graphs that are identical. * * @author Andrew Schultz */ public class AllIsomorphs implements Unary { public Set<Graph> apply(Set<Graph> argument, Parameters params) { assert (params instanceof AllIsomorphsParameters); Set<Graph> result = new HashSet<Graph>(); for (Graph g : argument) { result.addAll(apply(g, (AllIsomorphsParameters)params)); } return result; } public Set<Graph> apply(Graph g, AllIsomorphsParameters params) { Set<Graph> result = new HashSet<Graph>(); result.add(g.copy()); if (g.nodeCount() < 2) { return result; } byte nodeCount = g.nodeCount(); byte[] labels = new byte[nodeCount]; for (byte i=0; i<labels.length; i++) { labels[i] = i; } Relabel relabel = new Relabel(); RelabelParameters rp = new RelabelParameters(labels); // swaps is the number of times we have swapped node i since the last time we swapped node i-1 // we'll need to swap nodeCount-1-i times byte[] swaps = new byte[nodeCount-1]; while (true) { boolean success = false; for (byte iNode = (byte)(nodeCount-2); iNode>-1; iNode--) { if (swaps[iNode] < nodeCount-iNode-1) { byte tmp = labels[iNode]; byte minMax = nodeCount; byte swapNode = -1; // we want to swap with the node at a higher index with the next highest value... // we'll have to find it for (byte jNode=(byte)(iNode+1); jNode<nodeCount; jNode++) { if (labels[jNode] > tmp && labels[jNode] < minMax) { minMax = labels[jNode]; swapNode = jNode; } } if (swapNode > -1) { labels[iNode] = labels[swapNode]; labels[swapNode] = tmp; swaps[iNode]++; for (byte jNode = (byte)(iNode+1); jNode<(byte)(nodeCount-1); jNode++) { swaps[jNode] = 0; } if (iNode+1 < nodeCount-1) { Arrays.sort(labels, iNode+1, nodeCount); } success = true; break; } } } if (!success) { break; } Graph newGraph = relabel.apply(g, rp); if (params.propertyFilter == null || params.propertyFilter.check(newGraph)) { result.add(newGraph); } } int nPermutations = result.size(); CoefficientImpl multiplier = new CoefficientImpl(1, nPermutations); for (Graph gr : result) { gr.coefficient().multiply(multiplier); } if (params.onlyUnique) { IdenticalGraphFilter identicalFilter = new IdenticalGraphFilter(new IteratorWrapper(result.iterator())); Set<Graph> filteredResult = new HashSet<Graph>(); while (identicalFilter.hasNext()) { filteredResult.add(identicalFilter.next()); } result = filteredResult; } return result; } public static class AllIsomorphsParameters implements Parameters { public boolean onlyUnique; public Property propertyFilter; public AllIsomorphsParameters(boolean onlyUnique) { this(onlyUnique, null); } public AllIsomorphsParameters(boolean onlyUnique, Property propertyFilter) { this.onlyUnique = onlyUnique; this.propertyFilter = propertyFilter; } } }
impose propertyFilter on first graph
etomica.graph/etomica/graph/operations/AllIsomorphs.java
impose propertyFilter on first graph
<ide><path>tomica.graph/etomica/graph/operations/AllIsomorphs.java <ide> <ide> public Set<Graph> apply(Graph g, AllIsomorphsParameters params) { <ide> Set<Graph> result = new HashSet<Graph>(); <del> result.add(g.copy()); <add> if (params.propertyFilter == null || params.propertyFilter.check(g)) result.add(g.copy()); <ide> if (g.nodeCount() < 2) { <ide> return result; <ide> }
Java
bsd-3-clause
dd514cc93c6bc2722b99835d9f97cf0b37f06836
0
Clunker5/tregmine-2.0,EmilHernvall/tregmine,EmilHernvall/tregmine,Clunker5/tregmine-2.0,EmilHernvall/tregmine
package info.tregmine.listeners; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.text.SimpleDateFormat; import org.bukkit.ChatColor; import org.bukkit.GameMode; import org.bukkit.Location; import org.bukkit.block.BlockState; import org.bukkit.block.DoubleChest; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.inventory.InventoryClickEvent; import org.bukkit.event.inventory.InventoryCloseEvent; import org.bukkit.event.inventory.InventoryCreativeEvent; import org.bukkit.event.inventory.InventoryDragEvent; import org.bukkit.event.inventory.InventoryInteractEvent; import org.bukkit.event.inventory.InventoryMoveItemEvent; import org.bukkit.event.inventory.InventoryOpenEvent; import org.bukkit.event.inventory.InventoryPickupItemEvent; import org.bukkit.inventory.Inventory; import org.bukkit.inventory.InventoryHolder; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.meta.ItemMeta; import info.tregmine.Tregmine; import info.tregmine.api.TregminePlayer; import info.tregmine.api.InventoryAccess; import info.tregmine.api.lore.Created; import info.tregmine.database.DAOException; import info.tregmine.database.IContext; import info.tregmine.database.IInventoryDAO; import static info.tregmine.database.IInventoryDAO.InventoryType; import static info.tregmine.database.IInventoryDAO.ChangeType; public class InventoryListener implements Listener { private Tregmine plugin; private Map<InventoryHolder, ItemStack[]> openInventories; public InventoryListener(Tregmine instance) { this.plugin = instance; this.openInventories = new HashMap<>(); } @EventHandler public void onInventoryOpen(InventoryOpenEvent event) { if (!(event.getPlayer() instanceof Player)) { return; } TregminePlayer player = plugin.getPlayer((Player)event.getPlayer()); Inventory inv = event.getInventory(); InventoryHolder holder = inv.getHolder(); Location loc = null; if (holder instanceof BlockState) { BlockState block = (BlockState)holder; loc = block.getLocation(); } else if (holder instanceof DoubleChest) { DoubleChest block = (DoubleChest)holder; loc = block.getLocation(); } else { return; } ItemStack[] contents = inv.getContents(); ItemStack[] copy = new ItemStack[contents.length]; for (int i = 0; i < contents.length; i++) { if (contents[i] != null) { copy[i] = contents[i].clone(); } } openInventories.put(holder, contents); try (IContext ctx = plugin.createContext()) { IInventoryDAO invDAO = ctx.getInventoryDAO(); // Find inventory id, or create a new row if none exists int id = invDAO.getInventoryId(loc); if (id == -1) { id = invDAO.insertInventory(player, loc, InventoryType.BLOCK); } else { List<InventoryAccess> accessLog = invDAO.getAccessLog(id, 5); player.sendMessage(ChatColor.YELLOW + "Last accessed by:"); SimpleDateFormat dfm = new SimpleDateFormat("dd/MM/yy hh:mm:ss a"); for (InventoryAccess access : accessLog) { TregminePlayer p = plugin.getPlayerOffline(access.getPlayerId()); player.sendMessage(p.getChatName() + ChatColor.YELLOW + " on " + dfm.format(access.getTimestamp()) + "."); } } // Insert into access log invDAO.insertAccessLog(player, id); } catch (DAOException e) { throw new RuntimeException(e); } } @EventHandler public void onInventoryClose(InventoryCloseEvent event) { if (!(event.getPlayer() instanceof Player)) { return; } TregminePlayer player = plugin.getPlayer((Player)event.getPlayer()); Inventory inv = event.getInventory(); InventoryHolder holder = inv.getHolder(); Location loc = null; if (holder instanceof BlockState) { BlockState block = (BlockState)holder; loc = block.getLocation(); } else if (holder instanceof DoubleChest) { DoubleChest block = (DoubleChest)holder; loc = block.getLocation(); } else { return; } if (!openInventories.containsKey(holder)) { Tregmine.LOGGER.info("Holder not found."); return; } Tregmine.LOGGER.info(player.getRealName() + " closed inventory: " + "x=" + loc.getBlockX() + " " + "y=" + loc.getBlockY() + " " + "z=" + loc.getBlockZ()); ItemStack[] oldContents = openInventories.get(holder); ItemStack[] currentContents = inv.getContents(); assert oldContents.length == currentContents.length; try (IContext ctx = plugin.createContext()) { IInventoryDAO invDAO = ctx.getInventoryDAO(); // Find inventory id, or create a new row if none exists int id = invDAO.getInventoryId(loc); if (id == -1) { Tregmine.LOGGER.warning("Inventory id " + id + " not found!"); return; } // Store all changes for (int i = 0; i < oldContents.length; i++) { ItemStack a = oldContents[i]; ItemStack b = currentContents[i]; if (a == null && b == null) { continue; } if (a == null || b == null || !a.equals(b)) { Tregmine.LOGGER.info("Slot " + i + " changed. Was " + a + " and is " + b); // Removed if (a != null) { invDAO.insertChangeLog(player, id, i, a, ChangeType.REMOVE); } // Added if (b != null) { invDAO.insertChangeLog(player, id, i, b, ChangeType.ADD); } } } // Store contents invDAO.insertStacks(id, currentContents); } catch (DAOException e) { throw new RuntimeException(e); } openInventories.remove(holder); /*Player player = (Player) event.getPlayer(); if (player.getGameMode() == GameMode.CREATIVE) { for (ItemStack item : player.getInventory().getContents()) { if (item != null) { ItemMeta meta = item.getItemMeta(); List<String> lore = new ArrayList<String>(); lore.add(Created.CREATIVE.toColorString()); TregminePlayer p = this.plugin.getPlayer(player); lore.add(ChatColor.WHITE + "by: " + p.getChatName()); lore.add(ChatColor.WHITE + "Value: " + ChatColor.MAGIC + "0000" + ChatColor.RESET + ChatColor.WHITE + " Treg"); meta.setLore(lore); item.setItemMeta(meta); } } }*/ } @EventHandler public void onInventoryCreative(InventoryCreativeEvent event) { Tregmine.LOGGER.info("InventoryCreative"); Tregmine.LOGGER.info(event.getInventory().getHolder().toString()); } }
src/info/tregmine/listeners/InventoryListener.java
package info.tregmine.listeners; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.text.SimpleDateFormat; import org.bukkit.ChatColor; import org.bukkit.GameMode; import org.bukkit.Location; import org.bukkit.block.BlockState; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.inventory.InventoryClickEvent; import org.bukkit.event.inventory.InventoryCloseEvent; import org.bukkit.event.inventory.InventoryCreativeEvent; import org.bukkit.event.inventory.InventoryDragEvent; import org.bukkit.event.inventory.InventoryInteractEvent; import org.bukkit.event.inventory.InventoryMoveItemEvent; import org.bukkit.event.inventory.InventoryOpenEvent; import org.bukkit.event.inventory.InventoryPickupItemEvent; import org.bukkit.inventory.Inventory; import org.bukkit.inventory.InventoryHolder; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.meta.ItemMeta; import info.tregmine.Tregmine; import info.tregmine.api.TregminePlayer; import info.tregmine.api.InventoryAccess; import info.tregmine.api.lore.Created; import info.tregmine.database.DAOException; import info.tregmine.database.IContext; import info.tregmine.database.IInventoryDAO; import static info.tregmine.database.IInventoryDAO.InventoryType; import static info.tregmine.database.IInventoryDAO.ChangeType; public class InventoryListener implements Listener { private Tregmine plugin; private Map<InventoryHolder, ItemStack[]> openInventories; public InventoryListener(Tregmine instance) { this.plugin = instance; this.openInventories = new HashMap<>(); } @EventHandler public void onInventoryOpen(InventoryOpenEvent event) { if (!(event.getPlayer() instanceof Player)) { return; } TregminePlayer player = plugin.getPlayer((Player)event.getPlayer()); Inventory inv = event.getInventory(); InventoryHolder holder = inv.getHolder(); if (!(holder instanceof BlockState)) { return; } BlockState block = (BlockState)holder; Location loc = block.getLocation(); ItemStack[] contents = inv.getContents(); ItemStack[] copy = new ItemStack[contents.length]; for (int i = 0; i < contents.length; i++) { if (contents[i] != null) { copy[i] = contents[i].clone(); } } openInventories.put(holder, contents); try (IContext ctx = plugin.createContext()) { IInventoryDAO invDAO = ctx.getInventoryDAO(); // Find inventory id, or create a new row if none exists int id = invDAO.getInventoryId(loc); if (id == -1) { id = invDAO.insertInventory(player, loc, InventoryType.BLOCK); } else { List<InventoryAccess> accessLog = invDAO.getAccessLog(id, 5); player.sendMessage(ChatColor.YELLOW + "Last accessed by:"); SimpleDateFormat dfm = new SimpleDateFormat("dd/MM/yy hh:mm:ss a"); for (InventoryAccess access : accessLog) { TregminePlayer p = plugin.getPlayerOffline(access.getPlayerId()); player.sendMessage(p.getChatName() + ChatColor.YELLOW + " on " + dfm.format(access.getTimestamp()) + "."); } } // Insert into access log invDAO.insertAccessLog(player, id); } catch (DAOException e) { throw new RuntimeException(e); } } @EventHandler public void onInventoryClose(InventoryCloseEvent event) { if (!(event.getPlayer() instanceof Player)) { return; } TregminePlayer player = plugin.getPlayer((Player)event.getPlayer()); Inventory inv = event.getInventory(); InventoryHolder holder = inv.getHolder(); if (!(holder instanceof BlockState)) { return; } if (!openInventories.containsKey(holder)) { Tregmine.LOGGER.info("Holder not found."); return; } BlockState block = (BlockState)holder; Location loc = block.getLocation(); Tregmine.LOGGER.info(player.getRealName() + " closed inventory: " + "x=" + loc.getBlockX() + " " + "y=" + loc.getBlockY() + " " + "z=" + loc.getBlockZ()); ItemStack[] oldContents = openInventories.get(holder); ItemStack[] currentContents = inv.getContents(); assert oldContents.length == currentContents.length; try (IContext ctx = plugin.createContext()) { IInventoryDAO invDAO = ctx.getInventoryDAO(); // Find inventory id, or create a new row if none exists int id = invDAO.getInventoryId(loc); if (id == -1) { Tregmine.LOGGER.warning("Inventory id " + id + " not found!"); return; } // Store all changes for (int i = 0; i < oldContents.length; i++) { ItemStack a = oldContents[i]; ItemStack b = currentContents[i]; if (a == null && b == null) { continue; } if (a == null || b == null || !a.equals(b)) { Tregmine.LOGGER.info("Slot " + i + " changed. Was " + a + " and is " + b); // Removed if (a != null) { invDAO.insertChangeLog(player, id, i, a, ChangeType.REMOVE); } // Added if (b != null) { invDAO.insertChangeLog(player, id, i, b, ChangeType.ADD); } } } // Store contents invDAO.insertStacks(id, currentContents); } catch (DAOException e) { throw new RuntimeException(e); } openInventories.remove(holder); /*Player player = (Player) event.getPlayer(); if (player.getGameMode() == GameMode.CREATIVE) { for (ItemStack item : player.getInventory().getContents()) { if (item != null) { ItemMeta meta = item.getItemMeta(); List<String> lore = new ArrayList<String>(); lore.add(Created.CREATIVE.toColorString()); TregminePlayer p = this.plugin.getPlayer(player); lore.add(ChatColor.WHITE + "by: " + p.getChatName()); lore.add(ChatColor.WHITE + "Value: " + ChatColor.MAGIC + "0000" + ChatColor.RESET + ChatColor.WHITE + " Treg"); meta.setLore(lore); item.setItemMeta(meta); } } }*/ } @EventHandler public void onInventoryCreative(InventoryCreativeEvent event) { Tregmine.LOGGER.info("InventoryCreative"); Tregmine.LOGGER.info(event.getInventory().getHolder().toString()); } }
Support for double chests in log
src/info/tregmine/listeners/InventoryListener.java
Support for double chests in log
<ide><path>rc/info/tregmine/listeners/InventoryListener.java <ide> import org.bukkit.GameMode; <ide> import org.bukkit.Location; <ide> import org.bukkit.block.BlockState; <add>import org.bukkit.block.DoubleChest; <ide> import org.bukkit.entity.Player; <ide> import org.bukkit.event.EventHandler; <ide> import org.bukkit.event.Listener; <ide> <ide> Inventory inv = event.getInventory(); <ide> InventoryHolder holder = inv.getHolder(); <del> if (!(holder instanceof BlockState)) { <del> return; <del> } <del> <del> BlockState block = (BlockState)holder; <del> Location loc = block.getLocation(); <add> Location loc = null; <add> if (holder instanceof BlockState) { <add> BlockState block = (BlockState)holder; <add> loc = block.getLocation(); <add> } <add> else if (holder instanceof DoubleChest) { <add> DoubleChest block = (DoubleChest)holder; <add> loc = block.getLocation(); <add> } <add> else { <add> return; <add> } <ide> <ide> ItemStack[] contents = inv.getContents(); <ide> ItemStack[] copy = new ItemStack[contents.length]; <ide> <ide> Inventory inv = event.getInventory(); <ide> InventoryHolder holder = inv.getHolder(); <del> if (!(holder instanceof BlockState)) { <add> Location loc = null; <add> if (holder instanceof BlockState) { <add> BlockState block = (BlockState)holder; <add> loc = block.getLocation(); <add> } <add> else if (holder instanceof DoubleChest) { <add> DoubleChest block = (DoubleChest)holder; <add> loc = block.getLocation(); <add> } <add> else { <ide> return; <ide> } <ide> <ide> Tregmine.LOGGER.info("Holder not found."); <ide> return; <ide> } <del> <del> BlockState block = (BlockState)holder; <del> Location loc = block.getLocation(); <ide> <ide> Tregmine.LOGGER.info(player.getRealName() + " closed inventory: " + <ide> "x=" + loc.getBlockX() + " " +
Java
apache-2.0
6584f0bdd8858b1fb5bdb388778dbb8284977480
0
gagatust/nd4j,huitseeker/nd4j,deeplearning4j/nd4j,smarthi/nd4j,ambraspace/nd4j,huitseeker/nd4j,smarthi/nd4j,gagatust/nd4j,deeplearning4j/nd4j,ambraspace/nd4j
package org.nd4j.linalg.dataset.api.preprocessor.serializer; import lombok.NonNull; import lombok.Value; import org.nd4j.linalg.dataset.api.preprocessor.Normalizer; import java.io.*; import java.util.ArrayList; import java.util.List; /** * Utility for serializing and unserializing {@link Normalizer} instances. * * @author Ede Meijer */ public class NormalizerSerializer { private static final String HEADER = "NORMALIZER"; private static NormalizerSerializer defaultSerializer; private List<NormalizerSerializerStrategy> strategies = new ArrayList<>(); /** * Serialize a normalizer to the given file * * @param normalizer the normalizer * @param file the destination file * @throws IOException */ public void write(@NonNull Normalizer normalizer, @NonNull File file) throws IOException { try (OutputStream out = new BufferedOutputStream(new FileOutputStream(file))) { write(normalizer, out); } } /** * Serialize a normalizer to the given file path * * @param normalizer the normalizer * @param path the destination file path * @throws IOException */ public void write(@NonNull Normalizer normalizer, @NonNull String path) throws IOException { try (OutputStream out = new BufferedOutputStream(new FileOutputStream(path))) { write(normalizer, out); } } /** * Serialize a normalizer to an output stream * * @param normalizer the normalizer * @param stream the output stream to write to * @throws IOException */ public void write(@NonNull Normalizer normalizer, @NonNull OutputStream stream) throws IOException { NormalizerSerializerStrategy strategy = getStrategy(normalizer); writeHeader(stream, Header.fromStrategy(strategy)); //noinspection unchecked strategy.write(normalizer, stream); } /** * Restore a normalizer from the given path * * @param path path of the file containing a serialized normalizer * @return the restored normalizer * @throws IOException */ public <T extends Normalizer> T restore(@NonNull String path) throws Exception { try (InputStream in = new BufferedInputStream(new FileInputStream(path))) { return restore(in); } } /** * Restore a normalizer from the given file * * @param file the file containing a serialized normalizer * @return the restored normalizer * @throws IOException */ public <T extends Normalizer> T restore(@NonNull File file) throws Exception { try (InputStream in = new BufferedInputStream(new FileInputStream(file))) { return restore(in); } } /** * Restore a normalizer from an input stream * * @param stream a stream of serialized normalizer data * @return the restored normalizer * @throws IOException */ public <T extends Normalizer> T restore(@NonNull InputStream stream) throws Exception { Header header = parseHeader(stream); //noinspection unchecked return (T) getStrategy(header).restore(stream); } /** * Get the default serializer configured with strategies for the built-in normalizer implementations * * @return the default serializer */ public static NormalizerSerializer getDefault() { if (defaultSerializer == null) { defaultSerializer = new NormalizerSerializer() .addStrategy(new StandardizeSerializerStrategy()) .addStrategy(new MinMaxSerializerStrategy()) .addStrategy(new MultiStandardizeSerializerStrategy()) .addStrategy(new MultiMinMaxSerializerStrategy()) .addStrategy(new MultiHybridSerializerStrategy()); } return defaultSerializer; } /** * Add a normalizer serializer strategy * * @param strategy the new strategy * @return self */ public NormalizerSerializer addStrategy(@NonNull NormalizerSerializerStrategy strategy) { strategies.add(strategy); return this; } /** * Get a serializer strategy the given normalizer * * @param normalizer the normalizer to find a compatible serializer strategy for * @return the compatible strategy */ private NormalizerSerializerStrategy getStrategy(Normalizer normalizer) { for (NormalizerSerializerStrategy strategy : strategies) { if (strategySupportsNormalizer(strategy, normalizer.getType(), normalizer.getClass())) { return strategy; } } throw new RuntimeException(String.format( "No serializer strategy found for normalizer of class %s. If this is a custom normalizer, you probably" + "forgot to register a corresponding custom serializer strategy with this serializer.", normalizer.getClass() )); } /** * Get a serializer strategy the given serialized file header * * @param header the header to find the associated serializer strategy for * @return the compatible strategy */ private NormalizerSerializerStrategy getStrategy(Header header) throws Exception { if (header.normalizerType.equals(NormalizerType.CUSTOM)) { return header.customStrategyClass.newInstance(); } for (NormalizerSerializerStrategy strategy : strategies) { if (strategySupportsNormalizer(strategy, header.normalizerType, null)) { return strategy; } } throw new RuntimeException("No serializer strategy found for given header " + header); } /** * Check if a serializer strategy supports a normalizer. If the normalizer is a custom type, it checks if the * supported normalizer class matches. * * @param strategy * @param normalizerType * @param normalizerClass * @return whether the strategy supports the normalizer */ private boolean strategySupportsNormalizer(NormalizerSerializerStrategy strategy, NormalizerType normalizerType, Class<? extends Normalizer> normalizerClass) { if (!strategy.getSupportedType().equals(normalizerType)) { return false; } if (strategy.getSupportedType().equals(NormalizerType.CUSTOM)) { // Strategy should be instance of CustomSerializerStrategy if (!(strategy instanceof CustomSerializerStrategy)) { throw new IllegalArgumentException( "Strategies supporting CUSTOM type must be instance of CustomSerializerStrategy, got" + strategy.getClass() ); } return ((CustomSerializerStrategy) strategy).getSupportedClass().equals(normalizerClass); } return true; } /** * Parse the data header * * @param stream the input stream * @return the parsed header with information about the contents * @throws IOException * @throws IllegalArgumentException if the data format is invalid */ private Header parseHeader(InputStream stream) throws IOException, ClassNotFoundException { DataInputStream dis = new DataInputStream(stream); // Check if the stream starts with the expected header String header = dis.readUTF(); if (!header.equals(HEADER)) { throw new IllegalArgumentException( "Could not restore normalizer: invalid header. If this normalizer was saved with a type-specific " + "strategy like StandardizeSerializerStrategy, use that class to restore it as well." ); } // The next byte is an integer indicating the version int version = dis.readInt(); // Right now, we only support version 1 if (version != 1) { throw new IllegalArgumentException("Could not restore normalizer: invalid version (" + version + ")"); } // The next value is a string indicating the normalizer type NormalizerType type = NormalizerType.valueOf(dis.readUTF()); if (type.equals(NormalizerType.CUSTOM)) { // For custom serializers, the next value is a string with the class name String strategyClassName = dis.readUTF(); //noinspection unchecked return new Header(type, (Class<? extends NormalizerSerializerStrategy>) Class.forName(strategyClassName)); } else { return new Header(type, null); } } /** * Write the data header * * @param stream the output stream * @param header the header to write * @throws IOException */ private void writeHeader(OutputStream stream, Header header) throws IOException { DataOutputStream dos = new DataOutputStream(stream); dos.writeUTF(HEADER); // Write the current version dos.writeInt(1); // Write the normalizer type dos.writeUTF(header.normalizerType.toString()); // If the header contains a custom class name, write that too if (header.customStrategyClass != null) { dos.writeUTF(header.customStrategyClass.getName()); } } /** * Represents the header of a serialized normalizer file */ @Value private static class Header { NormalizerType normalizerType; Class<? extends NormalizerSerializerStrategy> customStrategyClass; public static Header fromStrategy(NormalizerSerializerStrategy strategy) { if (strategy instanceof CustomSerializerStrategy) { return new Header( strategy.getSupportedType(), strategy.getClass() ); } else { return new Header(strategy.getSupportedType(), null); } } } }
nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/serializer/NormalizerSerializer.java
package org.nd4j.linalg.dataset.api.preprocessor.serializer; import lombok.NonNull; import lombok.Value; import org.nd4j.linalg.dataset.api.preprocessor.Normalizer; import java.io.*; import java.util.ArrayList; import java.util.List; /** * Utility for serializing and unserializing {@link Normalizer} instances. * * @author Ede Meijer */ public class NormalizerSerializer { private static final String HEADER = "NORMALIZER"; private static NormalizerSerializer defaultSerializer; private List<NormalizerSerializerStrategy> strategies = new ArrayList<>(); /** * Serialize a normalizer to the given file * * @param normalizer the normalizer * @param file the destination file * @throws IOException */ public void write(@NonNull Normalizer normalizer, @NonNull File file) throws IOException { try (OutputStream out = new BufferedOutputStream(new FileOutputStream(file))) { write(normalizer, out); } } /** * Serialize a normalizer to the given file path * * @param normalizer the normalizer * @param path the destination file path * @throws IOException */ public void write(@NonNull Normalizer normalizer, @NonNull String path) throws IOException { try (OutputStream out = new BufferedOutputStream(new FileOutputStream(path))) { write(normalizer, out); } } /** * Serialize a normalizer to an output stream * * @param normalizer the normalizer * @param stream the output stream to write to * @throws IOException */ public void write(@NonNull Normalizer normalizer, @NonNull OutputStream stream) throws IOException { NormalizerSerializerStrategy strategy = getStrategy(normalizer); writeHeader(stream, Header.fromStrategy(strategy)); //noinspection unchecked strategy.write(normalizer, stream); } /** * Restore a normalizer from the given path * * @param path path of the file containing a serialized normalizer * @return the restored normalizer * @throws IOException */ public <T extends Normalizer> T restore(@NonNull String path) throws Exception { try (InputStream in = new BufferedInputStream(new FileInputStream(path))) { return restore(in); } } /** * Restore a normalizer from the given file * * @param file the file containing a serialized normalizer * @return the restored normalizer * @throws IOException */ public <T extends Normalizer> T restore(@NonNull File file) throws Exception { try (InputStream in = new BufferedInputStream(new FileInputStream(file))) { return restore(in); } } /** * Restore a normalizer from an input stream * * @param stream a stream of serialized normalizer data * @return the restored normalizer * @throws IOException */ public <T extends Normalizer> T restore(@NonNull InputStream stream) throws Exception { Header header = parseHeader(stream); //noinspection unchecked return (T) getStrategy(header).restore(stream); } /** * Get the default serializer configured with strategies for the built-in normalizer implementations * * @return the default serializer */ public static NormalizerSerializer getDefault() { if (defaultSerializer == null) { defaultSerializer = new NormalizerSerializer() .addStrategy(new StandardizeSerializerStrategy()) .addStrategy(new MinMaxSerializerStrategy()) .addStrategy(new MultiStandardizeSerializerStrategy()) .addStrategy(new MultiMinMaxSerializerStrategy()) .addStrategy(new MultiHybridSerializerStrategy()); } return defaultSerializer; } /** * Add a normalizer serializer strategy * * @param strategy the new strategy * @return self */ public NormalizerSerializer addStrategy(@NonNull NormalizerSerializerStrategy strategy) { strategies.add(strategy); return this; } /** * Get a serializer strategy the given normalizer * * @param normalizer the normalizer to find a compatible serializer strategy for * @return the compatible strategy */ private NormalizerSerializerStrategy getStrategy(Normalizer normalizer) { for (NormalizerSerializerStrategy strategy : strategies) { if (strategySupportsNormalizer(strategy, normalizer.getType(), normalizer.getClass())) { return strategy; } } throw new RuntimeException("No serializer strategy found for normalizer of class " + normalizer.getClass()); } /** * Get a serializer strategy the given serialized file header * * @param header the header to find the associated serializer strategy for * @return the compatible strategy */ private NormalizerSerializerStrategy getStrategy(Header header) throws Exception { if (header.normalizerType.equals(NormalizerType.CUSTOM)) { return header.customStrategyClass.newInstance(); } for (NormalizerSerializerStrategy strategy : strategies) { if (strategySupportsNormalizer(strategy, header.normalizerType, null)) { return strategy; } } throw new RuntimeException("No serializer strategy found for given header " + header); } /** * Check if a serializer strategy supports a normalizer. If the normalizer is a custom type, it checks if the * supported normalizer class matches. * * @param strategy * @param normalizerType * @param normalizerClass * @return whether the strategy supports the normalizer */ private boolean strategySupportsNormalizer(NormalizerSerializerStrategy strategy, NormalizerType normalizerType, Class<? extends Normalizer> normalizerClass) { if (!strategy.getSupportedType().equals(normalizerType)) { return false; } if (strategy.getSupportedType().equals(NormalizerType.CUSTOM)) { // Strategy should be instance of CustomSerializerStrategy if (!(strategy instanceof CustomSerializerStrategy)) { throw new IllegalArgumentException( "Strategies supporting CUSTOM type must be instance of CustomSerializerStrategy, got" + strategy.getClass() ); } return ((CustomSerializerStrategy) strategy).getSupportedClass().equals(normalizerClass); } return true; } /** * Parse the data header * * @param stream the input stream * @return the parsed header with information about the contents * @throws IOException * @throws IllegalArgumentException if the data format is invalid */ private Header parseHeader(InputStream stream) throws IOException, ClassNotFoundException { DataInputStream dis = new DataInputStream(stream); // Check if the stream starts with the expected header String header = dis.readUTF(); if (!header.equals(HEADER)) { throw new IllegalArgumentException( "Could not restore normalizer: invalid header. If this normalizer was saved with a type-specific " + "strategy like StandardizeSerializerStrategy, use that class to restore it as well." ); } // The next byte is an integer indicating the version int version = dis.readInt(); // Right now, we only support version 1 if (version != 1) { throw new IllegalArgumentException("Could not restore normalizer: invalid version (" + version + ")"); } // The next value is a string indicating the normalizer type NormalizerType type = NormalizerType.valueOf(dis.readUTF()); if (type.equals(NormalizerType.CUSTOM)) { // For custom serializers, the next value is a string with the class name String strategyClassName = dis.readUTF(); //noinspection unchecked return new Header(type, (Class<? extends NormalizerSerializerStrategy>) Class.forName(strategyClassName)); } else { return new Header(type, null); } } /** * Write the data header * * @param stream the output stream * @param header the header to write * @throws IOException */ private void writeHeader(OutputStream stream, Header header) throws IOException { DataOutputStream dos = new DataOutputStream(stream); dos.writeUTF(HEADER); // Write the current version dos.writeInt(1); // Write the normalizer type dos.writeUTF(header.normalizerType.toString()); // If the header contains a custom class name, write that too if (header.customStrategyClass != null) { dos.writeUTF(header.customStrategyClass.getName()); } } /** * Represents the header of a serialized normalizer file */ @Value private static class Header { NormalizerType normalizerType; Class<? extends NormalizerSerializerStrategy> customStrategyClass; public static Header fromStrategy(NormalizerSerializerStrategy strategy) { if (strategy instanceof CustomSerializerStrategy) { return new Header( strategy.getSupportedType(), strategy.getClass() ); } else { return new Header(strategy.getSupportedType(), null); } } } }
Add hint to register custom normalizer serializer strategy to exception message
nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/serializer/NormalizerSerializer.java
Add hint to register custom normalizer serializer strategy to exception message
<ide><path>d4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/serializer/NormalizerSerializer.java <ide> return strategy; <ide> } <ide> } <del> throw new RuntimeException("No serializer strategy found for normalizer of class " + normalizer.getClass()); <add> throw new RuntimeException(String.format( <add> "No serializer strategy found for normalizer of class %s. If this is a custom normalizer, you probably" + <add> "forgot to register a corresponding custom serializer strategy with this serializer.", <add> normalizer.getClass() <add> )); <ide> } <ide> <ide> /**
JavaScript
mit
02e0755f30e638b4d439eb32d55f8e8e6146f687
0
psnovichkov/narrative,kbase/narrative,jmchandonia/narrative,mlhenderson/narrative,psnovichkov/narrative,kbase/narrative,kbase/narrative,jmchandonia/narrative,jmchandonia/narrative,kbase/narrative,briehl/narrative,msneddon/narrative,briehl/narrative,jmchandonia/narrative,mlhenderson/narrative,psnovichkov/narrative,psnovichkov/narrative,briehl/narrative,pranjan77/narrative,mlhenderson/narrative,briehl/narrative,briehl/narrative,mlhenderson/narrative,msneddon/narrative,jmchandonia/narrative,msneddon/narrative,kbase/narrative,psnovichkov/narrative,msneddon/narrative,pranjan77/narrative,briehl/narrative,mlhenderson/narrative,pranjan77/narrative,jmchandonia/narrative,jmchandonia/narrative,msneddon/narrative,msneddon/narrative,kbase/narrative,mlhenderson/narrative,briehl/narrative,pranjan77/narrative,pranjan77/narrative,pranjan77/narrative,pranjan77/narrative,psnovichkov/narrative,psnovichkov/narrative,msneddon/narrative
/*global define,Jupyter*/ /*jslint white: true*/ /** * @author Michael Sneddon <[email protected]> * @public */ define(['jquery', 'underscore', 'narrativeConfig', 'Util/String', 'Util/Display', 'kbase-client-api', 'jquery-nearest', 'kbwidget', 'kbaseAuthenticatedWidget', 'kbaseNarrativeDownloadPanel'], function ($, _, Config, StringUtil, DisplayUtil) { 'use strict'; $.KBWidget({ name: 'kbaseNarrativeDataList', parent: 'kbaseAuthenticatedWidget', version: '1.0.0', options: { ws_name: null, // must be the WS name, not the WS Numeric ID ws_url: Config.url('workspace'), landing_page_url: "/functional-site/#/", // !! always include trailing slash lp_url: Config.url('landing_pages'), profile_page_url: Config.url('profile_page'), user_name_fetch_url: "https://kbase.us/services/genome_comparison/users?usernames=", loadingImage: Config.get('loading_gif'), methodStoreURL: Config.url('narrative_method_store'), ws_chunk_size: 10000, // this is the limit of the number of objects to retrieve from the ws on each pass ws_max_objs_to_fetch: 75000, // this is the total limit of the number of objects before we stop trying to get more // note that if there are more objects than this, then sorts/search filters may // not show accurate results objs_to_render_to_start: 40, // initial number of rows to display objs_to_render_on_scroll: 5, // number of rows to add when the user scrolls to the bottom, should be <=5, much more and // the addition of new rows becomes jerky max_objs_to_prevent_filter_as_you_type_in_search: 50000, //if there are more than this # of objs, user must click search //instead of updating as you type max_objs_to_prevent_initial_sort: 10000, // initial sort makes loading slower, so we can turn it off if // there are more than this number of objects max_name_length: 33, refresh_interval: 30000, parentControlPanel: null }, // private variables mainListPanelHeight: '340px', refreshTimer: null, ws_name: null, ws: null, ws_last_update_timestamp: null, ws_obj_count: null, n_objs_rendered: 0, real_name_lookup: {}, $searchInput: null, $filterTypeSelect: null, availableTypes: {}, $searchDiv: null, $sortByDiv: null, $filterTypeDiv: null, $addDataButton: null, $controllerDiv: null, $mainListDiv: null, mainListId: null, $loadingDiv: null, methClient: null, obj_list: [], obj_data: {}, // old style - type_name : info my_user_id: null, /** * @method init * Builds the DOM structure for the widget. * Includes the tables and panel. * If any data was passed in (options.data), that gets shoved into the datatable. * @param {Object} - the options set. * @returns {Object} this shiny new widget. * @private */ init: function (options) { this._super(options); var self = this; this.$controllerDiv = $('<div>'); this.$elem.append(this.$controllerDiv); this.renderController(); this.$loadingDiv = $('<div>').addClass('kb-data-loading') .append('<img src="' + this.options.loadingImage + '">') .hide(); this.$elem.append(this.$loadingDiv); this.mainListId = StringUtil.uuid(); this.$mainListDiv = $('<div id=' + this.mainListId + '>') .css({'overflow-x': 'hidden', 'overflow-y': 'auto', 'height': this.mainListPanelHeight}) .on('scroll', function () { if ($(this).scrollTop() + $(this).innerHeight() >= this.scrollHeight) { self.renderMore(); } }); this.$addDataButton = $('<span>').addClass('kb-data-list-add-data-button fa fa-plus fa-2x') .css({'position': 'absolute', bottom: '15px', right: '25px', 'z-index': '5'}) .click(function () { self.trigger('hideGalleryPanelOverlay.Narrative'); self.trigger('toggleSidePanelOverlay.Narrative', self.options.parentControlPanel.$overlayPanel); }); var $mainListDivContainer = $('<div>').css({'position': 'relative'}) .append(this.$mainListDiv) .append(this.$addDataButton.hide()); this.$elem.append($mainListDivContainer); if (this._attributes.auth) { this.ws = new Workspace(this.options.ws_url, this._attributes.auth); } // listener for refresh $(document).on('updateDataList.Narrative', function () { self.refresh(); }) if (this.options.ws_name) { this.ws_name = this.options.ws_name; } this.methClient = new NarrativeMethodStore(this.options.methodStoreURL); return this; }, setWorkspace: function (ws_name) { this.ws_name = ws_name; // this.ws_name = "janakacore"; // for testing a bigish workspace //this.ws_name = "KBasePublicGenomesV4"; // for testing a very big workspace this.refresh(); }, refresh: function (showError) { var self = this; console.log('DataList: refresh -- ' + self.ws_name); // Set the refresh timer on the first refresh. From here, it'll refresh itself // every this.options.refresh_interval (30000) ms if (self.refreshTimer === null) { self.refreshTimer = setInterval(function () { self.refresh(); }, this.options.refresh_interval); // check if there is new data every X ms } if (self.ws_name && self.ws) { self.ws.get_workspace_info({ workspace: this.ws_name }, function (workspace_info) { //[0] ws_id id, [1] ws_name workspace, [2] username owner, [3] timestamp moddate, //[4] int object, [5] permission user_permission, [6] permission globalread, //[7] lock_status lockstat, [8] usermeta metadata //console.log('I have: '+self.ws_last_update_timestamp+ " remote has: "+workspace_info[3]); // Update RO or RW mode self.trigger("updateReadOnlyMode.Narrative", [self.ws, self.ws_name]); if (self.ws_last_update_timestamp) { if (self.ws_last_update_timestamp !== workspace_info[3]) { self.ws_last_update_timestamp = workspace_info[3]; self.ws_obj_count = workspace_info[4]; self.reloadWsData(); } else { //console.log('updating times'); self.refreshTimeStrings(); } } else { self.ws_last_update_timestamp = workspace_info[3]; self.ws_obj_count = workspace_info[4]; self.reloadWsData(); } }, function (error) { console.error('DataList: when checking for updates:', error); if (showError) { self.$mainListDiv.show(); self.$mainListDiv.empty(); self.$mainListDiv.append($('<div>').css({'color': '#F44336', 'margin': '10px'}) .append('Error: Unable to connect to KBase data.')); } self.hideLoading(); }); } else { console.error('DataList: missing variable(s)'); console.error('ws_name: ' + self.ws_name); console.error('ws: ' + self.ws); self.hideLoading(); /*Not really an error yet because we don't know what order things are being called var where = "kbaseNarrativeDataList.refresh"; if (!self.ws) { console.error(where, "Workspace not connected"); } else { console.error(where, "Workspace name is empty"); }*/ } }, refreshSpecificObject: function () { }, refreshTimeStrings: function () { var self = this; var newTime; var oldTime; if (self.objectList) { for (var i = 0; i < self.objectList.length; i++) { if (self.objectList[i].$div) { newTime = self.getTimeStampStr(self.objectList[i].info[3]); oldTime = self.objectList[i].$div.find('.kb-data-list-date').text(); if (newTime !== oldTime) { self.objectList[i].$div.find('.kb-data-list-date').text(newTime); } } } } }, reloadWsData: function () { var self = this; console.log('DataList: reloadWsData'); if (self.ws_name && self.ws) { // empty the existing object list first self.objectList = []; self.obj_data = {}; self.availableTypes = {}; self.getNextDataChunk(0); } }, getNextDataChunk: function (skip) { var self = this; console.log('DataList: getNextDataChunk - ' + skip); self.ws.list_objects({ workspaces: [self.ws_name], includeMetadata: 1, skip: skip, limit: self.options.ws_chunk_size }, function (infoList) { console.log('DataList: getNextDataChunk return'); console.log(infoList); // object_info: // [0] : obj_id objid // [1] : obj_name name // [2] : type_string type // [3] : timestamp save_date // [4] : int version // [5] : username saved_by // [6] : ws_id wsid // [7] : ws_name workspace // [8] : string chsum // [9] : int size // [10] : usermeta meta for (var i = 0; i < infoList.length; i++) { // skip narrative objects if (infoList[i][2].indexOf('KBaseNarrative') == 0) { continue; } self.objectList.push( { key: StringUtil.uuid(), // always generate the DnD key $div: null, //self.renderObjectRowDiv(infoList[i]), // we defer rendering the div until it is shown info: infoList[i], attached: false } ); var typeKey = infoList[i][2].split("-")[0]; if (!(typeKey in self.obj_data)) { self.obj_data[typeKey] = []; } self.obj_data[typeKey].push(infoList[i]); var typeName = typeKey.split('.')[1]; if (!(typeName in self.availableTypes)) { self.availableTypes[typeName] = { type: typeName, count: 0 }; } self.availableTypes[typeName].count++; } // if we have more than 2k objects, make them hit enter to search... self.$searchInput.off("input change blur"); self.$searchInput.on("change blur", function () { self.search(); }); if (self.objectList.length <= self.options.max_objs_to_prevent_filter_as_you_type_in_search) { self.$searchInput.on("input", function () { self.search(); }); self.$searchInput.on("keyup", function (e) { if (e.keyCode == 27) self.$searchDiv.hide(); }); } self.trigger('dataUpdated.Narrative'); //LOGIC: we keep trying to get more until we reach the ws_obj_count or untill the max // fetch count option, UNLESS the last call returned nothing, in which case we stop. //IMPORTANT NOTE: IN RARE CASES THIS DOES NOT GAURANTEE THAT WE GET ALL OBJECTS FROM //THIS WS!! IF THERE IS A CHUNK THAT RETURNED NOTHING, THERE STILL MAY BE MORE //OBJECTS DUE TO A BUG IN THE WORKSPACE THAT INCLUDES OLD VERSIONS AND DELETED VERSIONS //BEFORE FILTERING OUT THE NUMBER - A BETTER TEMP FIX WOULD BE TO LIMIT THE NUMBER OF //RECURSIONS TO 2 or 3 MAYBE... //BUT WHATEVER YOU DO PLEASE REMEMBER TO USE CAPITAL LETTERS EXTENSIVELY //OTHERWISE PEOPLE MIGHT NOT NOTICE WHAT YOU ARE SAYING AND THAT WOULD //BE EXTREMELY ANNOYING!!!! SERIOUSLY!!! if (self.objectList.length < self.ws_obj_count && self.objectList.length < self.options.ws_max_objs_to_fetch && infoList.length > 0) { self.getNextDataChunk(skip + self.options.ws_chunk_size); } else { if (self.objectList.length <= self.options.max_objs_to_prevent_initial_sort) { self.objectList.sort(function (a, b) { if (a.info[3] > b.info[3]) return -1; // sort by date if (a.info[3] < b.info[3]) return 1; // sort by date return 0; }); self.$elem.find('#nar-data-list-default-sort-label').addClass('active'); self.$elem.find('#nar-data-list-default-sort-option').attr('checked'); } } self.populateAvailableTypes(); self.renderList(); self.hideLoading(); }, function (error) { console.error(error); KBError("kbaseNarrativeDataList.getNextDataChunk", error.error.message); self.$mainListDiv.show(); self.$mainListDiv.empty(); self.$mainListDiv.append($('<div>').css({'color': '#F44336', 'margin': '10px'}) .append('Error: ' + error.error.message)); self.hideLoading(); }); }, getObjData: function (type, ignoreVersion) { if (type) { var dataSet = {}; if (typeof type === 'string') { type = [type]; } for (var i = 0; i < type.length; i++) { if (this.obj_data[type[i]]) { dataSet[type[i]] = this.obj_data[type[i]]; } } return dataSet; } return this.obj_data; }, $currentSelectedRow: null, selectedObject: null, setSelected: function ($selectedRow, object_info) { var self = this; if (self.$currentSelectedRow) { self.$currentSelectedRow.removeClass('kb-data-list-obj-row-selected'); } if (object_info[0] === self.selectedObject) { self.$currentSelectedRow = null; self.selectedObject = null; self.trigger('removeFilterMethods.Narrative'); } // } else { // $selectedRow.addClass('kb-data-list-obj-row-selected'); // self.$currentSelectedRow = $selectedRow; // self.selectedObject = object_info[0]; // self.trigger('filterMethods.Narrative', 'type:' + object_info[2].split('-')[0].split('.')[1]); // } }, addDataControls: function (object_info, $alertContainer) { var self = this; var $btnToolbar = $('<span>') .addClass('btn-group'); var btnClasses = "btn btn-xs btn-default"; var css = {'color': '#888'}; /*.append($('<div>').css({'text-align':'center','margin':'5pt'}) .append('<a href="'+landingPageLink+'" target="_blank">'+ 'explore data</a>&nbsp&nbsp|&nbsp&nbsp') .append('<a href="'+this.options.landing_page_url+'objgraphview/'+object_info[7] +'/'+object_info[1] +'" target="_blank">'+ 'view provenance</a><br>'))*/ var $filterMethodInput = $('<span>') .tooltip({ title: 'Show Methods with this as input', container: '#' + this.mainListId, delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .addClass(btnClasses) .append($('<span>').addClass('fa fa-sign-in').css(css)) .click(function (e) { this.trigger('filterMethods.Narrative', 'in_type:' + object_info[2].split('-')[0].split('.')[1]); }.bind(this)); var $filterMethodOutput = $('<span>') .tooltip({ title: 'Show Methods with this as output', container: '#' + this.mainListId, delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .addClass(btnClasses) .append($('<span>').addClass('fa fa-sign-out').css(css)) .click(function (e) { this.trigger('filterMethods.Narrative', 'out_type:' + object_info[2].split('-')[0].split('.')[1]); }.bind(this)); var $openLandingPage = $('<span>') .tooltip({ title: 'Explore data', container: '#' + this.mainListId, delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .addClass(btnClasses) .append($('<span>').addClass('fa fa-binoculars').css(css)) .click(function (e) { e.stopPropagation(); $alertContainer.empty(); var typeTokens = object_info[2].split('-')[0].split('.'); var landingPageLink = self.options.lp_url + object_info[6] + '/' + object_info[1]; window.open(landingPageLink); }); var $openHistory = $('<span>') .addClass(btnClasses).css(css) .tooltip({ title: 'View history to revert changes', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append($('<span>').addClass('fa fa-history').css(css)) .click(function (e) { e.stopPropagation(); $alertContainer.empty(); if (self.ws_name && self.ws) { self.ws.get_object_history({ref: object_info[6] + "/" + object_info[0]}, function (history) { $alertContainer.append($('<div>') .append($('<button>').addClass('kb-data-list-cancel-btn') .append('Hide History') .click(function () { $alertContainer.empty(); }))); history.reverse(); var $tbl = $('<table>').css({'width': '100%'}); for (var k = 0; k < history.length; k++) { var $revertBtn = $('<button>').append('v' + history[k][4]).addClass('kb-data-list-btn'); if (k == 0) { $revertBtn.tooltip({ title: 'Current Version', container: 'body', placement: 'bottom', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }); } else { var revertRef = {wsid: history[k][6], objid: history[k][0], ver: history[k][4]}; (function (revertRefLocal) { $revertBtn.tooltip({ title: 'Revert to this version?', container: 'body', placement: 'bottom', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .click(function () { self.ws.revert_object(revertRefLocal, function (reverted_obj_info) { self.refresh(); }, function (error) { console.error(error); $alertContainer.empty(); $alertContainer.append($('<span>').css({'color': '#F44336'}).append("Error! " + error.error.message)); }); }); })(revertRef); } $tbl.append($('<tr>') .append($('<td>').append($revertBtn)) .append($('<td>').append('Saved by ' + history[k][5] + '<br>' + self.getTimeStampStr(history[k][3]))) .append($('<td>').append($('<span>').css({margin: '4px'}).addClass('fa fa-info pull-right')) .tooltip({ title: history[k][2] + '<br>' + history[k][8] + '<br>' + history[k][9] + ' bytes', container: 'body', html: true, placement: 'bottom', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) )); } $alertContainer.append($tbl); }, function (error) { console.error(error); $alertContainer.empty(); $alertContainer.append($('<span>').css({'color': '#F44336'}).append("Error! " + error.error.message)); }); } }); var $openProvenance = $('<span>') .addClass(btnClasses).css(css) .tooltip({ title: 'View data provenance and relationships', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append($('<span>').addClass('fa fa-sitemap fa-rotate-90').css(css)) .click(function (e) { e.stopPropagation(); $alertContainer.empty(); window.open(self.options.landing_page_url + 'objgraphview/' + object_info[7] + '/' + object_info[1]); }); var $download = $('<span>') .addClass(btnClasses).css(css) .tooltip({ title: 'Export / Download data', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append($('<span>').addClass('fa fa-download').css(css)) .click(function (e) { e.stopPropagation(); $alertContainer.empty(); var type = object_info[2].split('-')[0]; var wsId = object_info[7]; var objId = object_info[1]; var downloadPanel = $('<div>'); $alertContainer.append(downloadPanel); downloadPanel.kbaseNarrativeDownloadPanel({token: self._attributes.auth.token, type: type, wsId: wsId, objId: objId}); }); var $rename = $('<span>') .addClass(btnClasses).css(css) .tooltip({ title: 'Rename data', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append($('<span>').addClass('fa fa-font').css(css)) .click(function (e) { e.stopPropagation(); $alertContainer.empty(); var $newNameInput = $('<input type="text">') .addClass('form-control') .val(object_info[1]) .on('focus', function () { if (Jupyter && Jupyter.narrative) { Jupyter.narrative.disableKeyboardManager(); } }) .on('blur', function () { if (Jupyter && Jupyter.narrative) { Jupyter.narrative.enableKeyboardManager(); } }); $alertContainer.append($('<div>') .append($('<div>').append("Warning: Apps using the old name may break.")) .append($('<div>').append($newNameInput)) .append($('<button>').addClass('kb-data-list-btn') .append('Rename') .click(function () { if (self.ws_name && self.ws) { self.ws.rename_object({ obj: {ref: object_info[6] + "/" + object_info[0]}, new_name: $newNameInput.val() }, function (renamed_info) { self.refresh(); }, function (error) { console.error(error); $alertContainer.empty(); $alertContainer.append($('<span>').css({'color': '#F44336'}).append("Error! " + error.error.message)); }); } })) .append($('<button>').addClass('kb-data-list-cancel-btn') .append('Cancel') .click(function () { $alertContainer.empty(); }))); }); var $delete = $('<span>') .addClass(btnClasses).css(css) .tooltip({ title: 'Delete data', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append($('<span>').addClass('fa fa-trash-o').css(css)) .click(function (e) { e.stopPropagation(); $alertContainer.empty(); $alertContainer.append($('<div>') .append($('<span>').append('Are you sure?')) .append($('<button>').addClass('kb-data-list-btn') .append('Delete') .click(function () { if (self.ws_name && self.ws) { self.ws.rename_object({ obj: {ref: object_info[6] + "/" + object_info[0]}, new_name: object_info[1].split('-deleted-')[0] + "-deleted-" + (new Date()).getTime() }, function (renamed_info) { self.ws.delete_objects([{ref: object_info[6] + "/" + object_info[0]}], function () { self.refresh(); }, function (error) { console.error(error); $alertContainer.empty(); $alertContainer.append($('<span>').css({'color': '#F44336'}).append("Error! " + error.error.message)); }); }, function (error) { console.error(error); $alertContainer.empty(); $alertContainer.append($('<span>').css({'color': '#F44336'}).append("Error! " + error.error.message)); }); } })) .append($('<button>').addClass('kb-data-list-cancel-btn') .append('Cancel') .click(function () { $alertContainer.empty(); }))); }); if (!Jupyter.narrative.readonly) { $btnToolbar.append($filterMethodInput) .append($filterMethodOutput); } $btnToolbar.append($openLandingPage); if (!Jupyter.narrative.readonly) $btnToolbar.append($openHistory); $btnToolbar.append($openProvenance); if (!Jupyter.narrative.readonly) { $btnToolbar.append($download) .append($rename) .append($delete); } return $btnToolbar; }, renderObjectRowDiv: function (object_info, object_key) { var self = this; // object_info: // [0] : obj_id objid // [1] : obj_name name // [2] : type_string type // [3] : timestamp save_date // [4] : int version // [5] : username saved_by // [6] : ws_id wsid // [7] : ws_name workspace // [8] : string chsum // [9] : int size // [10] : usermeta meta var type_tokens = object_info[2].split('.') var type_module = type_tokens[0]; var type = type_tokens[1].split('-')[0]; var unversioned_full_type = type_module + '.' + type; var $logo = $('<div>'); // set icon $(document).trigger("setDataIcon.Narrative", {elt: $logo, type: type}); // add behavior $logo.click(function (e) { e.stopPropagation(); self.insertViewer(object_key); }); var shortName = object_info[1]; var isShortened = false; if (shortName.length > this.options.max_name_length) { shortName = shortName.substring(0, this.options.max_name_length - 3) + '...'; isShortened = true; } var $name = $('<span>').addClass("kb-data-list-name").append('<a>' + shortName + '</a>') .css({'cursor': 'pointer'}) .click(function (e) { e.stopPropagation(); self.insertViewer(object_key); }); if (isShortened) { $name.tooltip({ title: object_info[1], placement: 'bottom', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }); } var $version = $('<span>').addClass("kb-data-list-version").append('v' + object_info[4]); var $type = $('<div>').addClass("kb-data-list-type").append(type); var $date = $('<span>').addClass("kb-data-list-date").append(this.getTimeStampStr(object_info[3])); var $byUser = $('<span>').addClass("kb-data-list-edit-by"); if (object_info[5] !== self.my_user_id) { $byUser.append(' by ' + object_info[5]) .click(function (e) { e.stopPropagation(); window.open(self.options.landing_page_url + 'people/' + object_info[5]); }); } var metadata = object_info[10]; var metadataText = ''; for (var key in metadata) { if (metadata.hasOwnProperty(key)) { metadataText += '<tr><th>' + key + '</th><td>' + metadata[key] + '</td></tr>'; } } if (type === 'Genome') { if (metadata.hasOwnProperty('Name')) { $type.text(type + ': ' + metadata['Name']); } } var $savedByUserSpan = $('<td>').addClass('kb-data-list-username-td'); DisplayUtil.displayRealName(object_info[5], $savedByUserSpan); var $alertDiv = $('<div>').css({'text-align': 'center', 'margin': '10px 0px'}); var typeLink = '<a href="' + this.options.landing_page_url + 'spec/module/' + type_module + '" target="_blank">' + type_module + "</a>.<wbr>" + '<a href="' + this.options.landing_page_url + 'spec/type/' + object_info[2] + '" target="_blank">' + (type_tokens[1].replace('-', '&#8209;')) + '.' + type_tokens[2] + '</a>'; var $moreRow = $('<div>').addClass("kb-data-list-more-div").hide() .append($('<div>').css({'text-align': 'center', 'margin': '5pt'}) .append(self.addDataControls(object_info, $alertDiv)).append($alertDiv)) .append( $('<table style="width:100%;">') .append("<tr><th>Permament Id</th><td>" + object_info[6] + "/" + object_info[0] + "/" + object_info[4] + '</td></tr>') .append("<tr><th>Full Type</th><td>" + typeLink + '</td></tr>') .append($('<tr>').append('<th>Saved by</th>').append($savedByUserSpan)) .append(metadataText)); var $toggleAdvancedViewBtn = $('<span>').addClass("kb-data-list-more")//.addClass('btn btn-default btn-xs kb-data-list-more-btn') .hide() .html($('<button class="btn btn-xs btn-default pull-right" aria-hidden="true">').append('<span class="fa fa-ellipsis-h" style="color:#888" />')); var toggleAdvanced = function () { if (self.selectedObject === object_info[0] && $moreRow.is(':visible')) { // assume selection handling occurs before this is called // so if we are now selected and the moreRow is visible, leave it... return; } if ($moreRow.is(':visible')) { $moreRow.slideUp('fast'); //$toggleAdvancedViewBtn.show(); } else { self.getRichData(object_info, $moreRow); $moreRow.slideDown('fast'); //$toggleAdvancedViewBtn.hide(); } }; var $mainDiv = $('<div>').addClass('kb-data-list-info').css({padding: '0px', margin: '0px'}) .append($name).append($version).append('<br>') .append($('<table>').css({width: '100%'}) .append($('<tr>') .append($('<td>').css({width: '80%'}) .append($type).append($date).append($byUser)) .append($('<td>') .append($toggleAdvancedViewBtn)))) .click( function () { self.setSelected($(this).closest('.kb-data-list-obj-row'), object_info); toggleAdvanced(); }); var $topTable = $('<table>').attr('kb-oid', object_key) .css({'width': '100%', 'background': '#fff'}) // set background to white looks better on DnD .append($('<tr>') .append($('<td>') .css({'width': '15%'}) .append($logo)) .append($('<td>') .append($mainDiv))); var $row = $('<div>').addClass('kb-data-list-obj-row') .append($('<div>').addClass('kb-data-list-obj-row-main') .append($topTable)) .append($moreRow) // show/hide ellipses on hover, show extra info on click .mouseenter(function () { $toggleAdvancedViewBtn.show(); }) .mouseleave(function () { $toggleAdvancedViewBtn.hide(); }); // Drag and drop this.addDragAndDrop($topTable); var $rowWithHr = $('<div>') .append($('<hr>') .addClass('kb-data-list-row-hr') .css({'margin-left': '65px'})) .append($row); return $rowWithHr; }, // ============= DnD ================== addDropZone: function (container, targetCell, isBelow) { var targetDiv = document.createElement('div'), self = this; targetDiv.classList.add('kb-data-list-drag-target'); targetDiv.innerHTML = '<i>drop data object here</i>'; targetDiv.addEventListener('dragover', function (e) { e.target.classList.add('-drag-active'); e.preventDefault(); }); targetDiv.addEventListener('dragenter', function (e) { e.target.classList.add('-drag-hover'); e.preventDefault(); }); targetDiv.addEventListener('dragleave', function (e) { e.target.classList.remove('-drag-hover'); e.target.classList.remove('-drag-active'); e.preventDefault(); }); targetDiv.addEventListener('drop', function (e) { var data = JSON.parse(e.dataTransfer.getData('info')), key = data.key, obj = _.findWhere(self.objectList, {key: key}), info = self.createInfoObject(obj.info), cell, cellIndex, placement; if (e.target.getAttribute('cellIs') === 'below') { cell = $(e.target.nextSibling).data().cell; placement = 'above'; } else { cell = $(e.target.previousSibling).data().cell; placement = 'below'; } cellIndex = Jupyter.notebook.find_cell_index(cell); $(document).trigger('createViewerCell.Narrative', { nearCellIdx: cellIndex, widget: 'kbaseNarrativeDataCell', info: info, placement: placement }); }); if (isBelow) { targetDiv.setAttribute('cellIs', 'above'); container.appendChild(targetDiv); } else { targetDiv.setAttribute('cellIs', 'below'); container.insertBefore(targetDiv, targetCell); } }, addDragAndDrop: function ($row) { var node = $row.parent().get(0), key = $row.attr('kb-oid'), obj = _.findWhere(this.objectList, {key: key}), info = this.createInfoObject(obj.info), data = { widget: 'kbaseNarrativeDataCell', info: info, key: key }, dataString = JSON.stringify(data), self = this; node.setAttribute('draggable', true); node.addEventListener('dragstart', function (e) { e.dataTransfer.dropEffect = 'copy'; e.dataTransfer.setData('info', dataString); // e.target.style.border = "3px red solid"; var targetCells = document.querySelectorAll('#notebook-container .cell'); var container = document.querySelector('#notebook-container'); for (var i = 0; i < targetCells.length; i += 1) { self.addDropZone(container, targetCells.item(i)); if (i === targetCells.length - 1) { self.addDropZone(container, targetCells.item(i), true); } } }); node.addEventListener('dragend', function (e) { console.log('drag ended...'); var container = document.querySelector('#notebook-container'), targetCells = document.querySelectorAll('#notebook-container .kb-data-list-drag-target'); for (var i = 0; i < targetCells.length; i += 1) { var targetCell = targetCells.item(i); container.removeChild(targetCell); } }); // Add tooltip to indicate this functionality $row.attr({ 'data-toggle': 'tooltip', 'title': 'Drag onto narrative &rarr;' }); $row.tooltip({ delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay }, placement: 'top auto', html: true, viewport: { selector: '#kb-side-panel .kb-narr-side-panel:nth-child(1) .kb-narr-panel-body', padding: 2 } }); return this; }, /** * Helper function to create named object attrs from * list of fields returned from Workspace service. */ createInfoObject: function (info) { return _.object(['id', 'name', 'type', 'save_date', 'version', 'saved_by', 'ws_id', 'ws_name', 'chsum', 'size', 'meta'], info); }, // ============= end DnD ================ insertViewer: function (key) { var self = this; var cell = Jupyter.notebook.get_selected_cell(); var near_idx = 0; if (cell) { near_idx = Jupyter.notebook.find_cell_index(cell); $(cell.element).off('dblclick'); $(cell.element).off('keydown'); } //console.log(cell, near_idx); //var cell_id = StringUtil.uuid(); //cell.rendered = false; //cell.set_text('<div id="' + cell_id + '">&nbsp;</div>'); //cell.render(); var obj = _.findWhere(self.objectList, {key: key}); var info = self.createInfoObject(obj.info); // Insert the narrative data cell into the div we just rendered //$('#' + cell_id).kbaseNarrativeDataCell({cell: cell, info: info}); self.trigger('createViewerCell.Narrative', { 'nearCellIdx': near_idx, 'widget': 'kbaseNarrativeDataCell', 'info': info }); }, renderMore: function () { var self = this; if (self.objectList) { if (!self.searchFilterOn) { // if search filter is off, then we just are showing everything var start = self.n_objs_rendered; for (var i = start; i < self.objectList.length; i++) { // only show them as we scroll to them if (self.n_objs_rendered >= start + self.options.objs_to_render_on_scroll) { break; } self.attachRow(i); } //console.log('showing '+ self.n_objs_rendered + ' of ' + self.objectList.length); } else { // search filter is on, so we have to base this on what is currently filtered var start = self.n_filteredObjsRendered; for (var i = start; i < self.currentMatch.length; i++) { // only show them as we scroll to them if (self.n_filteredObjsRendered >= start + self.options.objs_to_render_on_scroll) { break; } self.attachRowElement(self.currentMatch[i]); self.n_filteredObjsRendered++; } //console.log('showing '+ self.n_filteredObjsRendered + ' of ' + self.currentMatch.length + ' objs matching search filter'); } } }, attachRow: function (index) { var obj = this.objectList[index]; if (obj.attached) { return; } if (obj.$div) { this.$mainListDiv.append(obj.$div); } else { obj.$div = this.renderObjectRowDiv(obj.info, obj.key); this.$mainListDiv.append(obj.$div); } obj.attached = true; this.n_objs_rendered++; }, attachRowElement: function (row) { if (row.attached) { return; } // return if we are already attached if (row.$div) { this.$mainListDiv.append(row.$div); } else { row.$div = this.renderObjectRowDiv(row.info, row.key); this.$mainListDiv.append(row.$div); } row.attached = true; this.n_objs_rendered++; }, detachAllRows: function () { for (var i = 0; i < this.objectList.length; i++) { this.detachRow(i); } this.$mainListDiv.children().detach(); this.n_objs_rendered = 0; this.renderedAll = false; }, detachRow: function (index) { if (this.objectList[index].attached) { if (this.objectList[index].$div) { this.objectList[index].$div.detach(); } this.objectList[index].attached = false; this.n_objs_rendered--; } }, renderList: function () { var self = this; self.showLoading(); self.detachAllRows(); if (self.objectList.length > 0) { for (var i = 0; i < self.objectList.length; i++) { // only show up to the given number if (i >= self.options.objs_to_render_to_start) { self.n_objs_rendered = i; break; } // If object does not have a key, define one. // This will be used for 'id' of rendered element. // But do *not* replace an existing key. if (self.objectList[i].key == undefined) { self.objectList[i].key = StringUtil.uuid(); } self.attachRow(i); } this.$addDataButton.toggle(!(Jupyter.narrative && Jupyter.narrative.readonly === true)); } else { var $noDataDiv = $('<div>') .css({'text-align': 'center', 'margin': '20pt'}) .append('This Narrative has no data yet.<br><br>'); if (Jupyter && Jupyter.narrative && !Jupyter.narrative.readonly) { $noDataDiv.append($("<button>") .append('Add Data') .addClass('kb-data-list-add-data-text-button') .css({'margin': '20px'}) .click(function () { self.trigger('hideGalleryPanelOverlay.Narrative'); self.trigger('toggleSidePanelOverlay.Narrative', self.options.parentControlPanel.$overlayPanel); })); } self.$mainListDiv.append($noDataDiv); } self.hideLoading(); }, renderController: function () { var self = this; var $byDate = $('<label id="nar-data-list-default-sort-label" class="btn btn-default">').addClass('btn btn-default') .append($('<input type="radio" name="options" id="nar-data-list-default-sort-option" autocomplete="off">')) .append("date") .on('click', function () { self.sortData(function (a, b) { if (a.info[3] > b.info[3]) return -1; // sort by date if (a.info[3] < b.info[3]) return 1; // sort by date return 0; }); }); var $byName = $('<label class="btn btn-default">') .append($('<input type="radio" name="options" id="option2" autocomplete="off">')) .append("name") .on('click', function () { self.sortData(function (a, b) { if (a.info[1].toUpperCase() < b.info[1].toUpperCase()) return -1; // sort by name if (a.info[1].toUpperCase() > b.info[1].toUpperCase()) return 1; return 0; }); }); var $byType = $('<label class="btn btn-default">') .append($('<input type="radio" name="options" id="option3" autocomplete="off">')) .append("type") .on('click', function () { self.sortData(function (a, b) { if (a.info[2].toUpperCase() > b.info[2].toUpperCase()) return -1; // sort by type if (a.info[2].toUpperCase() < b.info[2].toUpperCase()) return 1; return 0; }); }); var $upOrDown = $('<button class="btn btn-default btn-sm" type="button">').css({'margin-left': '5px'}) .append('<span class="glyphicon glyphicon-sort" style="color:#777" aria-hidden="true" />') .on('click', function () { self.reverseData(); }); var $sortByGroup = $('<div data-toggle="buttons">') .addClass("btn-group btn-group-sm") .css({"margin": "2px"}) .append($byDate) .append($byName) .append($byType); // var $addDataBtn = $('<button>') // .addClass("btn btn-warning kb-data-list-get-data-button") // .append('<span class="fa fa-plus" style="color:#fff" aria-hidden="true" /> Add Data') // .on('click',function() { // self.trigger('toggleSidePanelOverlay.Narrative'); // }); var $openSearch = $('<span>') .addClass('btn btn-xs btn-default') .tooltip({ title: 'Search data in narrative', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append('<span class="fa fa-search"></span>') .on('click', function () { if (!self.$searchDiv.is(':visible')) { self.$searchDiv.show(); self.$sortByDiv.hide(); self.$filterTypeDiv.hide(); self.$searchInput.focus(); } else { self.$searchDiv.hide(); } }); var $openSort = $('<span>') .addClass('btn btn-xs btn-default') .tooltip({ title: 'Sort data list', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append('<span class="fa fa-sort-amount-asc"></span>') .on('click', function () { if (!self.$sortByDiv.is(':visible')) { self.$sortByDiv.show(); self.$searchDiv.hide(); self.$filterTypeDiv.hide(); } else { self.$sortByDiv.hide(); } }); var $openFilter = $('<span>') .addClass('btn btn-xs btn-default') .tooltip({ title: 'Filter data by type', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append('<span class="fa fa-filter"></span>') .on('click', function () { if (!self.$filterTypeDiv.is(':visible')) { self.$filterTypeDiv.show(); self.$sortByDiv.hide(); self.$searchDiv.hide(); } else { self.$filterTypeDiv.hide(); } }); var $refreshBtn = $('<span>') .addClass('btn btn-xs btn-default') .tooltip({ title: 'Refresh data list', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append('<span class="glyphicon glyphicon-refresh"></span>') .on('click', function () { self.refresh(); }); self.$searchInput = $('<input type="text">') .addClass('form-control') .on('focus', function () { if (Jupyter && Jupyter.narrative) { Jupyter.narrative.disableKeyboardManager(); } }) .on('blur', function () { if (Jupyter && Jupyter.narrative) { Jupyter.narraive.enableKeyboardManager(); } }); self.$searchDiv = $('<div>').addClass("input-group").css({'margin-bottom': '10px'}) .append(self.$searchInput) .append($("<span>").addClass("input-group-addon") .append($("<span>") .addClass("glyphicon glyphicon-search") .css({'cursor': 'pointer'}) .on('click', function () { self.search(); }))); self.$sortByDiv = $('<div>').css({'margin': '3px', 'margin-left': '5px', 'margin-bottom': '10px'}) .append("<small>sort by: </small>") .append($sortByGroup) .append($upOrDown); self.$filterTypeSelect = $('<select>').addClass("form-control") .append($('<option value="">')) .change(function () { var optionSelected = $(this).find("option:selected"); var typeSelected = optionSelected.val(); // whenever we change the type filter, we need to clear the current match // so that the complete filter can rerun self.currentMatch = self.objectList; self.filterByType(typeSelected); }); self.$filterTypeDiv = $('<div>').css({'margin': '3px', 'margin-left': '5px', 'margin-bottom': '10px'}) .append(self.$filterTypeSelect); var $header = $('<div>'); if (self.options.parentControlPanel) { self.options.parentControlPanel.addButtonToControlPanel($openSearch); self.options.parentControlPanel.addButtonToControlPanel($openSort); self.options.parentControlPanel.addButtonToControlPanel($openFilter); self.options.parentControlPanel.addButtonToControlPanel($refreshBtn); } else { $header.addClass('row').css({'margin': '5px'}) .append($('<div>').addClass('col-xs-12').css({'margin': '0px', 'padding': '0px', 'text-align': 'right'}) .append($openSearch) .append($openSort) .append($openFilter)) } self.$sortByDiv.hide(); self.$searchDiv.hide(); self.$filterTypeDiv.hide(); var $filterDiv = $('<div>') .append(self.$sortByDiv) .append(self.$searchDiv) .append(self.$filterTypeDiv); self.$controllerDiv.append($header).append($filterDiv); }, populateAvailableTypes: function () { var self = this; if (self.availableTypes && self.$filterTypeSelect) { var types = []; for (var type in self.availableTypes) { if (self.availableTypes.hasOwnProperty(type)) { types.push(type); } } types.sort(); self.$filterTypeSelect.empty(); var runningCount = 0; for (var i = 0; i < types.length; i++) { runningCount += self.availableTypes[types[i]].count; var countStr = ''; if (self.availableTypes[types[i]].count == 1) { countStr = " (".concat(self.availableTypes[types[i]].count).concat(" object)"); } else { countStr = " (".concat(self.availableTypes[types[i]].count).concat(" objects)"); } self.$filterTypeSelect.append( $('<option value="' + self.availableTypes[types[i]].type + '">') .append(self.availableTypes[types[i]].type + countStr)); } if (runningCount == 1) { self.$filterTypeSelect.prepend($('<option value="">').append("Show All Types (" + runningCount + " object)")); } else { self.$filterTypeSelect.prepend($('<option value="">').append("Show All Types (" + runningCount + " objects)")); } self.$filterTypeSelect.val(""); } }, reverseData: function () { var self = this; if (!self.objectList) { return; } self.objectList.reverse(); self.renderList(); self.search(); self.hideLoading(); }, sortData: function (sortfunction) { var self = this; if (!self.objectList) { return; } //should add spinning wait bar .... self.showLoading(); self.objectList.sort(sortfunction); self.renderList(); self.search(); // always refilter on the search term search if there is something there self.hideLoading(); // go back to the top on sort self.$mainListDiv.animate({ scrollTop: 0 }, 300); // fast = 200, slow = 600 }, currentMatch: [], currentTerm: '', searchFilterOn: false, n_filteredObjsRendered: null, search: function (term, type) { var self = this; if (!self.objectList) { return; } if (!term && self.$searchInput) { term = self.$searchInput.val(); } // if type wasn't selected, then we try to get something that was set if (!type) { if (self.$filterTypeSelect) { type = self.$filterTypeSelect.find("option:selected").val(); } } term = term.trim(); if (term.length > 0 || type) { self.searchFilterOn = true; // todo: should show searching indicator (could take several seconds if there is a lot of data) // optimization => we filter existing matches instead of researching everything if the new // term starts with the last term searched for var newMatch = []; if (!self.currentTerm) { // reset if currentTerm is null or empty self.currentMatch = self.objectList; } else { if (term.indexOf(self.currentTerm) !== 0) { self.currentMatch = self.objectList; } } // clean the term for regex use term = term.replace(/\|/g, '\\|').replace(/\\\\\|/g, '|'); // bars are common in kb ids, so escape them unless we have \\| term = term.replace(/\./g, '\\.').replace(/\\\\\./g, '.'); // dots are common in names, so we escape them, but // if a user writes '\\.' we assume they want the regex '.' var regex = new RegExp(term, 'i'); var n_matches = 0; self.n_filteredObjsRendered = 0; for (var k = 0; k < self.currentMatch.length; k++) { // [0] : obj_id objid // [1] : obj_name name // [2] : type_string type // [3] : timestamp save_date // [4] : int version // [5] : username saved_by // [6] : ws_id wsid // [7] : ws_name workspace // [8] : string chsum // [9] : int size // [10] : usermeta meta var match = false; var info = self.currentMatch[k].info; if (regex.test(info[1])) { match = true; } // match on name else if (regex.test(info[2].split('.')[1].split('-'))) { match = true; } // match on type name else if (regex.test(info[5])) { match = true; } // match on saved_by user if (!match && info[10]) { // match on metadata values for (var metaKey in info[10]) { if (info[10].hasOwnProperty(metaKey)) { if (regex.test(info[10][metaKey])) { match = true; break; } else if (regex.test(metaKey + "::" + info[10][metaKey])) { match = true; break; } } } } if (type) { // if type is defined, then our sort must also filter by the type if (type !== info[2].split('-')[0].split('.')[1]) { match = false; // no match if we are not the selected type! } } if (match) { // matches must always switch to show if they are rendered if (self.currentMatch[k].$div) { self.currentMatch[k].$div.show(); } // todo: add check so we only show up to the number we render... switching to this will require that // we revise the renderMore logic... if (n_matches < self.options.objs_to_render_to_start) { self.attachRowElement(self.currentMatch[k]); self.n_filteredObjsRendered++; } newMatch.push(self.currentMatch[k]); n_matches++; } else { if (self.currentMatch[k].$div) { self.currentMatch[k].$div.hide(); } } } self.currentMatch = newMatch; // update the current match } else { self.searchFilterOn = false; // no new search, so show all and render the list for (var k = 0; k < self.objectList.length; k++) { if (self.objectList[k].$div) { self.objectList[k].$div.show(); } } self.renderList(); } self.currentTerm = term; }, filterByType: function (type) { var self = this; self.search(null, type); }, getRichData: function (object_info, $moreRow) { var self = this; var $usernameTd = $moreRow.find(".kb-data-list-username-td"); DisplayUtil.displayRealName(object_info[5], $usernameTd); }, showLoading: function () { this.$loadingDiv.show(); this.$mainListDiv.hide(); }, hideLoading: function () { this.$loadingDiv.hide(); this.$mainListDiv.show(); }, /** * @method loggedInCallback * This is associated with the login widget (through the kbaseAuthenticatedWidget parent) and * is triggered when a login event occurs. * It associates the new auth token with this widget and refreshes the data panel. * @private */ loggedInCallback: function (event, auth) { console.log('DataList: loggedInCallback'); this.ws = new Workspace(this.options.ws_url, auth); this.my_user_id = auth.user_id; this.isLoggedIn = true; this.refresh(); return this; }, /** * @method loggedOutCallback * Like the loggedInCallback, this is triggered during a logout event (through the login widget). * It throws away the auth token and workspace client, and refreshes the widget * @private */ loggedOutCallback: function (event, auth) { this.ws = null; this.isLoggedIn = false; this.my_user_id = null; if (this.ws_name) this.refresh(); return this; }, // edited from: http://stackoverflow.com/questions/3177836/how-to-format-time-since-xxx-e-g-4-minutes-ago-similar-to-stack-exchange-site getTimeStampStr: function (objInfoTimeStamp) { var date = new Date(objInfoTimeStamp); var seconds = Math.floor((new Date() - date) / 1000); // f-ing safari, need to add extra ':' delimiter to parse the timestamp if (isNaN(seconds)) { var tokens = objInfoTimeStamp.split('+'); // this is just the date without the GMT offset var newTimestamp = tokens[0] + '+' + tokens[0].substr(0, 2) + ":" + tokens[1].substr(2, 2); date = new Date(newTimestamp); seconds = Math.floor((new Date() - date) / 1000); if (isNaN(seconds)) { // just in case that didn't work either, then parse without the timezone offset, but // then just show the day and forget the fancy stuff... date = new Date(tokens[0]); return this.monthLookup[date.getMonth()] + " " + date.getDate() + ", " + date.getFullYear(); } } var interval = Math.floor(seconds / 31536000); if (interval > 1) { return this.monthLookup[date.getMonth()] + " " + date.getDate() + ", " + date.getFullYear(); } interval = Math.floor(seconds / 2592000); if (interval > 1) { if (interval < 4) { return interval + " months ago"; } else { return this.monthLookup[date.getMonth()] + " " + date.getDate() + ", " + date.getFullYear(); } } interval = Math.floor(seconds / 86400); if (interval > 1) { return interval + " days ago"; } interval = Math.floor(seconds / 3600); if (interval > 1) { return interval + " hours ago"; } interval = Math.floor(seconds / 60); if (interval > 1) { return interval + " minutes ago"; } return Math.floor(seconds) + " seconds ago"; }, monthLookup: ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"], }) });
kbase-extension/static/kbase/js/widgets/narrative_core/kbaseNarrativeDataList.js
/*global define,IPython*/ /*jslint white: true*/ /** * @author Michael Sneddon <[email protected]> * @public */ define(['jquery', 'underscore', 'narrativeConfig', 'Util/String', 'Util/Display', 'kbase-client-api', 'jquery-nearest', 'kbwidget', 'kbaseAuthenticatedWidget', 'kbaseNarrativeDownloadPanel'], function ($, _, Config, StringUtil, DisplayUtil) { 'use strict'; $.KBWidget({ name: 'kbaseNarrativeDataList', parent: 'kbaseAuthenticatedWidget', version: '1.0.0', options: { ws_name: null, // must be the WS name, not the WS Numeric ID ws_url: Config.url('workspace'), landing_page_url: "/functional-site/#/", // !! always include trailing slash lp_url: Config.url('landing_pages'), profile_page_url: Config.url('profile_page'), user_name_fetch_url: "https://kbase.us/services/genome_comparison/users?usernames=", loadingImage: Config.get('loading_gif'), methodStoreURL: Config.url('narrative_method_store'), ws_chunk_size: 10000, // this is the limit of the number of objects to retrieve from the ws on each pass ws_max_objs_to_fetch: 75000, // this is the total limit of the number of objects before we stop trying to get more // note that if there are more objects than this, then sorts/search filters may // not show accurate results objs_to_render_to_start: 40, // initial number of rows to display objs_to_render_on_scroll: 5, // number of rows to add when the user scrolls to the bottom, should be <=5, much more and // the addition of new rows becomes jerky max_objs_to_prevent_filter_as_you_type_in_search: 50000, //if there are more than this # of objs, user must click search //instead of updating as you type max_objs_to_prevent_initial_sort: 10000, // initial sort makes loading slower, so we can turn it off if // there are more than this number of objects max_name_length: 33, refresh_interval: 30000, parentControlPanel: null }, // private variables mainListPanelHeight: '340px', refreshTimer: null, ws_name: null, ws: null, ws_last_update_timestamp: null, ws_obj_count: null, n_objs_rendered: 0, real_name_lookup: {}, $searchInput: null, $filterTypeSelect: null, availableTypes: {}, $searchDiv: null, $sortByDiv: null, $filterTypeDiv: null, $addDataButton: null, $controllerDiv: null, $mainListDiv: null, mainListId: null, $loadingDiv: null, methClient: null, obj_list: [], obj_data: {}, // old style - type_name : info my_user_id: null, /** * @method init * Builds the DOM structure for the widget. * Includes the tables and panel. * If any data was passed in (options.data), that gets shoved into the datatable. * @param {Object} - the options set. * @returns {Object} this shiny new widget. * @private */ init: function (options) { this._super(options); var self = this; this.$controllerDiv = $('<div>'); this.$elem.append(this.$controllerDiv); this.renderController(); this.$loadingDiv = $('<div>').addClass('kb-data-loading') .append('<img src="' + this.options.loadingImage + '">') .hide(); this.$elem.append(this.$loadingDiv); this.mainListId = StringUtil.uuid(); this.$mainListDiv = $('<div id=' + this.mainListId + '>') .css({'overflow-x': 'hidden', 'overflow-y': 'auto', 'height': this.mainListPanelHeight}) .on('scroll', function () { if ($(this).scrollTop() + $(this).innerHeight() >= this.scrollHeight) { self.renderMore(); } }); this.$addDataButton = $('<span>').addClass('kb-data-list-add-data-button fa fa-plus fa-2x') .css({'position': 'absolute', bottom: '15px', right: '25px', 'z-index': '5'}) .click(function () { self.trigger('hideGalleryPanelOverlay.Narrative'); self.trigger('toggleSidePanelOverlay.Narrative', self.options.parentControlPanel.$overlayPanel); }); var $mainListDivContainer = $('<div>').css({'position': 'relative'}) .append(this.$mainListDiv) .append(this.$addDataButton.hide()); this.$elem.append($mainListDivContainer); if (this._attributes.auth) { this.ws = new Workspace(this.options.ws_url, this._attributes.auth); } // listener for refresh $(document).on('updateDataList.Narrative', function () { self.refresh(); }) if (this.options.ws_name) { this.ws_name = this.options.ws_name; } this.methClient = new NarrativeMethodStore(this.options.methodStoreURL); return this; }, setWorkspace: function (ws_name) { this.ws_name = ws_name; // this.ws_name = "janakacore"; // for testing a bigish workspace //this.ws_name = "KBasePublicGenomesV4"; // for testing a very big workspace this.refresh(); }, refresh: function (showError) { var self = this; console.log('DataList: refresh -- ' + self.ws_name); // Set the refresh timer on the first refresh. From here, it'll refresh itself // every this.options.refresh_interval (30000) ms if (self.refreshTimer === null) { self.refreshTimer = setInterval(function () { self.refresh(); }, this.options.refresh_interval); // check if there is new data every X ms } if (self.ws_name && self.ws) { self.ws.get_workspace_info({ workspace: this.ws_name }, function (workspace_info) { //[0] ws_id id, [1] ws_name workspace, [2] username owner, [3] timestamp moddate, //[4] int object, [5] permission user_permission, [6] permission globalread, //[7] lock_status lockstat, [8] usermeta metadata //console.log('I have: '+self.ws_last_update_timestamp+ " remote has: "+workspace_info[3]); // Update RO or RW mode self.trigger("updateReadOnlyMode.Narrative", [self.ws, self.ws_name]); if (self.ws_last_update_timestamp) { if (self.ws_last_update_timestamp !== workspace_info[3]) { self.ws_last_update_timestamp = workspace_info[3]; self.ws_obj_count = workspace_info[4]; self.reloadWsData(); } else { //console.log('updating times'); self.refreshTimeStrings(); } } else { self.ws_last_update_timestamp = workspace_info[3]; self.ws_obj_count = workspace_info[4]; self.reloadWsData(); } }, function (error) { console.error('DataList: when checking for updates:', error); if (showError) { self.$mainListDiv.show(); self.$mainListDiv.empty(); self.$mainListDiv.append($('<div>').css({'color': '#F44336', 'margin': '10px'}) .append('Error: Unable to connect to KBase data.')); } self.hideLoading(); }); } else { console.error('DataList: missing variable(s)'); console.error('ws_name: ' + self.ws_name); console.error('ws: ' + self.ws); self.hideLoading(); /*Not really an error yet because we don't know what order things are being called var where = "kbaseNarrativeDataList.refresh"; if (!self.ws) { console.error(where, "Workspace not connected"); } else { console.error(where, "Workspace name is empty"); }*/ } }, refreshSpecificObject: function () { }, refreshTimeStrings: function () { var self = this; var newTime; var oldTime; if (self.objectList) { for (var i = 0; i < self.objectList.length; i++) { if (self.objectList[i].$div) { newTime = self.getTimeStampStr(self.objectList[i].info[3]); oldTime = self.objectList[i].$div.find('.kb-data-list-date').text(); if (newTime !== oldTime) { self.objectList[i].$div.find('.kb-data-list-date').text(newTime); } } } } }, reloadWsData: function () { var self = this; console.log('DataList: reloadWsData'); if (self.ws_name && self.ws) { // empty the existing object list first self.objectList = []; self.obj_data = {}; self.availableTypes = {}; self.getNextDataChunk(0); } }, getNextDataChunk: function (skip) { var self = this; console.log('DataList: getNextDataChunk - ' + skip); self.ws.list_objects({ workspaces: [self.ws_name], includeMetadata: 1, skip: skip, limit: self.options.ws_chunk_size }, function (infoList) { console.log('DataList: getNextDataChunk return'); console.log(infoList); // object_info: // [0] : obj_id objid // [1] : obj_name name // [2] : type_string type // [3] : timestamp save_date // [4] : int version // [5] : username saved_by // [6] : ws_id wsid // [7] : ws_name workspace // [8] : string chsum // [9] : int size // [10] : usermeta meta for (var i = 0; i < infoList.length; i++) { // skip narrative objects if (infoList[i][2].indexOf('KBaseNarrative') == 0) { continue; } self.objectList.push( { key: StringUtil.uuid(), // always generate the DnD key $div: null, //self.renderObjectRowDiv(infoList[i]), // we defer rendering the div until it is shown info: infoList[i], attached: false } ); var typeKey = infoList[i][2].split("-")[0]; if (!(typeKey in self.obj_data)) { self.obj_data[typeKey] = []; } self.obj_data[typeKey].push(infoList[i]); var typeName = typeKey.split('.')[1]; if (!(typeName in self.availableTypes)) { self.availableTypes[typeName] = { type: typeName, count: 0 }; } self.availableTypes[typeName].count++; } // if we have more than 2k objects, make them hit enter to search... self.$searchInput.off("input change blur"); self.$searchInput.on("change blur", function () { self.search(); }); if (self.objectList.length <= self.options.max_objs_to_prevent_filter_as_you_type_in_search) { self.$searchInput.on("input", function () { self.search(); }); self.$searchInput.on("keyup", function (e) { if (e.keyCode == 27) self.$searchDiv.hide(); }); } self.trigger('dataUpdated.Narrative'); //LOGIC: we keep trying to get more until we reach the ws_obj_count or untill the max // fetch count option, UNLESS the last call returned nothing, in which case we stop. //IMPORTANT NOTE: IN RARE CASES THIS DOES NOT GAURANTEE THAT WE GET ALL OBJECTS FROM //THIS WS!! IF THERE IS A CHUNK THAT RETURNED NOTHING, THERE STILL MAY BE MORE //OBJECTS DUE TO A BUG IN THE WORKSPACE THAT INCLUDES OLD VERSIONS AND DELETED VERSIONS //BEFORE FILTERING OUT THE NUMBER - A BETTER TEMP FIX WOULD BE TO LIMIT THE NUMBER OF //RECURSIONS TO 2 or 3 MAYBE... //BUT WHATEVER YOU DO PLEASE REMEMBER TO USE CAPITAL LETTERS EXTENSIVELY //OTHERWISE PEOPLE MIGHT NOT NOTICE WHAT YOU ARE SAYING AND THAT WOULD //BE EXTREMELY ANNOYING!!!! SERIOUSLY!!! if (self.objectList.length < self.ws_obj_count && self.objectList.length < self.options.ws_max_objs_to_fetch && infoList.length > 0) { self.getNextDataChunk(skip + self.options.ws_chunk_size); } else { if (self.objectList.length <= self.options.max_objs_to_prevent_initial_sort) { self.objectList.sort(function (a, b) { if (a.info[3] > b.info[3]) return -1; // sort by date if (a.info[3] < b.info[3]) return 1; // sort by date return 0; }); self.$elem.find('#nar-data-list-default-sort-label').addClass('active'); self.$elem.find('#nar-data-list-default-sort-option').attr('checked'); } } self.populateAvailableTypes(); self.renderList(); self.hideLoading(); }, function (error) { console.error(error); KBError("kbaseNarrativeDataList.getNextDataChunk", error.error.message); self.$mainListDiv.show(); self.$mainListDiv.empty(); self.$mainListDiv.append($('<div>').css({'color': '#F44336', 'margin': '10px'}) .append('Error: ' + error.error.message)); self.hideLoading(); }); }, getObjData: function (type, ignoreVersion) { if (type) { var dataSet = {}; if (typeof type === 'string') { type = [type]; } for (var i = 0; i < type.length; i++) { if (this.obj_data[type[i]]) { dataSet[type[i]] = this.obj_data[type[i]]; } } return dataSet; } return this.obj_data; }, $currentSelectedRow: null, selectedObject: null, setSelected: function ($selectedRow, object_info) { var self = this; if (self.$currentSelectedRow) { self.$currentSelectedRow.removeClass('kb-data-list-obj-row-selected'); } if (object_info[0] === self.selectedObject) { self.$currentSelectedRow = null; self.selectedObject = null; self.trigger('removeFilterMethods.Narrative'); } // } else { // $selectedRow.addClass('kb-data-list-obj-row-selected'); // self.$currentSelectedRow = $selectedRow; // self.selectedObject = object_info[0]; // self.trigger('filterMethods.Narrative', 'type:' + object_info[2].split('-')[0].split('.')[1]); // } }, addDataControls: function (object_info, $alertContainer) { var self = this; var $btnToolbar = $('<span>') .addClass('btn-group'); var btnClasses = "btn btn-xs btn-default"; var css = {'color': '#888'}; /*.append($('<div>').css({'text-align':'center','margin':'5pt'}) .append('<a href="'+landingPageLink+'" target="_blank">'+ 'explore data</a>&nbsp&nbsp|&nbsp&nbsp') .append('<a href="'+this.options.landing_page_url+'objgraphview/'+object_info[7] +'/'+object_info[1] +'" target="_blank">'+ 'view provenance</a><br>'))*/ var $filterMethodInput = $('<span>') .tooltip({ title: 'Show Methods with this as input', container: '#' + this.mainListId, delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .addClass(btnClasses) .append($('<span>').addClass('fa fa-sign-in').css(css)) .click(function (e) { this.trigger('filterMethods.Narrative', 'in_type:' + object_info[2].split('-')[0].split('.')[1]); }.bind(this)); var $filterMethodOutput = $('<span>') .tooltip({ title: 'Show Methods with this as output', container: '#' + this.mainListId, delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .addClass(btnClasses) .append($('<span>').addClass('fa fa-sign-out').css(css)) .click(function (e) { this.trigger('filterMethods.Narrative', 'out_type:' + object_info[2].split('-')[0].split('.')[1]); }.bind(this)); var $openLandingPage = $('<span>') .tooltip({ title: 'Explore data', container: '#' + this.mainListId, delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .addClass(btnClasses) .append($('<span>').addClass('fa fa-binoculars').css(css)) .click(function (e) { e.stopPropagation(); $alertContainer.empty(); var typeTokens = object_info[2].split('-')[0].split('.'); var landingPageLink = self.options.lp_url + object_info[6] + '/' + object_info[1]; window.open(landingPageLink); }); var $openHistory = $('<span>') .addClass(btnClasses).css(css) .tooltip({ title: 'View history to revert changes', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append($('<span>').addClass('fa fa-history').css(css)) .click(function (e) { e.stopPropagation(); $alertContainer.empty(); if (self.ws_name && self.ws) { self.ws.get_object_history({ref: object_info[6] + "/" + object_info[0]}, function (history) { $alertContainer.append($('<div>') .append($('<button>').addClass('kb-data-list-cancel-btn') .append('Hide History') .click(function () { $alertContainer.empty(); }))); history.reverse(); var $tbl = $('<table>').css({'width': '100%'}); for (var k = 0; k < history.length; k++) { var $revertBtn = $('<button>').append('v' + history[k][4]).addClass('kb-data-list-btn'); if (k == 0) { $revertBtn.tooltip({ title: 'Current Version', container: 'body', placement: 'bottom', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }); } else { var revertRef = {wsid: history[k][6], objid: history[k][0], ver: history[k][4]}; (function (revertRefLocal) { $revertBtn.tooltip({ title: 'Revert to this version?', container: 'body', placement: 'bottom', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .click(function () { self.ws.revert_object(revertRefLocal, function (reverted_obj_info) { self.refresh(); }, function (error) { console.error(error); $alertContainer.empty(); $alertContainer.append($('<span>').css({'color': '#F44336'}).append("Error! " + error.error.message)); }); }); })(revertRef); } $tbl.append($('<tr>') .append($('<td>').append($revertBtn)) .append($('<td>').append('Saved by ' + history[k][5] + '<br>' + self.getTimeStampStr(history[k][3]))) .append($('<td>').append($('<span>').css({margin: '4px'}).addClass('fa fa-info pull-right')) .tooltip({ title: history[k][2] + '<br>' + history[k][8] + '<br>' + history[k][9] + ' bytes', container: 'body', html: true, placement: 'bottom', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) )); } $alertContainer.append($tbl); }, function (error) { console.error(error); $alertContainer.empty(); $alertContainer.append($('<span>').css({'color': '#F44336'}).append("Error! " + error.error.message)); }); } }); var $openProvenance = $('<span>') .addClass(btnClasses).css(css) .tooltip({ title: 'View data provenance and relationships', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append($('<span>').addClass('fa fa-sitemap fa-rotate-90').css(css)) .click(function (e) { e.stopPropagation(); $alertContainer.empty(); window.open(self.options.landing_page_url + 'objgraphview/' + object_info[7] + '/' + object_info[1]); }); var $download = $('<span>') .addClass(btnClasses).css(css) .tooltip({ title: 'Export / Download data', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append($('<span>').addClass('fa fa-download').css(css)) .click(function (e) { e.stopPropagation(); $alertContainer.empty(); var type = object_info[2].split('-')[0]; var wsId = object_info[7]; var objId = object_info[1]; var downloadPanel = $('<div>'); $alertContainer.append(downloadPanel); downloadPanel.kbaseNarrativeDownloadPanel({token: self._attributes.auth.token, type: type, wsId: wsId, objId: objId}); }); var $rename = $('<span>') .addClass(btnClasses).css(css) .tooltip({ title: 'Rename data', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append($('<span>').addClass('fa fa-font').css(css)) .click(function (e) { e.stopPropagation(); $alertContainer.empty(); var $newNameInput = $('<input type="text">') .addClass('form-control') .val(object_info[1]) .on('focus', function () { if (IPython && IPython.narrative) { IPython.narrative.disableKeyboardManager(); } }) .on('blur', function () { if (IPython && IPython.narrative) { IPython.narrative.enableKeyboardManager(); } }); $alertContainer.append($('<div>') .append($('<div>').append("Warning: Apps using the old name may break.")) .append($('<div>').append($newNameInput)) .append($('<button>').addClass('kb-data-list-btn') .append('Rename') .click(function () { if (self.ws_name && self.ws) { self.ws.rename_object({ obj: {ref: object_info[6] + "/" + object_info[0]}, new_name: $newNameInput.val() }, function (renamed_info) { self.refresh(); }, function (error) { console.error(error); $alertContainer.empty(); $alertContainer.append($('<span>').css({'color': '#F44336'}).append("Error! " + error.error.message)); }); } })) .append($('<button>').addClass('kb-data-list-cancel-btn') .append('Cancel') .click(function () { $alertContainer.empty(); }))); }); var $delete = $('<span>') .addClass(btnClasses).css(css) .tooltip({ title: 'Delete data', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append($('<span>').addClass('fa fa-trash-o').css(css)) .click(function (e) { e.stopPropagation(); $alertContainer.empty(); $alertContainer.append($('<div>') .append($('<span>').append('Are you sure?')) .append($('<button>').addClass('kb-data-list-btn') .append('Delete') .click(function () { if (self.ws_name && self.ws) { self.ws.rename_object({ obj: {ref: object_info[6] + "/" + object_info[0]}, new_name: object_info[1].split('-deleted-')[0] + "-deleted-" + (new Date()).getTime() }, function (renamed_info) { self.ws.delete_objects([{ref: object_info[6] + "/" + object_info[0]}], function () { self.refresh(); }, function (error) { console.error(error); $alertContainer.empty(); $alertContainer.append($('<span>').css({'color': '#F44336'}).append("Error! " + error.error.message)); }); }, function (error) { console.error(error); $alertContainer.empty(); $alertContainer.append($('<span>').css({'color': '#F44336'}).append("Error! " + error.error.message)); }); } })) .append($('<button>').addClass('kb-data-list-cancel-btn') .append('Cancel') .click(function () { $alertContainer.empty(); }))); }); if (!IPython.narrative.readonly) { $btnToolbar.append($filterMethodInput) .append($filterMethodOutput); } $btnToolbar.append($openLandingPage); if (!IPython.narrative.readonly) $btnToolbar.append($openHistory); $btnToolbar.append($openProvenance); if (!IPython.narrative.readonly) { $btnToolbar.append($download) .append($rename) .append($delete); } return $btnToolbar; }, renderObjectRowDiv: function (object_info, object_key) { var self = this; // object_info: // [0] : obj_id objid // [1] : obj_name name // [2] : type_string type // [3] : timestamp save_date // [4] : int version // [5] : username saved_by // [6] : ws_id wsid // [7] : ws_name workspace // [8] : string chsum // [9] : int size // [10] : usermeta meta var type_tokens = object_info[2].split('.') var type_module = type_tokens[0]; var type = type_tokens[1].split('-')[0]; var unversioned_full_type = type_module + '.' + type; var $logo = $('<div>'); // set icon $(document).trigger("setDataIcon.Narrative", {elt: $logo, type: type}); // add behavior $logo.click(function (e) { e.stopPropagation(); self.insertViewer(object_key); }); var shortName = object_info[1]; var isShortened = false; if (shortName.length > this.options.max_name_length) { shortName = shortName.substring(0, this.options.max_name_length - 3) + '...'; isShortened = true; } var $name = $('<span>').addClass("kb-data-list-name").append('<a>' + shortName + '</a>') .css({'cursor': 'pointer'}) .click(function (e) { e.stopPropagation(); self.insertViewer(object_key); }); if (isShortened) { $name.tooltip({ title: object_info[1], placement: 'bottom', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }); } var $version = $('<span>').addClass("kb-data-list-version").append('v' + object_info[4]); var $type = $('<div>').addClass("kb-data-list-type").append(type); var $date = $('<span>').addClass("kb-data-list-date").append(this.getTimeStampStr(object_info[3])); var $byUser = $('<span>').addClass("kb-data-list-edit-by"); if (object_info[5] !== self.my_user_id) { $byUser.append(' by ' + object_info[5]) .click(function (e) { e.stopPropagation(); window.open(self.options.landing_page_url + 'people/' + object_info[5]); }); } var metadata = object_info[10]; var metadataText = ''; for (var key in metadata) { if (metadata.hasOwnProperty(key)) { metadataText += '<tr><th>' + key + '</th><td>' + metadata[key] + '</td></tr>'; } } if (type === 'Genome') { if (metadata.hasOwnProperty('Name')) { $type.text(type + ': ' + metadata['Name']); } } var $savedByUserSpan = $('<td>').addClass('kb-data-list-username-td'); DisplayUtil.displayRealName(object_info[5], $savedByUserSpan); var $alertDiv = $('<div>').css({'text-align': 'center', 'margin': '10px 0px'}); var typeLink = '<a href="' + this.options.landing_page_url + 'spec/module/' + type_module + '" target="_blank">' + type_module + "</a>.<wbr>" + '<a href="' + this.options.landing_page_url + 'spec/type/' + object_info[2] + '" target="_blank">' + (type_tokens[1].replace('-', '&#8209;')) + '.' + type_tokens[2] + '</a>'; var $moreRow = $('<div>').addClass("kb-data-list-more-div").hide() .append($('<div>').css({'text-align': 'center', 'margin': '5pt'}) .append(self.addDataControls(object_info, $alertDiv)).append($alertDiv)) .append( $('<table style="width:100%;">') .append("<tr><th>Permament Id</th><td>" + object_info[6] + "/" + object_info[0] + "/" + object_info[4] + '</td></tr>') .append("<tr><th>Full Type</th><td>" + typeLink + '</td></tr>') .append($('<tr>').append('<th>Saved by</th>').append($savedByUserSpan)) .append(metadataText)); var $toggleAdvancedViewBtn = $('<span>').addClass("kb-data-list-more")//.addClass('btn btn-default btn-xs kb-data-list-more-btn') .hide() .html($('<button class="btn btn-xs btn-default pull-right" aria-hidden="true">').append('<span class="fa fa-ellipsis-h" style="color:#888" />')); var toggleAdvanced = function () { if (self.selectedObject === object_info[0] && $moreRow.is(':visible')) { // assume selection handling occurs before this is called // so if we are now selected and the moreRow is visible, leave it... return; } if ($moreRow.is(':visible')) { $moreRow.slideUp('fast'); //$toggleAdvancedViewBtn.show(); } else { self.getRichData(object_info, $moreRow); $moreRow.slideDown('fast'); //$toggleAdvancedViewBtn.hide(); } }; var $mainDiv = $('<div>').addClass('kb-data-list-info').css({padding: '0px', margin: '0px'}) .append($name).append($version).append('<br>') .append($('<table>').css({width: '100%'}) .append($('<tr>') .append($('<td>').css({width: '80%'}) .append($type).append($date).append($byUser)) .append($('<td>') .append($toggleAdvancedViewBtn)))) .click( function () { self.setSelected($(this).closest('.kb-data-list-obj-row'), object_info); toggleAdvanced(); }); var $topTable = $('<table>').attr('kb-oid', object_key) .css({'width': '100%', 'background': '#fff'}) // set background to white looks better on DnD .append($('<tr>') .append($('<td>') .css({'width': '15%'}) .append($logo)) .append($('<td>') .append($mainDiv))); var $row = $('<div>').addClass('kb-data-list-obj-row') .append($('<div>').addClass('kb-data-list-obj-row-main') .append($topTable)) .append($moreRow) // show/hide ellipses on hover, show extra info on click .mouseenter(function () { $toggleAdvancedViewBtn.show(); }) .mouseleave(function () { $toggleAdvancedViewBtn.hide(); }); // Drag and drop this.addDragAndDrop($topTable); var $rowWithHr = $('<div>') .append($('<hr>') .addClass('kb-data-list-row-hr') .css({'margin-left': '65px'})) .append($row); return $rowWithHr; }, // ============= DnD ================== addDropZone: function (container, targetCell, isBelow) { var targetDiv = document.createElement('div'), self = this; targetDiv.classList.add('kb-data-list-drag-target'); targetDiv.innerHTML = '<i>drop data object here</i>'; targetDiv.addEventListener('dragover', function (e) { e.target.classList.add('-drag-active'); e.preventDefault(); }); targetDiv.addEventListener('dragenter', function (e) { e.target.classList.add('-drag-hover'); e.preventDefault(); }); targetDiv.addEventListener('dragleave', function (e) { e.target.classList.remove('-drag-hover'); e.target.classList.remove('-drag-active'); e.preventDefault(); }); targetDiv.addEventListener('drop', function (e) { var data = JSON.parse(e.dataTransfer.getData('info')), key = data.key, obj = _.findWhere(self.objectList, {key: key}), info = self.createInfoObject(obj.info), cell, cellIndex, placement; if (e.target.getAttribute('cellIs') === 'below') { cell = $(e.target.nextSibling).data().cell; placement = 'above'; } else { cell = $(e.target.previousSibling).data().cell; placement = 'below'; } cellIndex = IPython.notebook.find_cell_index(cell); $(document).trigger('createViewerCell.Narrative', { nearCellIdx: cellIndex, widget: 'kbaseNarrativeDataCell', info: info, placement: placement }); }); if (isBelow) { targetDiv.setAttribute('cellIs', 'above'); container.appendChild(targetDiv); } else { targetDiv.setAttribute('cellIs', 'below'); container.insertBefore(targetDiv, targetCell); } }, addDragAndDrop: function ($row) { var node = $row.parent().get(0), key = $row.attr('kb-oid'), obj = _.findWhere(this.objectList, {key: key}), info = this.createInfoObject(obj.info), data = { widget: 'kbaseNarrativeDataCell', info: info, key: key }, dataString = JSON.stringify(data), self = this; node.setAttribute('draggable', true); node.addEventListener('dragstart', function (e) { e.dataTransfer.dropEffect = 'copy'; e.dataTransfer.setData('info', dataString); // e.target.style.border = "3px red solid"; var targetCells = document.querySelectorAll('#notebook-container .cell'); var container = document.querySelector('#notebook-container'); for (var i = 0; i < targetCells.length; i += 1) { self.addDropZone(container, targetCells.item(i)); if (i === targetCells.length - 1) { self.addDropZone(container, targetCells.item(i), true); } } }); node.addEventListener('dragend', function (e) { console.log('drag ended...'); var container = document.querySelector('#notebook-container'), targetCells = document.querySelectorAll('#notebook-container .kb-data-list-drag-target'); for (var i = 0; i < targetCells.length; i += 1) { var targetCell = targetCells.item(i); container.removeChild(targetCell); } }); // Add tooltip to indicate this functionality $row.attr({ 'data-toggle': 'tooltip', 'title': 'Drag onto narrative &rarr;' }); $row.tooltip({ delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay }, placement: 'top auto', html: true, viewport: { selector: '#kb-side-panel .kb-narr-side-panel:nth-child(1) .kb-narr-panel-body', padding: 2 } }); return this; }, /** * Helper function to create named object attrs from * list of fields returned from Workspace service. */ createInfoObject: function (info) { return _.object(['id', 'name', 'type', 'save_date', 'version', 'saved_by', 'ws_id', 'ws_name', 'chsum', 'size', 'meta'], info); }, // ============= end DnD ================ insertViewer: function (key) { var self = this; var cell = IPython.notebook.get_selected_cell(); var near_idx = 0; if (cell) { near_idx = IPython.notebook.find_cell_index(cell); $(cell.element).off('dblclick'); $(cell.element).off('keydown'); } //console.log(cell, near_idx); //var cell_id = StringUtil.uuid(); //cell.rendered = false; //cell.set_text('<div id="' + cell_id + '">&nbsp;</div>'); //cell.render(); var obj = _.findWhere(self.objectList, {key: key}); var info = self.createInfoObject(obj.info); // Insert the narrative data cell into the div we just rendered //$('#' + cell_id).kbaseNarrativeDataCell({cell: cell, info: info}); self.trigger('createViewerCell.Narrative', { 'nearCellIdx': near_idx, 'widget': 'kbaseNarrativeDataCell', 'info': info }); }, renderMore: function () { var self = this; if (self.objectList) { if (!self.searchFilterOn) { // if search filter is off, then we just are showing everything var start = self.n_objs_rendered; for (var i = start; i < self.objectList.length; i++) { // only show them as we scroll to them if (self.n_objs_rendered >= start + self.options.objs_to_render_on_scroll) { break; } self.attachRow(i); } //console.log('showing '+ self.n_objs_rendered + ' of ' + self.objectList.length); } else { // search filter is on, so we have to base this on what is currently filtered var start = self.n_filteredObjsRendered; for (var i = start; i < self.currentMatch.length; i++) { // only show them as we scroll to them if (self.n_filteredObjsRendered >= start + self.options.objs_to_render_on_scroll) { break; } self.attachRowElement(self.currentMatch[i]); self.n_filteredObjsRendered++; } //console.log('showing '+ self.n_filteredObjsRendered + ' of ' + self.currentMatch.length + ' objs matching search filter'); } } }, attachRow: function (index) { var obj = this.objectList[index]; if (obj.attached) { return; } if (obj.$div) { this.$mainListDiv.append(obj.$div); } else { obj.$div = this.renderObjectRowDiv(obj.info, obj.key); this.$mainListDiv.append(obj.$div); } obj.attached = true; this.n_objs_rendered++; }, attachRowElement: function (row) { if (row.attached) { return; } // return if we are already attached if (row.$div) { this.$mainListDiv.append(row.$div); } else { row.$div = this.renderObjectRowDiv(row.info, row.key); this.$mainListDiv.append(row.$div); } row.attached = true; this.n_objs_rendered++; }, detachAllRows: function () { for (var i = 0; i < this.objectList.length; i++) { this.detachRow(i); } this.$mainListDiv.children().detach(); this.n_objs_rendered = 0; this.renderedAll = false; }, detachRow: function (index) { if (this.objectList[index].attached) { if (this.objectList[index].$div) { this.objectList[index].$div.detach(); } this.objectList[index].attached = false; this.n_objs_rendered--; } }, renderList: function () { var self = this; self.showLoading(); self.detachAllRows(); if (self.objectList.length > 0) { for (var i = 0; i < self.objectList.length; i++) { // only show up to the given number if (i >= self.options.objs_to_render_to_start) { self.n_objs_rendered = i; break; } // If object does not have a key, define one. // This will be used for 'id' of rendered element. // But do *not* replace an existing key. if (self.objectList[i].key == undefined) { self.objectList[i].key = StringUtil.uuid(); } self.attachRow(i); } this.$addDataButton.toggle(!(IPython.narrative && IPython.narrative.readonly === true)); } else { // todo: show an upload button or some other message if there are no elements var $noDataDiv = $('<div>') .css({'text-align': 'center', 'margin': '20pt'}) .append('This Narrative has no data yet.<br><br>'); if (IPython && IPython.narrative && !IPython.narrative.readonly) { $noDataDiv.append($("<button>") .append('Add Data') .addClass('kb-data-list-add-data-text-button') .css({'margin': '20px'}) .click(function () { self.trigger('hideGalleryPanelOverlay.Narrative'); self.trigger('toggleSidePanelOverlay.Narrative', self.options.parentControlPanel.$overlayPanel); })); } self.$mainListDiv.append($noDataDiv); } self.hideLoading(); }, renderController: function () { var self = this; var $byDate = $('<label id="nar-data-list-default-sort-label" class="btn btn-default">').addClass('btn btn-default') .append($('<input type="radio" name="options" id="nar-data-list-default-sort-option" autocomplete="off">')) .append("date") .on('click', function () { self.sortData(function (a, b) { if (a.info[3] > b.info[3]) return -1; // sort by date if (a.info[3] < b.info[3]) return 1; // sort by date return 0; }); }); var $byName = $('<label class="btn btn-default">') .append($('<input type="radio" name="options" id="option2" autocomplete="off">')) .append("name") .on('click', function () { self.sortData(function (a, b) { if (a.info[1].toUpperCase() < b.info[1].toUpperCase()) return -1; // sort by name if (a.info[1].toUpperCase() > b.info[1].toUpperCase()) return 1; return 0; }); }); var $byType = $('<label class="btn btn-default">') .append($('<input type="radio" name="options" id="option3" autocomplete="off">')) .append("type") .on('click', function () { self.sortData(function (a, b) { if (a.info[2].toUpperCase() > b.info[2].toUpperCase()) return -1; // sort by type if (a.info[2].toUpperCase() < b.info[2].toUpperCase()) return 1; return 0; }); }); var $upOrDown = $('<button class="btn btn-default btn-sm" type="button">').css({'margin-left': '5px'}) .append('<span class="glyphicon glyphicon-sort" style="color:#777" aria-hidden="true" />') .on('click', function () { self.reverseData(); }); var $sortByGroup = $('<div data-toggle="buttons">') .addClass("btn-group btn-group-sm") .css({"margin": "2px"}) .append($byDate) .append($byName) .append($byType); // var $addDataBtn = $('<button>') // .addClass("btn btn-warning kb-data-list-get-data-button") // .append('<span class="fa fa-plus" style="color:#fff" aria-hidden="true" /> Add Data') // .on('click',function() { // self.trigger('toggleSidePanelOverlay.Narrative'); // }); var $openSearch = $('<span>') .addClass('btn btn-xs btn-default') .tooltip({ title: 'Search data in narrative', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append('<span class="fa fa-search"></span>') .on('click', function () { if (!self.$searchDiv.is(':visible')) { self.$searchDiv.show(); self.$sortByDiv.hide(); self.$filterTypeDiv.hide(); self.$searchInput.focus(); } else { self.$searchDiv.hide(); } }); var $openSort = $('<span>') .addClass('btn btn-xs btn-default') .tooltip({ title: 'Sort data list', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append('<span class="fa fa-sort-amount-asc"></span>') .on('click', function () { if (!self.$sortByDiv.is(':visible')) { self.$sortByDiv.show(); self.$searchDiv.hide(); self.$filterTypeDiv.hide(); } else { self.$sortByDiv.hide(); } }); var $openFilter = $('<span>') .addClass('btn btn-xs btn-default') .tooltip({ title: 'Filter data by type', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append('<span class="fa fa-filter"></span>') .on('click', function () { if (!self.$filterTypeDiv.is(':visible')) { self.$filterTypeDiv.show(); self.$sortByDiv.hide(); self.$searchDiv.hide(); } else { self.$filterTypeDiv.hide(); } }); var $refreshBtn = $('<span>') .addClass('btn btn-xs btn-default') .tooltip({ title: 'Refresh data list', container: 'body', delay: { show: Config.get('tooltip').showDelay, hide: Config.get('tooltip').hideDelay } }) .append('<span class="glyphicon glyphicon-refresh"></span>') .on('click', function () { self.refresh(); }); self.$searchInput = $('<input type="text">') .addClass('form-control') .on('focus', function () { if (IPython && IPython.narrative) { IPython.narrative.disableKeyboardManager(); } }) .on('blur', function () { if (IPython && IPython.narrative) { IPython.narraive.enableKeyboardManager(); } }); self.$searchDiv = $('<div>').addClass("input-group").css({'margin-bottom': '10px'}) .append(self.$searchInput) .append($("<span>").addClass("input-group-addon") .append($("<span>") .addClass("glyphicon glyphicon-search") .css({'cursor': 'pointer'}) .on('click', function () { self.search(); }))); self.$sortByDiv = $('<div>').css({'margin': '3px', 'margin-left': '5px', 'margin-bottom': '10px'}) .append("<small>sort by: </small>") .append($sortByGroup) .append($upOrDown); self.$filterTypeSelect = $('<select>').addClass("form-control") .append($('<option value="">')) .change(function () { var optionSelected = $(this).find("option:selected"); var typeSelected = optionSelected.val(); // whenever we change the type filter, we need to clear the current match // so that the complete filter can rerun self.currentMatch = self.objectList; self.filterByType(typeSelected); }); self.$filterTypeDiv = $('<div>').css({'margin': '3px', 'margin-left': '5px', 'margin-bottom': '10px'}) .append(self.$filterTypeSelect); var $header = $('<div>'); if (self.options.parentControlPanel) { self.options.parentControlPanel.addButtonToControlPanel($openSearch); self.options.parentControlPanel.addButtonToControlPanel($openSort); self.options.parentControlPanel.addButtonToControlPanel($openFilter); self.options.parentControlPanel.addButtonToControlPanel($refreshBtn); } else { $header.addClass('row').css({'margin': '5px'}) .append($('<div>').addClass('col-xs-12').css({'margin': '0px', 'padding': '0px', 'text-align': 'right'}) .append($openSearch) .append($openSort) .append($openFilter)) } self.$sortByDiv.hide(); self.$searchDiv.hide(); self.$filterTypeDiv.hide(); var $filterDiv = $('<div>') .append(self.$sortByDiv) .append(self.$searchDiv) .append(self.$filterTypeDiv); self.$controllerDiv.append($header).append($filterDiv); }, populateAvailableTypes: function () { var self = this; if (self.availableTypes && self.$filterTypeSelect) { var types = []; for (var type in self.availableTypes) { if (self.availableTypes.hasOwnProperty(type)) { types.push(type); } } types.sort(); self.$filterTypeSelect.empty(); var runningCount = 0; for (var i = 0; i < types.length; i++) { runningCount += self.availableTypes[types[i]].count; var countStr = ''; if (self.availableTypes[types[i]].count == 1) { countStr = " (".concat(self.availableTypes[types[i]].count).concat(" object)"); } else { countStr = " (".concat(self.availableTypes[types[i]].count).concat(" objects)"); } self.$filterTypeSelect.append( $('<option value="' + self.availableTypes[types[i]].type + '">') .append(self.availableTypes[types[i]].type + countStr)); } if (runningCount == 1) { self.$filterTypeSelect.prepend($('<option value="">').append("Show All Types (" + runningCount + " object)")); } else { self.$filterTypeSelect.prepend($('<option value="">').append("Show All Types (" + runningCount + " objects)")); } self.$filterTypeSelect.val(""); } }, reverseData: function () { var self = this; if (!self.objectList) { return; } self.objectList.reverse(); self.renderList(); self.search(); self.hideLoading(); }, sortData: function (sortfunction) { var self = this; if (!self.objectList) { return; } //should add spinning wait bar .... self.showLoading(); self.objectList.sort(sortfunction); self.renderList(); self.search(); // always refilter on the search term search if there is something there self.hideLoading(); // go back to the top on sort self.$mainListDiv.animate({ scrollTop: 0 }, 300); // fast = 200, slow = 600 }, currentMatch: [], currentTerm: '', searchFilterOn: false, n_filteredObjsRendered: null, search: function (term, type) { var self = this; if (!self.objectList) { return; } if (!term && self.$searchInput) { term = self.$searchInput.val(); } // if type wasn't selected, then we try to get something that was set if (!type) { if (self.$filterTypeSelect) { type = self.$filterTypeSelect.find("option:selected").val(); } } term = term.trim(); if (term.length > 0 || type) { self.searchFilterOn = true; // todo: should show searching indicator (could take several seconds if there is a lot of data) // optimization => we filter existing matches instead of researching everything if the new // term starts with the last term searched for var newMatch = []; if (!self.currentTerm) { // reset if currentTerm is null or empty self.currentMatch = self.objectList; } else { if (term.indexOf(self.currentTerm) !== 0) { self.currentMatch = self.objectList; } } // clean the term for regex use term = term.replace(/\|/g, '\\|').replace(/\\\\\|/g, '|'); // bars are common in kb ids, so escape them unless we have \\| term = term.replace(/\./g, '\\.').replace(/\\\\\./g, '.'); // dots are common in names, so we escape them, but // if a user writes '\\.' we assume they want the regex '.' var regex = new RegExp(term, 'i'); var n_matches = 0; self.n_filteredObjsRendered = 0; for (var k = 0; k < self.currentMatch.length; k++) { // [0] : obj_id objid // [1] : obj_name name // [2] : type_string type // [3] : timestamp save_date // [4] : int version // [5] : username saved_by // [6] : ws_id wsid // [7] : ws_name workspace // [8] : string chsum // [9] : int size // [10] : usermeta meta var match = false; var info = self.currentMatch[k].info; if (regex.test(info[1])) { match = true; } // match on name else if (regex.test(info[2].split('.')[1].split('-'))) { match = true; } // match on type name else if (regex.test(info[5])) { match = true; } // match on saved_by user if (!match && info[10]) { // match on metadata values for (var metaKey in info[10]) { if (info[10].hasOwnProperty(metaKey)) { if (regex.test(info[10][metaKey])) { match = true; break; } else if (regex.test(metaKey + "::" + info[10][metaKey])) { match = true; break; } } } } if (type) { // if type is defined, then our sort must also filter by the type if (type !== info[2].split('-')[0].split('.')[1]) { match = false; // no match if we are not the selected type! } } if (match) { // matches must always switch to show if they are rendered if (self.currentMatch[k].$div) { self.currentMatch[k].$div.show(); } // todo: add check so we only show up to the number we render... switching to this will require that // we revise the renderMore logic... if (n_matches < self.options.objs_to_render_to_start) { self.attachRowElement(self.currentMatch[k]); self.n_filteredObjsRendered++; } newMatch.push(self.currentMatch[k]); n_matches++; } else { if (self.currentMatch[k].$div) { self.currentMatch[k].$div.hide(); } } } self.currentMatch = newMatch; // update the current match } else { self.searchFilterOn = false; // no new search, so show all and render the list for (var k = 0; k < self.objectList.length; k++) { if (self.objectList[k].$div) { self.objectList[k].$div.show(); } } self.renderList(); } self.currentTerm = term; }, filterByType: function (type) { var self = this; self.search(null, type); }, getRichData: function (object_info, $moreRow) { var self = this; var $usernameTd = $moreRow.find(".kb-data-list-username-td"); DisplayUtil.displayRealName(object_info[5], $usernameTd); }, showLoading: function () { this.$loadingDiv.show(); this.$mainListDiv.hide(); }, hideLoading: function () { this.$loadingDiv.hide(); this.$mainListDiv.show(); }, /** * @method loggedInCallback * This is associated with the login widget (through the kbaseAuthenticatedWidget parent) and * is triggered when a login event occurs. * It associates the new auth token with this widget and refreshes the data panel. * @private */ loggedInCallback: function (event, auth) { console.log('DataList: loggedInCallback'); this.ws = new Workspace(this.options.ws_url, auth); this.my_user_id = auth.user_id; this.isLoggedIn = true; this.refresh(); return this; }, /** * @method loggedOutCallback * Like the loggedInCallback, this is triggered during a logout event (through the login widget). * It throws away the auth token and workspace client, and refreshes the widget * @private */ loggedOutCallback: function (event, auth) { this.ws = null; this.isLoggedIn = false; this.my_user_id = null; if (this.ws_name) this.refresh(); return this; }, // edited from: http://stackoverflow.com/questions/3177836/how-to-format-time-since-xxx-e-g-4-minutes-ago-similar-to-stack-exchange-site getTimeStampStr: function (objInfoTimeStamp) { var date = new Date(objInfoTimeStamp); var seconds = Math.floor((new Date() - date) / 1000); // f-ing safari, need to add extra ':' delimiter to parse the timestamp if (isNaN(seconds)) { var tokens = objInfoTimeStamp.split('+'); // this is just the date without the GMT offset var newTimestamp = tokens[0] + '+' + tokens[0].substr(0, 2) + ":" + tokens[1].substr(2, 2); date = new Date(newTimestamp); seconds = Math.floor((new Date() - date) / 1000); if (isNaN(seconds)) { // just in case that didn't work either, then parse without the timezone offset, but // then just show the day and forget the fancy stuff... date = new Date(tokens[0]); return this.monthLookup[date.getMonth()] + " " + date.getDate() + ", " + date.getFullYear(); } } var interval = Math.floor(seconds / 31536000); if (interval > 1) { return this.monthLookup[date.getMonth()] + " " + date.getDate() + ", " + date.getFullYear(); } interval = Math.floor(seconds / 2592000); if (interval > 1) { if (interval < 4) { return interval + " months ago"; } else { return this.monthLookup[date.getMonth()] + " " + date.getDate() + ", " + date.getFullYear(); } } interval = Math.floor(seconds / 86400); if (interval > 1) { return interval + " days ago"; } interval = Math.floor(seconds / 3600); if (interval > 1) { return interval + " hours ago"; } interval = Math.floor(seconds / 60); if (interval > 1) { return interval + " minutes ago"; } return Math.floor(seconds) + " seconds ago"; }, monthLookup: ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"], }) });
Converted IPython->Jupyter refs in data list.
kbase-extension/static/kbase/js/widgets/narrative_core/kbaseNarrativeDataList.js
Converted IPython->Jupyter refs in data list.
<ide><path>base-extension/static/kbase/js/widgets/narrative_core/kbaseNarrativeDataList.js <del>/*global define,IPython*/ <add>/*global define,Jupyter*/ <ide> /*jslint white: true*/ <ide> /** <ide> * @author Michael Sneddon <[email protected]> <ide> .addClass('form-control') <ide> .val(object_info[1]) <ide> .on('focus', function () { <del> if (IPython && IPython.narrative) { <del> IPython.narrative.disableKeyboardManager(); <add> if (Jupyter && Jupyter.narrative) { <add> Jupyter.narrative.disableKeyboardManager(); <ide> } <ide> }) <ide> .on('blur', function () { <del> if (IPython && IPython.narrative) { <del> IPython.narrative.enableKeyboardManager(); <add> if (Jupyter && Jupyter.narrative) { <add> Jupyter.narrative.enableKeyboardManager(); <ide> } <ide> }); <ide> $alertContainer.append($('<div>') <ide> }))); <ide> }); <ide> <del> if (!IPython.narrative.readonly) { <add> if (!Jupyter.narrative.readonly) { <ide> $btnToolbar.append($filterMethodInput) <ide> .append($filterMethodOutput); <ide> } <ide> $btnToolbar.append($openLandingPage); <del> if (!IPython.narrative.readonly) <add> if (!Jupyter.narrative.readonly) <ide> $btnToolbar.append($openHistory); <ide> $btnToolbar.append($openProvenance); <del> if (!IPython.narrative.readonly) { <add> if (!Jupyter.narrative.readonly) { <ide> $btnToolbar.append($download) <ide> .append($rename) <ide> .append($delete); <ide> cell = $(e.target.previousSibling).data().cell; <ide> placement = 'below'; <ide> } <del> cellIndex = IPython.notebook.find_cell_index(cell); <add> cellIndex = Jupyter.notebook.find_cell_index(cell); <ide> <ide> $(document).trigger('createViewerCell.Narrative', { <ide> nearCellIdx: cellIndex, <ide> <ide> insertViewer: function (key) { <ide> var self = this; <del> var cell = IPython.notebook.get_selected_cell(); <add> var cell = Jupyter.notebook.get_selected_cell(); <ide> var near_idx = 0; <ide> if (cell) { <del> near_idx = IPython.notebook.find_cell_index(cell); <add> near_idx = Jupyter.notebook.find_cell_index(cell); <ide> $(cell.element).off('dblclick'); <ide> $(cell.element).off('keydown'); <ide> } <ide> } <ide> self.attachRow(i); <ide> } <del> this.$addDataButton.toggle(!(IPython.narrative && IPython.narrative.readonly === true)); <add> this.$addDataButton.toggle(!(Jupyter.narrative && Jupyter.narrative.readonly === true)); <ide> } else { <del> // todo: show an upload button or some other message if there are no elements <ide> var $noDataDiv = $('<div>') <ide> .css({'text-align': 'center', 'margin': '20pt'}) <ide> .append('This Narrative has no data yet.<br><br>'); <del> if (IPython && IPython.narrative && !IPython.narrative.readonly) { <add> if (Jupyter && Jupyter.narrative && !Jupyter.narrative.readonly) { <ide> $noDataDiv.append($("<button>") <ide> .append('Add Data') <ide> .addClass('kb-data-list-add-data-text-button') <ide> self.$searchInput = $('<input type="text">') <ide> .addClass('form-control') <ide> .on('focus', function () { <del> if (IPython && IPython.narrative) { <del> IPython.narrative.disableKeyboardManager(); <add> if (Jupyter && Jupyter.narrative) { <add> Jupyter.narrative.disableKeyboardManager(); <ide> } <ide> }) <ide> .on('blur', function () { <del> if (IPython && IPython.narrative) { <del> IPython.narraive.enableKeyboardManager(); <add> if (Jupyter && Jupyter.narrative) { <add> Jupyter.narraive.enableKeyboardManager(); <ide> } <ide> }); <ide> self.$searchDiv = $('<div>').addClass("input-group").css({'margin-bottom': '10px'})
Java
epl-1.0
6d81c53327c80419d1d8c94716d5151d10dc8de0
0
theanuradha/debrief,theanuradha/debrief,theanuradha/debrief,debrief/debrief,theanuradha/debrief,debrief/debrief,debrief/debrief,debrief/debrief,debrief/debrief,theanuradha/debrief,theanuradha/debrief,debrief/debrief,theanuradha/debrief
/* * Debrief - the Open Source Maritime Analysis Application * http://debrief.info * * (C) 2000-2014, PlanetMayo Ltd * * This library is free software; you can redistribute it and/or * modify it under the terms of the Eclipse Public License v1.0 * (http://www.eclipse.org/legal/epl-v10.html) * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. */ package org.mwc.cmap.core.preferences; import org.eclipse.jface.preference.*; import org.eclipse.ui.*; import org.mwc.cmap.core.CorePlugin; import Debrief.ReaderWriter.Replay.ImportReplay; import Debrief.Tools.Tote.Calculations.relBearingCalc; import MWC.GUI.Properties.UnitsPropertyEditor; /** * This class represents a preference page that is contributed to the * Preferences dialog. By subclassing <samp>FieldEditorPreferencePage</samp>, * we can use the field support built into JFace that allows us to create a page * that is small and knows how to save, restore and apply itself. * <p> * This page is used to modify preferences only. They are stored in the * preference store that belongs to the main plug-in class. That way, * preferences can be accessed directly via the preference store. */ public class CMAPPrefsPage extends FieldEditorPreferencePage implements IWorkbenchPreferencePage { public static final String PREFS_PAGE_ID = "org.mwc.cmap.core.preferences.CMAPPrefsPage"; public CMAPPrefsPage() { super(GRID); setPreferenceStore(CorePlugin.getDefault().getPreferenceStore()); setDescription("Settings applicable to Maritime Analysis"); } /** the tags and labels to use in the range units editor * */ private static String[][] _distanceUnitTags; /** the tags and labels to use in the relative bearing format selector * */ private static String[][] _relBearingTags; /** the options for what to do when importing a track * */ private static String[][] _trackModeTags; /** * Creates the field editors. Field editors are abstractions of the common GUI * blocks needed to manipulate various types of preferences. Each field editor * knows how to save and restore itself. */ public synchronized void createFieldEditors() { // initialise the units tags, if we have to if (_distanceUnitTags == null) { // get the unit types final UnitsPropertyEditor units = new UnitsPropertyEditor(); final String[] tags = units.getTags(); _distanceUnitTags = new String[tags.length][2]; for (int i = 0; i < tags.length; i++) { _distanceUnitTags[i][0] = tags[i]; _distanceUnitTags[i][1] = tags[i]; } } // initialise the units tags, if we have to if (_relBearingTags == null) { _relBearingTags = new String[2][2]; _relBearingTags[0][0] = "UK format (R180..G180)"; _relBearingTags[0][1] = relBearingCalc.UK_REL_BEARING_FORMAT; _relBearingTags[1][0] = "US format (0..360)"; _relBearingTags[1][1] = relBearingCalc.US_REL_BEARING_FORMAT; } // initialise the import choice tags, if we have to if (_trackModeTags == null) { _trackModeTags = new String[3][2]; _trackModeTags[0][0] = "DR Track"; _trackModeTags[0][1] = ImportReplay.IMPORT_AS_DR; _trackModeTags[1][0] = "OTG Track"; _trackModeTags[1][1] = ImportReplay.IMPORT_AS_OTG; _trackModeTags[2][0] = "Ask user"; _trackModeTags[2][1] = ImportReplay.ASK_THE_AUDIENCE; } addField(new RadioGroupFieldEditor(PreferenceConstants.IMPORT_MODE, "Default &track import mode: (use 'Ask user' to allow specification of import frequency)", 1, _trackModeTags, getFieldEditorParent())); addField(new RadioGroupFieldEditor(PreferenceConstants.RNG_UNITS, "Default &range units:", 1, _distanceUnitTags, getFieldEditorParent())); addField(new RadioGroupFieldEditor(PreferenceConstants.REL_BEARING_FORMAT, "Relative &bearing format:", 1, _relBearingTags, getFieldEditorParent())); } /* * (non-Javadoc) * * @see org.eclipse.ui.IWorkbenchPreferencePage#init(org.eclipse.ui.IWorkbench) */ public void init(final IWorkbench workbench) { } /** * Constant definitions for plug-in preferences */ public static class PreferenceConstants { public static final String RNG_UNITS = MWC.GUI.Properties.UnitsPropertyEditor.UNITS_PROPERTY; public static final String REL_BEARING_FORMAT = relBearingCalc.REL_BEARING_FORMAT; public static final String IMPORT_MODE = ImportReplay.TRACK_IMPORT_MODE; } }
org.mwc.cmap.core/src/org/mwc/cmap/core/preferences/CMAPPrefsPage.java
/* * Debrief - the Open Source Maritime Analysis Application * http://debrief.info * * (C) 2000-2014, PlanetMayo Ltd * * This library is free software; you can redistribute it and/or * modify it under the terms of the Eclipse Public License v1.0 * (http://www.eclipse.org/legal/epl-v10.html) * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. */ package org.mwc.cmap.core.preferences; import org.eclipse.jface.preference.*; import org.eclipse.ui.*; import org.mwc.cmap.core.CorePlugin; import Debrief.ReaderWriter.Replay.ImportReplay; import Debrief.Tools.Tote.Calculations.relBearingCalc; import MWC.GUI.Properties.UnitsPropertyEditor; /** * This class represents a preference page that is contributed to the * Preferences dialog. By subclassing <samp>FieldEditorPreferencePage</samp>, * we can use the field support built into JFace that allows us to create a page * that is small and knows how to save, restore and apply itself. * <p> * This page is used to modify preferences only. They are stored in the * preference store that belongs to the main plug-in class. That way, * preferences can be accessed directly via the preference store. */ public class CMAPPrefsPage extends FieldEditorPreferencePage implements IWorkbenchPreferencePage { public static final String PREFS_PAGE_ID = "org.mwc.cmap.core.preferences.CMAPPrefsPage"; public CMAPPrefsPage() { super(GRID); setPreferenceStore(CorePlugin.getDefault().getPreferenceStore()); setDescription("Settings applicable to Maritime Analysis"); } /** the tags and labels to use in the range units editor * */ private static String[][] _distanceUnitTags; /** the tags and labels to use in the relative bearing format selector * */ private static String[][] _relBearingTags; /** the options for what to do when importing a track * */ private static String[][] _trackModeTags; /** * Creates the field editors. Field editors are abstractions of the common GUI * blocks needed to manipulate various types of preferences. Each field editor * knows how to save and restore itself. */ public synchronized void createFieldEditors() { // initialise the units tags, if we have to if (_distanceUnitTags == null) { // get the unit types final UnitsPropertyEditor units = new UnitsPropertyEditor(); final String[] tags = units.getTags(); _distanceUnitTags = new String[tags.length][2]; for (int i = 0; i < tags.length; i++) { _distanceUnitTags[i][0] = tags[i]; _distanceUnitTags[i][1] = tags[i]; } } // initialise the units tags, if we have to if (_relBearingTags == null) { _relBearingTags = new String[2][2]; _relBearingTags[0][0] = "UK format (R180..G180)"; _relBearingTags[0][1] = relBearingCalc.UK_REL_BEARING_FORMAT; _relBearingTags[1][0] = "US format (0..360)"; _relBearingTags[1][1] = relBearingCalc.US_REL_BEARING_FORMAT; } // initialise the import choice tags, if we have to if (_trackModeTags == null) { _trackModeTags = new String[3][2]; _trackModeTags[0][0] = "DR Track"; _trackModeTags[0][1] = ImportReplay.IMPORT_AS_DR; _trackModeTags[1][0] = "OTG Track"; _trackModeTags[1][1] = ImportReplay.IMPORT_AS_OTG; _trackModeTags[2][0] = "Ask user"; _trackModeTags[2][1] = ImportReplay.ASK_THE_AUDIENCE; } addField(new RadioGroupFieldEditor(PreferenceConstants.IMPORT_MODE, "Default &track import mode:", 1, _trackModeTags, getFieldEditorParent())); addField(new RadioGroupFieldEditor(PreferenceConstants.RNG_UNITS, "Default &range units:", 1, _distanceUnitTags, getFieldEditorParent())); addField(new RadioGroupFieldEditor(PreferenceConstants.REL_BEARING_FORMAT, "Relative &bearing format:", 1, _relBearingTags, getFieldEditorParent())); } /* * (non-Javadoc) * * @see org.eclipse.ui.IWorkbenchPreferencePage#init(org.eclipse.ui.IWorkbench) */ public void init(final IWorkbench workbench) { } /** * Constant definitions for plug-in preferences */ public static class PreferenceConstants { public static final String RNG_UNITS = MWC.GUI.Properties.UnitsPropertyEditor.UNITS_PROPERTY; public static final String REL_BEARING_FORMAT = relBearingCalc.REL_BEARING_FORMAT; public static final String IMPORT_MODE = ImportReplay.TRACK_IMPORT_MODE; } }
user support
org.mwc.cmap.core/src/org/mwc/cmap/core/preferences/CMAPPrefsPage.java
user support
<ide><path>rg.mwc.cmap.core/src/org/mwc/cmap/core/preferences/CMAPPrefsPage.java <ide> <ide> <ide> addField(new RadioGroupFieldEditor(PreferenceConstants.IMPORT_MODE, <del> "Default &track import mode:", 1, _trackModeTags, getFieldEditorParent())); <add> "Default &track import mode: (use 'Ask user' to allow specification of import frequency)", 1, _trackModeTags, getFieldEditorParent())); <ide> <ide> addField(new RadioGroupFieldEditor(PreferenceConstants.RNG_UNITS, <ide> "Default &range units:", 1, _distanceUnitTags, getFieldEditorParent()));
Java
mit
bdb9040439a965b90b7c7c6aefbed1e9579a6690
0
PrinceOfAmber/CyclicMagic,PrinceOfAmber/Cyclic
/******************************************************************************* * The MIT License (MIT) * * Copyright (C) 2014-2018 Sam Bassett (aka Lothrazar) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. ******************************************************************************/ package com.lothrazar.cyclicmagic.block.cablewireless.content; import java.io.IOException; import com.lothrazar.cyclicmagic.ModCyclic; import com.lothrazar.cyclicmagic.block.cablewireless.energy.TileCableEnergyWireless; import com.lothrazar.cyclicmagic.data.BlockPosDim; import com.lothrazar.cyclicmagic.gui.core.GuiBaseContainer; import com.lothrazar.cyclicmagic.gui.core.GuiButtonTooltip; import com.lothrazar.cyclicmagic.item.location.ItemLocation; import com.lothrazar.cyclicmagic.util.Const; import com.lothrazar.cyclicmagic.util.Const.ScreenSize; import com.lothrazar.cyclicmagic.util.UtilChat; import net.minecraft.block.Block; import net.minecraft.client.gui.Gui; import net.minecraft.client.gui.GuiButton; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.InventoryPlayer; import net.minecraft.util.math.BlockPos; public class GuiCableContentWireless extends GuiBaseContainer { public GuiCableContentWireless(InventoryPlayer inventoryPlayer, TileCableContentWireless te) { super(new ContainerCableContentWireless(inventoryPlayer, te), te); this.setScreenSize(ScreenSize.LARGE); this.fieldRedstoneBtn = TileCableContentWireless.Fields.REDSTONE.ordinal(); int xCenter = this.getScreenSize().width() / 2; } @Override public void initGui() { super.initGui(); int x; int y = 106; int size = Const.SQ; GuiButtonTooltip btnSize; for (int i = 1; i < TileCableContentWireless.SLOT_COUNT; i++) { btnSize = new GuiButtonTooltip(i, this.guiLeft + (i - 1) * (size) + 8, this.guiTop + y, size, size, "?"); btnSize.setTooltip("wireless.target"); // btnSize.height = 14; this.addButton(btnSize); } } @Override protected void actionPerformed(GuiButton button) throws IOException { if (button.id != redstoneBtn.id) { //TODO: DIMENSION EntityPlayer player = ModCyclic.proxy.getClientPlayer(); BlockPosDim dim = ItemLocation.getPosition(tile.getStackInSlot(button.id)); if (dim == null) { UtilChat.addChatMessage(player, "wireless.empty"); } else if (dim.getDimension() != player.dimension) { UtilChat.addChatMessage(player, "wireless.dimension"); } else { BlockPos target = dim.toBlockPos(); if (tile.getWorld().isAreaLoaded(target, target.up())) { //get target Block block = tile.getWorld().getBlockState(target).getBlock(); UtilChat.addChatMessage(player, block.getLocalizedName()); } else { UtilChat.addChatMessage(player, "wireless.unloaded"); } } } } @Override protected void drawGuiContainerBackgroundLayer(float partialTicks, int mouseX, int mouseY) { super.drawGuiContainerBackgroundLayer(partialTicks, mouseX, mouseY); int u = 0, v = 0, x, y; this.mc.getTextureManager().bindTexture(Const.Res.SLOT_LARGE); // item transfer slot x = this.guiLeft + 142; y = this.guiTop + 38; int s = Const.SQ + 4; Gui.drawModalRectWithCustomSizedTexture( x, y, u, v, s, s, s, s); //now draw target location card slots this.mc.getTextureManager().bindTexture(Const.Res.SLOT_GPS); x = this.guiLeft + 8; y = this.guiTop + 86; for (int i = 0; i < TileCableEnergyWireless.SLOT_COUNT; i++) { Gui.drawModalRectWithCustomSizedTexture(// this is for item transfer x, y, u, v, Const.SQ, Const.SQ, Const.SQ, Const.SQ); x += Const.SQ; } } }
src/main/java/com/lothrazar/cyclicmagic/block/cablewireless/content/GuiCableContentWireless.java
/******************************************************************************* * The MIT License (MIT) * * Copyright (C) 2014-2018 Sam Bassett (aka Lothrazar) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. ******************************************************************************/ package com.lothrazar.cyclicmagic.block.cablewireless.content; import java.io.IOException; import com.lothrazar.cyclicmagic.ModCyclic; import com.lothrazar.cyclicmagic.block.cablewireless.energy.TileCableEnergyWireless; import com.lothrazar.cyclicmagic.data.BlockPosDim; import com.lothrazar.cyclicmagic.gui.core.GuiBaseContainer; import com.lothrazar.cyclicmagic.gui.core.GuiButtonTooltip; import com.lothrazar.cyclicmagic.item.location.ItemLocation; import com.lothrazar.cyclicmagic.util.Const; import com.lothrazar.cyclicmagic.util.Const.ScreenSize; import com.lothrazar.cyclicmagic.util.UtilChat; import net.minecraft.block.Block; import net.minecraft.client.gui.Gui; import net.minecraft.client.gui.GuiButton; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.InventoryPlayer; import net.minecraft.util.math.BlockPos; public class GuiCableContentWireless extends GuiBaseContainer { public GuiCableContentWireless(InventoryPlayer inventoryPlayer, TileCableContentWireless te) { super(new ContainerCableContentWireless(inventoryPlayer, te), te); this.setScreenSize(ScreenSize.LARGE); this.fieldRedstoneBtn = TileCableContentWireless.Fields.REDSTONE.ordinal(); int xCenter = this.getScreenSize().width() / 2; } @Override public void initGui() { super.initGui(); int x; int y = 106; int size = Const.SQ; GuiButtonTooltip btnSize; for (int i = 0; i < TileCableContentWireless.SLOT_COUNT - 1; i++) { btnSize = new GuiButtonTooltip(i, this.guiLeft + i * (size) + 8, this.guiTop + y, size, size, "?"); btnSize.setTooltip("wireless.target"); // btnSize.height = 14; this.addButton(btnSize); } } @Override protected void actionPerformed(GuiButton button) throws IOException { if (button.id != redstoneBtn.id) { //TODO: DIMENSION EntityPlayer player = ModCyclic.proxy.getClientPlayer(); BlockPosDim dim = ItemLocation.getPosition(tile.getStackInSlot(button.id)); if (dim == null) { UtilChat.addChatMessage(player, "wireless.empty"); } else if (dim.getDimension() != player.dimension) { UtilChat.addChatMessage(player, "wireless.dimension"); } else { BlockPos target = dim.toBlockPos(); if (tile.getWorld().isAreaLoaded(target, target.up())) { //get target Block block = tile.getWorld().getBlockState(target).getBlock(); UtilChat.addChatMessage(player, block.getLocalizedName()); } else { UtilChat.addChatMessage(player, "wireless.unloaded"); } } } } @Override protected void drawGuiContainerBackgroundLayer(float partialTicks, int mouseX, int mouseY) { super.drawGuiContainerBackgroundLayer(partialTicks, mouseX, mouseY); int u = 0, v = 0, x, y; this.mc.getTextureManager().bindTexture(Const.Res.SLOT_LARGE); // item transfer slot x = this.guiLeft + 142; y = this.guiTop + 38; int s = Const.SQ + 4; Gui.drawModalRectWithCustomSizedTexture( x, y, u, v, s, s, s, s); //now draw target location card slots this.mc.getTextureManager().bindTexture(Const.Res.SLOT_GPS); x = this.guiLeft + 8; y = this.guiTop + 86; for (int i = 0; i < TileCableEnergyWireless.SLOT_COUNT; i++) { Gui.drawModalRectWithCustomSizedTexture(// this is for item transfer x, y, u, v, Const.SQ, Const.SQ, Const.SQ, Const.SQ); x += Const.SQ; } } }
1.18.0 darkphan off by one node issue
src/main/java/com/lothrazar/cyclicmagic/block/cablewireless/content/GuiCableContentWireless.java
1.18.0 darkphan off by one node issue
<ide><path>rc/main/java/com/lothrazar/cyclicmagic/block/cablewireless/content/GuiCableContentWireless.java <ide> int y = 106; <ide> int size = Const.SQ; <ide> GuiButtonTooltip btnSize; <del> for (int i = 0; i < TileCableContentWireless.SLOT_COUNT - 1; i++) { <add> for (int i = 1; i < TileCableContentWireless.SLOT_COUNT; i++) { <ide> btnSize = new GuiButtonTooltip(i, <del> this.guiLeft + i * (size) + 8, <add> this.guiLeft + (i - 1) * (size) + 8, <ide> this.guiTop + y, size, size, "?"); <ide> btnSize.setTooltip("wireless.target"); <ide> // btnSize.height = 14;
JavaScript
mit
2170216880ba4ddc0a7238d627e55e05020dd2bf
0
luislobo/waterline,balderdashy/waterline
/** * Module dependencies */ var util = require('util'); var _ = require('@sailshq/lodash'); var async = require('async'); var forgeStageThreeQuery = require('./forge-stage-three-query'); var InMemoryJoin = require('./in-memory-join'); var transformPopulatedChildRecords = require('./transform-populated-child-records'); /** * helpFind() * * Given a stage 2 "find" or "findOne" query, build and execute a sequence * of generated stage 3 queries (ska "find" operations)-- and then run them. * If disparate data sources need to be used, then perform in-memory joins * as needed. Afterwards, transform the normalized result set into an array * of records, and (potentially) populate them. * * > Fun facts: * > • This file is sometimes informally known as the "operations runner". * > • If particlebanana and mikermcneil were trees and you chopped us down, * > the months in 2013-2016 we spent figuring out the original implementation * > of the code in this file & the integrator would be a charred, necrotic * > ring that imparts frostbite when touched. * > • This is used for `.find()` and `.findOne()` queries. * > • It's a key piece of the puzzle when it comes to populating records in a * > cross-datastore/adapter (xD/A) fashion. * * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - * * @param {Ref} WLModel * The live Waterline model. * * @param {Dictionary} s2q * Stage two query. * * @param {Function} done * @param {Error?} err [if an error occured] * @param {Array} records * * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ module.exports = function helpFind(WLModel, s2q, done) { if (!WLModel) { return done(new Error('Consistency violation: Live Waterline model should be provided as the 1st argument')); } if (!s2q) { return done(new Error('Consistency violation: Stage two query (S2Q) should be provided as the 2nd argument')); } if (!_.isFunction(done)) { return done(new Error('Consistency violation: `done` (3rd argument) should be a function')); } // Construct an FQRunner isntance. var fqRunner = new FQRunner(WLModel, s2q); // Get a hold of the initial stage 3 query. var initialS3Q = fqRunner.queryObj; // ╦═╗╦ ╦╔╗╔ ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ // ╠╦╝║ ║║║║ │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││└─┐ // ╩╚═╚═╝╝╚╝ └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘└─┘ fqRunner.run(function _afterRunningFindOperations(err, values) { if (err) { return done(err); } // If the values don't have a cache there is nothing to return if (!values.cache) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: check up on this-- pretty sure we need to send back an array here..? // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - return done(); } // Now round up the resuls (function _roundUpResults(proceed){ try { // If no joins are used, grab the only item from the cache and pass that on. if (!initialS3Q.joins || !initialS3Q.joins.length) { values = values.cache[WLModel.identity]; return proceed(undefined, values); }//-• // Otherwise, if the values are already combined, return the results. if (values.combined) { return proceed(undefined, values.cache[WLModel.identity]); }//-• // Otherwise, perform an in-memory join (run the integrator) on the values // returned from the operations, and then use that as our joined results. var joinedResults; try { var pkColumnName = WLModel.schema[WLModel.primaryKey].columnName; joinedResults = InMemoryJoin(initialS3Q, values.cache, pkColumnName); } catch (e) { return proceed(e); } return proceed(undefined, joinedResults); } catch (e) { return proceed(e); } })(function _afterRoundingUpResults(err, results){ if (err) { return done(err); } try { // ╔═╗╦═╗╔═╗╔═╗╔═╗╔═╗╔═╗ ┌─┐┌─┐┌┬┐┌┐ ┬┌┐┌┌─┐┌┬┐ ┬─┐┌─┐┌─┐┬ ┬┬ ┌┬┐┌─┐ // ╠═╝╠╦╝║ ║║ ║╣ ╚═╗╚═╗ │ │ ││││├┴┐││││├┤ ││ ├┬┘├┤ └─┐│ ││ │ └─┐ // ╩ ╩╚═╚═╝╚═╝╚═╝╚═╝╚═╝ └─┘└─┘┴ ┴└─┘┴┘└┘└─┘─┴┘ ┴└─└─┘└─┘└─┘┴─┘┴ └─┘ if (!results) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: figure out what's up here. Is this ever the expected behavior? // If not, we should send back an error instead. If so, we should send // back empty array ([]), right? // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - return done(); }//-• // Normalize results to an array if (!_.isArray(results)) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: why is this necessary? Move check below up to here if possible. // (the check below with the "Consistency violation"-style error, I mean) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // if (!_.isArray(results)) { // return done(new Error('`results` from `find` method in adapter should always be an array. But it was not! If you are seeing this error, either there is a bug in this database adapter, or some heretofore unseen issue in Waterline.')); // } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - results = [ results ]; }//>- // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Transform column names into attribute names for each of the result records // before attempting any in-memory join logic on them. var transformedRecords = _.map(results, function(result) { return WLModel._transformer.unserialize(result); }); // Transform column names into attribute names for all nested, populated records too. var joins = initialS3Q.joins ? initialS3Q.joins : []; if (!_.isArray(joins)) { return done(new Error('Consistency violation: `joins` must be an array at this point. But isntead, somehow it is this: '+util.inspect(joins, {depth:5})+'')); } var data; try { data = transformPopulatedChildRecords(joins, transformedRecords, WLModel); } catch (e) { return done(new Error('Unexpected error transforming populated child records. '+e.stack)); } // If `data` is invalid (not an array) return early to avoid getting into trouble. if (!data || !_.isArray(data)) { return done(new Error('Consistency violation: Result from operations runner should be an array, but instead got: '+util.inspect(data, {depth: 5})+'')); }//-• return done(undefined, data); } catch (e) { return done(e); } });//</ after rounding up results from appropriate source > });//</ .run() > }; /** * ``` * new FQRunner(...); * ``` * * Construct an "FQRunner" instance for use in fetching data * for `find` and `findOne`. * * This is used for accessing (A) a contextualized "run" method and (B) a stage 3 query. * These are, in turn, used to fetch data for `find` and `findOne` queries. * * > The primary responsibility of this class is taking a stage 2 query and determining * > how to fufill it using stage 3 queries. This could involve breaking it up to run * > on multiple datatstores, or simply passing it through after mapping attribute names * > to their column name equivalents. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * > FUTURE: This implementation will likely be simplified/superceded in future versions * > of Waterline. (For example, the "run" method could simply be exposed as a first-class * > citizen and required + called directly in `find()` and in `findOne()`. This would * > just involve making it stateless.) * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - * * @param {Ref} WLModel * The live Waterline model. * * @param {Dictionary} s2q * Stage two query. * * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - * @constructs {Ref} * An "FQRunner" instance. * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ function FQRunner(WLModel, s2q) { // Build up an internal record cache. this.cache = {}; // Build an initial stage three query (s3q) from the incoming stage 2 query (s2q). var s3q = forgeStageThreeQuery({ stageTwoQuery: s2q, identity: WLModel.identity, transformer: WLModel._transformer, originalModels: WLModel.waterline.collections }); // Expose a reference to this stage 3 query for use later on this.queryObj = s3q; // Hold a default value for pre-combined results (native joins) this.preCombined = false; // Expose a reference to the entire set of all WL models available // in the current ORM instance. this.collections = WLModel.waterline.collections; // Expose a reference to the primary model identity. this.currentIdentity = WLModel.identity; // Seed the record cache. this.seedCache(); // Build an array of dictionaries representing find operations // that will need to take place. Then expose it as `this.operations`. this.operations = this.buildOperations(); return this; } // ╦═╗╦ ╦╔╗╔ ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ // ╠╦╝║ ║║║║ │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││└─┐ // ╩╚═╚═╝╝╚╝ └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘└─┘ FQRunner.prototype.run = function (cb) { var self = this; // Validate that the options that will be used to run the query are valid. // Mainly that if a connection was passed in and the operation will be run // on more than a single connection that an error is retured. var usedConnections = _.uniq(_.map(this.operations, 'leasedConnection')); if (usedConnections.length > 1 && _.has(this.metaContainer, 'leasedConnection')) { setImmediate(function() { return cb(new Error('Cannot execute this query, because it would need to be run across two different datastores, but a db connection was also explicitly provided (e.g. `usingConnection()`). Either ensure that all relevant models are on the same datastore, or do not pass in an explicit db connection.')); }); return; }//-• // Grab the parent operation, it will always be the very first operation var parentOp = this.operations.shift(); // Run The Parent Operation this.runOperation(parentOp, function(err, results) { if (err) { return cb(err); } // If the values aren't an array, ensure they get stored as one if (!_.isArray(results)) { // TODO: replace this with code that rejects anything OTHER than an array results = [results]; } // Set the cache values self.cache[parentOp.collectionName] = results; // If results are empty, or we're already combined, nothing else to so do return if (!results || self.preCombined) { return cb(undefined, { combined: true, cache: self.cache }); } // Run child operations and populate the cache self.execChildOpts(results, function(err) { if (err) { return cb(err); } cb(undefined, { combined: self.preCombined, cache: self.cache }); }); }); }; // ╔═╗╔═╗╔═╗╔╦╗ ┌─┐┌─┐┌─┐┬ ┬┌─┐ // ╚═╗║╣ ║╣ ║║ │ ├─┤│ ├─┤├┤ // ╚═╝╚═╝╚═╝═╩╝ └─┘┴ ┴└─┘┴ ┴└─┘ // Builds an internal representation of result records on a per-model basis. // This holds intermediate results from any parent, junction, and child queries. FQRunner.prototype.seedCache = function () { var cache = {}; _.each(this.collections, function(val, collectionName) { cache[collectionName] = []; }); this.cache = cache; }; // ╔╗ ╦ ╦╦╦ ╔╦╗ ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ // ╠╩╗║ ║║║ ║║ │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││└─┐ // ╚═╝╚═╝╩╩═╝═╩╝ └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘└─┘ // Inspects the query object and determines which operations are needed to // fufill the query. FQRunner.prototype.buildOperations = function () { var operations = []; // Check is any populates were performed on the query. If there weren't any then // the operation can be run in a single query. if (!_.keys(this.queryObj.joins).length) { // Grab the collection var collection = this.collections[this.currentIdentity]; if (!collection) { throw new Error('Consistency violation: No such model (identity: `' + this.currentIdentity + '`) has been registered with the ORM.'); } // Find the name of the datastore to run the query on using the dictionary. // If this method can't be found, default to whatever the datastore used by // the `find` method would use. var datastoreName = collection.adapterDictionary[this.queryObj.method]; if (!datastoreName) { datastoreName = collection.adapterDictionary.find; } operations.push({ connectionName: datastoreName, collectionName: this.currentIdentity, queryObj: this.queryObj }); return operations; } // Otherwise populates were used in this operation. Lets grab the connections // needed for these queries. It may only be a single connection in a simple // case or it could be multiple connections in some cases. var connections = this.getConnections(); // Now that all the connections are created, build up the operations needed to // accomplish the end goal of getting all the results no matter which connection // they are on. To do this, figure out if a connection supports joins and if // so pass down a criteria object containing join instructions. If joins are // not supported by a connection, build a series of operations to achieve the // end result. operations = this.stageOperations(connections); return operations; }; // ╔═╗╔╦╗╔═╗╔═╗╔═╗ ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ // ╚═╗ ║ ╠═╣║ ╦║╣ │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││└─┐ // ╚═╝ ╩ ╩ ╩╚═╝╚═╝ └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘└─┘ // Figures out which piece of the query to run on each datastore. FQRunner.prototype.stageOperations = function stageOperations(datastores) { var self = this; var operations = []; // Build the parent operation and set it as the first operation in the array operations.push(this.createParentOperation(datastores)); // Grab access to the "parent" model, and the name of its datastore. var ParentWLModel = this.collections[this.currentIdentity]; var parentDatastoreName = ParentWLModel.adapterDictionary[this.queryObj.method]; // Parent operation var parentOperation = _.first(operations); // For each additional datastore, build operations. _.each(datastores, function(val, datastoreName) { // Ignore the datastore used for the parent operation if a join can be used // on it. This means all of the operations for the query can take place on a // single db connection, using a single query. if (datastoreName === parentDatastoreName && parentOperation.method === 'join') { return; }//-• // Operations are needed that will be run after the parent operation has been // completed. If there are more than a single join, set the parent join and // build up children operations. This occurs in a many-to-many relationship // when a join table is needed. // Criteria is omitted until after the parent operation has been run so that // an IN query can be formed on child operations. var localOpts = []; _.each(val.joins, function(join, idx) { // Grab the `find` datastore name for the child model being used // in the join method. var optModel = self.collections[join.childCollectionIdentity]; var optDatastoreName = optModel.adapterDictionary.find; var operation = { connectionName: optDatastoreName, collectionName: join.childCollectionIdentity, queryObj: { method: 'find', using: join.child, join: join } }; // If this is the first join, it can't have any parents if (idx === 0) { localOpts.push(operation); return; } // Look into the previous operations and see if this is a child of any // of them var child = false; _.each(localOpts, function(localOpt) { var childJoin = localOpt.queryObj.join.childCollectionIdentity; if (childJoin !== join.parentCollectionIdentity) { return; } // Flag the child operation localOpt.child = operation; child = true; }); // If this was a child join, it's already been set if (child) { return; } localOpts.push(operation); }); // Add the local child operations to the operations array operations = operations.concat(localOpts); }); return operations; }; // ╔═╗╦═╗╔═╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐┬─┐┌─┐┌┐┌┌┬┐ // ║ ╠╦╝║╣ ╠═╣ ║ ║╣ ├─┘├─┤├┬┘├┤ │││ │ // ╚═╝╩╚═╚═╝╩ ╩ ╩ ╚═╝ ┴ ┴ ┴┴└─└─┘┘└┘ ┴ // ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌ // │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││ // └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘ /** * createParentOperation() * * * @param {Array} datastores * * @returns {Dictionary} * The parent operation. */ FQRunner.prototype.createParentOperation = function (datastores) { // Get a reference to the original stage three query. // (for the sake of familiarity) var originalS3Q = this.queryObj; // Look up the parent model. var ParentWLModel = this.collections[this.currentIdentity]; // Look up the datastore name. // (the name of the parent model's datastore) var datastoreName = ParentWLModel.adapterDictionary[originalS3Q.method]; // ============================================================================ // > Note: // > If findOne was used as the method, use the same datastore `find` is on. // > (This is a relic of when datastores might vary on a per-method basis. // > It is relatively pointless now, and only necessary here because it's not // > being normalized elsewhere. TODO: rip this out!) // > // > * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // > For a quick fix, just use 'find' above instead of making it dynamic per-method. // > e.g. // > ``` // > ParentWLModel.adapterDictionary.find // > ``` // > * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * if (!datastoreName) { if (originalS3Q.method === 'findOne') { console.warn('Warning: For compatibility, falling back to an implementation detail of a deprecated, per-method approach to datastore access. If you are seeing this warning, please report it at http://sailsjs.com/bugs. Thanks!\nDetails:\n```\n'+((new Error('Here is a stack trace, for context')).stack)+'\n```\n'); datastoreName = ParentWLModel.adapterDictionary.find; }//>- if (!datastoreName) { throw new Error('Consistency violation: Failed to locate proper datastore name for stage 3 query. (This is probably not your fault- more than likely it\'s a bug in Waterline.) Here is the offending stage 3 query: \n```\n'+util.inspect(originalS3Q, {depth:5})+'\n```\n'); } }//>- // ============================================================================ // Look up the parent WL model's datastore from the provided array. var datastore = datastores[datastoreName]; if (!datastore) { throw new Error('Consistency violation: Unexpected Waterline error (bug) when determining the datastore to use for this query. Attempted to look up datastore `'+datastoreName+'` (for model `'+this.currentIdentity+'`) -- but it could not be found in the provided set of datastores: '+util.inspect(datastores, {depth:5})+''); }//-• // Determine if the adapter has a native `join` method. var doesAdapterSupportNativeJoin = _.has(ParentWLModel.adapterDictionary, 'join'); if (doesAdapterSupportNativeJoin) { if (!_.isEqual(ParentWLModel.adapterDictionary.join, datastoreName)) { throw new Error('Consistency violation: The `join` adapter method should not be pointed at a different datastore! (Per-method datastores are longer supported.)'); } // If so, verify that all of the "joins" can be run natively in one fell swoop. // If all the joins are supported, then go ahead and build & return a simple // operation that just sends the entire query down to a single datastore/adapter. var allJoinsAreSupported = _.any(datastore.joins, function(join) { return _.indexOf(datastore.collections, join.childCollectionIdentity) > -1; }); if (allJoinsAreSupported) { // Set the stage 3 query to have `method: 'join'` so it will use the // native `join` adapter method. originalS3Q.method = 'join'; // Set the preCombined flag on our "Operations" instance to indicate that // the integrator doesn't need to run. this.preCombined = true; // Build & return native join operation. return { connectionName: datastoreName, collectionName: this.currentIdentity, queryObj: originalS3Q }; }//-• // (Otherwise, we have to do an xD/A populate. So we just continue on below.) }//>- // --• // IWMIH we'll be doing an xD/A (in-memory) populate. // Make a shallow copy of our S3Q that has the `joins` key removed. // (because this will be an in-memory join now) var tmpS3Q = _.omit(originalS3Q, 'joins'); // Build initial ("parent") operation for xD/A populate. return { connectionName: datastoreName, collectionName: this.currentIdentity, queryObj: tmpS3Q }; }; // ╔═╗╔═╗╔╦╗ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ // ║ ╦║╣ ║ │ │ │││││││├┤ │ │ ││ ││││└─┐ // ╚═╝╚═╝ ╩ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘└─┘ FQRunner.prototype.getConnections = function getConnections() { var self = this; var connections = {}; // Default structure for connection objects var defaultConnection = { collections: [], children: [], joins: [] }; // For each populate build a connection item to build up an entire collection/connection registry // for this query. Using this, queries should be able to be seperated into discrete queries // which can be run on connections in parallel. _.each(this.queryObj.joins, function(join) { var parentConnection; var childConnection; function getConnection(collName) { var collection = self.collections[collName]; var connectionName = collection.adapterDictionary.find; connections[connectionName] = connections[connectionName] || _.merge({}, defaultConnection); return connections[connectionName]; } // If this join is a junctionTable, find the parent operation and add it to that connection's // children instead of creating a new operation on another connection. This allows cross-connection // many-to-many joins to be used where the join relies on the results of the parent operation // being run first. if (join.junctionTable) { // Find the previous join var parentJoin = _.find(self.queryObj.joins, function(otherJoin) { return otherJoin.child === join.parent; }); // Grab the parent join connection var parentJoinConnection = getConnection(parentJoin.parentCollectionIdentity); // Find the connection the parent and child collections belongs to parentConnection = getConnection(join.parentCollectionIdentity); childConnection = getConnection(join.childCollectionIdentity); // Update the registry parentConnection.collections.push(join.parentCollectionIdentity); childConnection.collections.push(join.childCollectionIdentity); parentConnection.children.push(join.parentCollectionIdentity); // Ensure the arrays are made up only of unique values parentConnection.collections = _.uniq(parentConnection.collections); childConnection.collections = _.uniq(childConnection.collections); parentConnection.children = _.uniq(parentConnection.children); // Add the join to the correct joins array. We want it to be on the same // connection as the operation before so the timing is correct. parentJoinConnection.joins = parentJoinConnection.joins.concat(join); // Build up the connection registry like normal } else { parentConnection = getConnection(join.parentCollectionIdentity); childConnection = getConnection(join.childCollectionIdentity); parentConnection.collections.push(join.parentCollectionIdentity); childConnection.collections.push(join.childCollectionIdentity); parentConnection.joins = parentConnection.joins.concat(join); } }); return connections; }; // ╦═╗╦ ╦╔╗╔ ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌ // ╠╦╝║ ║║║║ │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││ // ╩╚═╚═╝╝╚╝ └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘ FQRunner.prototype.runOperation = function runOperation(operation, cb) { var collectionName = operation.collectionName; var queryObj = operation.queryObj; // Ensure the collection exist if (!_.has(this.collections, collectionName)) { return cb(new Error('Invalid Collection specfied in operation.')); } // Find the collection to use var collection = this.collections[collectionName]; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: this should have already been dealt with in forgeStage3Query // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Send the findOne queries to the adapter's find method if (queryObj.method === 'findOne') { queryObj.method = 'find'; console.warn('TODO: this swapping of findOne=>find should have already been dealt with in forgeStage3Query. Stack trace for easy reference:'+(new Error()).stack); } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Grab the adapter to perform the query on var datastoreName = collection.adapterDictionary[queryObj.method]; var adapter = collection.datastores[datastoreName].adapter; // Run the operation adapter[queryObj.method](datastoreName, queryObj, cb, this.metaContainer); }; // ╔═╗═╗ ╦╔═╗╔═╗╦ ╦╔╦╗╔═╗ ┌─┐┬ ┬┬┬ ┌┬┐ // ║╣ ╔╩╦╝║╣ ║ ║ ║ ║ ║╣ │ ├─┤││ ││ // ╚═╝╩ ╚═╚═╝╚═╝╚═╝ ╩ ╚═╝ └─┘┴ ┴┴┴─┘─┴┘ // ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ // │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││└─┐ // └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘└─┘ // If joins are used and an adapter doesn't support them, there will be child // operations that will need to be run. Parse each child operation and run them // along with any tree joins and return an array of children results that can be // combined with the parent results. FQRunner.prototype.execChildOpts = function execChildOpts(parentResults, cb) { var self = this; var childOperations = this.buildChildOpts(parentResults); // Run the generated operations in parallel async.each(childOperations, function(opt, next) { self.collectChildResults(opt, next); }, cb); }; // ╔╗ ╦ ╦╦╦ ╔╦╗ ┌─┐┬ ┬┬┬ ┌┬┐ // ╠╩╗║ ║║║ ║║ │ ├─┤││ ││ // ╚═╝╚═╝╩╩═╝═╩╝ └─┘┴ ┴┴┴─┘─┴┘ // ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ // │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││└─┐ // └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘└─┘ // Using the results of a parent operation, build up a set of operations that // contain criteria based on what is returned from a parent operation. These can // be arrays containing more than one operation for each child, which will happen // when "join tables" would be used. Each set should be able to be run in parallel. FQRunner.prototype.buildChildOpts = function buildChildOpts(parentResults) { var self = this; var opts = []; // Build up operations that can be run in parallel using the results of the parent operation _.each(this.operations, function(item) { var localOpts = []; var parents = []; var idx = 0; var using = self.collections[item.collectionName]; // Go through all the parent records and build up an array of keys to look in. // This will be used in an IN query to grab all the records needed for the "join". _.each(parentResults, function(result) { if (!_.has(result, item.queryObj.join.parentKey)) { return; } if (_.isNull(result[item.queryObj.join.parentKey]) || _.isUndefined(result[item.queryObj.join.parentKey])) { return; } parents.push(result[item.queryObj.join.parentKey]); }); // If no parents match the join criteria, don't build up an operation if (!parents.length) { return; } // Build up criteria that will be used inside an IN query var criteria = {}; criteria[item.queryObj.join.childKey] = parents; var _tmpCriteria = {}; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: remove this halfway normalization code // (it doesn't actually cover all the edge cases anyway, and it shouldn't // be necessary because we normalize all this ahead of time when forging // the stage 2 query. If it IS necessary, then that means we're building // incomplete criteria in Waterline core, so that's an easy fix-- we'd just // need to find those spots and make them use the fully-expanded query language) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // If the join instruction contains `criteria`... if (item.queryObj.join.criteria) { var userlandCriteria = _.extend({}, item.queryObj.join.criteria); _tmpCriteria = _.extend({}, userlandCriteria); // Ensure `where` criteria is properly formatted if (_.has(userlandCriteria, 'where')) { if (_.isUndefined(userlandCriteria.where)) { delete userlandCriteria.where; } else { // If an array of primary keys was passed in, normalize the criteria if (_.isArray(userlandCriteria.where)) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: verify that this is actually intended to be the pk attribute name // and not a column name: // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - var pkAttrName = self.collections[item.queryObj.join.childCollectionIdentity].primaryKey; var tmpPkWhereClause = {}; tmpPkWhereClause[pkAttrName] = _.extend({}, userlandCriteria.where); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: we need to expand this into a proper query (i.e. with an `and` at the top level) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - userlandCriteria.where = tmpPkWhereClause; } }//</else> }//>- </ if join instruction's criteria has a `where` clause > // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: replace this merge with explicit overriding. // (my guess is that we just want `_.extend({}, userlandCriteria, { where: criteria })`-- // but even that is a bit confusing b/c `criteria` isn't the same thing as the `where` // clause) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - criteria = _.merge({}, userlandCriteria, { where: criteria }); }//>- </if join instruction contains `criteria`> // If criteria contains a skip or limit option, an operation will be needed for each parent. if (_.has(_tmpCriteria, 'skip') || _.has(_tmpCriteria, 'limit')) { _.each(parents, function(parent) { var tmpCriteria = _.merge({}, criteria); tmpCriteria.where[item.queryObj.join.childKey] = parent; // Mixin the user defined skip and limit if (_.has(_tmpCriteria, 'skip')) { tmpCriteria.skip = _tmpCriteria.skip; } if (_.has(_tmpCriteria, 'limit')) { tmpCriteria.limit = _tmpCriteria.limit; } // Build a simple operation to run with criteria from the parent results. // Give it an ID so that children operations can reference it if needed. localOpts.push({ id: idx, collectionName: item.collectionName, queryObj: { method: item.queryObj.method, using: using.tableName, criteria: tmpCriteria }, join: item.queryObj.join }); }); } else { // Build a simple operation to run with criteria from the parent results. // Give it an ID so that children operations can reference it if needed. localOpts.push({ id: idx, collectionName: item.collectionName, queryObj: { method: item.queryObj.method, using: using.tableName, criteria: criteria }, join: item.queryObj.join }); } // If there are child records, add the opt but don't add the criteria if (!item.child) { opts.push(localOpts); return; } localOpts.push({ collectionName: item.child.collectionName, queryObj: { method: item.queryObj.method, using: self.collections[item.child.collectionName].tableName, }, parent: idx, join: item.child.queryObj.join }); // Add the local opt to the opts array opts.push(localOpts); }); return opts; }; // ╔═╗╔═╗╦ ╦ ╔═╗╔═╗╔╦╗ ┌─┐┬ ┬┬┬ ┌┬┐ // ║ ║ ║║ ║ ║╣ ║ ║ │ ├─┤││ ││ // ╚═╝╚═╝╩═╝╩═╝╚═╝╚═╝ ╩ └─┘┴ ┴┴┴─┘─┴┘ // ┬─┐┌─┐┌─┐┬ ┬┬ ┌┬┐┌─┐ // ├┬┘├┤ └─┐│ ││ │ └─┐ // ┴└─└─┘└─┘└─┘┴─┘┴ └─┘ // Run a set of child operations and return the results in a namespaced array // that can later be used to do an in-memory join. FQRunner.prototype.collectChildResults = function collectChildResults(opts, cb) { var self = this; var intermediateResults = []; var i = 0; if (!opts || opts.length === 0) { return cb(undefined, {}); } // Run the operations and any child operations in series so that each can access the // results of the previous operation. async.eachSeries(opts, function(opt, next) { self.runChildOperations(intermediateResults, opt, function(err, values) { if (err) { return next(err); } // If the values aren't an array, ensure they get stored as one if (!_.isArray(values)) { // TODO: replace this with code that rejects anything other than an array values = [values]; } // If there are multiple operations and we are on the first one lets put the results // into an intermediate results array if (opts.length > 1 && i === 0) { intermediateResults = intermediateResults.concat(values); } // Add values to the cache key self.cache[opt.collectionName] = self.cache[opt.collectionName] || []; self.cache[opt.collectionName] = self.cache[opt.collectionName].concat(values); // Ensure the values are unique var pkColumnName = self.getPKColumnName(opt.collectionName); self.cache[opt.collectionName] = _.uniq(self.cache[opt.collectionName], pkColumnName); i++; next(); }); }, cb); }; // ╦═╗╦ ╦╔╗╔ ┌─┐┬ ┬┬┬ ┌┬┐ // ╠╦╝║ ║║║║ │ ├─┤││ ││ // ╩╚═╚═╝╝╚╝ └─┘┴ ┴┴┴─┘─┴┘ // ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌ // │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││ // └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘ // Executes a child operation and appends the results as a namespaced object to the // main operation results object. FQRunner.prototype.runChildOperations = function runChildOperations(intermediateResults, opt, cb) { var self = this; // Check if value has a parent, if so a join table was used and we need to build up dictionary // values that can be used to join the parent and the children together. // If the operation doesn't have a parent operation run it if (!_.has(opt, 'parent')) { return self.runOperation(opt, cb); } // If the operation has a parent, look into the optResults and build up a criteria // object using the results of a previous operation var parents = []; // Build criteria that can be used with an `in` query _.each(intermediateResults, function(result) { parents.push(result[opt.join.parentKey]); }); var criteria = {}; criteria[opt.join.childKey] = parents; // Check if the join contains any criteria if (opt.join.criteria) { var userlandCriteria = _.merge({}, opt.join.criteria); // Ensure `where` criteria is properly formatted if (_.has(userlandCriteria, 'where')) { if (_.isUndefined(userlandCriteria.where)) { delete userlandCriteria.where; } } delete userlandCriteria.sort; delete userlandCriteria.skip; delete userlandCriteria.limit; criteria = _.merge({}, userlandCriteria, { where: criteria }); } // Empty the cache for the join table so we can only add values used var cacheCopy = _.merge({}, self.cache[opt.join.parentCollectionIdentity]); self.cache[opt.join.parentCollectionIdentity] = []; // Run the operation self.runOperation(opt, function(err, values) { if (err) { return cb(err); } // If the values aren't an array, ensure they get stored as one if (!_.isArray(values)) { // TODO: replace this with code that rejects anything other than an array values = [values]; } // Build up the new join table result _.each(values, function(val) { _.each(cacheCopy, function(copy) { if (copy[opt.join.parentKey] === val[opt.join.childKey]) { self.cache[opt.join.parentCollectionIdentity].push(copy); } }); }); // Ensure the values are unique var pkColumnName = self.getPKColumnName(opt.join.parentCollectionIdentity); self.cache[opt.join.parentCollectionIdentity] = _.uniq(self.cache[opt.join.parentCollectionIdentity], pkColumnName); cb(undefined, values); }); }; // ╔═╗╦╔╗╔╔╦╗ ┌─┐┌─┐┬ ┬ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌ // ╠╣ ║║║║ ║║ │ │ ││ │ ├┤ │ │ ││ ││││ // ╚ ╩╝╚╝═╩╝ └─┘└─┘┴─┘┴─┘└─┘└─┘ ┴ ┴└─┘┘└┘ // ┌─┐┬─┐┬┌┬┐┌─┐┬─┐┬ ┬ ┬┌─┌─┐┬ ┬ // ├─┘├┬┘││││├─┤├┬┘└┬┘ ├┴┐├┤ └┬┘ // ┴ ┴└─┴┴ ┴┴ ┴┴└─ ┴ ┴ ┴└─┘ ┴ // (Note: this returns the column name of the pk -- not the attr name!) FQRunner.prototype.getPKColumnName = function (identity) { var WLModel = this.collections[identity]; return WLModel.schema[WLModel.primaryKey].columnName; };
lib/waterline/utils/query/help-find.js
/** * Module dependencies */ var util = require('util'); var _ = require('@sailshq/lodash'); var async = require('async'); var forgeStageThreeQuery = require('./forge-stage-three-query'); var InMemoryJoin = require('./in-memory-join'); var transformPopulatedChildRecords = require('./transform-populated-child-records'); /** * helpFind() * * Given a stage 2 "find" or "findOne" query, build and execute a sequence * of generated stage 3 queries (ska "find" operations)-- and then run them. * If disparate data sources need to be used, then perform in-memory joins * as needed. Afterwards, transform the normalized result set into an array * of records, and (potentially) populate them. * * > Fun facts: * > • This file is sometimes informally known as the "operations runner". * > • If particlebanana and mikermcneil were trees and you chopped us down, * > the months in 2013-2016 we spent figuring out the original implementation * > of the code in this file & the integrator would be a charred, necrotic * > ring that imparts frostbite when touched. * > • This is used for `.find()` and `.findOne()` queries. * > • It's a key piece of the puzzle when it comes to populating records in a * > cross-datastore/adapter (xD/A) fashion. * * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - * * @param {Ref} WLModel * The live Waterline model. * * @param {Dictionary} s2q * Stage two query. * * @param {Function} done * @param {Error?} err [if an error occured] * @param {Array} records * * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ module.exports = function helpFind(WLModel, s2q, done) { if (!WLModel) { return done(new Error('Consistency violation: Live Waterline model should be provided as the 1st argument')); } if (!s2q) { return done(new Error('Consistency violation: Stage two query (S2Q) should be provided as the 2nd argument')); } if (!_.isFunction(done)) { return done(new Error('Consistency violation: `done` (3rd argument) should be a function')); } // Construct an FQRunner isntance. var fqRunner = new FQRunner(WLModel, s2q); // Get a hold of the initial stage 3 query. var initialS3Q = fqRunner.queryObj; // ╦═╗╦ ╦╔╗╔ ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ // ╠╦╝║ ║║║║ │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││└─┐ // ╩╚═╚═╝╝╚╝ └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘└─┘ fqRunner.run(function _afterRunningFindOperations(err, values) { if (err) { return done(err); } // If the values don't have a cache there is nothing to return if (!values.cache) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: check up on this-- pretty sure we need to send back an array here..? // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - return done(); } // Now round up the resuls (function _roundUpResults(proceed){ try { // If no joins are used, grab the only item from the cache and pass that on. if (!initialS3Q.joins || !initialS3Q.joins.length) { values = values.cache[WLModel.identity]; return proceed(undefined, values); }//-• // Otherwise, if the values are already combined, return the results. if (values.combined) { return proceed(undefined, values.cache[WLModel.identity]); }//-• // Otherwise, perform an in-memory join (run the integrator) on the values // returned from the operations, and then use that as our joined results. var joinedResults; try { var pkColumnName = WLModel.schema[WLModel.primaryKey].columnName; joinedResults = InMemoryJoin(initialS3Q, values.cache, pkColumnName); } catch (e) { return proceed(e); } return proceed(undefined, joinedResults); } catch (e) { return proceed(e); } })(function _afterRoundingUpResults(err, results){ if (err) { return done(err); } try { // ╔═╗╦═╗╔═╗╔═╗╔═╗╔═╗╔═╗ ┌─┐┌─┐┌┬┐┌┐ ┬┌┐┌┌─┐┌┬┐ ┬─┐┌─┐┌─┐┬ ┬┬ ┌┬┐┌─┐ // ╠═╝╠╦╝║ ║║ ║╣ ╚═╗╚═╗ │ │ ││││├┴┐││││├┤ ││ ├┬┘├┤ └─┐│ ││ │ └─┐ // ╩ ╩╚═╚═╝╚═╝╚═╝╚═╝╚═╝ └─┘└─┘┴ ┴└─┘┴┘└┘└─┘─┴┘ ┴└─└─┘└─┘└─┘┴─┘┴ └─┘ if (!results) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: figure out what's up here. Is this ever the expected behavior? // If not, we should send back an error instead. If so, we should send // back empty array ([]), right? // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - return done(); }//-• // Normalize results to an array if (!_.isArray(results)) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: why is this necessary? Move check below up to here if possible. // (the check below with the "Consistency violation"-style error, I mean) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // if (!_.isArray(results)) { // return done(new Error('`results` from `find` method in adapter should always be an array. But it was not! If you are seeing this error, either there is a bug in this database adapter, or some heretofore unseen issue in Waterline.')); // } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - results = [ results ]; }//>- // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Transform column names into attribute names for each of the result records // before attempting any in-memory join logic on them. var transformedRecords = _.map(results, function(result) { return WLModel._transformer.unserialize(result); }); // Transform column names into attribute names for all nested, populated records too. var joins = initialS3Q.joins ? initialS3Q.joins : []; if (!_.isArray(joins)) { return done(new Error('Consistency violation: `joins` must be an array at this point. But isntead, somehow it is this: '+util.inspect(joins, {depth:5})+'')); } var data; try { data = transformPopulatedChildRecords(joins, transformedRecords, WLModel); } catch (e) { return done(new Error('Unexpected error transforming populated child records. '+e.stack)); } // If `data` is invalid (not an array) return early to avoid getting into trouble. if (!data || !_.isArray(data)) { return done(new Error('Consistency violation: Result from operations runner should be an array, but instead got: '+util.inspect(data, {depth: 5})+'')); }//-• return done(undefined, data); } catch (e) { return done(e); } });//</ after rounding up results from appropriate source > });//</ .run() > }; /** * ``` * new FQRunner(...); * ``` * * Construct an "FQRunner" instance for use in fetching data * for `find` and `findOne`. * * This is used for accessing (A) a contextualized "run" method and (B) a stage 3 query. * These are, in turn, used to fetch data for `find` and `findOne` queries. * * > The primary responsibility of this class is taking a stage 2 query and determining * > how to fufill it using stage 3 queries. This could involve breaking it up to run * > on multiple datatstores, or simply passing it through after mapping attribute names * > to their column name equivalents. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * > FUTURE: This implementation will likely be simplified/superceded in future versions * > of Waterline. (For example, the "run" method could simply be exposed as a first-class * > citizen and required + called directly in `find()` and in `findOne()`. This would * > just involve making it stateless.) * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - * * @param {Ref} WLModel * The live Waterline model. * * @param {Dictionary} s2q * Stage two query. * * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - * @constructs {Ref} * An "FQRunner" instance. * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ function FQRunner(WLModel, s2q) { // Build up an internal record cache. this.cache = {}; // Build an initial stage three query (s3q) from the incoming stage 2 query (s2q). var s3q = forgeStageThreeQuery({ stageTwoQuery: s2q, identity: WLModel.identity, transformer: WLModel._transformer, originalModels: WLModel.waterline.collections }); // Expose a reference to this stage 3 query for use later on this.queryObj = s3q; // Hold a default value for pre-combined results (native joins) this.preCombined = false; // Expose a reference to the entire set of all WL models available // in the current ORM instance. this.collections = WLModel.waterline.collections; // Expose a reference to the primary model identity. this.currentIdentity = WLModel.identity; // Seed the record cache. this.seedCache(); // Build an array of dictionaries representing find operations // that will need to take place. Then expose it as `this.operations`. this.operations = this.buildOperations(); return this; } // ╦═╗╦ ╦╔╗╔ ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ // ╠╦╝║ ║║║║ │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││└─┐ // ╩╚═╚═╝╝╚╝ └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘└─┘ FQRunner.prototype.run = function (cb) { var self = this; // Validate that the options that will be used to run the query are valid. // Mainly that if a connection was passed in and the operation will be run // on more than a single connection that an error is retured. var usedConnections = _.uniq(_.map(this.operations, 'leasedConnection')); if (usedConnections.length > 1 && _.has(this.metaContainer, 'leasedConnection')) { setImmediate(function() { return cb(new Error('Cannot execute this query, because it would need to be run across two different datastores, but a db connection was also explicitly provided (e.g. `usingConnection()`). Either ensure that all relevant models are on the same datastore, or do not pass in an explicit db connection.')); }); return; }//-• // Grab the parent operation, it will always be the very first operation var parentOp = this.operations.shift(); // Run The Parent Operation this.runOperation(parentOp, function(err, results) { if (err) { return cb(err); } // If the values aren't an array, ensure they get stored as one if (!_.isArray(results)) { // TODO: replace this with code that rejects anything OTHER than an array results = [results]; } // Set the cache values self.cache[parentOp.collectionName] = results; // If results are empty, or we're already combined, nothing else to so do return if (!results || self.preCombined) { return cb(undefined, { combined: true, cache: self.cache }); } // Run child operations and populate the cache self.execChildOpts(results, function(err) { if (err) { return cb(err); } cb(undefined, { combined: self.preCombined, cache: self.cache }); }); }); }; // ╔═╗╔═╗╔═╗╔╦╗ ┌─┐┌─┐┌─┐┬ ┬┌─┐ // ╚═╗║╣ ║╣ ║║ │ ├─┤│ ├─┤├┤ // ╚═╝╚═╝╚═╝═╩╝ └─┘┴ ┴└─┘┴ ┴└─┘ // Builds an internal representation of result records on a per-model basis. // This holds intermediate results from any parent, junction, and child queries. FQRunner.prototype.seedCache = function () { var cache = {}; _.each(this.collections, function(val, collectionName) { cache[collectionName] = []; }); this.cache = cache; }; // ╔╗ ╦ ╦╦╦ ╔╦╗ ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ // ╠╩╗║ ║║║ ║║ │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││└─┐ // ╚═╝╚═╝╩╩═╝═╩╝ └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘└─┘ // Inspects the query object and determines which operations are needed to // fufill the query. FQRunner.prototype.buildOperations = function () { var operations = []; // Check is any populates were performed on the query. If there weren't any then // the operation can be run in a single query. if (!_.keys(this.queryObj.joins).length) { // Grab the collection var collection = this.collections[this.currentIdentity]; if (!collection) { throw new Error('Consistency violation: No such model (identity: `' + this.currentIdentity + '`) has been registered with the ORM.'); } // Find the name of the datastore to run the query on using the dictionary. // If this method can't be found, default to whatever the datastore used by // the `find` method would use. var datastoreName = collection.adapterDictionary[this.queryObj.method]; if (!datastoreName) { datastoreName = collection.adapterDictionary.find; } operations.push({ connectionName: datastoreName, collectionName: this.currentIdentity, queryObj: this.queryObj }); return operations; } // Otherwise populates were used in this operation. Lets grab the connections // needed for these queries. It may only be a single connection in a simple // case or it could be multiple connections in some cases. var connections = this.getConnections(); // Now that all the connections are created, build up the operations needed to // accomplish the end goal of getting all the results no matter which connection // they are on. To do this, figure out if a connection supports joins and if // so pass down a criteria object containing join instructions. If joins are // not supported by a connection, build a series of operations to achieve the // end result. operations = this.stageOperations(connections); return operations; }; // ╔═╗╔╦╗╔═╗╔═╗╔═╗ ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ // ╚═╗ ║ ╠═╣║ ╦║╣ │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││└─┐ // ╚═╝ ╩ ╩ ╩╚═╝╚═╝ └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘└─┘ // Figures out which piece of the query to run on each datastore. FQRunner.prototype.stageOperations = function stageOperations(datastores) { var self = this; var operations = []; // Build the parent operation and set it as the first operation in the array operations.push(this.createParentOperation(datastores)); // Grab access to the "parent" model, and the name of its datastore. var ParentWLModel = this.collections[this.currentIdentity]; var parentDatastoreName = ParentWLModel.adapterDictionary[this.queryObj.method]; // Parent operation var parentOperation = _.first(operations); // For each additional datastore, build operations. _.each(datastores, function(val, datastoreName) { // Ignore the datastore used for the parent operation if a join can be used // on it. This means all of the operations for the query can take place on a // single db connection, using a single query. if (datastoreName === parentDatastoreName && parentOperation.method === 'join') { return; }//-• // Operations are needed that will be run after the parent operation has been // completed. If there are more than a single join, set the parent join and // build up children operations. This occurs in a many-to-many relationship // when a join table is needed. // Criteria is omitted until after the parent operation has been run so that // an IN query can be formed on child operations. var localOpts = []; _.each(val.joins, function(join, idx) { // Grab the `find` datastore name for the child model being used // in the join method. var optModel = self.collections[join.childCollectionIdentity]; var optDatastoreName = optModel.adapterDictionary.find; var operation = { connectionName: optDatastoreName, collectionName: join.childCollectionIdentity, queryObj: { method: 'find', using: join.child, join: join } }; // If this is the first join, it can't have any parents if (idx === 0) { localOpts.push(operation); return; } // Look into the previous operations and see if this is a child of any // of them var child = false; _.each(localOpts, function(localOpt) { var childJoin = localOpt.queryObj.join.childCollectionIdentity; if (childJoin !== join.parentCollectionIdentity) { return; } // Flag the child operation localOpt.child = operation; child = true; }); // If this was a child join, it's already been set if (child) { return; } localOpts.push(operation); }); // Add the local child operations to the operations array operations = operations.concat(localOpts); }); return operations; }; // ╔═╗╦═╗╔═╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐┬─┐┌─┐┌┐┌┌┬┐ // ║ ╠╦╝║╣ ╠═╣ ║ ║╣ ├─┘├─┤├┬┘├┤ │││ │ // ╚═╝╩╚═╚═╝╩ ╩ ╩ ╚═╝ ┴ ┴ ┴┴└─└─┘┘└┘ ┴ // ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌ // │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││ // └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘ /** * createParentOperation() * * * @param {Array} datastores * * @returns {Dictionary} * The parent operation. */ FQRunner.prototype.createParentOperation = function (datastores) { // Get a reference to the original stage three query. // (for the sake of familiarity) var originalS3Q = this.queryObj; // Look up the parent model. var ParentWLModel = this.collections[this.currentIdentity]; // Look up the datastore name. // (the name of the parent model's datastore) var datastoreName = ParentWLModel.adapterDictionary[originalS3Q.method]; // ============================================================================ // > Note: // > If findOne was used as the method, use the same datastore `find` is on. // > (This is a relic of when datastores might vary on a per-method basis. // > It is relatively pointless now, and only necessary here because it's not // > being normalized elsewhere. TODO: rip this out!) // > // > * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // > For a quick fix, just use 'find' above instead of making it dynamic per-method. // > e.g. // > ``` // > ParentWLModel.adapterDictionary.find // > ``` // > * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * if (!datastoreName) { if (originalS3Q.method === 'findOne') { console.warn('Warning: For compatibility, falling back to an implementation detail of a deprecated, per-method approach to datastore access. If you are seeing this warning, please report it at http://sailsjs.com/bugs. Thanks!\nDetails:\n```\n'+((new Error('Here is a stack trace, for context')).stack)+'\n```\n'); datastoreName = ParentWLModel.adapterDictionary.find; }//>- if (!datastoreName) { throw new Error('Consistency violation: Failed to locate proper datastore name for stage 3 query. (This is probably not your fault- more than likely it\'s a bug in Waterline.) Here is the offending stage 3 query: \n```\n'+util.inspect(originalS3Q, {depth:5})+'\n```\n'); } }//>- // ============================================================================ // Look up the parent WL model's datastore from the provided array. var datastore = datastores[datastoreName]; if (!datastore) { throw new Error('Consistency violation: Unexpected Waterline error (bug) when determining the datastore to use for this query. Attempted to look up datastore `'+datastoreName+'` (for model `'+this.currentIdentity+'`) -- but it could not be found in the provided set of datastores: '+util.inspect(datastores, {depth:5})+''); }//-• // Determine if the adapter has a native `join` method. var doesAdapterSupportNativeJoin = _.has(ParentWLModel.adapterDictionary, 'join'); if (doesAdapterSupportNativeJoin) { if (!_.isEqual(ParentWLModel.adapterDictionary.join, datastoreName)) { throw new Error('Consistency violation: The `join` adapter method should not be pointed at a different datastore! (Per-method datastores are longer supported.)'); } // If so, verify that all of the "joins" can be run natively in one fell swoop. // If all the joins are supported, then go ahead and build & return a simple // operation that just sends the entire query down to a single datastore/adapter. var allJoinsAreSupported = _.any(datastore.joins, function(join) { return _.indexOf(datastore.collections, join.childCollectionIdentity) > -1; }); if (allJoinsAreSupported) { // Set the stage 3 query to have `method: 'join'` so it will use the // native `join` adapter method. originalS3Q.method = 'join'; // Set the preCombined flag on our "Operations" instance to indicate that // the integrator doesn't need to run. this.preCombined = true; // Build & return native join operation. return { connectionName: datastoreName, collectionName: this.currentIdentity, queryObj: originalS3Q }; }//-• // (Otherwise, we have to do an xD/A populate. So we just continue on below.) }//>- // --• // IWMIH we'll be doing an xD/A (in-memory) populate. // Make a shallow copy of our S3Q that has the `joins` key removed. // (because this will be an in-memory join now) var tmpS3Q = _.omit(originalS3Q, 'joins'); // Build initial ("parent") operation for xD/A populate. return { connectionName: datastoreName, collectionName: this.currentIdentity, queryObj: tmpS3Q }; }; // ╔═╗╔═╗╔╦╗ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ // ║ ╦║╣ ║ │ │ │││││││├┤ │ │ ││ ││││└─┐ // ╚═╝╚═╝ ╩ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘└─┘ FQRunner.prototype.getConnections = function getConnections() { var self = this; var connections = {}; // Default structure for connection objects var defaultConnection = { collections: [], children: [], joins: [] }; // For each populate build a connection item to build up an entire collection/connection registry // for this query. Using this, queries should be able to be seperated into discrete queries // which can be run on connections in parallel. _.each(this.queryObj.joins, function(join) { var parentConnection; var childConnection; function getConnection(collName) { var collection = self.collections[collName]; var connectionName = collection.adapterDictionary.find; connections[connectionName] = connections[connectionName] || _.merge({}, defaultConnection); return connections[connectionName]; } // If this join is a junctionTable, find the parent operation and add it to that connection's // children instead of creating a new operation on another connection. This allows cross-connection // many-to-many joins to be used where the join relies on the results of the parent operation // being run first. if (join.junctionTable) { // Find the previous join var parentJoin = _.find(self.queryObj.joins, function(otherJoin) { return otherJoin.child === join.parent; }); // Grab the parent join connection var parentJoinConnection = getConnection(parentJoin.parentCollectionIdentity); // Find the connection the parent and child collections belongs to parentConnection = getConnection(join.parentCollectionIdentity); childConnection = getConnection(join.childCollectionIdentity); // Update the registry parentConnection.collections.push(join.parentCollectionIdentity); childConnection.collections.push(join.childCollectionIdentity); parentConnection.children.push(join.parentCollectionIdentity); // Ensure the arrays are made up only of unique values parentConnection.collections = _.uniq(parentConnection.collections); childConnection.collections = _.uniq(childConnection.collections); parentConnection.children = _.uniq(parentConnection.children); // Add the join to the correct joins array. We want it to be on the same // connection as the operation before so the timing is correct. parentJoinConnection.joins = parentJoinConnection.joins.concat(join); // Build up the connection registry like normal } else { parentConnection = getConnection(join.parentCollectionIdentity); childConnection = getConnection(join.childCollectionIdentity); parentConnection.collections.push(join.parentCollectionIdentity); childConnection.collections.push(join.childCollectionIdentity); parentConnection.joins = parentConnection.joins.concat(join); } }); return connections; }; // ╦═╗╦ ╦╔╗╔ ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌ // ╠╦╝║ ║║║║ │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││ // ╩╚═╚═╝╝╚╝ └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘ FQRunner.prototype.runOperation = function runOperation(operation, cb) { var collectionName = operation.collectionName; var queryObj = operation.queryObj; // Ensure the collection exist if (!_.has(this.collections, collectionName)) { return cb(new Error('Invalid Collection specfied in operation.')); } // Find the collection to use var collection = this.collections[collectionName]; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: this should have already been dealt with in forgeStage3Query // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Send the findOne queries to the adapter's find method if (queryObj.method === 'findOne') { queryObj.method = 'find'; console.warn('TODO: this swapping of findOne=>find should have already been dealt with in forgeStage3Query. Stack trace for easy reference:'+(new Error()).stack); } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Grab the adapter to perform the query on var datastoreName = collection.adapterDictionary[queryObj.method]; var adapter = collection.datastores[datastoreName].adapter; // Run the operation adapter[queryObj.method](datastoreName, queryObj, cb, this.metaContainer); }; // ╔═╗═╗ ╦╔═╗╔═╗╦ ╦╔╦╗╔═╗ ┌─┐┬ ┬┬┬ ┌┬┐ // ║╣ ╔╩╦╝║╣ ║ ║ ║ ║ ║╣ │ ├─┤││ ││ // ╚═╝╩ ╚═╚═╝╚═╝╚═╝ ╩ ╚═╝ └─┘┴ ┴┴┴─┘─┴┘ // ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ // │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││└─┐ // └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘└─┘ // If joins are used and an adapter doesn't support them, there will be child // operations that will need to be run. Parse each child operation and run them // along with any tree joins and return an array of children results that can be // combined with the parent results. FQRunner.prototype.execChildOpts = function execChildOpts(parentResults, cb) { var self = this; var childOperations = this.buildChildOpts(parentResults); // Run the generated operations in parallel async.each(childOperations, function(opt, next) { self.collectChildResults(opt, next); }, cb); }; // ╔╗ ╦ ╦╦╦ ╔╦╗ ┌─┐┬ ┬┬┬ ┌┬┐ // ╠╩╗║ ║║║ ║║ │ ├─┤││ ││ // ╚═╝╚═╝╩╩═╝═╩╝ └─┘┴ ┴┴┴─┘─┴┘ // ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ // │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││└─┐ // └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘└─┘ // Using the results of a parent operation, build up a set of operations that // contain criteria based on what is returned from a parent operation. These can // be arrays containing more than one operation for each child, which will happen // when "join tables" would be used. Each set should be able to be run in parallel. FQRunner.prototype.buildChildOpts = function buildChildOpts(parentResults) { var self = this; var opts = []; // Build up operations that can be run in parallel using the results of the parent operation _.each(this.operations, function(item) { var localOpts = []; var parents = []; var idx = 0; var using = self.collections[item.collectionName]; // Go through all the parent records and build up an array of keys to look in. // This will be used in an IN query to grab all the records needed for the "join". _.each(parentResults, function(result) { if (!_.has(result, item.queryObj.join.parentKey)) { return; } if (_.isNull(result[item.queryObj.join.parentKey]) || _.isUndefined(result[item.queryObj.join.parentKey])) { return; } parents.push(result[item.queryObj.join.parentKey]); }); // If no parents match the join criteria, don't build up an operation if (!parents.length) { return; } // Build up criteria that will be used inside an IN query var criteria = {}; criteria[item.queryObj.join.childKey] = parents; var _tmpCriteria = {}; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: remove this halfway normalization code // (it doesn't actually cover all the edge cases anyway, and it shouldn't // be necessary because we normalize all this ahead of time when forging // the stage 2 query. If it IS necessary, then that means we're building // incomplete criteria in Waterline core, so that's an easy fix-- we'd just // need to find those spots and make them use the fully-expanded query language) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // If the join instruction contains `criteria`... if (item.queryObj.join.criteria) { var userlandCriteria = _.extend({}, item.queryObj.join.criteria); _tmpCriteria = _.extend({}, userlandCriteria); // Ensure `where` criteria is properly formatted if (_.has(userlandCriteria, 'where')) { if (_.isUndefined(userlandCriteria.where)) { delete userlandCriteria.where; } else { // If an array of primary keys was passed in, normalize the criteria if (_.isArray(userlandCriteria.where)) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: verify that this is actually intended to be the pk attribute name // and not a column name: // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - var pkAttrName = self.collections[item.queryObj.join.childCollectionIdentity].primaryKey; var tmpPkWhereClause = {}; tmpPkWhereClause[pkAttrName] = _.extend({}, userlandCriteria.where); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: we need to expand this into a proper query (i.e. with an `and` at the top level) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - userlandCriteria.where = tmpPkWhereClause; } }//</else> }//>- </ if join instruction's criteria has a `where` clause > // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: replace this merge with explicit overriding. // (my guess is that we just want `_.extend({}, userlandCriteria, { where: criteria })`-- // but even that is a bit confusing b/c `criteria` isn't the same thing as the `where` // clause) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - criteria = _.merge({}, userlandCriteria, { where: criteria }); }//>- </if join instruction contains `criteria`> // If criteria contains a skip or limit option, an operation will be needed for each parent. if (_.has(_tmpCriteria, 'skip') || _.has(_tmpCriteria, 'limit')) { _.each(parents, function(parent) { var tmpCriteria = _.merge({}, criteria); tmpCriteria.where[item.queryObj.join.childKey] = parent; // Mixin the user defined skip and limit if (_.has(_tmpCriteria, 'skip')) { tmpCriteria.skip = _tmpCriteria.skip; } if (_.has(_tmpCriteria, 'limit')) { tmpCriteria.limit = _tmpCriteria.limit; } // Build a simple operation to run with criteria from the parent results. // Give it an ID so that children operations can reference it if needed. localOpts.push({ id: idx, collectionName: item.collectionName, queryObj: { method: item.queryObj.method, using: using.tableName, criteria: tmpCriteria }, join: item.queryObj.join }); }); } else { // Build a simple operation to run with criteria from the parent results. // Give it an ID so that children operations can reference it if needed. localOpts.push({ id: idx, collectionName: item.collectionName, queryObj: { method: item.queryObj.method, using: using.tableName, criteria: criteria }, join: item.queryObj.join }); } // If there are child records, add the opt but don't add the criteria if (!item.queryObj.child) { opts.push(localOpts); return; } localOpts.push({ collectionName: item.queryObj.child.collection, queryObj: { method: item.queryObj.method, using: self.collections[item.queryObj.child.collection].tableName }, parent: idx, join: item.queryObj.child.join }); // Add the local opt to the opts array opts.push(localOpts); }); return opts; }; // ╔═╗╔═╗╦ ╦ ╔═╗╔═╗╔╦╗ ┌─┐┬ ┬┬┬ ┌┬┐ // ║ ║ ║║ ║ ║╣ ║ ║ │ ├─┤││ ││ // ╚═╝╚═╝╩═╝╩═╝╚═╝╚═╝ ╩ └─┘┴ ┴┴┴─┘─┴┘ // ┬─┐┌─┐┌─┐┬ ┬┬ ┌┬┐┌─┐ // ├┬┘├┤ └─┐│ ││ │ └─┐ // ┴└─└─┘└─┘└─┘┴─┘┴ └─┘ // Run a set of child operations and return the results in a namespaced array // that can later be used to do an in-memory join. FQRunner.prototype.collectChildResults = function collectChildResults(opts, cb) { var self = this; var intermediateResults = []; var i = 0; if (!opts || opts.length === 0) { return cb(undefined, {}); } // Run the operations and any child operations in series so that each can access the // results of the previous operation. async.eachSeries(opts, function(opt, next) { self.runChildOperations(intermediateResults, opt, function(err, values) { if (err) { return next(err); } // If the values aren't an array, ensure they get stored as one if (!_.isArray(values)) { // TODO: replace this with code that rejects anything other than an array values = [values]; } // If there are multiple operations and we are on the first one lets put the results // into an intermediate results array if (opts.length > 1 && i === 0) { intermediateResults = intermediateResults.concat(values); } // Add values to the cache key self.cache[opt.collectionName] = self.cache[opt.collectionName] || []; self.cache[opt.collectionName] = self.cache[opt.collectionName].concat(values); // Ensure the values are unique var pkColumnName = self.getPKColumnName(opt.collectionName); self.cache[opt.collectionName] = _.uniq(self.cache[opt.collectionName], pkColumnName); i++; next(); }); }, cb); }; // ╦═╗╦ ╦╔╗╔ ┌─┐┬ ┬┬┬ ┌┬┐ // ╠╦╝║ ║║║║ │ ├─┤││ ││ // ╩╚═╚═╝╝╚╝ └─┘┴ ┴┴┴─┘─┴┘ // ┌─┐┌─┐┌─┐┬─┐┌─┐┌┬┐┬┌─┐┌┐┌ // │ │├─┘├┤ ├┬┘├─┤ │ ││ ││││ // └─┘┴ └─┘┴└─┴ ┴ ┴ ┴└─┘┘└┘ // Executes a child operation and appends the results as a namespaced object to the // main operation results object. FQRunner.prototype.runChildOperations = function runChildOperations(intermediateResults, opt, cb) { var self = this; // Check if value has a parent, if so a join table was used and we need to build up dictionary // values that can be used to join the parent and the children together. // If the operation doesn't have a parent operation run it if (!_.has(opt, 'parent')) { return self.runOperation(opt, cb); } // If the operation has a parent, look into the optResults and build up a criteria // object using the results of a previous operation var parents = []; // Build criteria that can be used with an `in` query _.each(intermediateResults, function(result) { parents.push(result[opt.join.parentKey]); }); var criteria = {}; criteria[opt.join.childKey] = parents; // Check if the join contains any criteria if (opt.join.criteria) { var userlandCriteria = _.merge({}, opt.join.criteria); // Ensure `where` criteria is properly formatted if (_.has(userlandCriteria, 'where')) { if (_.isUndefined(userlandCriteria.where)) { delete userlandCriteria.where; } } delete userlandCriteria.sort; delete userlandCriteria.skip; delete userlandCriteria.limit; criteria = _.merge({}, userlandCriteria, { where: criteria }); } // Empty the cache for the join table so we can only add values used var cacheCopy = _.merge({}, self.cache[opt.join.parentCollectionIdentity]); self.cache[opt.join.parentCollectionIdentity] = []; // Run the operation self.runOperation(opt, function(err, values) { if (err) { return cb(err); } // If the values aren't an array, ensure they get stored as one if (!_.isArray(values)) { // TODO: replace this with code that rejects anything other than an array values = [values]; } // Build up the new join table result _.each(values, function(val) { _.each(cacheCopy, function(copy) { if (copy[opt.join.parentKey] === val[opt.join.childKey]) { self.cache[opt.join.parentCollectionIdentity].push(copy); } }); }); // Ensure the values are unique var pkColumnName = self.getPKColumnName(opt.join.parentCollectionIdentity); self.cache[opt.join.parentCollectionIdentity] = _.uniq(self.cache[opt.join.parentCollectionIdentity], pkColumnName); cb(undefined, values); }); }; // ╔═╗╦╔╗╔╔╦╗ ┌─┐┌─┐┬ ┬ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌ // ╠╣ ║║║║ ║║ │ │ ││ │ ├┤ │ │ ││ ││││ // ╚ ╩╝╚╝═╩╝ └─┘└─┘┴─┘┴─┘└─┘└─┘ ┴ ┴└─┘┘└┘ // ┌─┐┬─┐┬┌┬┐┌─┐┬─┐┬ ┬ ┬┌─┌─┐┬ ┬ // ├─┘├┬┘││││├─┤├┬┘└┬┘ ├┴┐├┤ └┬┘ // ┴ ┴└─┴┴ ┴┴ ┴┴└─ ┴ ┴ ┴└─┘ ┴ // (Note: this returns the column name of the pk -- not the attr name!) FQRunner.prototype.getPKColumnName = function (identity) { var WLModel = this.collections[identity]; return WLModel.schema[WLModel.primaryKey].columnName; };
Fix some references
lib/waterline/utils/query/help-find.js
Fix some references
<ide><path>ib/waterline/utils/query/help-find.js <ide> } <ide> <ide> // If there are child records, add the opt but don't add the criteria <del> if (!item.queryObj.child) { <add> if (!item.child) { <ide> opts.push(localOpts); <ide> return; <ide> } <ide> <ide> localOpts.push({ <del> collectionName: item.queryObj.child.collection, <add> collectionName: item.child.collectionName, <ide> queryObj: { <ide> method: item.queryObj.method, <del> using: self.collections[item.queryObj.child.collection].tableName <add> using: self.collections[item.child.collectionName].tableName, <ide> }, <ide> parent: idx, <del> join: item.queryObj.child.join <add> join: item.child.queryObj.join <ide> }); <ide> <ide> // Add the local opt to the opts array
Java
apache-2.0
91da90ee9ed7f264fae2b14b581fee41c5821951
0
gurbuzali/hazelcast-jet,gurbuzali/hazelcast-jet
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.jet.impl; import com.hazelcast.client.impl.HazelcastClientInstanceImpl; import com.hazelcast.client.impl.protocol.ClientMessage; import com.hazelcast.client.impl.protocol.codec.JetCancelJobCodec; import com.hazelcast.client.impl.protocol.codec.JetGetJobIdsCodec; import com.hazelcast.client.impl.protocol.codec.JetGetJobStatusCodec; import com.hazelcast.client.impl.protocol.codec.JetJoinSubmittedJobCodec; import com.hazelcast.client.impl.protocol.codec.JetSubmitJobCodec; import com.hazelcast.client.spi.impl.ClientInvocation; import com.hazelcast.client.spi.impl.ClientInvocationFuture; import com.hazelcast.core.ExecutionCallback; import com.hazelcast.core.ICompletableFuture; import com.hazelcast.core.Member; import com.hazelcast.jet.core.DAG; import com.hazelcast.jet.JetInstance; import com.hazelcast.jet.Job; import com.hazelcast.jet.core.JobStatus; import com.hazelcast.jet.config.JetConfig; import com.hazelcast.jet.config.JobConfig; import com.hazelcast.jet.impl.util.ExceptionUtil; import com.hazelcast.logging.ILogger; import com.hazelcast.nio.Address; import com.hazelcast.nio.serialization.Data; import com.hazelcast.spi.serialization.SerializationService; import javax.annotation.Nonnull; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import static com.hazelcast.jet.impl.util.ExceptionUtil.rethrow; import static com.hazelcast.jet.impl.util.Util.idToString; import static java.util.stream.Collectors.toList; /** * Client-side {@code JetInstance} implementation */ public class JetClientInstanceImpl extends AbstractJetInstance { private final HazelcastClientInstanceImpl client; private final ILogger logger; private SerializationService serializationService; public JetClientInstanceImpl(HazelcastClientInstanceImpl hazelcastInstance) { super(hazelcastInstance); this.client = hazelcastInstance; this.logger = getLogger(JetInstance.class); this.serializationService = client.getSerializationService(); ExceptionUtil.registerJetExceptions(hazelcastInstance.getClientExceptionFactory()); } @Override public JetConfig getConfig() { throw new UnsupportedOperationException("Jet Configuration is not available on the client"); } @Override public Job newJob(DAG dag) { SubmittedJobImpl job = new SubmittedJobImpl(this, getLogger(SubmittedJobImpl.class), dag, new JobConfig()); job.init(); return job; } @Override public Job newJob(DAG dag, JobConfig config) { SubmittedJobImpl job = new SubmittedJobImpl(this, getLogger(SubmittedJobImpl.class), dag, config); job.init(); return job; } @Override public Collection<Job> getJobs() { ClientMessage request = JetGetJobIdsCodec.encodeRequest(); ClientInvocation invocation = new ClientInvocation(client, request, null, masterAddress()); Set<Long> jobIds; try { ClientMessage clientMessage = invocation.invoke().get(); JetGetJobIdsCodec.ResponseParameters response = JetGetJobIdsCodec.decodeResponse(clientMessage); jobIds = serializationService.toObject(response.response); } catch (Exception e) { throw rethrow(e); } List<Job> jobs = jobIds.stream().map(jobId -> new TrackedJobImpl(getLogger(TrackedJobImpl.class), jobId)) .collect(toList()); jobs.forEach(job -> ((TrackedJobImpl) job).init()); return jobs; } private JobStatus sendJobStatusRequest(long jobId) { ClientMessage request = JetGetJobStatusCodec.encodeRequest(jobId); ClientInvocation invocation = new ClientInvocation(client, request, jobObjectName(jobId), masterAddress()); try { ClientMessage clientMessage = invocation.invoke().get(); JetGetJobStatusCodec.ResponseParameters response = JetGetJobStatusCodec.decodeResponse(clientMessage); return serializationService.toObject(response.response); } catch (Exception e) { throw rethrow(e); } } private ILogger getLogger(Class type) { return client.getLoggingService().getLogger(type); } private Address masterAddress() { Optional<Member> first = client.getCluster().getMembers().stream().findFirst(); return first.orElseThrow(() -> new IllegalStateException("No members found in cluster")).getAddress(); } private static String jobObjectName(long jobId) { return "jobId=" + idToString(jobId); } private class SubmittedJobImpl extends AbstractSubmittedJobImpl { SubmittedJobImpl(JetInstance jetInstance, ILogger logger, DAG dag, JobConfig config) { super(jetInstance, logger, dag, config); } @Override protected Address getMasterAddress() { return JetClientInstanceImpl.this.masterAddress(); } @Override protected ICompletableFuture<Void> sendJoinRequest(Address masterAddress) { ClientInvocation invocation = new ClientInvocation(client, createJoinJobRequest(), jobObjectName(getJobId()), masterAddress); return new ExecutionFuture(invocation.invoke(), getJobId(), masterAddress); } @Override protected JobStatus sendJobStatusRequest() { return JetClientInstanceImpl.this.sendJobStatusRequest(getJobId()); } private ClientMessage createJoinJobRequest() { Data serializedDag = serializationService.toData(dag); Data serializedConfig = serializationService.toData(config); return JetSubmitJobCodec.encodeRequest(getJobId(), serializedDag, serializedConfig); } } private class TrackedJobImpl extends AbstractTrackedJobImpl { TrackedJobImpl(ILogger logger, long jobId) { super(logger, jobId); } @Override protected Address getMasterAddress() { return JetClientInstanceImpl.this.masterAddress(); } @Override protected ICompletableFuture<Void> sendJoinRequest(Address masterAddress) { ClientMessage request = JetJoinSubmittedJobCodec.encodeRequest(getJobId()); ClientInvocation invocation = new ClientInvocation(client, request, jobObjectName(getJobId()), masterAddress); return new ExecutionFuture(invocation.invoke(), getJobId(), masterAddress); } @Override protected JobStatus sendJobStatusRequest() { return JetClientInstanceImpl.this.sendJobStatusRequest(getJobId()); } } private final class ExecutionFuture implements ICompletableFuture<Void> { private final ClientInvocationFuture future; private final long jobId; private final Address executionAddress; ExecutionFuture(ClientInvocationFuture future, long jobId, Address executionAddress) { this.future = future; this.jobId = jobId; this.executionAddress = executionAddress; } @Override public boolean cancel(boolean mayInterruptIfRunning) { boolean cancelled = future.cancel(true); if (!cancelled) { return false; } new ClientInvocation(client, JetCancelJobCodec.encodeRequest(jobId), jobObjectName(jobId), executionAddress) .invoke().andThen(new ExecutionCallback<ClientMessage>() { @Override public void onResponse(ClientMessage clientMessage) { //ignored } @Override public void onFailure(Throwable throwable) { logger.warning("Error cancelling job with jobId " + idToString(jobId), throwable); } }); return true; } @Override public boolean isCancelled() { return future.isCancelled(); } @Override public boolean isDone() { return future.isDone(); } @Override public Void get() throws InterruptedException, ExecutionException { future.get(); return null; } @Override public Void get(long timeout, @Nonnull TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { future.get(timeout, unit); return null; } @Override public void andThen(ExecutionCallback<Void> callback) { future.andThen(new ExecutionCallback<ClientMessage>() { @Override public void onResponse(ClientMessage response) { callback.onResponse(null); } @Override public void onFailure(Throwable t) { callback.onFailure(t); } }); } @Override public void andThen(ExecutionCallback<Void> callback, Executor executor) { future.andThen(new ExecutionCallback<ClientMessage>() { @Override public void onResponse(ClientMessage response) { callback.onResponse(null); } @Override public void onFailure(Throwable t) { callback.onFailure(t); } }, executor); } } }
hazelcast-jet-core/src/main/java/com/hazelcast/jet/impl/JetClientInstanceImpl.java
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.jet.impl; import com.hazelcast.client.impl.HazelcastClientInstanceImpl; import com.hazelcast.client.impl.protocol.ClientMessage; import com.hazelcast.client.impl.protocol.codec.JetCancelJobCodec; import com.hazelcast.client.impl.protocol.codec.JetGetJobIdsCodec; import com.hazelcast.client.impl.protocol.codec.JetGetJobStatusCodec; import com.hazelcast.client.impl.protocol.codec.JetJoinSubmittedJobCodec; import com.hazelcast.client.impl.protocol.codec.JetSubmitJobCodec; import com.hazelcast.client.spi.impl.ClientInvocation; import com.hazelcast.client.spi.impl.ClientInvocationFuture; import com.hazelcast.core.ExecutionCallback; import com.hazelcast.core.ICompletableFuture; import com.hazelcast.core.Member; import com.hazelcast.jet.core.DAG; import com.hazelcast.jet.JetInstance; import com.hazelcast.jet.Job; import com.hazelcast.jet.core.JobStatus; import com.hazelcast.jet.config.JetConfig; import com.hazelcast.jet.config.JobConfig; import com.hazelcast.jet.impl.util.ExceptionUtil; import com.hazelcast.logging.ILogger; import com.hazelcast.nio.Address; import com.hazelcast.nio.serialization.Data; import com.hazelcast.spi.serialization.SerializationService; import javax.annotation.Nonnull; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import static com.hazelcast.jet.impl.util.ExceptionUtil.rethrow; import static com.hazelcast.jet.impl.util.Util.idToString; import static java.util.stream.Collectors.toList; /** * Client-side {@code JetInstance} implementation */ public class JetClientInstanceImpl extends AbstractJetInstance { private final HazelcastClientInstanceImpl client; private final ILogger logger; private SerializationService serializationService; public JetClientInstanceImpl(HazelcastClientInstanceImpl hazelcastInstance) { super(hazelcastInstance); this.client = hazelcastInstance; this.logger = getLogger(JetInstance.class); this.serializationService = client.getSerializationService(); ExceptionUtil.registerJetExceptions(hazelcastInstance.getClientExceptionFactory()); } @Override public JetConfig getConfig() { throw new UnsupportedOperationException("Jet Configuration is not available on the client"); } @Override public Job newJob(DAG dag) { SubmittedJobImpl job = new SubmittedJobImpl(this, getLogger(SubmittedJobImpl.class), dag, new JobConfig()); job.init(); return job; } @Override public Job newJob(DAG dag, JobConfig config) { SubmittedJobImpl job = new SubmittedJobImpl(this, getLogger(SubmittedJobImpl.class), dag, config); job.init(); return job; } @Override public Collection<Job> getJobs() { ClientMessage request = JetGetJobIdsCodec.encodeRequest(); ClientInvocation invocation = new ClientInvocation(client, request, masterAddress()); Set<Long> jobIds; try { ClientMessage clientMessage = invocation.invoke().get(); JetGetJobIdsCodec.ResponseParameters response = JetGetJobIdsCodec.decodeResponse(clientMessage); jobIds = serializationService.toObject(response.response); } catch (Exception e) { throw rethrow(e); } List<Job> jobs = jobIds.stream().map(jobId -> new TrackedJobImpl(getLogger(TrackedJobImpl.class), jobId)) .collect(toList()); jobs.forEach(job -> ((TrackedJobImpl) job).init()); return jobs; } private JobStatus sendJobStatusRequest(long jobId) { ClientMessage request = JetGetJobStatusCodec.encodeRequest(jobId); ClientInvocation invocation = new ClientInvocation(client, request, masterAddress()); try { ClientMessage clientMessage = invocation.invoke().get(); JetGetJobStatusCodec.ResponseParameters response = JetGetJobStatusCodec.decodeResponse(clientMessage); return serializationService.toObject(response.response); } catch (Exception e) { throw rethrow(e); } } private ILogger getLogger(Class type) { return client.getLoggingService().getLogger(type); } private Address masterAddress() { Optional<Member> first = client.getCluster().getMembers().stream().findFirst(); return first.orElseThrow(() -> new IllegalStateException("No members found in cluster")).getAddress(); } private class SubmittedJobImpl extends AbstractSubmittedJobImpl { SubmittedJobImpl(JetInstance jetInstance, ILogger logger, DAG dag, JobConfig config) { super(jetInstance, logger, dag, config); } @Override protected Address getMasterAddress() { return JetClientInstanceImpl.this.masterAddress(); } @Override protected ICompletableFuture<Void> sendJoinRequest(Address masterAddress) { ClientInvocation invocation = new ClientInvocation(client, createJoinJobRequest(), masterAddress); return new ExecutionFuture(invocation.invoke(), getJobId(), masterAddress); } @Override protected JobStatus sendJobStatusRequest() { return JetClientInstanceImpl.this.sendJobStatusRequest(getJobId()); } private ClientMessage createJoinJobRequest() { Data serializedDag = serializationService.toData(dag); Data serializedConfig = serializationService.toData(config); return JetSubmitJobCodec.encodeRequest(getJobId(), serializedDag, serializedConfig); } } private class TrackedJobImpl extends AbstractTrackedJobImpl { TrackedJobImpl(ILogger logger, long jobId) { super(logger, jobId); } @Override protected Address getMasterAddress() { return JetClientInstanceImpl.this.masterAddress(); } @Override protected ICompletableFuture<Void> sendJoinRequest(Address masterAddress) { ClientMessage request = JetJoinSubmittedJobCodec.encodeRequest(getJobId()); ClientInvocation invocation = new ClientInvocation(client, request, masterAddress); return new ExecutionFuture(invocation.invoke(), getJobId(), masterAddress); } @Override protected JobStatus sendJobStatusRequest() { return JetClientInstanceImpl.this.sendJobStatusRequest(getJobId()); } } private final class ExecutionFuture implements ICompletableFuture<Void> { private final ClientInvocationFuture future; private final long executionId; private final Address executionAddress; ExecutionFuture(ClientInvocationFuture future, long executionId, Address executionAddress) { this.future = future; this.executionId = executionId; this.executionAddress = executionAddress; } @Override public boolean cancel(boolean mayInterruptIfRunning) { boolean cancelled = future.cancel(true); if (!cancelled) { return false; } new ClientInvocation(client, JetCancelJobCodec.encodeRequest(executionId), executionAddress) .invoke().andThen(new ExecutionCallback<ClientMessage>() { @Override public void onResponse(ClientMessage clientMessage) { //ignored } @Override public void onFailure(Throwable throwable) { logger.warning("Error cancelling job with executionId " + idToString(executionId), throwable); } }); return true; } @Override public boolean isCancelled() { return future.isCancelled(); } @Override public boolean isDone() { return future.isDone(); } @Override public Void get() throws InterruptedException, ExecutionException { future.get(); return null; } @Override public Void get(long timeout, @Nonnull TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { future.get(timeout, unit); return null; } @Override public void andThen(ExecutionCallback<Void> callback) { future.andThen(new ExecutionCallback<ClientMessage>() { @Override public void onResponse(ClientMessage response) { callback.onResponse(null); } @Override public void onFailure(Throwable t) { callback.onFailure(t); } }); } @Override public void andThen(ExecutionCallback<Void> callback, Executor executor) { future.andThen(new ExecutionCallback<ClientMessage>() { @Override public void onResponse(ClientMessage response) { callback.onResponse(null); } @Override public void onFailure(Throwable t) { callback.onFailure(t); } }, executor); } } }
Fix compilation for new imdg 3.9-SNAPSHOT
hazelcast-jet-core/src/main/java/com/hazelcast/jet/impl/JetClientInstanceImpl.java
Fix compilation for new imdg 3.9-SNAPSHOT
<ide><path>azelcast-jet-core/src/main/java/com/hazelcast/jet/impl/JetClientInstanceImpl.java <ide> @Override <ide> public Collection<Job> getJobs() { <ide> ClientMessage request = JetGetJobIdsCodec.encodeRequest(); <del> ClientInvocation invocation = new ClientInvocation(client, request, masterAddress()); <add> ClientInvocation invocation = new ClientInvocation(client, request, null, masterAddress()); <ide> Set<Long> jobIds; <ide> try { <ide> ClientMessage clientMessage = invocation.invoke().get(); <ide> <ide> private JobStatus sendJobStatusRequest(long jobId) { <ide> ClientMessage request = JetGetJobStatusCodec.encodeRequest(jobId); <del> ClientInvocation invocation = new ClientInvocation(client, request, masterAddress()); <add> ClientInvocation invocation = new ClientInvocation(client, request, jobObjectName(jobId), masterAddress()); <ide> try { <ide> ClientMessage clientMessage = invocation.invoke().get(); <ide> JetGetJobStatusCodec.ResponseParameters response = JetGetJobStatusCodec.decodeResponse(clientMessage); <ide> return first.orElseThrow(() -> new IllegalStateException("No members found in cluster")).getAddress(); <ide> } <ide> <add> private static String jobObjectName(long jobId) { <add> return "jobId=" + idToString(jobId); <add> } <add> <ide> private class SubmittedJobImpl extends AbstractSubmittedJobImpl { <ide> <ide> SubmittedJobImpl(JetInstance jetInstance, ILogger logger, DAG dag, JobConfig config) { <ide> <ide> @Override <ide> protected ICompletableFuture<Void> sendJoinRequest(Address masterAddress) { <del> ClientInvocation invocation = new ClientInvocation(client, createJoinJobRequest(), masterAddress); <add> ClientInvocation invocation = new ClientInvocation(client, createJoinJobRequest(), jobObjectName(getJobId()), <add> masterAddress); <ide> return new ExecutionFuture(invocation.invoke(), getJobId(), masterAddress); <ide> } <ide> <ide> @Override <ide> protected ICompletableFuture<Void> sendJoinRequest(Address masterAddress) { <ide> ClientMessage request = JetJoinSubmittedJobCodec.encodeRequest(getJobId()); <del> ClientInvocation invocation = new ClientInvocation(client, request, masterAddress); <add> ClientInvocation invocation = new ClientInvocation(client, request, jobObjectName(getJobId()), masterAddress); <ide> return new ExecutionFuture(invocation.invoke(), getJobId(), masterAddress); <ide> } <ide> <ide> private final class ExecutionFuture implements ICompletableFuture<Void> { <ide> <ide> private final ClientInvocationFuture future; <del> private final long executionId; <add> private final long jobId; <ide> private final Address executionAddress; <ide> <del> ExecutionFuture(ClientInvocationFuture future, long executionId, Address executionAddress) { <add> ExecutionFuture(ClientInvocationFuture future, long jobId, Address executionAddress) { <ide> this.future = future; <del> this.executionId = executionId; <add> this.jobId = jobId; <ide> this.executionAddress = executionAddress; <ide> } <ide> <ide> if (!cancelled) { <ide> return false; <ide> } <del> new ClientInvocation(client, JetCancelJobCodec.encodeRequest(executionId), executionAddress) <add> new ClientInvocation(client, JetCancelJobCodec.encodeRequest(jobId), jobObjectName(jobId), executionAddress) <ide> .invoke().andThen(new ExecutionCallback<ClientMessage>() { <ide> @Override <ide> public void onResponse(ClientMessage clientMessage) { <ide> <ide> @Override <ide> public void onFailure(Throwable throwable) { <del> logger.warning("Error cancelling job with executionId " + idToString(executionId), throwable); <add> logger.warning("Error cancelling job with jobId " + idToString(jobId), throwable); <ide> } <ide> }); <ide> return true;
Java
epl-1.0
40381a949f6ce8340236bc842ec8fe7bc58e6b92
0
rherrmann/eclipse-extras,rherrmann/eclipse-extras
package com.codeaffine.extras.ide.internal.launch; import org.eclipse.core.commands.AbstractHandler; import org.eclipse.core.commands.ExecutionEvent; import org.eclipse.core.commands.ExecutionException; import org.eclipse.debug.core.ILaunchConfiguration; import org.eclipse.debug.ui.DebugUITools; import org.eclipse.jface.window.Window; import org.eclipse.swt.widgets.Shell; import org.eclipse.ui.handlers.HandlerUtil; public class LaunchHandler extends AbstractHandler { public static final String COMMAND_ID = "com.codeaffine.extras.ide.internal.LaunchCommand"; @Override public Object execute( ExecutionEvent event ) throws ExecutionException { LaunchSelectionDialog dialog = createDialog( event ); if( dialog.open() == Window.OK ) { launchSelectedElements( dialog.getLaunchModeId(), dialog.getResult() ); } return null; } private static LaunchSelectionDialog createDialog( ExecutionEvent event ) { Shell shell = HandlerUtil.getActiveWorkbenchWindow( event ).getShell(); return new LaunchSelectionDialog( shell ); } private static void launchSelectedElements( String launchModeId, Object[] selectedElements ) { for( Object selectedElement : selectedElements ) { ILaunchConfiguration launchConfig = ( ILaunchConfiguration )selectedElement; DebugUITools.launch( launchConfig, launchModeId ); } } }
com.codeaffine.extras.ide/src/com/codeaffine/extras/ide/internal/launch/LaunchHandler.java
package com.codeaffine.extras.ide.internal.launch; import org.eclipse.core.commands.AbstractHandler; import org.eclipse.core.commands.ExecutionEvent; import org.eclipse.core.commands.ExecutionException; import org.eclipse.debug.core.ILaunchConfiguration; import org.eclipse.debug.ui.DebugUITools; import org.eclipse.jface.window.Window; import org.eclipse.swt.widgets.Shell; import org.eclipse.ui.handlers.HandlerUtil; public class LaunchHandler extends AbstractHandler { public static final String COMMAND_ID = "com.codeaffine.extras.ide.internal.LaunchCommand"; @Override public Object execute( ExecutionEvent event ) throws ExecutionException { Shell shell = HandlerUtil.getActiveWorkbenchWindow( event ).getShell(); LaunchSelectionDialog dialog = new LaunchSelectionDialog( shell ); if( dialog.open() == Window.OK ) { Object[] selectedElements = dialog.getResult(); for( Object selectedElement : selectedElements ) { ILaunchConfiguration launchConfig = ( ILaunchConfiguration )selectedElement; DebugUITools.launch( launchConfig, dialog.getLaunchModeId() ); } } return null; } }
Refactor LaunchHandler
com.codeaffine.extras.ide/src/com/codeaffine/extras/ide/internal/launch/LaunchHandler.java
Refactor LaunchHandler
<ide><path>om.codeaffine.extras.ide/src/com/codeaffine/extras/ide/internal/launch/LaunchHandler.java <ide> <ide> @Override <ide> public Object execute( ExecutionEvent event ) throws ExecutionException { <del> Shell shell = HandlerUtil.getActiveWorkbenchWindow( event ).getShell(); <del> LaunchSelectionDialog dialog = new LaunchSelectionDialog( shell ); <add> LaunchSelectionDialog dialog = createDialog( event ); <ide> if( dialog.open() == Window.OK ) { <del> Object[] selectedElements = dialog.getResult(); <del> for( Object selectedElement : selectedElements ) { <del> ILaunchConfiguration launchConfig = ( ILaunchConfiguration )selectedElement; <del> DebugUITools.launch( launchConfig, dialog.getLaunchModeId() ); <del> } <add> launchSelectedElements( dialog.getLaunchModeId(), dialog.getResult() ); <ide> } <ide> return null; <ide> } <add> <add> private static LaunchSelectionDialog createDialog( ExecutionEvent event ) { <add> Shell shell = HandlerUtil.getActiveWorkbenchWindow( event ).getShell(); <add> return new LaunchSelectionDialog( shell ); <add> } <add> <add> private static void launchSelectedElements( String launchModeId, Object[] selectedElements ) { <add> for( Object selectedElement : selectedElements ) { <add> ILaunchConfiguration launchConfig = ( ILaunchConfiguration )selectedElement; <add> DebugUITools.launch( launchConfig, launchModeId ); <add> } <add> } <ide> }
JavaScript
isc
fb8eefd00d3f44af6e5bddf2856623350ed9fdf4
0
ripple/ripple-lib,ripple/ripple-lib,wilsonianb/ripple-lib,ripple/ripple-lib,darkdarkdragon/ripple-lib,wilsonianb/ripple-lib,ripple/ripple-lib,darkdarkdragon/ripple-lib,wilsonianb/ripple-lib,darkdarkdragon/ripple-lib
/* @flow */ 'use strict'; const _ = require('lodash'); const transactionParser = require('ripple-lib-transactionparser'); const toTimestamp = require('../../../core/utils').toTimestamp; const utils = require('../utils'); const BigNumber = require('bignumber.js'); function adjustQualityForXRP(quality: string, takerGetsCurrency: string, takerPaysCurrency: string) { // quality = takerPays.value/takerGets.value // using drops (1e-6 XRP) for XRP values const numeratorShift = (takerPaysCurrency === 'XRP' ? -6 : 0); const denominatorShift = (takerGetsCurrency === 'XRP' ? -6 : 0); const shift = numeratorShift - denominatorShift; return shift === 0 ? quality : (new BigNumber(quality)).shift(shift).toString(); } function parseTimestamp(tx: {date: string}): string | void { return tx.date ? (new Date(toTimestamp(tx.date))).toISOString() : undefined; } function removeUndefined(obj: Object): Object { return _.omit(obj, _.isUndefined); } function removeEmptyCounterparty(amount) { if (amount.counterparty === '') { delete amount.counterparty; } } function removeEmptyCounterpartyInBalanceChanges(balanceChanges) { _.forEach(balanceChanges, (changes) => { _.forEach(changes, removeEmptyCounterparty); }); } function removeEmptyCounterpartyInOrderbookChanges(orderbookChanges) { _.forEach(orderbookChanges, (changes) => { _.forEach(changes, (change) => { _.forEach(change, removeEmptyCounterparty); }); }); } function parseOutcome(tx: Object): ?Object { if (!tx.validated) { return undefined; } const balanceChanges = transactionParser.parseBalanceChanges(tx.meta); const orderbookChanges = transactionParser.parseOrderBookChanges(tx.meta); removeEmptyCounterpartyInBalanceChanges(balanceChanges); removeEmptyCounterpartyInOrderbookChanges(orderbookChanges); return { result: tx.meta.TransactionResult, timestamp: parseTimestamp(tx), fee: utils.common.dropsToXrp(tx.Fee), balanceChanges: balanceChanges, orderbookChanges: orderbookChanges, ledgerVersion: tx.ledger_index, indexInLedger: tx.meta.TransactionIndex, sequence: tx.Sequence }; } module.exports = { parseOutcome, removeUndefined, adjustQualityForXRP, dropsToXrp: utils.common.dropsToXrp, constants: utils.common.constants, core: utils.common.core };
src/api/ledger/parse/utils.js
/* @flow */ 'use strict'; const _ = require('lodash'); const transactionParser = require('ripple-lib-transactionparser'); const toTimestamp = require('../../../core/utils').toTimestamp; const utils = require('../utils'); const BigNumber = require('bignumber.js'); function adjustQualityForXRP(quality: string, takerGetsCurrency: string, takerPaysCurrency: string) { const shift = (takerGetsCurrency === 'XRP' ? 6 : 0) - (takerPaysCurrency === 'XRP' ? 6 : 0); return shift === 0 ? quality : (new BigNumber(quality)).shift(shift).toString(); } function parseTimestamp(tx: {date: string}): string | void { return tx.date ? (new Date(toTimestamp(tx.date))).toISOString() : undefined; } function removeUndefined(obj: Object): Object { return _.omit(obj, _.isUndefined); } function removeEmptyCounterparty(amount) { if (amount.counterparty === '') { delete amount.counterparty; } } function removeEmptyCounterpartyInBalanceChanges(balanceChanges) { _.forEach(balanceChanges, (changes) => { _.forEach(changes, removeEmptyCounterparty); }); } function removeEmptyCounterpartyInOrderbookChanges(orderbookChanges) { _.forEach(orderbookChanges, (changes) => { _.forEach(changes, (change) => { _.forEach(change, removeEmptyCounterparty); }); }); } function parseOutcome(tx: Object): ?Object { if (!tx.validated) { return undefined; } const balanceChanges = transactionParser.parseBalanceChanges(tx.meta); const orderbookChanges = transactionParser.parseOrderBookChanges(tx.meta); removeEmptyCounterpartyInBalanceChanges(balanceChanges); removeEmptyCounterpartyInOrderbookChanges(orderbookChanges); return { result: tx.meta.TransactionResult, timestamp: parseTimestamp(tx), fee: utils.common.dropsToXrp(tx.Fee), balanceChanges: balanceChanges, orderbookChanges: orderbookChanges, ledgerVersion: tx.ledger_index, indexInLedger: tx.meta.TransactionIndex, sequence: tx.Sequence }; } module.exports = { parseOutcome, removeUndefined, adjustQualityForXRP, dropsToXrp: utils.common.dropsToXrp, constants: utils.common.constants, core: utils.common.core };
Refactor to make quality adjustment for XRP more clear
src/api/ledger/parse/utils.js
Refactor to make quality adjustment for XRP more clear
<ide><path>rc/api/ledger/parse/utils.js <ide> <ide> function adjustQualityForXRP(quality: string, takerGetsCurrency: string, <ide> takerPaysCurrency: string) { <del> const shift = (takerGetsCurrency === 'XRP' ? 6 : 0) <del> - (takerPaysCurrency === 'XRP' ? 6 : 0); <add> // quality = takerPays.value/takerGets.value <add> // using drops (1e-6 XRP) for XRP values <add> const numeratorShift = (takerPaysCurrency === 'XRP' ? -6 : 0); <add> const denominatorShift = (takerGetsCurrency === 'XRP' ? -6 : 0); <add> const shift = numeratorShift - denominatorShift; <ide> return shift === 0 ? quality : <ide> (new BigNumber(quality)).shift(shift).toString(); <ide> }
Java
apache-2.0
727dfcef1a40afb17b97f560cdf8cb41d640505c
0
jeorme/OG-Platform,jerome79/OG-Platform,McLeodMoores/starling,DevStreet/FinanceAnalytics,nssales/OG-Platform,codeaudit/OG-Platform,McLeodMoores/starling,DevStreet/FinanceAnalytics,McLeodMoores/starling,jeorme/OG-Platform,jerome79/OG-Platform,nssales/OG-Platform,ChinaQuants/OG-Platform,DevStreet/FinanceAnalytics,codeaudit/OG-Platform,jerome79/OG-Platform,jeorme/OG-Platform,nssales/OG-Platform,ChinaQuants/OG-Platform,nssales/OG-Platform,codeaudit/OG-Platform,McLeodMoores/starling,ChinaQuants/OG-Platform,DevStreet/FinanceAnalytics,codeaudit/OG-Platform,ChinaQuants/OG-Platform,jerome79/OG-Platform,jeorme/OG-Platform
/** * Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial; /** * Enum representing the exercise decision type of single payoff options */ public enum ExerciseDecisionType { /** * European => decision to be made only on single Expiry */ EUROPEAN, /** * American => decision may be made at any time before Expiry */ AMERICAN; public static ExerciseDecisionType from(final String name) { final String capsName = name.toUpperCase(); return ExerciseDecisionType.valueOf(capsName); } }
projects/OG-Analytics/src/com/opengamma/analytics/financial/ExerciseDecisionType.java
/** * Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial; /** * Enum representing the exercise decision type of single payoff options */ public enum ExerciseDecisionType { /** * European => decision to be made only on single Expiry */ EUROPEAN, /** * American => decision may be made at any time before Expiry */ AMERICAN; }
Added from() to ExerciseDecisionType that will take any case string, esp take European from OG-Fin ExerciseType
projects/OG-Analytics/src/com/opengamma/analytics/financial/ExerciseDecisionType.java
Added from() to ExerciseDecisionType that will take any case string, esp take European from OG-Fin ExerciseType
<ide><path>rojects/OG-Analytics/src/com/opengamma/analytics/financial/ExerciseDecisionType.java <ide> */ <ide> AMERICAN; <ide> <add> public static ExerciseDecisionType from(final String name) { <add> final String capsName = name.toUpperCase(); <add> return ExerciseDecisionType.valueOf(capsName); <add> } <ide> }
Java
apache-2.0
97f4df0bfe20be0dae85e71fa7a1eda2b2957fcf
0
d3sw/conductor,d3sw/conductor,d3sw/conductor,d3sw/conductor,d3sw/conductor,d3sw/conductor
/** * Copyright 2016 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * */ package com.netflix.conductor.core.execution; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.common.metadata.tasks.PollData; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.core.WorkflowContext; import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.events.EventQueues; import com.netflix.conductor.core.events.queue.Message; import com.netflix.conductor.core.events.queue.ObservableQueue; import com.netflix.conductor.core.execution.ApplicationException.Code; import com.netflix.conductor.core.execution.DeciderService.DeciderOutcome; import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask.PrePostAction; import com.netflix.conductor.core.utils.IDGenerator; import com.netflix.conductor.core.utils.QueueUtils; import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.dao.QueueDAO; import com.netflix.conductor.metrics.Monitors; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Inject; import java.util.*; import java.util.stream.Collectors; /** * @author Viren Workflow services provider interface */ @Trace public class WorkflowExecutor { private static Logger logger = LoggerFactory.getLogger(WorkflowExecutor.class); private MetadataDAO metadata; private ExecutionDAO edao; private QueueDAO queue; private DeciderService decider; private Configuration config; public static final String deciderQueue = "_deciderQueue"; private int activeWorkerLastPollnSecs; @Inject public WorkflowExecutor(MetadataDAO metadata, ExecutionDAO edao, QueueDAO queue, ObjectMapper om, Configuration config) { this.metadata = metadata; this.edao = edao; this.queue = queue; this.config = config; activeWorkerLastPollnSecs = config.getIntProperty("tasks.active.worker.lastpoll", 10); this.decider = new DeciderService(metadata, om); } public String startWorkflow(String name, int version, String correlationId, Map<String, Object> input) throws Exception { return startWorkflow(name, version, correlationId, input, null); } public String startWorkflow(String name, int version, String correlationId, Map<String, Object> input, String event) throws Exception { return startWorkflow(name, version, input, correlationId, null, null, event); } public String startWorkflow(String name, int version, String correlationId, Map<String, Object> input, String event, Map<String, String> taskToDomain) throws Exception { return startWorkflow(name, version, input, correlationId, null, null, event, taskToDomain); } public String startWorkflow(String name, int version, Map<String, Object> input, String correlationId, String parentWorkflowId, String parentWorkflowTaskId, String event) throws Exception { return startWorkflow(name, version, input, correlationId, parentWorkflowId, parentWorkflowTaskId, event, null); } public String startWorkflow(String name, int version, Map<String, Object> input, String correlationId, String parentWorkflowId, String parentWorkflowTaskId, String event, Map<String, String> taskToDomain) throws Exception { try { if(input == null){ throw new ApplicationException(Code.INVALID_INPUT, "NULL input passed when starting workflow"); } WorkflowDef exists = metadata.get(name, version); if (exists == null) { throw new ApplicationException(Code.NOT_FOUND, "No such workflow defined. name=" + name + ", version=" + version); } Set<String> missingTaskDefs = exists.all().stream() .filter(wft -> wft.getType().equals(WorkflowTask.Type.SIMPLE.name())) .map(wft2 -> wft2.getName()).filter(task -> metadata.getTaskDef(task) == null) .collect(Collectors.toSet()); if(!missingTaskDefs.isEmpty()) { throw new ApplicationException(Code.INVALID_INPUT, "Cannot find the task definitions for the following tasks used in workflow: " + missingTaskDefs); } String workflowId = IDGenerator.generate(); // Persist the Workflow Workflow wf = new Workflow(); wf.setWorkflowId(workflowId); wf.setCorrelationId(correlationId); wf.setWorkflowType(name); wf.setVersion(version); wf.setInput(input); wf.setStatus(WorkflowStatus.RUNNING); wf.setParentWorkflowId(parentWorkflowId); wf.setParentWorkflowTaskId(parentWorkflowTaskId); wf.setOwnerApp(WorkflowContext.get().getClientApp()); wf.setCreateTime(System.currentTimeMillis()); wf.setUpdatedBy(null); wf.setUpdateTime(null); wf.setEvent(event); wf.setTaskToDomain(taskToDomain); edao.createWorkflow(wf); decide(workflowId); logger.info("Workflow has started.Current status=" + wf.getStatus() + ",workflowId=" + wf.getWorkflowId()+",CorrelationId=" + wf.getCorrelationId()+",input="+wf.getInput()); return workflowId; }catch (Exception e) { Monitors.recordWorkflowStartError(name); throw e; } } public String rerun(RerunWorkflowRequest request) throws Exception { Workflow reRunFromWorkflow = edao.getWorkflow(request.getReRunFromWorkflowId()); String workflowId = IDGenerator.generate(); // Persist the workflow and task First Workflow wf = new Workflow(); wf.setWorkflowId(workflowId); wf.setCorrelationId((request.getCorrelationId() == null) ? reRunFromWorkflow.getCorrelationId() : request.getCorrelationId()); wf.setWorkflowType(reRunFromWorkflow.getWorkflowType()); wf.setVersion(reRunFromWorkflow.getVersion()); wf.setInput((request.getWorkflowInput() == null) ? reRunFromWorkflow.getInput() : request.getWorkflowInput()); wf.setReRunFromWorkflowId(request.getReRunFromWorkflowId()); wf.setStatus(WorkflowStatus.RUNNING); wf.setOwnerApp(WorkflowContext.get().getClientApp()); wf.setCreateTime(System.currentTimeMillis()); wf.setUpdatedBy(null); wf.setUpdateTime(null); // If the "reRunFromTaskId" is not given in the RerunWorkflowRequest, // then the whole // workflow has to rerun if (request.getReRunFromTaskId() != null) { // We need to go thru the workflowDef and create tasks for // all tasks before request.getReRunFromTaskId() and marked them // skipped List<Task> newTasks = new LinkedList<>(); Map<String, Task> refNameToTask = new HashMap<String, Task>(); reRunFromWorkflow.getTasks().forEach(task -> refNameToTask.put(task.getReferenceTaskName(), task)); WorkflowDef wd = metadata.get(reRunFromWorkflow.getWorkflowType(), reRunFromWorkflow.getVersion()); Iterator<WorkflowTask> it = wd.getTasks().iterator(); int seq = wf.getTasks().size(); while (it.hasNext()) { WorkflowTask wt = it.next(); Task previousTask = refNameToTask.get(wt.getTaskReferenceName()); if (previousTask.getTaskId().equals(request.getReRunFromTaskId())) { Task theTask = new Task(); theTask.setTaskId(IDGenerator.generate()); theTask.setReferenceTaskName(previousTask.getReferenceTaskName()); theTask.setInputData((request.getTaskInput() == null) ? previousTask.getInputData() : request.getTaskInput()); theTask.setWorkflowInstanceId(workflowId); theTask.setStatus(Status.READY_FOR_RERUN); theTask.setTaskType(previousTask.getTaskType()); theTask.setCorrelationId(wf.getCorrelationId()); theTask.setSeq(seq++); theTask.setRetryCount(previousTask.getRetryCount() + 1); newTasks.add(theTask); break; } else { // Create with Skipped status Task theTask = new Task(); theTask.setTaskId(IDGenerator.generate()); theTask.setReferenceTaskName(previousTask.getReferenceTaskName()); theTask.setWorkflowInstanceId(workflowId); theTask.setStatus(Status.SKIPPED); theTask.setTaskType(previousTask.getTaskType()); theTask.setCorrelationId(wf.getCorrelationId()); theTask.setInputData(previousTask.getInputData()); theTask.setOutputData(previousTask.getOutputData()); theTask.setRetryCount(previousTask.getRetryCount() + 1); theTask.setSeq(seq++); newTasks.add(theTask); } } edao.createTasks(newTasks); } edao.createWorkflow(wf); decide(workflowId); return workflowId; } public void rewind(String workflowId) throws Exception { Workflow workflow = edao.getWorkflow(workflowId, true); if (!workflow.getStatus().isTerminal()) { logger.error("Workflow is still running. status=" + workflow.getStatus()+",workflowId="+workflow.getWorkflowId()+",correlationId="+workflow.getCorrelationId()); throw new ApplicationException(Code.CONFLICT, "Workflow is still running. status=" + workflow.getStatus()); } // Remove all the tasks... workflow.getTasks().forEach(t -> edao.removeTask(t.getTaskId())); workflow.getTasks().clear(); workflow.setReasonForIncompletion(null); workflow.setStartTime(System.currentTimeMillis()); workflow.setEndTime(0); // Change the status to running workflow.setStatus(WorkflowStatus.RUNNING); edao.updateWorkflow(workflow); decide(workflowId); } public void retry(String workflowId) throws Exception { Workflow workflow = edao.getWorkflow(workflowId, true); if (!workflow.getStatus().isTerminal()) { logger.error("Workflow is still running. status=" + workflow.getStatus()+",workflowId="+workflow.getWorkflowId()+",correlationId="+workflow.getCorrelationId()); throw new ApplicationException(Code.CONFLICT, "Workflow is still running. status=" + workflow.getStatus()); } if (workflow.getTasks().isEmpty()) { logger.error("Workflow has not started yet. status=" + workflow.getStatus()+",workflowId="+workflow.getWorkflowId()+",correlationId="+workflow.getCorrelationId()); throw new ApplicationException(Code.CONFLICT, "Workflow has not started yet"); } int lastIndex = workflow.getTasks().size() - 1; Task last = workflow.getTasks().get(lastIndex); if (!last.getStatus().isTerminal()) { throw new ApplicationException(Code.CONFLICT, "The last task is still not completed! I can only retry the last failed task. Use restart if you want to attempt entire workflow execution again."); } if (last.getStatus().isSuccessful()) { throw new ApplicationException(Code.CONFLICT, "The last task has not failed! I can only retry the last failed task. Use restart if you want to attempt entire workflow execution again."); } // Below is the situation where currently when the task failure causes // workflow to fail, the task's retried flag is not updated. This is to // update for these old tasks. List<Task> update = workflow.getTasks().stream().filter(task -> !task.isRetried()).collect(Collectors.toList()); update.forEach(task -> task.setRetried(true)); edao.updateTasks(update); Task retried = last.copy(); retried.setTaskId(IDGenerator.generate()); retried.setRetriedTaskId(last.getTaskId()); retried.setStatus(Status.SCHEDULED); retried.setRetryCount(last.getRetryCount() + 1); scheduleTask(workflow, Arrays.asList(retried)); workflow.setStatus(WorkflowStatus.RUNNING); edao.updateWorkflow(workflow); decide(workflowId); } public List<Workflow> getStatusByCorrelationId(String workflowName, String correlationId, boolean includeClosed) throws Exception { Preconditions.checkNotNull(correlationId, "correlation id is missing"); Preconditions.checkNotNull(workflowName, "workflow name is missing"); List<Workflow> workflows = edao.getWorkflowsByCorrelationId(correlationId); List<Workflow> result = new LinkedList<>(); for (Workflow wf : workflows) { if (wf.getWorkflowType().equals(workflowName) && (includeClosed || wf.getStatus().equals(WorkflowStatus.RUNNING))) { result.add(wf); } } return result; } public Task getPendingTaskByWorkflow(String taskReferenceName, String workflowId) { List<Task> tasks = edao.getTasksForWorkflow(workflowId).stream() .filter(task -> !task.getStatus().isTerminal() && task.getReferenceTaskName().equals(taskReferenceName)).collect(Collectors.toList()); if (!tasks.isEmpty()) { return tasks.get(0); // There can only be one task by a given // reference name running at a time. } return null; } public void completeWorkflow(Workflow wf) throws Exception { Workflow workflow = edao.getWorkflow(wf.getWorkflowId(), false); if (workflow.getStatus().equals(WorkflowStatus.COMPLETED)) { logger.warn("Workflow has already been completed. Current status=" + workflow.getStatus() + ", workflowId=" + wf.getWorkflowId()+",CorrelationId=" + wf.getCorrelationId()); return; } if (workflow.getStatus().isTerminal()) { String msg = "Workflow has already been completed. Current status " + workflow.getStatus(); logger.error("Workflow has already been completed. status=" + workflow.getStatus()+",workflowId="+workflow.getWorkflowId()+",correlationId="+workflow.getCorrelationId()); throw new ApplicationException(Code.CONFLICT, msg); } workflow.setStatus(WorkflowStatus.COMPLETED); workflow.setOutput(wf.getOutput()); edao.updateWorkflow(workflow); // If the following task, for some reason fails, the sweep will take // care of this again! if (workflow.getParentWorkflowId() != null) { Workflow parent = edao.getWorkflow(workflow.getParentWorkflowId(), false); decide(parent.getWorkflowId()); } Monitors.recordWorkflowCompletion(workflow.getWorkflowType(), workflow.getEndTime() - workflow.getStartTime()); queue.remove(deciderQueue, workflow.getWorkflowId()); //remove from the sweep queue logger.info("Workflow has completed, workflowId=" + wf.getWorkflowId()+",input="+wf.getInput()+",CorrelationId="+wf.getCorrelationId()+",output="+wf.getOutput()); } public String cancelWorkflow(String workflowId,Map<String, Object> inputbody) throws Exception { Workflow workflow = edao.getWorkflow(workflowId, true); workflow.setStatus(WorkflowStatus.CANCELLED); return cancelWorkflow(workflow,inputbody); } public String cancelWorkflow(Workflow workflow,Map<String, Object> inputbody) throws Exception { if (!workflow.getStatus().isTerminal()) { workflow.setStatus(WorkflowStatus.CANCELLED); } String workflowId = workflow.getWorkflowId(); edao.updateWorkflow(workflow); logger.error("Workflow is cancelled.workflowId="+workflowId+",correlationId="+workflow.getCorrelationId()); List<Task> tasks = workflow.getTasks(); for (Task task : tasks) { if (!task.getStatus().isTerminal()) { // Cancel the ones which are not completed yet.... task.setStatus(Status.CANCELED); if (SystemTaskType.is(task.getTaskType())) { WorkflowSystemTask stt = WorkflowSystemTask.get(task.getTaskType()); stt.cancel(workflow, task, this); //SystemTaskType.valueOf(task.getTaskType()).cancel(workflow, task, this); } edao.updateTask(task); applyTaskAction(task, PrePostAction.postTask); } // And remove from the task queue if they were there queue.remove(QueueUtils.getQueueName(task), task.getTaskId()); } // If the following lines, for some reason fails, the sweep will take // care of this again! if (workflow.getParentWorkflowId() != null) { Workflow parent = edao.getWorkflow(workflow.getParentWorkflowId(), false); decide(parent.getWorkflowId()); } WorkflowDef def = metadata.get(workflow.getWorkflowType(), workflow.getVersion()); String cancelWorkflow = def.getCancelWorkflow(); if (!StringUtils.isBlank(cancelWorkflow)) { // Backward compatible by default boolean expandInline = Boolean.parseBoolean(config.getProperty("workflow.failure.expandInline", "true")); Map<String, Object> input = new HashMap<>(); if (expandInline) { input.putAll(workflow.getInput()); } else { input.put("workflowInput", workflow.getInput()); } input.put("workflowId", workflowId); input.put("workflowType", workflow.getWorkflowType()); input.put("workflowVersion", workflow.getVersion()); try { WorkflowDef latestCancelWorkflow = metadata.getLatest(cancelWorkflow); String cancelWFId = startWorkflow(cancelWorkflow, latestCancelWorkflow.getVersion(), input, workflowId, null, null, null); workflow.getOutput().put("conductor.cancel_workflow", cancelWFId); } catch (Exception e) { logger.error("Error workflow " + cancelWorkflow + " failed to start. reason: " + e.getMessage()); workflow.getOutput().put("conductor.cancel_workflow", "Error workflow " + cancelWorkflow + " failed to start. reason: " + e.getMessage()); Monitors.recordWorkflowStartError(cancelWorkflow); } } queue.remove(deciderQueue, workflow.getWorkflowId()); //remove from the sweep queue // Send to atlas Monitors.recordWorkflowTermination(workflow.getWorkflowType(), workflow.getStatus()); return workflowId; } public void terminateWorkflow(String workflowId, String reason) throws Exception { Workflow workflow = edao.getWorkflow(workflowId, true); workflow.setStatus(WorkflowStatus.TERMINATED); terminateWorkflow(workflow, reason, null); } public void terminateWorkflow(Workflow workflow, String reason, String failureWorkflow) throws Exception { terminateWorkflow(workflow, reason, failureWorkflow, null); } public void terminateWorkflow(Workflow workflow, String reason, String failureWorkflow, Task failedTask) throws Exception { if (!workflow.getStatus().isTerminal()) { workflow.setStatus(WorkflowStatus.TERMINATED); } String workflowId = workflow.getWorkflowId(); workflow.setReasonForIncompletion(reason); edao.updateWorkflow(workflow); logger.error("Workflow is terminated.workflowId="+workflowId+",correlationId="+workflow.getCorrelationId()+",reasonForIncompletion="+reason); List<Task> tasks = workflow.getTasks(); for (Task task : tasks) { if (!task.getStatus().isTerminal()) { // Cancel the ones which are not completed yet.... task.setStatus(Status.CANCELED); if (SystemTaskType.is(task.getTaskType())) { WorkflowSystemTask stt = WorkflowSystemTask.get(task.getTaskType()); stt.cancel(workflow, task, this); //SystemTaskType.valueOf(task.getTaskType()).cancel(workflow, task, this); } edao.updateTask(task); applyTaskAction(task, PrePostAction.postTask); } // And remove from the task queue if they were there queue.remove(QueueUtils.getQueueName(task), task.getTaskId()); } // If the following lines, for some reason fails, the sweep will take // care of this again! if (workflow.getParentWorkflowId() != null) { Workflow parent = edao.getWorkflow(workflow.getParentWorkflowId(), false); decide(parent.getWorkflowId()); } if (!StringUtils.isBlank(failureWorkflow)) { // Backward compatible by default boolean expandInline = Boolean.parseBoolean(config.getProperty("workflow.failure.expandInline", "true")); Map<String, Object> input = new HashMap<>(); if (expandInline) { input.putAll(workflow.getInput()); } else { input.put("workflowInput", workflow.getInput()); } input.put("workflowId", workflowId); input.put("workflowType", workflow.getWorkflowType()); input.put("workflowVersion", workflow.getVersion()); input.put("reason", reason); input.put("failureStatus", workflow.getStatus().toString()); if (failedTask != null) { Map<String, Object> map = new HashMap<>(); map.put("taskId", failedTask.getTaskId()); map.put("input", failedTask.getInputData()); map.put("output", failedTask.getOutputData()); map.put("retryCount", failedTask.getRetryCount()); map.put("referenceName", failedTask.getReferenceTaskName()); map.put("reasonForIncompletion", failedTask.getReasonForIncompletion()); input.put("failedTask", map); logger.error("Error in task execution.workflowid="+workflowId+",correlationId="+workflow.getCorrelationId()+",failedTaskid="+failedTask.getTaskId()+",taskReferenceName="+failedTask.getReferenceTaskName()+"reasonForIncompletion="+failedTask.getReasonForIncompletion()); } try { WorkflowDef latestFailureWorkflow = metadata.getLatest(failureWorkflow); String failureWFId = startWorkflow(failureWorkflow, latestFailureWorkflow.getVersion(), input, workflowId, null, null, null); workflow.getOutput().put("conductor.failure_workflow", failureWFId); } catch (Exception e) { logger.error("Error workflow " + failureWorkflow + " failed to start. reason: " + e.getMessage()); workflow.getOutput().put("conductor.failure_workflow", "Error workflow " + failureWorkflow + " failed to start. reason: " + e.getMessage()); Monitors.recordWorkflowStartError(failureWorkflow); } } queue.remove(deciderQueue, workflow.getWorkflowId()); //remove from the sweep queue // Send to atlas Monitors.recordWorkflowTermination(workflow.getWorkflowType(), workflow.getStatus()); } public void updateTask(TaskResult result) throws Exception { if (result == null) { logger.error("null task given for update..." + result); throw new ApplicationException(Code.INVALID_INPUT, "Task object is null"); } String workflowId = result.getWorkflowInstanceId(); Workflow wf = edao.getWorkflow(workflowId); Task task = edao.getTask(result.getTaskId()); if (wf.getStatus().isTerminal()) { // Workflow is in terminal state queue.remove(QueueUtils.getQueueName(task), result.getTaskId()); if(!task.getStatus().isTerminal()) { task.setStatus(Status.COMPLETED); } task.setOutputData(result.getOutputData()); task.setReasonForIncompletion(result.getReasonForIncompletion()); task.setWorkerId(result.getWorkerId()); edao.updateTask(task); applyTaskAction(task, PrePostAction.postTask); String msg = "Workflow " + wf.getWorkflowId() + " is already completed as " + wf.getStatus() + ", task=" + task.getTaskType() + ", reason=" + wf.getReasonForIncompletion()+",correlationId="+wf.getCorrelationId(); logger.warn(msg); Monitors.recordUpdateConflict(task.getTaskType(), wf.getWorkflowType(), wf.getStatus()); return; } if (task.getStatus().isTerminal()) { // Task was already updated.... queue.remove(QueueUtils.getQueueName(task), result.getTaskId()); String msg = "Task is already completed as " + task.getStatus() + "@" + task.getEndTime() + ", workflow status=" + wf.getStatus() + ", workflowId=" + wf.getWorkflowId() + ", taskId=" + task.getTaskId()+",correlationId="+wf.getCorrelationId(); logger.warn(msg); Monitors.recordUpdateConflict(task.getTaskType(), wf.getWorkflowType(), task.getStatus()); return; } task.setStatus(Status.valueOf(result.getStatus().name())); task.setOutputData(result.getOutputData()); task.setReasonForIncompletion(result.getReasonForIncompletion()); task.setWorkerId(result.getWorkerId()); task.setCallbackAfterSeconds(result.getCallbackAfterSeconds()); if (task.getStatus().isTerminal()) { task.setEndTime(System.currentTimeMillis()); } else { if (result.isResetStartTime()) { task.setStartTime(System.currentTimeMillis()); // We must reset endtime only when it is set if (task.getEndTime() > 0) { task.setEndTime(System.currentTimeMillis()); } } } edao.updateTask(task); result.getLogs().forEach(tl -> tl.setTaskId(task.getTaskId())); edao.addTaskExecLog(result.getLogs()); switch (task.getStatus()) { case COMPLETED: queue.remove(QueueUtils.getQueueName(task), result.getTaskId()); applyTaskAction(task, PrePostAction.postTask); break; case CANCELED: queue.remove(QueueUtils.getQueueName(task), result.getTaskId()); applyTaskAction(task, PrePostAction.postTask); break; case FAILED: queue.remove(QueueUtils.getQueueName(task), result.getTaskId()); applyTaskAction(task, PrePostAction.postTask); break; case IN_PROGRESS: // put it back in queue based in callbackAfterSeconds long callBack = result.getCallbackAfterSeconds(); queue.remove(QueueUtils.getQueueName(task), task.getTaskId()); queue.push(QueueUtils.getQueueName(task), task.getTaskId(), callBack); // Milliseconds break; default: break; } decide(workflowId); if (task.getStatus().isTerminal()) { long duration = getTaskDuration(0, task); long lastDuration = task.getEndTime() - task.getStartTime(); Monitors.recordTaskExecutionTime(task.getTaskDefName(), duration, true, task.getStatus()); Monitors.recordTaskExecutionTime(task.getTaskDefName(), lastDuration, false, task.getStatus()); } } public List<Task> getTasks(String taskType, String startKey, int count) throws Exception { return edao.getTasks(taskType, startKey, count); } public List<Workflow> getRunningWorkflows(String workflowName) throws Exception { List<Workflow> allwf = edao.getPendingWorkflowsByType(workflowName); return allwf; } public List<String> getWorkflows(String name, Integer version, Long startTime, Long endTime) { List<Workflow> allwf = edao.getWorkflowsByType(name, startTime, endTime); List<String> workflows = allwf.stream().filter(wf -> wf.getVersion() == version).map(wf -> wf.getWorkflowId()).collect(Collectors.toList()); return workflows; } public List<String> getRunningWorkflowIds(String workflowName) throws Exception { return edao.getRunningWorkflowIds(workflowName); } /** * * @param workflowId ID of the workflow to evaluate the state for * @return true if the workflow has completed (success or failed), false otherwise. * @throws Exception If there was an error - caller should retry in this case. */ public boolean decide(String workflowId) throws Exception { Workflow workflow = edao.getWorkflow(workflowId, true); WorkflowDef def = metadata.get(workflow.getWorkflowType(), workflow.getVersion()); try { DeciderOutcome outcome = decider.decide(workflow, def); if(outcome.isComplete) { completeWorkflow(workflow); return true; } List<Task> tasksToBeScheduled = outcome.tasksToBeScheduled; setTaskDomains(tasksToBeScheduled, workflow); List<Task> tasksToBeUpdated = outcome.tasksToBeUpdated; boolean stateChanged = false; workflow.getTasks().addAll(tasksToBeScheduled); for(Task task : tasksToBeScheduled) { if (SystemTaskType.is(task.getTaskType()) && !task.getStatus().isTerminal()) { WorkflowSystemTask stt = WorkflowSystemTask.get(task.getTaskType()); if (!stt.isAsync() && stt.execute(workflow, task, this)) { tasksToBeUpdated.add(task); stateChanged = true; } } } stateChanged = scheduleTask(workflow, tasksToBeScheduled) || stateChanged; if(!outcome.tasksToBeUpdated.isEmpty() || !outcome.tasksToBeScheduled.isEmpty()) { edao.updateTasks(tasksToBeUpdated); edao.updateWorkflow(workflow); queue.push(deciderQueue, workflow.getWorkflowId(), config.getSweepFrequency()); } if (outcome.startWorkflow != null) { DeciderService.StartWorkflowParams startWorkflow = outcome.startWorkflow; String workflowName = startWorkflow.name; int workflowVersion; if (startWorkflow.version == null) { WorkflowDef subFlowDef = metadata.getLatest(workflowName); workflowVersion = subFlowDef.getVersion(); } else { workflowVersion = startWorkflow.version; } startWorkflow(workflowName, workflowVersion, startWorkflow.params, null, workflow.getWorkflowId(), null,null); } if(stateChanged) { decide(workflowId); } } catch (TerminateWorkflow tw) { logger.error("Error in workflow execution:"+tw.getMessage(), tw); terminate(def, workflow, tw); return true; } return false; } public void pauseWorkflow(String workflowId) throws Exception { WorkflowStatus status = WorkflowStatus.PAUSED; Workflow workflow = edao.getWorkflow(workflowId, false); if(workflow.getStatus().isTerminal()){ throw new ApplicationException(Code.CONFLICT, "Workflow id " + workflowId + " has ended, status cannot be updated."); } if (workflow.getStatus().equals(status)) { return; //Already paused! } workflow.setStatus(status); edao.updateWorkflow(workflow); } public void resumeWorkflow(String workflowId) throws Exception{ Workflow workflow = edao.getWorkflow(workflowId, false); if(!workflow.getStatus().equals(WorkflowStatus.PAUSED)){ logger.error("Workflow is not is not PAUSED so cannot resume. Current status=" + workflow.getStatus() + ", workflowId=" + workflow.getWorkflowId()+",CorrelationId=" + workflow.getCorrelationId()); throw new IllegalStateException("The workflow " + workflowId + " is not is not PAUSED so cannot resume"); } workflow.setStatus(WorkflowStatus.RUNNING); edao.updateWorkflow(workflow); decide(workflowId); } public void skipTaskFromWorkflow(String workflowId, String taskReferenceName, SkipTaskRequest skipTaskRequest) throws Exception { Workflow wf = edao.getWorkflow(workflowId, true); // If the wf is not running then cannot skip any task if(!wf.getStatus().equals(WorkflowStatus.RUNNING)){ String errorMsg = String.format("The workflow %s is not running so the task referenced by %s cannot be skipped", workflowId, taskReferenceName); logger.error(errorMsg); throw new IllegalStateException(errorMsg); } // Check if the reference name is as per the workflowdef WorkflowDef wfd = metadata.get(wf.getWorkflowType(), wf.getVersion()); WorkflowTask wft = wfd.getTaskByRefName(taskReferenceName); if(wft == null){ String errorMsg = String.format("The task referenced by %s does not exist in the WorkflowDef %s", taskReferenceName, wf.getWorkflowType()); logger.error(errorMsg); throw new IllegalStateException(errorMsg); } // If the task is already started the again it cannot be skipped wf.getTasks().forEach(task -> { if(task.getReferenceTaskName().equals(taskReferenceName)){ String errorMsg = String.format("The task referenced %s has already been processed, cannot be skipped", taskReferenceName); logger.error(errorMsg); throw new IllegalStateException(errorMsg); } }); // Now create a "SKIPPED" task for this workflow Task theTask = new Task(); theTask.setTaskId(IDGenerator.generate()); theTask.setReferenceTaskName(taskReferenceName); theTask.setWorkflowInstanceId(workflowId); theTask.setStatus(Status.SKIPPED); theTask.setTaskType(wft.getName()); theTask.setCorrelationId(wf.getCorrelationId()); if(skipTaskRequest != null){ theTask.setInputData(skipTaskRequest.getTaskInput()); theTask.setOutputData(skipTaskRequest.getTaskOutput()); } edao.createTasks(Arrays.asList(theTask)); decide(workflowId); } public Workflow getWorkflow(String workflowId, boolean includeTasks) { return edao.getWorkflow(workflowId, includeTasks); } public void addTaskToQueue(Task task) throws Exception { // put in queue queue.remove(QueueUtils.getQueueName(task), task.getTaskId()); if (task.getCallbackAfterSeconds() > 0) { queue.push(QueueUtils.getQueueName(task), task.getTaskId(), task.getCallbackAfterSeconds()); } else { queue.push(QueueUtils.getQueueName(task), task.getTaskId(), 0); } } //Executes the async system task public void executeSystemTask(WorkflowSystemTask systemTask, String taskId, int unackTimeout) { try { Task task = edao.getTask(taskId); if(task.getStatus().isTerminal()) { //Tune the SystemTaskWorkerCoordinator's queues - if the queue size is very big this can happen! logger.warn("Task {}/{} was already completed.", task.getTaskType(), task.getTaskId()); queue.remove(QueueUtils.getQueueName(task), task.getTaskId()); return; } String workflowId = task.getWorkflowInstanceId(); Workflow workflow = edao.getWorkflow(workflowId, true); if (task.getStartTime() == 0) { task.setStartTime(System.currentTimeMillis()); Monitors.recordQueueWaitTime(task.getTaskDefName(), task.getQueueWaitTime()); } if(workflow.getStatus().isTerminal()) { logger.warn("Workflow {} has been completed for {}/{}", workflow.getWorkflowId(), systemTask.getName(), task.getTaskId()); if(!task.getStatus().isTerminal()) { task.setStatus(Status.CANCELED); applyTaskAction(task, PrePostAction.postTask); } edao.updateTask(task); queue.remove(QueueUtils.getQueueName(task), task.getTaskId()); return; } if(task.getStatus().equals(Status.SCHEDULED)) { if(edao.exceedsInProgressLimit(task)) { logger.warn("Rate limited for {}", task.getTaskDefName()); return; } } logger.info("Executing {}/{}-{}", task.getTaskType(), task.getTaskId(), task.getStatus()); queue.setUnackTimeout(QueueUtils.getQueueName(task), task.getTaskId(), systemTask.getRetryTimeInSecond() * 1000); task.setPollCount(task.getPollCount() + 1); edao.updateTask(task); switch (task.getStatus()) { case SCHEDULED: applyTaskAction(task, PrePostAction.preTask); systemTask.start(workflow, task, this); break; case IN_PROGRESS: systemTask.execute(workflow, task, this); break; default: break; } if(!task.getStatus().isTerminal()) { task.setCallbackAfterSeconds(unackTimeout); } updateTask(new TaskResult(task)); logger.info("Done Executing {}/{}-{} op={}", task.getTaskType(), task.getTaskId(), task.getStatus(), task.getOutputData().toString()); } catch (Exception e) { logger.error(e.getMessage(), e); } } public void setTaskDomains(List<Task> tasks, Workflow wf){ Map<String, String> taskToDomain = wf.getTaskToDomain(); if(taskToDomain != null){ // Check if all tasks have the same domain "*" String domainstr = taskToDomain.get("*"); if(domainstr != null){ String[] domains = domainstr.split(","); tasks.forEach(task -> { // Filter out SystemTask if(!(task instanceof SystemTask)){ // Check which domain worker is polling // Set the task domain task.setDomain(getActiveDomain(task.getTaskType(), domains)); } }); } else { tasks.forEach(task -> { if(!(task instanceof SystemTask)){ String taskDomainstr = taskToDomain.get(task.getTaskType()); if(taskDomainstr != null){ task.setDomain(getActiveDomain(task.getTaskType(), taskDomainstr.split(","))); } } }); } } } private String getActiveDomain(String taskType, String[] domains){ // The domain list has to be ordered. // In sequence check if any worker has polled for last 30 seconds, if so that is the Active domain String domain = null; // Default domain for(String d: domains){ PollData pd = edao.getPollData(taskType, d.trim()); if(pd != null){ if(pd.getLastPollTime() > System.currentTimeMillis() - (activeWorkerLastPollnSecs * 1000)){ domain = d.trim(); break; } } } return domain; } private long getTaskDuration(long s, Task task) { long duration = task.getEndTime() - task.getStartTime(); s += duration; if (task.getRetriedTaskId() == null) { return s; } return s + getTaskDuration(s, edao.getTask(task.getRetriedTaskId())); } @VisibleForTesting boolean scheduleTask(Workflow workflow, List<Task> tasks) throws Exception { if (tasks == null || tasks.isEmpty()) { return false; } int count = workflow.getTasks().size(); for (Task task : tasks) { task.setSeq(++count); } List<Task> created = edao.createTasks(tasks); List<Task> createdSystemTasks = created.stream().filter(task -> SystemTaskType.is(task.getTaskType())).collect(Collectors.toList()); List<Task> toBeQueued = created.stream().filter(task -> !SystemTaskType.is(task.getTaskType())).collect(Collectors.toList()); boolean startedSystemTasks = false; for(Task task : createdSystemTasks) { WorkflowSystemTask stt = WorkflowSystemTask.get(task.getTaskType()); if(stt == null) { throw new RuntimeException("No system task found by name " + task.getTaskType()); } task.setStartTime(System.currentTimeMillis()); if(!stt.isAsync()) { applyTaskAction(task, PrePostAction.preTask); stt.start(workflow, task, this); startedSystemTasks = true; edao.updateTask(task); if (task.getStatus().isTerminal()) { applyTaskAction(task, PrePostAction.postTask); } } else { toBeQueued.add(task); } } addTaskToQueue(toBeQueued); return startedSystemTasks; } private void addTaskToQueue(final List<Task> tasks) throws Exception { for (Task t : tasks) { addTaskToQueue(t); } } private void terminate(final WorkflowDef def, final Workflow workflow, TerminateWorkflow tw) throws Exception { if (!workflow.getStatus().isTerminal()) { workflow.setStatus(tw.workflowStatus); } String failureWorkflow = def.getFailureWorkflow(); if (failureWorkflow != null) { if (failureWorkflow.startsWith("$")) { String[] paramPathComponents = failureWorkflow.split("\\."); String name = paramPathComponents[2]; // name of the input parameter failureWorkflow = (String) workflow.getInput().get(name); } } if(tw.task != null){ edao.updateTask(tw.task); } terminateWorkflow(workflow, tw.getMessage(), failureWorkflow, tw.task); String taskId = (tw.task != null ? tw.task.getTaskId() : null); String taskRefName = (tw.task != null ? tw.task.getReferenceTaskName() : null); logger.error("Workflow failed. workflowId=" + workflow.getWorkflowId()+",correlationId="+workflow.getCorrelationId()+",Reason="+tw.getMessage()+",taskId="+taskId+",taskReferenceName="+taskRefName); } @SuppressWarnings("unchecked") private void applyTaskAction(Task task, WorkflowSystemTask.PrePostAction action) throws Exception { Object eventMessages = task.getInputData().get("event_messages"); if (eventMessages == null) { return; } Map<String, Object> eventMsgMap = (Map<String, Object>) eventMessages; if (eventMsgMap.containsKey(action.name())) { Map<String, Object> actionMap = (Map<String, Object>) eventMsgMap.get(action.name()); ObjectMapper mapper = new ObjectMapper(); Message msg = new Message(); msg.setId(UUID.randomUUID().toString()); String payload = mapper.writeValueAsString(actionMap.get("inputParameters")); msg.setPayload(payload); String sink = (String) actionMap.get("sink"); ObservableQueue queue = EventQueues.getQueue(sink, false); if (queue != null) { queue.publish(Collections.singletonList(msg)); } } } }
core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java
/** * Copyright 2016 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * */ package com.netflix.conductor.core.execution; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.common.metadata.tasks.PollData; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.core.WorkflowContext; import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.events.EventQueues; import com.netflix.conductor.core.events.queue.Message; import com.netflix.conductor.core.events.queue.ObservableQueue; import com.netflix.conductor.core.execution.ApplicationException.Code; import com.netflix.conductor.core.execution.DeciderService.DeciderOutcome; import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask.PrePostAction; import com.netflix.conductor.core.utils.IDGenerator; import com.netflix.conductor.core.utils.QueueUtils; import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.dao.QueueDAO; import com.netflix.conductor.metrics.Monitors; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Inject; import java.util.*; import java.util.stream.Collectors; /** * @author Viren Workflow services provider interface */ @SuppressWarnings("ALL") @Trace public class WorkflowExecutor { private static Logger logger = LoggerFactory.getLogger(WorkflowExecutor.class); private MetadataDAO metadata; private ExecutionDAO edao; private QueueDAO queue; private DeciderService decider; private Configuration config; public static final String deciderQueue = "_deciderQueue"; private int activeWorkerLastPollnSecs; @Inject public WorkflowExecutor(MetadataDAO metadata, ExecutionDAO edao, QueueDAO queue, ObjectMapper om, Configuration config) { this.metadata = metadata; this.edao = edao; this.queue = queue; this.config = config; activeWorkerLastPollnSecs = config.getIntProperty("tasks.active.worker.lastpoll", 10); this.decider = new DeciderService(metadata, om); } public String startWorkflow(String name, int version, String correlationId, Map<String, Object> input) throws Exception { return startWorkflow(name, version, correlationId, input, null); } public String startWorkflow(String name, int version, String correlationId, Map<String, Object> input, String event) throws Exception { return startWorkflow(name, version, input, correlationId, null, null, event); } public String startWorkflow(String name, int version, String correlationId, Map<String, Object> input, String event, Map<String, String> taskToDomain) throws Exception { return startWorkflow(name, version, input, correlationId, null, null, event, taskToDomain); } public String startWorkflow(String name, int version, Map<String, Object> input, String correlationId, String parentWorkflowId, String parentWorkflowTaskId, String event) throws Exception { return startWorkflow(name, version, input, correlationId, parentWorkflowId, parentWorkflowTaskId, event, null); } public String startWorkflow(String name, int version, Map<String, Object> input, String correlationId, String parentWorkflowId, String parentWorkflowTaskId, String event, Map<String, String> taskToDomain) throws Exception { try { if(input == null){ throw new ApplicationException(Code.INVALID_INPUT, "NULL input passed when starting workflow"); } WorkflowDef exists = metadata.get(name, version); if (exists == null) { throw new ApplicationException(Code.NOT_FOUND, "No such workflow defined. name=" + name + ", version=" + version); } Set<String> missingTaskDefs = exists.all().stream() .filter(wft -> wft.getType().equals(WorkflowTask.Type.SIMPLE.name())) .map(wft2 -> wft2.getName()).filter(task -> metadata.getTaskDef(task) == null) .collect(Collectors.toSet()); if(!missingTaskDefs.isEmpty()) { throw new ApplicationException(Code.INVALID_INPUT, "Cannot find the task definitions for the following tasks used in workflow: " + missingTaskDefs); } String workflowId = IDGenerator.generate(); // Persist the Workflow Workflow wf = new Workflow(); wf.setWorkflowId(workflowId); wf.setCorrelationId(correlationId); wf.setWorkflowType(name); wf.setVersion(version); wf.setInput(input); wf.setStatus(WorkflowStatus.RUNNING); wf.setParentWorkflowId(parentWorkflowId); wf.setParentWorkflowTaskId(parentWorkflowTaskId); wf.setOwnerApp(WorkflowContext.get().getClientApp()); wf.setCreateTime(System.currentTimeMillis()); wf.setUpdatedBy(null); wf.setUpdateTime(null); wf.setEvent(event); wf.setTaskToDomain(taskToDomain); edao.createWorkflow(wf); decide(workflowId); logger.info("Workflow has started.Current status=" + wf.getStatus() + ",workflowId=" + wf.getWorkflowId()+",CorrelationId=" + wf.getCorrelationId()+",input="+wf.getInput()); return workflowId; }catch (Exception e) { Monitors.recordWorkflowStartError(name); throw e; } } public String rerun(RerunWorkflowRequest request) throws Exception { Workflow reRunFromWorkflow = edao.getWorkflow(request.getReRunFromWorkflowId()); String workflowId = IDGenerator.generate(); // Persist the workflow and task First Workflow wf = new Workflow(); wf.setWorkflowId(workflowId); wf.setCorrelationId((request.getCorrelationId() == null) ? reRunFromWorkflow.getCorrelationId() : request.getCorrelationId()); wf.setWorkflowType(reRunFromWorkflow.getWorkflowType()); wf.setVersion(reRunFromWorkflow.getVersion()); wf.setInput((request.getWorkflowInput() == null) ? reRunFromWorkflow.getInput() : request.getWorkflowInput()); wf.setReRunFromWorkflowId(request.getReRunFromWorkflowId()); wf.setStatus(WorkflowStatus.RUNNING); wf.setOwnerApp(WorkflowContext.get().getClientApp()); wf.setCreateTime(System.currentTimeMillis()); wf.setUpdatedBy(null); wf.setUpdateTime(null); // If the "reRunFromTaskId" is not given in the RerunWorkflowRequest, // then the whole // workflow has to rerun if (request.getReRunFromTaskId() != null) { // We need to go thru the workflowDef and create tasks for // all tasks before request.getReRunFromTaskId() and marked them // skipped List<Task> newTasks = new LinkedList<>(); Map<String, Task> refNameToTask = new HashMap<String, Task>(); reRunFromWorkflow.getTasks().forEach(task -> refNameToTask.put(task.getReferenceTaskName(), task)); WorkflowDef wd = metadata.get(reRunFromWorkflow.getWorkflowType(), reRunFromWorkflow.getVersion()); Iterator<WorkflowTask> it = wd.getTasks().iterator(); int seq = wf.getTasks().size(); while (it.hasNext()) { WorkflowTask wt = it.next(); Task previousTask = refNameToTask.get(wt.getTaskReferenceName()); if (previousTask.getTaskId().equals(request.getReRunFromTaskId())) { Task theTask = new Task(); theTask.setTaskId(IDGenerator.generate()); theTask.setReferenceTaskName(previousTask.getReferenceTaskName()); theTask.setInputData((request.getTaskInput() == null) ? previousTask.getInputData() : request.getTaskInput()); theTask.setWorkflowInstanceId(workflowId); theTask.setStatus(Status.READY_FOR_RERUN); theTask.setTaskType(previousTask.getTaskType()); theTask.setCorrelationId(wf.getCorrelationId()); theTask.setSeq(seq++); theTask.setRetryCount(previousTask.getRetryCount() + 1); newTasks.add(theTask); break; } else { // Create with Skipped status Task theTask = new Task(); theTask.setTaskId(IDGenerator.generate()); theTask.setReferenceTaskName(previousTask.getReferenceTaskName()); theTask.setWorkflowInstanceId(workflowId); theTask.setStatus(Status.SKIPPED); theTask.setTaskType(previousTask.getTaskType()); theTask.setCorrelationId(wf.getCorrelationId()); theTask.setInputData(previousTask.getInputData()); theTask.setOutputData(previousTask.getOutputData()); theTask.setRetryCount(previousTask.getRetryCount() + 1); theTask.setSeq(seq++); newTasks.add(theTask); } } edao.createTasks(newTasks); } edao.createWorkflow(wf); decide(workflowId); return workflowId; } public void rewind(String workflowId) throws Exception { Workflow workflow = edao.getWorkflow(workflowId, true); if (!workflow.getStatus().isTerminal()) { logger.error("Workflow is still running. status=" + workflow.getStatus()+",workflowId="+workflow.getWorkflowId()+",correlationId="+workflow.getCorrelationId()); throw new ApplicationException(Code.CONFLICT, "Workflow is still running. status=" + workflow.getStatus()); } // Remove all the tasks... workflow.getTasks().forEach(t -> edao.removeTask(t.getTaskId())); workflow.getTasks().clear(); workflow.setReasonForIncompletion(null); workflow.setStartTime(System.currentTimeMillis()); workflow.setEndTime(0); // Change the status to running workflow.setStatus(WorkflowStatus.RUNNING); edao.updateWorkflow(workflow); decide(workflowId); } public void retry(String workflowId) throws Exception { Workflow workflow = edao.getWorkflow(workflowId, true); if (!workflow.getStatus().isTerminal()) { logger.error("Workflow is still running. status=" + workflow.getStatus()+",workflowId="+workflow.getWorkflowId()+",correlationId="+workflow.getCorrelationId()); throw new ApplicationException(Code.CONFLICT, "Workflow is still running. status=" + workflow.getStatus()); } if (workflow.getTasks().isEmpty()) { logger.error("Workflow has not started yet. status=" + workflow.getStatus()+",workflowId="+workflow.getWorkflowId()+",correlationId="+workflow.getCorrelationId()); throw new ApplicationException(Code.CONFLICT, "Workflow has not started yet"); } int lastIndex = workflow.getTasks().size() - 1; Task last = workflow.getTasks().get(lastIndex); if (!last.getStatus().isTerminal()) { throw new ApplicationException(Code.CONFLICT, "The last task is still not completed! I can only retry the last failed task. Use restart if you want to attempt entire workflow execution again."); } if (last.getStatus().isSuccessful()) { throw new ApplicationException(Code.CONFLICT, "The last task has not failed! I can only retry the last failed task. Use restart if you want to attempt entire workflow execution again."); } // Below is the situation where currently when the task failure causes // workflow to fail, the task's retried flag is not updated. This is to // update for these old tasks. List<Task> update = workflow.getTasks().stream().filter(task -> !task.isRetried()).collect(Collectors.toList()); update.forEach(task -> task.setRetried(true)); edao.updateTasks(update); Task retried = last.copy(); retried.setTaskId(IDGenerator.generate()); retried.setRetriedTaskId(last.getTaskId()); retried.setStatus(Status.SCHEDULED); retried.setRetryCount(last.getRetryCount() + 1); scheduleTask(workflow, Arrays.asList(retried)); workflow.setStatus(WorkflowStatus.RUNNING); edao.updateWorkflow(workflow); decide(workflowId); } public List<Workflow> getStatusByCorrelationId(String workflowName, String correlationId, boolean includeClosed) throws Exception { Preconditions.checkNotNull(correlationId, "correlation id is missing"); Preconditions.checkNotNull(workflowName, "workflow name is missing"); List<Workflow> workflows = edao.getWorkflowsByCorrelationId(correlationId); List<Workflow> result = new LinkedList<>(); for (Workflow wf : workflows) { if (wf.getWorkflowType().equals(workflowName) && (includeClosed || wf.getStatus().equals(WorkflowStatus.RUNNING))) { result.add(wf); } } return result; } public Task getPendingTaskByWorkflow(String taskReferenceName, String workflowId) { List<Task> tasks = edao.getTasksForWorkflow(workflowId).stream() .filter(task -> !task.getStatus().isTerminal() && task.getReferenceTaskName().equals(taskReferenceName)).collect(Collectors.toList()); if (!tasks.isEmpty()) { return tasks.get(0); // There can only be one task by a given // reference name running at a time. } return null; } public void completeWorkflow(Workflow wf) throws Exception { Workflow workflow = edao.getWorkflow(wf.getWorkflowId(), false); if (workflow.getStatus().equals(WorkflowStatus.COMPLETED)) { logger.warn("Workflow has already been completed. Current status=" + workflow.getStatus() + ", workflowId=" + wf.getWorkflowId()+",CorrelationId=" + wf.getCorrelationId()); return; } if (workflow.getStatus().isTerminal()) { String msg = "Workflow has already been completed. Current status " + workflow.getStatus(); logger.error("Workflow has already been completed. status=" + workflow.getStatus()+",workflowId="+workflow.getWorkflowId()+",correlationId="+workflow.getCorrelationId()); throw new ApplicationException(Code.CONFLICT, msg); } workflow.setStatus(WorkflowStatus.COMPLETED); workflow.setOutput(wf.getOutput()); edao.updateWorkflow(workflow); // If the following task, for some reason fails, the sweep will take // care of this again! if (workflow.getParentWorkflowId() != null) { Workflow parent = edao.getWorkflow(workflow.getParentWorkflowId(), false); decide(parent.getWorkflowId()); } Monitors.recordWorkflowCompletion(workflow.getWorkflowType(), workflow.getEndTime() - workflow.getStartTime()); queue.remove(deciderQueue, workflow.getWorkflowId()); //remove from the sweep queue logger.info("Workflow has completed, workflowId=" + wf.getWorkflowId()+",input="+wf.getInput()+",CorrelationId="+wf.getCorrelationId()+",output="+wf.getOutput()); } public String cancelWorkflow(String workflowId,Map<String, Object> inputbody) throws Exception { Workflow workflow = edao.getWorkflow(workflowId, true); workflow.setStatus(WorkflowStatus.CANCELLED); return cancelWorkflow(workflow,inputbody); } public String cancelWorkflow(Workflow workflow,Map<String, Object> inputbody) throws Exception { if (!workflow.getStatus().isTerminal()) { workflow.setStatus(WorkflowStatus.CANCELLED); } String workflowId = workflow.getWorkflowId(); edao.updateWorkflow(workflow); logger.error("Workflow is cancelled.workflowId="+workflowId+",correlationId="+workflow.getCorrelationId()); List<Task> tasks = workflow.getTasks(); for (Task task : tasks) { if (!task.getStatus().isTerminal()) { // Cancel the ones which are not completed yet.... task.setStatus(Status.CANCELED); if (SystemTaskType.is(task.getTaskType())) { WorkflowSystemTask stt = WorkflowSystemTask.get(task.getTaskType()); stt.cancel(workflow, task, this); //SystemTaskType.valueOf(task.getTaskType()).cancel(workflow, task, this); } edao.updateTask(task); applyTaskAction(task, PrePostAction.postTask); } // And remove from the task queue if they were there queue.remove(QueueUtils.getQueueName(task), task.getTaskId()); } // If the following lines, for some reason fails, the sweep will take // care of this again! if (workflow.getParentWorkflowId() != null) { Workflow parent = edao.getWorkflow(workflow.getParentWorkflowId(), false); decide(parent.getWorkflowId()); } WorkflowDef def = metadata.get(workflow.getWorkflowType(), workflow.getVersion()); String cancelWorkflow = def.getCancelWorkflow(); if (!StringUtils.isBlank(cancelWorkflow)) { // Backward compatible by default boolean expandInline = Boolean.parseBoolean(config.getProperty("workflow.failure.expandInline", "true")); Map<String, Object> input = new HashMap<>(); if (expandInline) { input.putAll(workflow.getInput()); } else { input.put("workflowInput", workflow.getInput()); } input.put("workflowId", workflowId); input.put("workflowType", workflow.getWorkflowType()); input.put("workflowVersion", workflow.getVersion()); try { WorkflowDef latestCancelWorkflow = metadata.getLatest(cancelWorkflow); String cancelWFId = startWorkflow(cancelWorkflow, latestCancelWorkflow.getVersion(), input, workflowId, null, null, null); workflow.getOutput().put("conductor.cancel_workflow", cancelWFId); } catch (Exception e) { logger.error("Error workflow " + cancelWorkflow + " failed to start. reason: " + e.getMessage()); workflow.getOutput().put("conductor.cancel_workflow", "Error workflow " + cancelWorkflow + " failed to start. reason: " + e.getMessage()); Monitors.recordWorkflowStartError(cancelWorkflow); } } queue.remove(deciderQueue, workflow.getWorkflowId()); //remove from the sweep queue // Send to atlas Monitors.recordWorkflowTermination(workflow.getWorkflowType(), workflow.getStatus()); return workflowId; } public void terminateWorkflow(String workflowId, String reason) throws Exception { Workflow workflow = edao.getWorkflow(workflowId, true); workflow.setStatus(WorkflowStatus.TERMINATED); terminateWorkflow(workflow, reason, null); } public void terminateWorkflow(Workflow workflow, String reason, String failureWorkflow) throws Exception { terminateWorkflow(workflow, reason, failureWorkflow, null); } public void terminateWorkflow(Workflow workflow, String reason, String failureWorkflow, Task failedTask) throws Exception { if (!workflow.getStatus().isTerminal()) { workflow.setStatus(WorkflowStatus.TERMINATED); } String workflowId = workflow.getWorkflowId(); workflow.setReasonForIncompletion(reason); edao.updateWorkflow(workflow); logger.error("Workflow is terminated.workflowId="+workflowId+",correlationId="+workflow.getCorrelationId()+",reasonForIncompletion="+reason); List<Task> tasks = workflow.getTasks(); for (Task task : tasks) { if (!task.getStatus().isTerminal()) { // Cancel the ones which are not completed yet.... task.setStatus(Status.CANCELED); if (SystemTaskType.is(task.getTaskType())) { WorkflowSystemTask stt = WorkflowSystemTask.get(task.getTaskType()); stt.cancel(workflow, task, this); //SystemTaskType.valueOf(task.getTaskType()).cancel(workflow, task, this); } edao.updateTask(task); applyTaskAction(task, PrePostAction.postTask); } // And remove from the task queue if they were there queue.remove(QueueUtils.getQueueName(task), task.getTaskId()); } // If the following lines, for some reason fails, the sweep will take // care of this again! if (workflow.getParentWorkflowId() != null) { Workflow parent = edao.getWorkflow(workflow.getParentWorkflowId(), false); decide(parent.getWorkflowId()); } if (!StringUtils.isBlank(failureWorkflow)) { // Backward compatible by default boolean expandInline = Boolean.parseBoolean(config.getProperty("workflow.failure.expandInline", "true")); Map<String, Object> input = new HashMap<>(); if (expandInline) { input.putAll(workflow.getInput()); } else { input.put("workflowInput", workflow.getInput()); } input.put("workflowId", workflowId); input.put("workflowType", workflow.getWorkflowType()); input.put("workflowVersion", workflow.getVersion()); input.put("reason", reason); input.put("failureStatus", workflow.getStatus().toString()); if (failedTask != null) { Map<String, Object> map = new HashMap<>(); map.put("taskId", failedTask.getTaskId()); map.put("input", failedTask.getInputData()); map.put("output", failedTask.getOutputData()); map.put("retryCount", failedTask.getRetryCount()); map.put("referenceName", failedTask.getReferenceTaskName()); map.put("reasonForIncompletion", failedTask.getReasonForIncompletion()); input.put("failedTask", map); logger.error("Error in task execution.workflowid="+workflowId+",correlationId="+workflow.getCorrelationId()+",failedTaskid="+failedTask.getTaskId()+",taskReferenceName="+failedTask.getReferenceTaskName()+"reasonForIncompletion="+failedTask.getReasonForIncompletion()); } try { WorkflowDef latestFailureWorkflow = metadata.getLatest(failureWorkflow); String failureWFId = startWorkflow(failureWorkflow, latestFailureWorkflow.getVersion(), input, workflowId, null, null, null); workflow.getOutput().put("conductor.failure_workflow", failureWFId); } catch (Exception e) { logger.error("Error workflow " + failureWorkflow + " failed to start. reason: " + e.getMessage()); workflow.getOutput().put("conductor.failure_workflow", "Error workflow " + failureWorkflow + " failed to start. reason: " + e.getMessage()); Monitors.recordWorkflowStartError(failureWorkflow); } } queue.remove(deciderQueue, workflow.getWorkflowId()); //remove from the sweep queue // Send to atlas Monitors.recordWorkflowTermination(workflow.getWorkflowType(), workflow.getStatus()); } public void updateTask(TaskResult result) throws Exception { if (result == null) { logger.error("null task given for update..." + result); throw new ApplicationException(Code.INVALID_INPUT, "Task object is null"); } String workflowId = result.getWorkflowInstanceId(); Workflow wf = edao.getWorkflow(workflowId); Task task = edao.getTask(result.getTaskId()); if (wf.getStatus().isTerminal()) { // Workflow is in terminal state queue.remove(QueueUtils.getQueueName(task), result.getTaskId()); if(!task.getStatus().isTerminal()) { task.setStatus(Status.COMPLETED); } task.setOutputData(result.getOutputData()); task.setReasonForIncompletion(result.getReasonForIncompletion()); task.setWorkerId(result.getWorkerId()); edao.updateTask(task); applyTaskAction(task, PrePostAction.postTask); String msg = "Workflow " + wf.getWorkflowId() + " is already completed as " + wf.getStatus() + ", task=" + task.getTaskType() + ", reason=" + wf.getReasonForIncompletion()+",correlationId="+wf.getCorrelationId(); logger.warn(msg); Monitors.recordUpdateConflict(task.getTaskType(), wf.getWorkflowType(), wf.getStatus()); return; } if (task.getStatus().isTerminal()) { // Task was already updated.... queue.remove(QueueUtils.getQueueName(task), result.getTaskId()); String msg = "Task is already completed as " + task.getStatus() + "@" + task.getEndTime() + ", workflow status=" + wf.getStatus() + ", workflowId=" + wf.getWorkflowId() + ", taskId=" + task.getTaskId()+",correlationId="+wf.getCorrelationId(); logger.warn(msg); Monitors.recordUpdateConflict(task.getTaskType(), wf.getWorkflowType(), task.getStatus()); return; } task.setStatus(Status.valueOf(result.getStatus().name())); task.setOutputData(result.getOutputData()); task.setReasonForIncompletion(result.getReasonForIncompletion()); task.setWorkerId(result.getWorkerId()); task.setCallbackAfterSeconds(result.getCallbackAfterSeconds()); if (task.getStatus().isTerminal()) { task.setEndTime(System.currentTimeMillis()); } else { if (result.isResetStartTime()) { task.setStartTime(System.currentTimeMillis()); // We must reset endtime only when it is set if (task.getEndTime() > 0) { task.setEndTime(System.currentTimeMillis()); } } } edao.updateTask(task); result.getLogs().forEach(tl -> tl.setTaskId(task.getTaskId())); edao.addTaskExecLog(result.getLogs()); switch (task.getStatus()) { case COMPLETED: queue.remove(QueueUtils.getQueueName(task), result.getTaskId()); applyTaskAction(task, PrePostAction.postTask); break; case CANCELED: queue.remove(QueueUtils.getQueueName(task), result.getTaskId()); applyTaskAction(task, PrePostAction.postTask); break; case FAILED: queue.remove(QueueUtils.getQueueName(task), result.getTaskId()); applyTaskAction(task, PrePostAction.postTask); break; case IN_PROGRESS: // put it back in queue based in callbackAfterSeconds long callBack = result.getCallbackAfterSeconds(); queue.remove(QueueUtils.getQueueName(task), task.getTaskId()); queue.push(QueueUtils.getQueueName(task), task.getTaskId(), callBack); // Milliseconds break; default: break; } decide(workflowId); if (task.getStatus().isTerminal()) { long duration = getTaskDuration(0, task); long lastDuration = task.getEndTime() - task.getStartTime(); Monitors.recordTaskExecutionTime(task.getTaskDefName(), duration, true, task.getStatus()); Monitors.recordTaskExecutionTime(task.getTaskDefName(), lastDuration, false, task.getStatus()); } } public List<Task> getTasks(String taskType, String startKey, int count) throws Exception { return edao.getTasks(taskType, startKey, count); } public List<Workflow> getRunningWorkflows(String workflowName) throws Exception { List<Workflow> allwf = edao.getPendingWorkflowsByType(workflowName); return allwf; } public List<String> getWorkflows(String name, Integer version, Long startTime, Long endTime) { List<Workflow> allwf = edao.getWorkflowsByType(name, startTime, endTime); List<String> workflows = allwf.stream().filter(wf -> wf.getVersion() == version).map(wf -> wf.getWorkflowId()).collect(Collectors.toList()); return workflows; } public List<String> getRunningWorkflowIds(String workflowName) throws Exception { return edao.getRunningWorkflowIds(workflowName); } /** * * @param workflowId ID of the workflow to evaluate the state for * @return true if the workflow has completed (success or failed), false otherwise. * @throws Exception If there was an error - caller should retry in this case. */ public boolean decide(String workflowId) throws Exception { Workflow workflow = edao.getWorkflow(workflowId, true); WorkflowDef def = metadata.get(workflow.getWorkflowType(), workflow.getVersion()); try { DeciderOutcome outcome = decider.decide(workflow, def); if(outcome.isComplete) { completeWorkflow(workflow); return true; } List<Task> tasksToBeScheduled = outcome.tasksToBeScheduled; setTaskDomains(tasksToBeScheduled, workflow); List<Task> tasksToBeUpdated = outcome.tasksToBeUpdated; boolean stateChanged = false; workflow.getTasks().addAll(tasksToBeScheduled); for(Task task : tasksToBeScheduled) { if (SystemTaskType.is(task.getTaskType()) && !task.getStatus().isTerminal()) { WorkflowSystemTask stt = WorkflowSystemTask.get(task.getTaskType()); if (!stt.isAsync() && stt.execute(workflow, task, this)) { tasksToBeUpdated.add(task); stateChanged = true; } } } stateChanged = scheduleTask(workflow, tasksToBeScheduled) || stateChanged; if(!outcome.tasksToBeUpdated.isEmpty() || !outcome.tasksToBeScheduled.isEmpty()) { edao.updateTasks(tasksToBeUpdated); edao.updateWorkflow(workflow); queue.push(deciderQueue, workflow.getWorkflowId(), config.getSweepFrequency()); } if (outcome.startWorkflow != null) { DeciderService.StartWorkflowParams startWorkflow = outcome.startWorkflow; String workflowName = startWorkflow.name; int workflowVersion; if (startWorkflow.version == null) { WorkflowDef subFlowDef = metadata.getLatest(workflowName); workflowVersion = subFlowDef.getVersion(); } else { workflowVersion = startWorkflow.version; } startWorkflow(workflowName, workflowVersion, startWorkflow.params, null, workflow.getWorkflowId(), null,null); } if(stateChanged) { decide(workflowId); } } catch (TerminateWorkflow tw) { logger.error("Error in workflow execution:"+tw.getMessage(), tw); terminate(def, workflow, tw); return true; } return false; } public void pauseWorkflow(String workflowId) throws Exception { WorkflowStatus status = WorkflowStatus.PAUSED; Workflow workflow = edao.getWorkflow(workflowId, false); if(workflow.getStatus().isTerminal()){ throw new ApplicationException(Code.CONFLICT, "Workflow id " + workflowId + " has ended, status cannot be updated."); } if (workflow.getStatus().equals(status)) { return; //Already paused! } workflow.setStatus(status); edao.updateWorkflow(workflow); } public void resumeWorkflow(String workflowId) throws Exception{ Workflow workflow = edao.getWorkflow(workflowId, false); if(!workflow.getStatus().equals(WorkflowStatus.PAUSED)){ logger.error("Workflow is not is not PAUSED so cannot resume. Current status=" + workflow.getStatus() + ", workflowId=" + workflow.getWorkflowId()+",CorrelationId=" + workflow.getCorrelationId()); throw new IllegalStateException("The workflow " + workflowId + " is not is not PAUSED so cannot resume"); } workflow.setStatus(WorkflowStatus.RUNNING); edao.updateWorkflow(workflow); decide(workflowId); } public void skipTaskFromWorkflow(String workflowId, String taskReferenceName, SkipTaskRequest skipTaskRequest) throws Exception { Workflow wf = edao.getWorkflow(workflowId, true); // If the wf is not running then cannot skip any task if(!wf.getStatus().equals(WorkflowStatus.RUNNING)){ String errorMsg = String.format("The workflow %s is not running so the task referenced by %s cannot be skipped", workflowId, taskReferenceName); logger.error(errorMsg); throw new IllegalStateException(errorMsg); } // Check if the reference name is as per the workflowdef WorkflowDef wfd = metadata.get(wf.getWorkflowType(), wf.getVersion()); WorkflowTask wft = wfd.getTaskByRefName(taskReferenceName); if(wft == null){ String errorMsg = String.format("The task referenced by %s does not exist in the WorkflowDef %s", taskReferenceName, wf.getWorkflowType()); logger.error(errorMsg); throw new IllegalStateException(errorMsg); } // If the task is already started the again it cannot be skipped wf.getTasks().forEach(task -> { if(task.getReferenceTaskName().equals(taskReferenceName)){ String errorMsg = String.format("The task referenced %s has already been processed, cannot be skipped", taskReferenceName); logger.error(errorMsg); throw new IllegalStateException(errorMsg); } }); // Now create a "SKIPPED" task for this workflow Task theTask = new Task(); theTask.setTaskId(IDGenerator.generate()); theTask.setReferenceTaskName(taskReferenceName); theTask.setWorkflowInstanceId(workflowId); theTask.setStatus(Status.SKIPPED); theTask.setTaskType(wft.getName()); theTask.setCorrelationId(wf.getCorrelationId()); if(skipTaskRequest != null){ theTask.setInputData(skipTaskRequest.getTaskInput()); theTask.setOutputData(skipTaskRequest.getTaskOutput()); } edao.createTasks(Arrays.asList(theTask)); decide(workflowId); } public Workflow getWorkflow(String workflowId, boolean includeTasks) { return edao.getWorkflow(workflowId, includeTasks); } public void addTaskToQueue(Task task) throws Exception { // put in queue queue.remove(QueueUtils.getQueueName(task), task.getTaskId()); if (task.getCallbackAfterSeconds() > 0) { queue.push(QueueUtils.getQueueName(task), task.getTaskId(), task.getCallbackAfterSeconds()); } else { queue.push(QueueUtils.getQueueName(task), task.getTaskId(), 0); } } //Executes the async system task public void executeSystemTask(WorkflowSystemTask systemTask, String taskId, int unackTimeout) { try { Task task = edao.getTask(taskId); if(task.getStatus().isTerminal()) { //Tune the SystemTaskWorkerCoordinator's queues - if the queue size is very big this can happen! logger.warn("Task {}/{} was already completed.", task.getTaskType(), task.getTaskId()); queue.remove(QueueUtils.getQueueName(task), task.getTaskId()); return; } String workflowId = task.getWorkflowInstanceId(); Workflow workflow = edao.getWorkflow(workflowId, true); if (task.getStartTime() == 0) { task.setStartTime(System.currentTimeMillis()); Monitors.recordQueueWaitTime(task.getTaskDefName(), task.getQueueWaitTime()); } if(workflow.getStatus().isTerminal()) { logger.warn("Workflow {} has been completed for {}/{}", workflow.getWorkflowId(), systemTask.getName(), task.getTaskId()); if(!task.getStatus().isTerminal()) { task.setStatus(Status.CANCELED); applyTaskAction(task, PrePostAction.postTask); } edao.updateTask(task); queue.remove(QueueUtils.getQueueName(task), task.getTaskId()); return; } if(task.getStatus().equals(Status.SCHEDULED)) { if(edao.exceedsInProgressLimit(task)) { logger.warn("Rate limited for {}", task.getTaskDefName()); return; } } logger.info("Executing {}/{}-{}", task.getTaskType(), task.getTaskId(), task.getStatus()); queue.setUnackTimeout(QueueUtils.getQueueName(task), task.getTaskId(), systemTask.getRetryTimeInSecond() * 1000); task.setPollCount(task.getPollCount() + 1); edao.updateTask(task); switch (task.getStatus()) { case SCHEDULED: applyTaskAction(task, PrePostAction.preTask); systemTask.start(workflow, task, this); break; case IN_PROGRESS: systemTask.execute(workflow, task, this); break; default: break; } if(!task.getStatus().isTerminal()) { task.setCallbackAfterSeconds(unackTimeout); } updateTask(new TaskResult(task)); logger.info("Done Executing {}/{}-{} op={}", task.getTaskType(), task.getTaskId(), task.getStatus(), task.getOutputData().toString()); } catch (Exception e) { logger.error(e.getMessage(), e); } } public void setTaskDomains(List<Task> tasks, Workflow wf){ Map<String, String> taskToDomain = wf.getTaskToDomain(); if(taskToDomain != null){ // Check if all tasks have the same domain "*" String domainstr = taskToDomain.get("*"); if(domainstr != null){ String[] domains = domainstr.split(","); tasks.forEach(task -> { // Filter out SystemTask if(!(task instanceof SystemTask)){ // Check which domain worker is polling // Set the task domain task.setDomain(getActiveDomain(task.getTaskType(), domains)); } }); } else { tasks.forEach(task -> { if(!(task instanceof SystemTask)){ String taskDomainstr = taskToDomain.get(task.getTaskType()); if(taskDomainstr != null){ task.setDomain(getActiveDomain(task.getTaskType(), taskDomainstr.split(","))); } } }); } } } private String getActiveDomain(String taskType, String[] domains){ // The domain list has to be ordered. // In sequence check if any worker has polled for last 30 seconds, if so that is the Active domain String domain = null; // Default domain for(String d: domains){ PollData pd = edao.getPollData(taskType, d.trim()); if(pd != null){ if(pd.getLastPollTime() > System.currentTimeMillis() - (activeWorkerLastPollnSecs * 1000)){ domain = d.trim(); break; } } } return domain; } private long getTaskDuration(long s, Task task) { long duration = task.getEndTime() - task.getStartTime(); s += duration; if (task.getRetriedTaskId() == null) { return s; } return s + getTaskDuration(s, edao.getTask(task.getRetriedTaskId())); } @VisibleForTesting boolean scheduleTask(Workflow workflow, List<Task> tasks) throws Exception { if (tasks == null || tasks.isEmpty()) { return false; } int count = workflow.getTasks().size(); for (Task task : tasks) { task.setSeq(++count); } List<Task> created = edao.createTasks(tasks); List<Task> createdSystemTasks = created.stream().filter(task -> SystemTaskType.is(task.getTaskType())).collect(Collectors.toList()); List<Task> toBeQueued = created.stream().filter(task -> !SystemTaskType.is(task.getTaskType())).collect(Collectors.toList()); boolean startedSystemTasks = false; for(Task task : createdSystemTasks) { WorkflowSystemTask stt = WorkflowSystemTask.get(task.getTaskType()); if(stt == null) { throw new RuntimeException("No system task found by name " + task.getTaskType()); } task.setStartTime(System.currentTimeMillis()); if(!stt.isAsync()) { applyTaskAction(task, PrePostAction.preTask); stt.start(workflow, task, this); startedSystemTasks = true; edao.updateTask(task); if (task.getStatus().isTerminal()) { applyTaskAction(task, PrePostAction.postTask); } } else { toBeQueued.add(task); } } addTaskToQueue(toBeQueued); return startedSystemTasks; } private void addTaskToQueue(final List<Task> tasks) throws Exception { for (Task t : tasks) { addTaskToQueue(t); } } private void terminate(final WorkflowDef def, final Workflow workflow, TerminateWorkflow tw) throws Exception { if (!workflow.getStatus().isTerminal()) { workflow.setStatus(tw.workflowStatus); } String failureWorkflow = def.getFailureWorkflow(); if (failureWorkflow != null) { if (failureWorkflow.startsWith("$")) { String[] paramPathComponents = failureWorkflow.split("\\."); String name = paramPathComponents[2]; // name of the input parameter failureWorkflow = (String) workflow.getInput().get(name); } } if(tw.task != null){ edao.updateTask(tw.task); } terminateWorkflow(workflow, tw.getMessage(), failureWorkflow, tw.task); String taskId = (tw.task != null ? tw.task.getTaskId() : null); String taskRefName = (tw.task != null ? tw.task.getReferenceTaskName() : null); logger.error("Workflow failed. workflowId=" + workflow.getWorkflowId()+",correlationId="+workflow.getCorrelationId()+",Reason="+tw.getMessage()+",taskId="+taskId+",taskReferenceName="+taskRefName); } @SuppressWarnings("unchecked") private void applyTaskAction(Task task, WorkflowSystemTask.PrePostAction action) throws Exception { Object eventMessages = task.getInputData().get("event_messages"); if (eventMessages == null) { return; } Map<String, Object> eventMsgMap = (Map<String, Object>) eventMessages; if (eventMsgMap.containsKey(action.name())) { Map<String, Object> actionMap = (Map<String, Object>) eventMsgMap.get(action.name()); ObjectMapper mapper = new ObjectMapper(); Message msg = new Message(); msg.setId(UUID.randomUUID().toString()); String payload = mapper.writeValueAsString(actionMap.get("inputParameters")); msg.setPayload(payload); String sink = (String) actionMap.get("sink"); ObservableQueue queue = EventQueues.getQueue(sink, false); if (queue != null) { queue.publish(Collections.singletonList(msg)); } } } }
minor optimization and additional places to fire the event have been added including system tasks (evetn, wait, etc)
core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java
minor optimization and additional places to fire the event have been added including system tasks (evetn, wait, etc)
<ide><path>ore/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java <ide> /** <ide> * @author Viren Workflow services provider interface <ide> */ <del>@SuppressWarnings("ALL") <ide> @Trace <ide> public class WorkflowExecutor { <ide>
JavaScript
mit
1c2b7db98687d9bbb9ec5ff03c46c9ec10a793b8
0
creynders/keystone,creynders/keystone
var demand = require('must'); var Path = require('../../../lib/path'); describe('Path', function () { describe('new', function () { it('must be an instance of Path', function () { new Path('').must.be.an.instanceof(Path); }); }); describe('.prototype.append', function () { it('must return the path appended to the given part', function () { var path = new Path('foo.example.dir'); path.append('.com').must.equal('foo.example.dir.com'); }); }); describe('.prototype.addTo', function () { it('must return an hierarchical object from path', function () { var path = new Path('foo.example.dir.file'); var obj = path.addTo({}, 42); obj.must.eql({ foo: { example: { dir: { file: 42 } } } }); }); it('must merge given an existing hierarchy', function () { var path = new Path('foo.example.dir.file'); var obj = path.addTo({ foo: { example: { link: 69 } } }, 42); obj.must.eql({ foo: { example: { link: 69, dir: { file: 42 } } } }); }); }); describe('.prototype.get', function () { it('must return a simple value', function () { var path = new Path('foo'); demand(path.get({ foo: 42 })).equal(42); }); it('must return a nested value', function () { var path = new Path('foo.example.dir'); demand(path.get({ foo: { example: { dir: 42 } } })).equal(42); }); it('must return undefined when a nested value isn\'t present', function () { var path = new Path('foo.example.dir'); demand(path.get({})).be.undefined(); }); it('must return a flat nested value', function () { var path = new Path('foo.example.dir'); demand(path.get({ 'foo.example.dir': 42 })).equal(42); }); it('must return an appended sub path', function () { var path = new Path('foo.example.dir'); demand(path.get({ foo: { example: { dir_ext: 42 } } }, '_ext')).equal(42); }); it('must return a flat appended sub path', function () { var path = new Path('foo.example.dir'); demand(path.get({ 'foo.example.dir_ext': 42 }, '_ext')).equal(42); }); it('must return a nested sub path', function () { var path = new Path('foo.example.dir'); demand(path.get({ foo: { example: { dir: { ext: 42 } } } }, '.ext')).equal(42); }); it('must return a flat nested sub path', function () { var path = new Path('foo.example.dir'); demand(path.get({ 'foo.example.dir.ext': 42 }, '.ext')).equal(42); }); }); });
test/unitTests/lib/path_test.js
var demand = require('must'); var Path = require("../../../lib/path"); describe("Path", function() { describe("new", function() { it("must be an instance of Path", function() { new Path("").must.be.an.instanceof(Path); }) }) describe(".prototype.append", function() { it("must return the path appended to the given part", function() { var path = new Path("foo.example.dir") path.append(".com").must.equal("foo.example.dir.com") }) }) describe(".prototype.addTo", function() { it("must return an hierarchical object from path", function() { var path = new Path("foo.example.dir.file") var obj = path.addTo({}, 42) obj.must.eql({foo: {example: {dir: {file: 42}}}}) }) it("must merge given an existing hierarchy", function() { var path = new Path("foo.example.dir.file") var obj = path.addTo({foo: {example: {link: 69}}}, 42) obj.must.eql({foo: {example: {link: 69, dir: {file: 42}}}}) }) }) describe(".prototype.get", function() { it("must walk hierarchy and return value", function() { var path = new Path("foo.example.dir") path.get({foo: {example: {dir: 42}}}).must.equal(42) }) it("must return undefined when a path isn't present", function() { var path = new Path("foo.example.dir") demand(path.get({})).be.undefined() }) }) })
Significantly expanding our Path tests
test/unitTests/lib/path_test.js
Significantly expanding our Path tests
<ide><path>est/unitTests/lib/path_test.js <ide> var demand = require('must'); <del>var Path = require("../../../lib/path"); <add>var Path = require('../../../lib/path'); <ide> <del>describe("Path", function() { <del> describe("new", function() { <del> it("must be an instance of Path", function() { <del> new Path("").must.be.an.instanceof(Path); <del> }) <del> }) <add>describe('Path', function () { <add> describe('new', function () { <add> it('must be an instance of Path', function () { <add> new Path('').must.be.an.instanceof(Path); <add> }); <add> }); <ide> <del> describe(".prototype.append", function() { <del> it("must return the path appended to the given part", function() { <del> var path = new Path("foo.example.dir") <del> path.append(".com").must.equal("foo.example.dir.com") <del> }) <del> }) <add> describe('.prototype.append', function () { <add> it('must return the path appended to the given part', function () { <add> var path = new Path('foo.example.dir'); <add> path.append('.com').must.equal('foo.example.dir.com'); <add> }); <add> }); <ide> <del> describe(".prototype.addTo", function() { <del> it("must return an hierarchical object from path", function() { <del> var path = new Path("foo.example.dir.file") <del> var obj = path.addTo({}, 42) <del> obj.must.eql({foo: {example: {dir: {file: 42}}}}) <del> }) <add> describe('.prototype.addTo', function () { <add> it('must return an hierarchical object from path', function () { <add> var path = new Path('foo.example.dir.file'); <add> var obj = path.addTo({}, 42); <add> obj.must.eql({ foo: { example: { dir: { file: 42 } } } }); <add> }); <ide> <del> it("must merge given an existing hierarchy", function() { <del> var path = new Path("foo.example.dir.file") <del> var obj = path.addTo({foo: {example: {link: 69}}}, 42) <del> obj.must.eql({foo: {example: {link: 69, dir: {file: 42}}}}) <del> }) <del> }) <add> it('must merge given an existing hierarchy', function () { <add> var path = new Path('foo.example.dir.file'); <add> var obj = path.addTo({ foo: { example: { link: 69 } } }, 42); <add> obj.must.eql({ foo: { example: { link: 69, dir: { file: 42 } } } }); <add> }); <add> }); <ide> <del> describe(".prototype.get", function() { <del> it("must walk hierarchy and return value", function() { <del> var path = new Path("foo.example.dir") <del> path.get({foo: {example: {dir: 42}}}).must.equal(42) <del> }) <del> it("must return undefined when a path isn't present", function() { <del> var path = new Path("foo.example.dir") <del> demand(path.get({})).be.undefined() <del> }) <del> }) <del>}) <add> describe('.prototype.get', function () { <add> it('must return a simple value', function () { <add> var path = new Path('foo'); <add> demand(path.get({ foo: 42 })).equal(42); <add> }); <add> it('must return a nested value', function () { <add> var path = new Path('foo.example.dir'); <add> demand(path.get({ foo: { example: { dir: 42 } } })).equal(42); <add> }); <add> it('must return undefined when a nested value isn\'t present', function () { <add> var path = new Path('foo.example.dir'); <add> demand(path.get({})).be.undefined(); <add> }); <add> it('must return a flat nested value', function () { <add> var path = new Path('foo.example.dir'); <add> demand(path.get({ 'foo.example.dir': 42 })).equal(42); <add> }); <add> it('must return an appended sub path', function () { <add> var path = new Path('foo.example.dir'); <add> demand(path.get({ foo: { example: { dir_ext: 42 } } }, '_ext')).equal(42); <add> }); <add> it('must return a flat appended sub path', function () { <add> var path = new Path('foo.example.dir'); <add> demand(path.get({ 'foo.example.dir_ext': 42 }, '_ext')).equal(42); <add> }); <add> it('must return a nested sub path', function () { <add> var path = new Path('foo.example.dir'); <add> demand(path.get({ foo: { example: { dir: { ext: 42 } } } }, '.ext')).equal(42); <add> }); <add> it('must return a flat nested sub path', function () { <add> var path = new Path('foo.example.dir'); <add> demand(path.get({ 'foo.example.dir.ext': 42 }, '.ext')).equal(42); <add> }); <add> }); <add>});
JavaScript
mit
6eb46143feddbf29f4052d0323c25c54654c620d
0
glumb/mrcl
import debug from 'debug' import EventEmitter from 'events' import protocol from '../src/protocolDefinitions' let numberOfMessages = 1 export default class MRIL extends EventEmitter { constructor(instruction = '') { super() this.instruction = instruction this.bytes = 0 this.preparedMRILMessage = '' this.number = numberOfMessages++ this.mrcp this.state = { executing: false, executed: false, } // remove whitespace and number this.preparedMRILMessage = instruction.split(' ').join('') .replace(new RegExp(`${protocol.MRIL.COMMAND_NUMBER}\\d+`, 'gi'), '') if (this.preparedMRILMessage.indexOf('#') > 0) { // remove comment this.preparedMRILMessage = this.preparedMRILMessage.substring(0, this.preparedMRILMessage.indexOf('#')) } if (instruction.length !== 0) { // no message number for an empty message this.preparedMRILMessage = protocol.MRIL.COMMAND_NUMBER + this.number + this.preparedMRILMessage } this.bytes = this.preparedMRILMessage.length } /** * the instruction send to the robot * @return {[string]} preparedMRILMessage */ getInstruction() { return this.preparedMRILMessage } /** * the instruction used to construct the MRIL object. (number and spacing may differ) * @return {[type]} raw instruction */ getRawInstruction() { return this.instruction } setExecuting() { this.state.executing = true this.emit('executing') } isExecuting() { return this.state.executing } onExecuting(cb) { this.on('executing', cb) } setExecuted(response = '') { this.state.executed = true this.response = response this.emit('executed', response) } isExecuted() { return this.state.executed } onExecuted(cb) { this.on('executed', cb) } getNumber() { return this.number } getBytes() { return this.bytes } }
src/MRIL.js
import debug from 'debug' import EventEmitter from 'events' import protocol from '../src/protocolDefinitions' let numberOfMessages = 1 export default class MRIL extends EventEmitter { constructor(instruction = '') { super() this.instruction = instruction this.bytes = 0 this.preparedMRILMessage = 0 this.number = numberOfMessages++ this.mrcp this.state = { executing: false, executed: false, } // remove whitespace and number this.preparedMRILMessage = instruction.split(' ').join('') .replace(new RegExp(`${protocol.MRIL.COMMAND_NUMBER}\\d+`, 'gi'), '') if (this.preparedMRILMessage.indexOf('#') > 0) { // remove comment this.preparedMRILMessage = this.preparedMRILMessage.substring(0, this.preparedMRILMessage.indexOf('#')) } this.preparedMRILMessage = protocol.MRIL.COMMAND_NUMBER + this.number + this.preparedMRILMessage this.bytes = this.preparedMRILMessage.length } /** * the instruction send to the robot * @return {[string]} preparedMRILMessage */ getInstruction() { return this.preparedMRILMessage } /** * the instruction used to construct the MRIL object. (number and spacing may differ) * @return {[type]} raw instruction */ getRawInstruction() { return this.instruction } setExecuting() { this.state.executing = true this.emit('executing') } isExecuting() { return this.state.executing } onExecuting(cb) { this.on('executing', cb) } setExecuted(response = '') { this.state.executed = true this.response = response this.emit('executed', response) } isExecuted() { return this.state.executed } onExecuted(cb) { this.on('executed', cb) } getNumber() { return this.number } getBytes() { return this.bytes } }
fix(MRIL): allow empty message
src/MRIL.js
fix(MRIL): allow empty message
<ide><path>rc/MRIL.js <ide> <ide> this.instruction = instruction <ide> this.bytes = 0 <del> this.preparedMRILMessage = 0 <add> this.preparedMRILMessage = '' <ide> this.number = numberOfMessages++ <ide> <ide> this.mrcp <ide> executed: false, <ide> } <ide> <add> <ide> // remove whitespace and number <ide> this.preparedMRILMessage = instruction.split(' ').join('') <ide> .replace(new RegExp(`${protocol.MRIL.COMMAND_NUMBER}\\d+`, 'gi'), '') <ide> this.preparedMRILMessage = this.preparedMRILMessage.substring(0, this.preparedMRILMessage.indexOf('#')) <ide> } <ide> <del> this.preparedMRILMessage = protocol.MRIL.COMMAND_NUMBER + this.number + this.preparedMRILMessage <add> if (instruction.length !== 0) { // no message number for an empty message <add> this.preparedMRILMessage = protocol.MRIL.COMMAND_NUMBER + this.number + this.preparedMRILMessage <add> } <ide> <ide> this.bytes = this.preparedMRILMessage.length <ide> }
Java
apache-2.0
291e381531720b4aee8cc5ba995a15613c08802a
0
farmerbb/Taskbar,farmerbb/Taskbar
/* Copyright 2016 Braden Farmer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.farmerbb.taskbar.service; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.app.SearchManager; import android.app.Service; import android.content.ActivityNotFoundException; import android.content.BroadcastReceiver; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.SharedPreferences; import android.content.pm.LauncherActivityInfo; import android.content.pm.LauncherApps; import android.content.pm.PackageManager; import android.content.res.Configuration; import android.graphics.PixelFormat; import android.graphics.Rect; import android.graphics.drawable.Drawable; import android.net.Uri; import android.os.Build; import android.os.Handler; import android.os.IBinder; import android.os.UserHandle; import android.os.UserManager; import android.provider.Settings; import android.support.v4.content.LocalBroadcastManager; import android.support.v7.widget.SearchView; import android.util.DisplayMetrics; import android.util.Patterns; import android.view.ContextThemeWrapper; import android.view.Gravity; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.WindowManager; import android.view.inputmethod.EditorInfo; import android.view.inputmethod.InputMethodManager; import android.webkit.URLUtil; import android.widget.EditText; import android.widget.FrameLayout; import android.widget.GridView; import android.widget.LinearLayout; import android.widget.ListAdapter; import android.widget.TextView; import com.farmerbb.taskbar.R; import com.farmerbb.taskbar.activity.ContextMenuActivity; import com.farmerbb.taskbar.activity.dark.ContextMenuActivityDark; import com.farmerbb.taskbar.activity.InvisibleActivity; import com.farmerbb.taskbar.activity.InvisibleActivityAlt; import com.farmerbb.taskbar.adapter.StartMenuAdapter; import com.farmerbb.taskbar.util.AppEntry; import com.farmerbb.taskbar.util.ApplicationType; import com.farmerbb.taskbar.util.Blacklist; import com.farmerbb.taskbar.util.FreeformHackHelper; import com.farmerbb.taskbar.util.IconCache; import com.farmerbb.taskbar.util.LauncherHelper; import com.farmerbb.taskbar.util.MenuHelper; import com.farmerbb.taskbar.util.TopApps; import com.farmerbb.taskbar.util.U; import com.farmerbb.taskbar.widget.StartMenuLayout; import java.text.Collator; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; public class StartMenuService extends Service { private WindowManager windowManager; private StartMenuLayout layout; private GridView startMenu; private SearchView searchView; private TextView textView; private PackageManager pm; private Handler handler; private Thread thread; private boolean shouldShowSearchBox = false; private boolean hasSubmittedQuery = false; private int layoutId = R.layout.start_menu_left; private List<String> currentStartMenuIds = new ArrayList<>(); private View.OnClickListener ocl = view -> toggleStartMenu(true); private BroadcastReceiver toggleReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { toggleStartMenu(true); } }; private BroadcastReceiver toggleReceiverAlt = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { toggleStartMenu(false); } }; private BroadcastReceiver showSpaceReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { layout.findViewById(R.id.start_menu_space).setVisibility(View.VISIBLE); } }; private BroadcastReceiver hideSpaceReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { layout.findViewById(R.id.start_menu_space).setVisibility(View.GONE); } }; private BroadcastReceiver hideReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { hideStartMenu(); } }; private Comparator<LauncherActivityInfo> comparator = (ai1, ai2) -> { String label1; String label2; try { label1 = ai1.getLabel().toString(); label2 = ai2.getLabel().toString(); } catch (OutOfMemoryError e) { System.gc(); label1 = ai1.getApplicationInfo().packageName; label2 = ai2.getApplicationInfo().packageName; } return Collator.getInstance().compare(label1, label2); }; @Override public IBinder onBind(Intent intent) { return null; } @Override public int onStartCommand(Intent intent, int flags, int startId) { return START_STICKY; } @TargetApi(Build.VERSION_CODES.M) @Override public void onCreate() { super.onCreate(); SharedPreferences pref = U.getSharedPreferences(this); if(pref.getBoolean("taskbar_active", false) || LauncherHelper.getInstance().isOnHomeScreen()) { if(Build.VERSION.SDK_INT < Build.VERSION_CODES.M || Settings.canDrawOverlays(this)) drawStartMenu(); else { pref.edit().putBoolean("taskbar_active", false).apply(); stopSelf(); } } else stopSelf(); } @SuppressLint("RtlHardcoded") private void drawStartMenu() { IconCache.getInstance(this).clearCache(); final SharedPreferences pref = U.getSharedPreferences(this); final boolean hasHardwareKeyboard = getResources().getConfiguration().keyboard != Configuration.KEYBOARD_NOKEYS; switch(pref.getString("show_search_bar", "keyboard")) { case "always": shouldShowSearchBox = true; break; case "keyboard": shouldShowSearchBox = hasHardwareKeyboard; break; case "never": shouldShowSearchBox = false; break; } // Initialize layout params windowManager = (WindowManager) getSystemService(WINDOW_SERVICE); U.setCachedRotation(windowManager.getDefaultDisplay().getRotation()); final WindowManager.LayoutParams params = new WindowManager.LayoutParams( WindowManager.LayoutParams.WRAP_CONTENT, WindowManager.LayoutParams.WRAP_CONTENT, U.getOverlayType(), shouldShowSearchBox ? 0 : WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM, PixelFormat.TRANSLUCENT); // Determine where to show the start menu on screen switch(U.getTaskbarPosition(this)) { case "bottom_left": layoutId = R.layout.start_menu_left; params.gravity = Gravity.BOTTOM | Gravity.LEFT; break; case "bottom_vertical_left": layoutId = R.layout.start_menu_vertical_left; params.gravity = Gravity.BOTTOM | Gravity.LEFT; break; case "bottom_right": layoutId = R.layout.start_menu_right; params.gravity = Gravity.BOTTOM | Gravity.RIGHT; break; case "bottom_vertical_right": layoutId = R.layout.start_menu_vertical_right; params.gravity = Gravity.BOTTOM | Gravity.RIGHT; break; case "top_left": layoutId = R.layout.start_menu_top_left; params.gravity = Gravity.TOP | Gravity.LEFT; break; case "top_vertical_left": layoutId = R.layout.start_menu_vertical_left; params.gravity = Gravity.TOP | Gravity.LEFT; break; case "top_right": layoutId = R.layout.start_menu_top_right; params.gravity = Gravity.TOP | Gravity.RIGHT; break; case "top_vertical_right": layoutId = R.layout.start_menu_vertical_right; params.gravity = Gravity.TOP | Gravity.RIGHT; break; } // Initialize views int theme = 0; switch(pref.getString("theme", "light")) { case "light": theme = R.style.AppTheme; break; case "dark": theme = R.style.AppTheme_Dark; break; } ContextThemeWrapper wrapper = new ContextThemeWrapper(this, theme); layout = (StartMenuLayout) LayoutInflater.from(wrapper).inflate(layoutId, null); startMenu = layout.findViewById(R.id.start_menu); if((shouldShowSearchBox && !hasHardwareKeyboard) || Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1) layout.viewHandlesBackButton(); boolean scrollbar = pref.getBoolean("scrollbar", false); startMenu.setFastScrollEnabled(scrollbar); startMenu.setFastScrollAlwaysVisible(scrollbar); startMenu.setScrollBarStyle(scrollbar ? View.SCROLLBARS_OUTSIDE_INSET : View.SCROLLBARS_INSIDE_OVERLAY); if(pref.getBoolean("transparent_start_menu", false)) startMenu.setBackgroundColor(0); searchView = layout.findViewById(R.id.search); int backgroundTint = U.getBackgroundTint(this); FrameLayout startMenuFrame = layout.findViewById(R.id.start_menu_frame); FrameLayout searchViewLayout = layout.findViewById(R.id.search_view_layout); startMenuFrame.setBackgroundColor(backgroundTint); searchViewLayout.setBackgroundColor(backgroundTint); if(shouldShowSearchBox) { if(!hasHardwareKeyboard) searchView.setIconifiedByDefault(true); searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() { @Override public boolean onQueryTextSubmit(String query) { if(!hasSubmittedQuery) { ListAdapter adapter = startMenu.getAdapter(); if(adapter != null) { hasSubmittedQuery = true; if(adapter.getCount() > 0) { View view = adapter.getView(0, null, startMenu); LinearLayout layout = view.findViewById(R.id.entry); layout.performClick(); } else { if(pref.getBoolean("hide_taskbar", true) && !FreeformHackHelper.getInstance().isInFreeformWorkspace()) LocalBroadcastManager.getInstance(StartMenuService.this).sendBroadcast(new Intent("com.farmerbb.taskbar.HIDE_TASKBAR")); else LocalBroadcastManager.getInstance(StartMenuService.this).sendBroadcast(new Intent("com.farmerbb.taskbar.HIDE_START_MENU")); Intent intent; if(Patterns.WEB_URL.matcher(query).matches()) { intent = new Intent(Intent.ACTION_VIEW); intent.setData(Uri.parse(URLUtil.guessUrl(query))); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); } else { intent = new Intent(Intent.ACTION_WEB_SEARCH); intent.putExtra(SearchManager.QUERY, query); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); } if(intent.resolveActivity(getPackageManager()) != null) startActivity(intent); else { Uri uri = new Uri.Builder() .scheme("https") .authority("www.google.com") .path("search") .appendQueryParameter("q", query) .build(); intent = new Intent(Intent.ACTION_VIEW); intent.setData(uri); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); try { startActivity(intent); } catch (ActivityNotFoundException e) { /* Gracefully fail */ } } } } } return true; } @Override public boolean onQueryTextChange(String newText) { searchView.setIconified(false); View closeButton = searchView.findViewById(R.id.search_close_btn); if(closeButton != null) closeButton.setVisibility(View.GONE); refreshApps(newText, false); if(Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1) { new Handler().postDelayed(() -> { EditText editText = searchView.findViewById(R.id.search_src_text); if(editText != null) { editText.requestFocus(); editText.setSelection(editText.getText().length()); } }, 50); } return true; } }); searchView.setOnQueryTextFocusChangeListener((view, b) -> { if(!hasHardwareKeyboard) { ViewGroup.LayoutParams params1 = startMenu.getLayoutParams(); params1.height = getResources().getDimensionPixelSize( b && !U.isServiceRunning(this, "com.farmerbb.secondscreen.service.DisableKeyboardService") ? R.dimen.start_menu_height_half : R.dimen.start_menu_height); startMenu.setLayoutParams(params1); } if(!b) { if(hasHardwareKeyboard && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP_MR1) LocalBroadcastManager.getInstance(StartMenuService.this).sendBroadcast(new Intent("com.farmerbb.taskbar.HIDE_START_MENU")); else { InputMethodManager imm = (InputMethodManager) getSystemService(INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(view.getWindowToken(), 0); } } }); searchView.setImeOptions(EditorInfo.IME_ACTION_DONE | EditorInfo.IME_FLAG_NO_EXTRACT_UI); LinearLayout powerButton = layout.findViewById(R.id.power_button); powerButton.setOnClickListener(view -> { int[] location = new int[2]; view.getLocationOnScreen(location); openContextMenu(location); }); powerButton.setOnGenericMotionListener((view, motionEvent) -> { if(motionEvent.getAction() == MotionEvent.ACTION_BUTTON_PRESS && motionEvent.getButtonState() == MotionEvent.BUTTON_SECONDARY) { int[] location = new int[2]; view.getLocationOnScreen(location); openContextMenu(location); } return false; }); searchViewLayout.setOnClickListener(view -> searchView.setIconified(false)); startMenu.setOnItemClickListener((parent, view, position, id) -> { hideStartMenu(); AppEntry entry = (AppEntry) parent.getAdapter().getItem(position); U.launchApp(StartMenuService.this, entry.getPackageName(), entry.getComponentName(), entry.getUserId(StartMenuService.this), null, false, false); }); if(pref.getBoolean("transparent_start_menu", false)) layout.findViewById(R.id.search_view_child_layout).setBackgroundColor(0); } else searchViewLayout.setVisibility(View.GONE); textView = layout.findViewById(R.id.no_apps_found); LocalBroadcastManager lbm = LocalBroadcastManager.getInstance(this); lbm.unregisterReceiver(toggleReceiver); lbm.unregisterReceiver(toggleReceiverAlt); lbm.unregisterReceiver(hideReceiver); lbm.unregisterReceiver(showSpaceReceiver); lbm.unregisterReceiver(hideSpaceReceiver); lbm.registerReceiver(toggleReceiver, new IntentFilter("com.farmerbb.taskbar.TOGGLE_START_MENU")); lbm.registerReceiver(toggleReceiverAlt, new IntentFilter("com.farmerbb.taskbar.TOGGLE_START_MENU_ALT")); lbm.registerReceiver(hideReceiver, new IntentFilter("com.farmerbb.taskbar.HIDE_START_MENU")); lbm.registerReceiver(showSpaceReceiver, new IntentFilter("com.farmerbb.taskbar.SHOW_START_MENU_SPACE")); lbm.registerReceiver(hideSpaceReceiver, new IntentFilter("com.farmerbb.taskbar.HIDE_START_MENU_SPACE")); handler = new Handler(); refreshApps(true); windowManager.addView(layout, params); } private void refreshApps(boolean firstDraw) { refreshApps(null, firstDraw); } private void refreshApps(final String query, final boolean firstDraw) { if(thread != null) thread.interrupt(); handler = new Handler(); thread = new Thread(() -> { if(pm == null) pm = getPackageManager(); UserManager userManager = (UserManager) getSystemService(Context.USER_SERVICE); LauncherApps launcherApps = (LauncherApps) getSystemService(Context.LAUNCHER_APPS_SERVICE); final List<UserHandle> userHandles = userManager.getUserProfiles(); final List<LauncherActivityInfo> unfilteredList = new ArrayList<>(); for(UserHandle handle : userHandles) { unfilteredList.addAll(launcherApps.getActivityList(null, handle)); } final List<LauncherActivityInfo> topAppsList = new ArrayList<>(); final List<LauncherActivityInfo> allAppsList = new ArrayList<>(); final List<LauncherActivityInfo> list = new ArrayList<>(); TopApps topApps = TopApps.getInstance(StartMenuService.this); for(LauncherActivityInfo appInfo : unfilteredList) { if(topApps.isTopApp(appInfo.getComponentName().flattenToString()) || topApps.isTopApp(appInfo.getName())) topAppsList.add(appInfo); } Blacklist blacklist = Blacklist.getInstance(StartMenuService.this); for(LauncherActivityInfo appInfo : unfilteredList) { if(!(blacklist.isBlocked(appInfo.getComponentName().flattenToString()) || blacklist.isBlocked(appInfo.getName())) && !(topApps.isTopApp(appInfo.getComponentName().flattenToString()) || topApps.isTopApp(appInfo.getName()))) allAppsList.add(appInfo); } Collections.sort(topAppsList, comparator); Collections.sort(allAppsList, comparator); list.addAll(topAppsList); list.addAll(allAppsList); topAppsList.clear(); allAppsList.clear(); List<LauncherActivityInfo> queryList; if(query == null) queryList = list; else { queryList = new ArrayList<>(); for(LauncherActivityInfo appInfo : list) { if(appInfo.getLabel().toString().toLowerCase().contains(query.toLowerCase())) queryList.add(appInfo); } } // Now that we've generated the list of apps, // we need to determine if we need to redraw the start menu or not boolean shouldRedrawStartMenu = false; List<String> finalApplicationIds = new ArrayList<>(); if(query == null && !firstDraw) { for(LauncherActivityInfo appInfo : queryList) { finalApplicationIds.add(appInfo.getApplicationInfo().packageName); } if(finalApplicationIds.size() != currentStartMenuIds.size()) shouldRedrawStartMenu = true; else { for(int i = 0; i < finalApplicationIds.size(); i++) { if(!finalApplicationIds.get(i).equals(currentStartMenuIds.get(i))) { shouldRedrawStartMenu = true; break; } } } } else shouldRedrawStartMenu = true; if(shouldRedrawStartMenu) { if(query == null) currentStartMenuIds = finalApplicationIds; Drawable defaultIcon = pm.getDefaultActivityIcon(); final List<AppEntry> entries = new ArrayList<>(); for(LauncherActivityInfo appInfo : queryList) { // Attempt to work around frequently reported OutOfMemoryErrors String label; Drawable icon; try { label = appInfo.getLabel().toString(); icon = IconCache.getInstance(StartMenuService.this).getIcon(StartMenuService.this, pm, appInfo); } catch (OutOfMemoryError e) { System.gc(); label = appInfo.getApplicationInfo().packageName; icon = defaultIcon; } AppEntry newEntry = new AppEntry( appInfo.getApplicationInfo().packageName, new ComponentName( appInfo.getApplicationInfo().packageName, appInfo.getName()).flattenToString(), label, icon, false); newEntry.setUserId(userManager.getSerialNumberForUser(appInfo.getUser())); entries.add(newEntry); } handler.post(() -> { String queryText = searchView.getQuery().toString(); if(query == null && queryText.length() == 0 || query != null && query.equals(queryText)) { StartMenuAdapter adapter; SharedPreferences pref = U.getSharedPreferences(StartMenuService.this); if(pref.getString("start_menu_layout", "list").equals("grid")) { startMenu.setNumColumns(3); adapter = new StartMenuAdapter(StartMenuService.this, R.layout.row_alt, entries); } else adapter = new StartMenuAdapter(StartMenuService.this, R.layout.row, entries); int position = startMenu.getFirstVisiblePosition(); startMenu.setAdapter(adapter); startMenu.setSelection(position); if(adapter.getCount() > 0) textView.setText(null); else if(query != null) textView.setText(getString(Patterns.WEB_URL.matcher(query).matches() ? R.string.press_enter_alt : R.string.press_enter)); else textView.setText(getString(R.string.nothing_to_see_here)); } }); } }); thread.start(); } private void toggleStartMenu(boolean shouldReset) { if(shouldReset) startMenu.setSelection(0); if(layout.getVisibility() == View.GONE) showStartMenu(); else hideStartMenu(); } @SuppressWarnings("deprecation") @TargetApi(Build.VERSION_CODES.N) private void showStartMenu() { if(layout.getVisibility() == View.GONE) { layout.setOnClickListener(ocl); layout.setVisibility(View.VISIBLE); MenuHelper.getInstance().setStartMenuOpen(true); LocalBroadcastManager.getInstance(this).sendBroadcast(new Intent("com.farmerbb.taskbar.START_MENU_APPEARING")); boolean onHomeScreen = LauncherHelper.getInstance().isOnHomeScreen(); boolean inFreeformMode = FreeformHackHelper.getInstance().isInFreeformWorkspace(); if(!onHomeScreen || inFreeformMode) { Class clazz = inFreeformMode && Build.VERSION.SDK_INT < Build.VERSION_CODES.O ? InvisibleActivityAlt.class : InvisibleActivity.class; Intent intent = new Intent(this, clazz); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); intent.addFlags(Intent.FLAG_ACTIVITY_NO_ANIMATION); if(inFreeformMode) { if(clazz.equals(InvisibleActivity.class)) U.launchAppLowerRight(this, intent); else if(clazz.equals(InvisibleActivityAlt.class)) U.launchAppMaximized(this, intent); } else startActivity(intent); } if(searchView.getVisibility() == View.VISIBLE) searchView.requestFocus(); refreshApps(false); } } private void hideStartMenu() { if(layout.getVisibility() == View.VISIBLE) { layout.setOnClickListener(null); layout.setVisibility(View.INVISIBLE); MenuHelper.getInstance().setStartMenuOpen(false); LocalBroadcastManager.getInstance(this).sendBroadcast(new Intent("com.farmerbb.taskbar.START_MENU_DISAPPEARING")); layout.postDelayed(() -> { layout.setVisibility(View.GONE); searchView.setQuery(null, false); searchView.setIconified(true); hasSubmittedQuery = false; InputMethodManager imm = (InputMethodManager) getSystemService(INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(layout.getWindowToken(), 0); }, 250); } } @Override public void onDestroy() { super.onDestroy(); if(layout != null) try { windowManager.removeView(layout); } catch (IllegalArgumentException e) { /* Gracefully fail */ } LocalBroadcastManager lbm = LocalBroadcastManager.getInstance(this); lbm.unregisterReceiver(toggleReceiver); lbm.unregisterReceiver(toggleReceiverAlt); lbm.unregisterReceiver(hideReceiver); lbm.unregisterReceiver(showSpaceReceiver); lbm.unregisterReceiver(hideSpaceReceiver); lbm.sendBroadcast(new Intent("com.farmerbb.taskbar.START_MENU_DISAPPEARING")); } @TargetApi(Build.VERSION_CODES.M) @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); if(layout != null) { try { windowManager.removeView(layout); } catch (IllegalArgumentException e) { /* Gracefully fail */ } if(Build.VERSION.SDK_INT < Build.VERSION_CODES.M || Settings.canDrawOverlays(this)) drawStartMenu(); else { SharedPreferences pref = U.getSharedPreferences(this); pref.edit().putBoolean("taskbar_active", false).apply(); stopSelf(); } } } @SuppressWarnings("deprecation") private void openContextMenu(final int[] location) { LocalBroadcastManager.getInstance(this).sendBroadcast(new Intent("com.farmerbb.taskbar.HIDE_START_MENU")); new Handler().postDelayed(() -> { SharedPreferences pref = U.getSharedPreferences(StartMenuService.this); Intent intent = null; switch(pref.getString("theme", "light")) { case "light": intent = new Intent(StartMenuService.this, ContextMenuActivity.class); break; case "dark": intent = new Intent(StartMenuService.this, ContextMenuActivityDark.class); break; } if(intent != null) { intent.putExtra("launched_from_start_menu", true); intent.putExtra("is_overflow_menu", true); intent.putExtra("x", location[0]); intent.putExtra("y", location[1]); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); } if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && FreeformHackHelper.getInstance().isInFreeformWorkspace()) { DisplayMetrics metrics = U.getRealDisplayMetrics(this); if(intent != null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) intent.putExtra("context_menu_fix", true); startActivity(intent, U.getActivityOptions(ApplicationType.CONTEXT_MENU).setLaunchBounds(new Rect(0, 0, metrics.widthPixels, metrics.heightPixels)).toBundle()); } else startActivity(intent); }, shouldDelay() ? 100 : 0); } private boolean shouldDelay() { SharedPreferences pref = U.getSharedPreferences(this); return Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && pref.getBoolean("freeform_hack", false) && !FreeformHackHelper.getInstance().isFreeformHackActive(); } }
app/src/main/java/com/farmerbb/taskbar/service/StartMenuService.java
/* Copyright 2016 Braden Farmer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.farmerbb.taskbar.service; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.app.SearchManager; import android.app.Service; import android.content.ActivityNotFoundException; import android.content.BroadcastReceiver; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.SharedPreferences; import android.content.pm.LauncherActivityInfo; import android.content.pm.LauncherApps; import android.content.pm.PackageManager; import android.content.res.Configuration; import android.graphics.PixelFormat; import android.graphics.Rect; import android.graphics.drawable.Drawable; import android.net.Uri; import android.os.Build; import android.os.Handler; import android.os.IBinder; import android.os.UserHandle; import android.os.UserManager; import android.provider.Settings; import android.support.v4.content.LocalBroadcastManager; import android.support.v7.widget.SearchView; import android.util.DisplayMetrics; import android.util.Patterns; import android.view.ContextThemeWrapper; import android.view.Gravity; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.WindowManager; import android.view.inputmethod.EditorInfo; import android.view.inputmethod.InputMethodManager; import android.webkit.URLUtil; import android.widget.EditText; import android.widget.FrameLayout; import android.widget.GridView; import android.widget.LinearLayout; import android.widget.ListAdapter; import android.widget.TextView; import com.farmerbb.taskbar.R; import com.farmerbb.taskbar.activity.ContextMenuActivity; import com.farmerbb.taskbar.activity.dark.ContextMenuActivityDark; import com.farmerbb.taskbar.activity.InvisibleActivity; import com.farmerbb.taskbar.activity.InvisibleActivityAlt; import com.farmerbb.taskbar.adapter.StartMenuAdapter; import com.farmerbb.taskbar.util.AppEntry; import com.farmerbb.taskbar.util.ApplicationType; import com.farmerbb.taskbar.util.Blacklist; import com.farmerbb.taskbar.util.FreeformHackHelper; import com.farmerbb.taskbar.util.IconCache; import com.farmerbb.taskbar.util.LauncherHelper; import com.farmerbb.taskbar.util.MenuHelper; import com.farmerbb.taskbar.util.TopApps; import com.farmerbb.taskbar.util.U; import com.farmerbb.taskbar.widget.StartMenuLayout; import java.text.Collator; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; public class StartMenuService extends Service { private WindowManager windowManager; private StartMenuLayout layout; private GridView startMenu; private SearchView searchView; private TextView textView; private PackageManager pm; private Handler handler; private Thread thread; private boolean shouldShowSearchBox = false; private boolean hasSubmittedQuery = false; private int layoutId = R.layout.start_menu_left; private List<String> currentStartMenuIds = new ArrayList<>(); private View.OnClickListener ocl = view -> toggleStartMenu(true); private BroadcastReceiver toggleReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { toggleStartMenu(true); } }; private BroadcastReceiver toggleReceiverAlt = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { toggleStartMenu(false); } }; private BroadcastReceiver showSpaceReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { layout.findViewById(R.id.start_menu_space).setVisibility(View.VISIBLE); } }; private BroadcastReceiver hideSpaceReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { layout.findViewById(R.id.start_menu_space).setVisibility(View.GONE); } }; private BroadcastReceiver hideReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { hideStartMenu(); } }; private Comparator<LauncherActivityInfo> comparator = (ai1, ai2) -> { String label1; String label2; try { label1 = ai1.getLabel().toString(); label2 = ai2.getLabel().toString(); } catch (OutOfMemoryError e) { System.gc(); label1 = ai1.getApplicationInfo().packageName; label2 = ai2.getApplicationInfo().packageName; } return Collator.getInstance().compare(label1, label2); }; @Override public IBinder onBind(Intent intent) { return null; } @Override public int onStartCommand(Intent intent, int flags, int startId) { return START_STICKY; } @TargetApi(Build.VERSION_CODES.M) @Override public void onCreate() { super.onCreate(); SharedPreferences pref = U.getSharedPreferences(this); if(pref.getBoolean("taskbar_active", false) || LauncherHelper.getInstance().isOnHomeScreen()) { if(Build.VERSION.SDK_INT < Build.VERSION_CODES.M || Settings.canDrawOverlays(this)) drawStartMenu(); else { pref.edit().putBoolean("taskbar_active", false).apply(); stopSelf(); } } else stopSelf(); } @SuppressLint("RtlHardcoded") private void drawStartMenu() { IconCache.getInstance(this).clearCache(); final SharedPreferences pref = U.getSharedPreferences(this); final boolean hasHardwareKeyboard = getResources().getConfiguration().keyboard != Configuration.KEYBOARD_NOKEYS; switch(pref.getString("show_search_bar", "keyboard")) { case "always": shouldShowSearchBox = true; break; case "keyboard": shouldShowSearchBox = hasHardwareKeyboard; break; case "never": shouldShowSearchBox = false; break; } // Initialize layout params windowManager = (WindowManager) getSystemService(WINDOW_SERVICE); U.setCachedRotation(windowManager.getDefaultDisplay().getRotation()); final WindowManager.LayoutParams params = new WindowManager.LayoutParams( WindowManager.LayoutParams.WRAP_CONTENT, WindowManager.LayoutParams.WRAP_CONTENT, U.getOverlayType(), shouldShowSearchBox ? 0 : WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM, PixelFormat.TRANSLUCENT); // Determine where to show the start menu on screen switch(U.getTaskbarPosition(this)) { case "bottom_left": layoutId = R.layout.start_menu_left; params.gravity = Gravity.BOTTOM | Gravity.LEFT; break; case "bottom_vertical_left": layoutId = R.layout.start_menu_vertical_left; params.gravity = Gravity.BOTTOM | Gravity.LEFT; break; case "bottom_right": layoutId = R.layout.start_menu_right; params.gravity = Gravity.BOTTOM | Gravity.RIGHT; break; case "bottom_vertical_right": layoutId = R.layout.start_menu_vertical_right; params.gravity = Gravity.BOTTOM | Gravity.RIGHT; break; case "top_left": layoutId = R.layout.start_menu_top_left; params.gravity = Gravity.TOP | Gravity.LEFT; break; case "top_vertical_left": layoutId = R.layout.start_menu_vertical_left; params.gravity = Gravity.TOP | Gravity.LEFT; break; case "top_right": layoutId = R.layout.start_menu_top_right; params.gravity = Gravity.TOP | Gravity.RIGHT; break; case "top_vertical_right": layoutId = R.layout.start_menu_vertical_right; params.gravity = Gravity.TOP | Gravity.RIGHT; break; } // Initialize views int theme = 0; switch(pref.getString("theme", "light")) { case "light": theme = R.style.AppTheme; break; case "dark": theme = R.style.AppTheme_Dark; break; } ContextThemeWrapper wrapper = new ContextThemeWrapper(this, theme); layout = (StartMenuLayout) LayoutInflater.from(wrapper).inflate(layoutId, null); startMenu = layout.findViewById(R.id.start_menu); if((shouldShowSearchBox && !hasHardwareKeyboard) || Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1) layout.viewHandlesBackButton(); boolean scrollbar = pref.getBoolean("scrollbar", false); startMenu.setFastScrollEnabled(scrollbar); startMenu.setFastScrollAlwaysVisible(scrollbar); startMenu.setScrollBarStyle(scrollbar ? View.SCROLLBARS_OUTSIDE_INSET : View.SCROLLBARS_INSIDE_OVERLAY); if(pref.getBoolean("transparent_start_menu", false)) startMenu.setBackgroundColor(0); searchView = layout.findViewById(R.id.search); int backgroundTint = U.getBackgroundTint(this); FrameLayout startMenuFrame = layout.findViewById(R.id.start_menu_frame); FrameLayout searchViewLayout = layout.findViewById(R.id.search_view_layout); startMenuFrame.setBackgroundColor(backgroundTint); searchViewLayout.setBackgroundColor(backgroundTint); if(shouldShowSearchBox) { if(!hasHardwareKeyboard) searchView.setIconifiedByDefault(true); searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() { @Override public boolean onQueryTextSubmit(String query) { if(!hasSubmittedQuery) { ListAdapter adapter = startMenu.getAdapter(); if(adapter != null) { hasSubmittedQuery = true; if(adapter.getCount() > 0) { View view = adapter.getView(0, null, startMenu); LinearLayout layout = view.findViewById(R.id.entry); layout.performClick(); } else { if(pref.getBoolean("hide_taskbar", true) && !FreeformHackHelper.getInstance().isInFreeformWorkspace()) LocalBroadcastManager.getInstance(StartMenuService.this).sendBroadcast(new Intent("com.farmerbb.taskbar.HIDE_TASKBAR")); else LocalBroadcastManager.getInstance(StartMenuService.this).sendBroadcast(new Intent("com.farmerbb.taskbar.HIDE_START_MENU")); Intent intent; if(Patterns.WEB_URL.matcher(query).matches()) { intent = new Intent(Intent.ACTION_VIEW); intent.setData(Uri.parse(URLUtil.guessUrl(query))); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); } else { intent = new Intent(Intent.ACTION_WEB_SEARCH); intent.putExtra(SearchManager.QUERY, query); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); } if(intent.resolveActivity(getPackageManager()) != null) startActivity(intent); else { Uri uri = new Uri.Builder() .scheme("https") .authority("www.google.com") .path("search") .appendQueryParameter("q", query) .build(); intent = new Intent(Intent.ACTION_VIEW); intent.setData(uri); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); try { startActivity(intent); } catch (ActivityNotFoundException e) { /* Gracefully fail */ } } } } } return true; } @Override public boolean onQueryTextChange(String newText) { searchView.setIconified(false); View closeButton = searchView.findViewById(R.id.search_close_btn); if(closeButton != null) closeButton.setVisibility(View.GONE); refreshApps(newText, false); if(Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1) { new Handler().postDelayed(() -> { EditText editText = searchView.findViewById(R.id.search_src_text); if(editText != null) { editText.requestFocus(); editText.setSelection(editText.getText().length()); } }, 50); } return true; } }); searchView.setOnQueryTextFocusChangeListener((view, b) -> { if(!hasHardwareKeyboard) { ViewGroup.LayoutParams params1 = startMenu.getLayoutParams(); params1.height = getResources().getDimensionPixelSize( b && !U.isServiceRunning(this, "com.farmerbb.secondscreen.service.DisableKeyboardService") ? R.dimen.start_menu_height_half : R.dimen.start_menu_height); startMenu.setLayoutParams(params1); } if(!b) { if(hasHardwareKeyboard && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP_MR1) LocalBroadcastManager.getInstance(StartMenuService.this).sendBroadcast(new Intent("com.farmerbb.taskbar.HIDE_START_MENU")); else { InputMethodManager imm = (InputMethodManager) getSystemService(INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(view.getWindowToken(), 0); } } }); searchView.setImeOptions(EditorInfo.IME_ACTION_DONE | EditorInfo.IME_FLAG_NO_EXTRACT_UI); LinearLayout powerButton = layout.findViewById(R.id.power_button); powerButton.setOnClickListener(view -> { int[] location = new int[2]; view.getLocationOnScreen(location); openContextMenu(location); }); powerButton.setOnGenericMotionListener((view, motionEvent) -> { if(motionEvent.getAction() == MotionEvent.ACTION_BUTTON_PRESS && motionEvent.getButtonState() == MotionEvent.BUTTON_SECONDARY) { int[] location = new int[2]; view.getLocationOnScreen(location); openContextMenu(location); } return false; }); searchViewLayout.setOnClickListener(view -> searchView.setIconified(false)); startMenu.setOnItemClickListener((parent, view, position, id) -> { hideStartMenu(); AppEntry entry = (AppEntry) parent.getAdapter().getItem(position); U.launchApp(StartMenuService.this, entry.getPackageName(), entry.getComponentName(), entry.getUserId(StartMenuService.this), null, false, false); }); if(pref.getBoolean("transparent_start_menu", false)) layout.findViewById(R.id.search_view_child_layout).setBackgroundColor(0); } else searchViewLayout.setVisibility(View.GONE); textView = layout.findViewById(R.id.no_apps_found); LocalBroadcastManager lbm = LocalBroadcastManager.getInstance(this); lbm.unregisterReceiver(toggleReceiver); lbm.unregisterReceiver(toggleReceiverAlt); lbm.unregisterReceiver(hideReceiver); lbm.unregisterReceiver(showSpaceReceiver); lbm.unregisterReceiver(hideSpaceReceiver); lbm.registerReceiver(toggleReceiver, new IntentFilter("com.farmerbb.taskbar.TOGGLE_START_MENU")); lbm.registerReceiver(toggleReceiverAlt, new IntentFilter("com.farmerbb.taskbar.TOGGLE_START_MENU_ALT")); lbm.registerReceiver(hideReceiver, new IntentFilter("com.farmerbb.taskbar.HIDE_START_MENU")); lbm.registerReceiver(showSpaceReceiver, new IntentFilter("com.farmerbb.taskbar.SHOW_START_MENU_SPACE")); lbm.registerReceiver(hideSpaceReceiver, new IntentFilter("com.farmerbb.taskbar.HIDE_START_MENU_SPACE")); handler = new Handler(); refreshApps(true); windowManager.addView(layout, params); } private void refreshApps(boolean firstDraw) { refreshApps(null, firstDraw); } private void refreshApps(final String query, final boolean firstDraw) { if(thread != null) thread.interrupt(); handler = new Handler(); thread = new Thread(() -> { if(pm == null) pm = getPackageManager(); UserManager userManager = (UserManager) getSystemService(Context.USER_SERVICE); LauncherApps launcherApps = (LauncherApps) getSystemService(Context.LAUNCHER_APPS_SERVICE); final List<UserHandle> userHandles = userManager.getUserProfiles(); final List<LauncherActivityInfo> unfilteredList = new ArrayList<>(); for(UserHandle handle : userHandles) { unfilteredList.addAll(launcherApps.getActivityList(null, handle)); } final List<LauncherActivityInfo> topAppsList = new ArrayList<>(); final List<LauncherActivityInfo> allAppsList = new ArrayList<>(); final List<LauncherActivityInfo> list = new ArrayList<>(); TopApps topApps = TopApps.getInstance(StartMenuService.this); for(LauncherActivityInfo appInfo : unfilteredList) { if(topApps.isTopApp(appInfo.getComponentName().flattenToString()) || topApps.isTopApp(appInfo.getName())) topAppsList.add(appInfo); } Blacklist blacklist = Blacklist.getInstance(StartMenuService.this); for(LauncherActivityInfo appInfo : unfilteredList) { if(!(blacklist.isBlocked(appInfo.getComponentName().flattenToString()) || blacklist.isBlocked(appInfo.getName())) && !(topApps.isTopApp(appInfo.getComponentName().flattenToString()) || topApps.isTopApp(appInfo.getName()))) allAppsList.add(appInfo); } Collections.sort(topAppsList, comparator); Collections.sort(allAppsList, comparator); list.addAll(topAppsList); list.addAll(allAppsList); topAppsList.clear(); allAppsList.clear(); List<LauncherActivityInfo> queryList; if(query == null) queryList = list; else { queryList = new ArrayList<>(); for(LauncherActivityInfo appInfo : list) { if(appInfo.getLabel().toString().toLowerCase().contains(query.toLowerCase())) queryList.add(appInfo); } } // Now that we've generated the list of apps, // we need to determine if we need to redraw the start menu or not boolean shouldRedrawStartMenu = false; List<String> finalApplicationIds = new ArrayList<>(); if(query == null && !firstDraw) { for(LauncherActivityInfo appInfo : queryList) { finalApplicationIds.add(appInfo.getApplicationInfo().packageName); } if(finalApplicationIds.size() != currentStartMenuIds.size()) shouldRedrawStartMenu = true; else { for(int i = 0; i < finalApplicationIds.size(); i++) { if(!finalApplicationIds.get(i).equals(currentStartMenuIds.get(i))) { shouldRedrawStartMenu = true; break; } } } } else shouldRedrawStartMenu = true; if(shouldRedrawStartMenu) { if(query == null) currentStartMenuIds = finalApplicationIds; Drawable defaultIcon = pm.getDefaultActivityIcon(); final List<AppEntry> entries = new ArrayList<>(); for(LauncherActivityInfo appInfo : queryList) { // Attempt to work around frequently reported OutOfMemoryErrors String label; Drawable icon; try { label = appInfo.getLabel().toString(); icon = IconCache.getInstance(StartMenuService.this).getIcon(StartMenuService.this, pm, appInfo); } catch (OutOfMemoryError e) { System.gc(); label = appInfo.getApplicationInfo().packageName; icon = defaultIcon; } AppEntry newEntry = new AppEntry( appInfo.getApplicationInfo().packageName, new ComponentName( appInfo.getApplicationInfo().packageName, appInfo.getName()).flattenToString(), label, icon, false); newEntry.setUserId(userManager.getSerialNumberForUser(appInfo.getUser())); entries.add(newEntry); } handler.post(() -> { String queryText = searchView.getQuery().toString(); if(query == null && queryText.length() == 0 || query != null && query.equals(queryText)) { StartMenuAdapter adapter; SharedPreferences pref = U.getSharedPreferences(StartMenuService.this); if(pref.getString("start_menu_layout", "list").equals("grid")) { startMenu.setNumColumns(3); adapter = new StartMenuAdapter(StartMenuService.this, R.layout.row_alt, entries); } else adapter = new StartMenuAdapter(StartMenuService.this, R.layout.row, entries); int position = startMenu.getFirstVisiblePosition(); startMenu.setAdapter(adapter); startMenu.setSelection(position); if(adapter.getCount() > 0) textView.setText(null); else if(query != null) textView.setText(getString(Patterns.WEB_URL.matcher(query).matches() ? R.string.press_enter_alt : R.string.press_enter)); else textView.setText(getString(R.string.nothing_to_see_here)); } }); } }); thread.start(); } private void toggleStartMenu(boolean shouldReset) { if(layout.getVisibility() == View.GONE) showStartMenu(shouldReset); else hideStartMenu(); } @SuppressWarnings("deprecation") @TargetApi(Build.VERSION_CODES.N) private void showStartMenu(boolean shouldReset) { if(layout.getVisibility() == View.GONE) { if(shouldReset) startMenu.setSelection(0); layout.setOnClickListener(ocl); layout.setVisibility(View.VISIBLE); MenuHelper.getInstance().setStartMenuOpen(true); LocalBroadcastManager.getInstance(this).sendBroadcast(new Intent("com.farmerbb.taskbar.START_MENU_APPEARING")); boolean onHomeScreen = LauncherHelper.getInstance().isOnHomeScreen(); boolean inFreeformMode = FreeformHackHelper.getInstance().isInFreeformWorkspace(); if(!onHomeScreen || inFreeformMode) { Class clazz = inFreeformMode && Build.VERSION.SDK_INT < Build.VERSION_CODES.O ? InvisibleActivityAlt.class : InvisibleActivity.class; Intent intent = new Intent(this, clazz); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); intent.addFlags(Intent.FLAG_ACTIVITY_NO_ANIMATION); if(inFreeformMode) { if(clazz.equals(InvisibleActivity.class)) U.launchAppLowerRight(this, intent); else if(clazz.equals(InvisibleActivityAlt.class)) U.launchAppMaximized(this, intent); } else startActivity(intent); } if(searchView.getVisibility() == View.VISIBLE) searchView.requestFocus(); refreshApps(false); } } private void hideStartMenu() { if(layout.getVisibility() == View.VISIBLE) { layout.setOnClickListener(null); layout.setVisibility(View.INVISIBLE); MenuHelper.getInstance().setStartMenuOpen(false); LocalBroadcastManager.getInstance(this).sendBroadcast(new Intent("com.farmerbb.taskbar.START_MENU_DISAPPEARING")); layout.postDelayed(() -> { layout.setVisibility(View.GONE); searchView.setQuery(null, false); searchView.setIconified(true); hasSubmittedQuery = false; InputMethodManager imm = (InputMethodManager) getSystemService(INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(layout.getWindowToken(), 0); }, 250); } } @Override public void onDestroy() { super.onDestroy(); if(layout != null) try { windowManager.removeView(layout); } catch (IllegalArgumentException e) { /* Gracefully fail */ } LocalBroadcastManager lbm = LocalBroadcastManager.getInstance(this); lbm.unregisterReceiver(toggleReceiver); lbm.unregisterReceiver(toggleReceiverAlt); lbm.unregisterReceiver(hideReceiver); lbm.unregisterReceiver(showSpaceReceiver); lbm.unregisterReceiver(hideSpaceReceiver); lbm.sendBroadcast(new Intent("com.farmerbb.taskbar.START_MENU_DISAPPEARING")); } @TargetApi(Build.VERSION_CODES.M) @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); if(layout != null) { try { windowManager.removeView(layout); } catch (IllegalArgumentException e) { /* Gracefully fail */ } if(Build.VERSION.SDK_INT < Build.VERSION_CODES.M || Settings.canDrawOverlays(this)) drawStartMenu(); else { SharedPreferences pref = U.getSharedPreferences(this); pref.edit().putBoolean("taskbar_active", false).apply(); stopSelf(); } } } @SuppressWarnings("deprecation") private void openContextMenu(final int[] location) { LocalBroadcastManager.getInstance(this).sendBroadcast(new Intent("com.farmerbb.taskbar.HIDE_START_MENU")); new Handler().postDelayed(() -> { SharedPreferences pref = U.getSharedPreferences(StartMenuService.this); Intent intent = null; switch(pref.getString("theme", "light")) { case "light": intent = new Intent(StartMenuService.this, ContextMenuActivity.class); break; case "dark": intent = new Intent(StartMenuService.this, ContextMenuActivityDark.class); break; } if(intent != null) { intent.putExtra("launched_from_start_menu", true); intent.putExtra("is_overflow_menu", true); intent.putExtra("x", location[0]); intent.putExtra("y", location[1]); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); } if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && FreeformHackHelper.getInstance().isInFreeformWorkspace()) { DisplayMetrics metrics = U.getRealDisplayMetrics(this); if(intent != null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) intent.putExtra("context_menu_fix", true); startActivity(intent, U.getActivityOptions(ApplicationType.CONTEXT_MENU).setLaunchBounds(new Rect(0, 0, metrics.widthPixels, metrics.heightPixels)).toBundle()); } else startActivity(intent); }, shouldDelay() ? 100 : 0); } private boolean shouldDelay() { SharedPreferences pref = U.getSharedPreferences(this); return Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && pref.getBoolean("freeform_hack", false) && !FreeformHackHelper.getInstance().isFreeformHackActive(); } }
Move start menu reset out of showStartMenu call
app/src/main/java/com/farmerbb/taskbar/service/StartMenuService.java
Move start menu reset out of showStartMenu call
<ide><path>pp/src/main/java/com/farmerbb/taskbar/service/StartMenuService.java <ide> } <ide> <ide> private void toggleStartMenu(boolean shouldReset) { <add> if(shouldReset) startMenu.setSelection(0); <add> <ide> if(layout.getVisibility() == View.GONE) <del> showStartMenu(shouldReset); <add> showStartMenu(); <ide> else <ide> hideStartMenu(); <ide> } <ide> <ide> @SuppressWarnings("deprecation") <ide> @TargetApi(Build.VERSION_CODES.N) <del> private void showStartMenu(boolean shouldReset) { <add> private void showStartMenu() { <ide> if(layout.getVisibility() == View.GONE) { <del> if(shouldReset) startMenu.setSelection(0); <del> <ide> layout.setOnClickListener(ocl); <ide> layout.setVisibility(View.VISIBLE); <ide>
Java
apache-2.0
d96360abc0f40b5c72ed7c8b6a7f6ee4d6eea3cf
0
yombunker/SpanEZ
package com.bunk3r.spanez.locators; import org.junit.Test; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; /** * Part of SpanEZ * Created by joragu on 1/13/2017. */ public class TargetRangeTest { @Test public void valid_range() { final int rangeStart = 0; final int rangeEnd = 10; TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); assertNotNull("Failed to create range", targetRange); } @Test(expected = IllegalArgumentException.class) public void exception_thrown_when_range_is_invalid() { final int rangeStart = 10; final int rangeEnd = 9; TargetRange.from(rangeStart, rangeEnd); } @Test public void hashcode_must_match() { final int HASH_CODE_RESULT = 36; final int rangeStart = 1; final int rangeEnd = 5; TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); final int hashCode = targetRange.hashCode(); assertTrue("The hashcode function has changed", hashCode == HASH_CODE_RESULT); } @Test public void hashcode_must_not_match() { final int rangeStart = 1; final int rangeEnd = 5; TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); TargetRange otherRange = TargetRange.from(rangeStart, rangeStart); final int hashCode = targetRange.hashCode(); final int otherHashCode = otherRange.hashCode(); assertFalse("The hashcode function has changed", hashCode == otherHashCode); } @Test public void target_range_must_be_equal() { final int rangeStart = 1; final int rangeEnd = 5; TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); TargetRange expectedResult = TargetRange.from(rangeStart, rangeEnd); assertTrue("The two objects should be equal", expectedResult.equals(targetRange)); } @SuppressWarnings("UnnecessaryLocalVariable") @Test public void target_range_compare_same_instance() { final int rangeStart = 1; final int rangeEnd = 5; TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); TargetRange expectedResult = targetRange; assertTrue("The two objects should be equal", expectedResult.equals(targetRange)); } @Test public void target_range_must_not_be_equal() { final int rangeStart = 1; final int rangeEnd = 5; TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); TargetRange notExpectedResult = TargetRange.from(0, 0); assertFalse("The two objects should be different", notExpectedResult.equals(targetRange)); } @Test public void target_range_start_must_not_be_equal() { final int rangeStart = 1; final int rangeEnd = 5; TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); TargetRange notExpectedResult = TargetRange.from(rangeEnd, rangeEnd); assertFalse("The two objects should be different", notExpectedResult.equals(targetRange)); } // @SuppressWarnings("EqualsBetweenInconvertibleTypes") @Test public void target_range_must_not_be_equal_they_are_different_types() { final int rangeStart = 1; final int rangeEnd = 5; TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); Object notExpectedResult = new Object(); assertFalse("The two objects should be different", notExpectedResult.equals(targetRange)); } @Test public void target_range_end_must_not_be_equal() { final int rangeStart = 1; final int rangeEnd = 5; TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); TargetRange notExpectedResult = TargetRange.from(rangeStart, rangeStart); assertFalse("The two objects should be different", notExpectedResult.equals(targetRange)); } @Test public void getters_are_just_for_read_only() { final int rangeStart = 1; final int rangeEnd = 5; TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); assertTrue("The start range has changed", targetRange.getStart() == rangeStart); assertTrue("The end range has changed", targetRange.getEnd() == rangeEnd); } }
library/src/test/java/com/bunk3r/spanez/locators/TargetRangeTest.java
package com.bunk3r.spanez.locators; import org.junit.Test; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; /** * Part of SpanEZ * Created by joragu on 1/13/2017. */ public class TargetRangeTest { @Test public void valid_range() { final int rangeStart = 0; final int rangeEnd = 10; TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); assertNotNull("Failed to create range", targetRange); } @Test(expected = IllegalArgumentException.class) public void exception_thrown_when_range_is_invalid() { final int rangeStart = 10; final int rangeEnd = 9; TargetRange.from(rangeStart, rangeEnd); } @Test public void hashcode_must_match() { final int HASH_CODE_RESULT = 36; final int rangeStart = 1; final int rangeEnd = 5; TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); final int hashCode = targetRange.hashCode(); assertTrue("The hashcode function has changed", hashCode == HASH_CODE_RESULT); } @Test public void hashcode_must_not_match() { final int rangeStart = 1; final int rangeEnd = 5; TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); TargetRange otherRange = TargetRange.from(rangeStart, rangeStart); final int hashCode = targetRange.hashCode(); final int otherHashCode = otherRange.hashCode(); assertFalse("The hashcode function has changed", hashCode == otherHashCode); } @Test public void target_range_must_be_equal() { final int rangeStart = 1; final int rangeEnd = 5; TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); TargetRange expectedResult = TargetRange.from(rangeStart, rangeEnd); assertTrue("The two objects should be equal", expectedResult.equals(targetRange)); } @Test public void target_range_must_not_be_equal() { final int rangeStart = 1; final int rangeEnd = 5; TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); TargetRange expectedResult = TargetRange.from(rangeStart, rangeStart); assertFalse("The two objects should be different", expectedResult.equals(targetRange)); } @Test public void getters_are_just_for_read_only() { final int rangeStart = 1; final int rangeEnd = 5; TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); assertTrue("The start range has changed", targetRange.getStart() == rangeStart); assertTrue("The end range has changed", targetRange.getEnd() == rangeEnd); } }
- Adding missing if branches for TargetRange
library/src/test/java/com/bunk3r/spanez/locators/TargetRangeTest.java
- Adding missing if branches for TargetRange
<ide><path>ibrary/src/test/java/com/bunk3r/spanez/locators/TargetRangeTest.java <ide> assertTrue("The two objects should be equal", expectedResult.equals(targetRange)); <ide> } <ide> <add> <add> @SuppressWarnings("UnnecessaryLocalVariable") <add> @Test <add> public void target_range_compare_same_instance() { <add> final int rangeStart = 1; <add> final int rangeEnd = 5; <add> <add> TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); <add> TargetRange expectedResult = targetRange; <add> <add> assertTrue("The two objects should be equal", expectedResult.equals(targetRange)); <add> } <add> <ide> @Test <ide> public void target_range_must_not_be_equal() { <ide> final int rangeStart = 1; <ide> final int rangeEnd = 5; <ide> <ide> TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); <del> TargetRange expectedResult = TargetRange.from(rangeStart, rangeStart); <add> TargetRange notExpectedResult = TargetRange.from(0, 0); <ide> <del> assertFalse("The two objects should be different", expectedResult.equals(targetRange)); <add> assertFalse("The two objects should be different", notExpectedResult.equals(targetRange)); <add> } <add> <add> @Test <add> public void target_range_start_must_not_be_equal() { <add> final int rangeStart = 1; <add> final int rangeEnd = 5; <add> <add> TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); <add> TargetRange notExpectedResult = TargetRange.from(rangeEnd, rangeEnd); <add> <add> assertFalse("The two objects should be different", notExpectedResult.equals(targetRange)); <add> } <add> <add>// @SuppressWarnings("EqualsBetweenInconvertibleTypes") <add> @Test <add> public void target_range_must_not_be_equal_they_are_different_types() { <add> final int rangeStart = 1; <add> final int rangeEnd = 5; <add> <add> TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); <add> Object notExpectedResult = new Object(); <add> <add> assertFalse("The two objects should be different", notExpectedResult.equals(targetRange)); <add> } <add> <add> @Test <add> public void target_range_end_must_not_be_equal() { <add> final int rangeStart = 1; <add> final int rangeEnd = 5; <add> <add> TargetRange targetRange = TargetRange.from(rangeStart, rangeEnd); <add> TargetRange notExpectedResult = TargetRange.from(rangeStart, rangeStart); <add> <add> assertFalse("The two objects should be different", notExpectedResult.equals(targetRange)); <ide> } <ide> <ide> @Test
JavaScript
mit
ca2e33f55239a3eb67e69373d2956eb759aeb13b
0
Verurteilt/core,ngx-translate/core,Verurteilt/core,ocombe/ng2-translate,ngx-translate/core,ocombe/ng2-translate
/** * Adapted from angular2-webpack-starter */ const helpers = require('./helpers'), webpack = require('webpack'), LoaderOptionsPlugin = require('webpack/lib/LoaderOptionsPlugin'); /** * Webpack Plugins */ module.exports = { /** * Source map for Karma from the help of karma-sourcemap-loader & karma-webpack * * Do not change, leave as is or it wont work. * See: https://github.com/webpack/karma-webpack#source-maps */ devtool: 'inline-source-map', resolve: { extensions: ['.ts', '.js'], modules: [helpers.root('src'), 'node_modules'] }, module: { rules: [{ enforce: 'pre', test: /\.ts$/, loader: 'tslint', exclude: [helpers.root('node_modules')] }, { enforce: 'pre', test: /\.js$/, loader: 'source-map-loader', exclude: [ // these packages have problems with their sourcemaps helpers.root('node_modules/rxjs'), helpers.root('node_modules/@angular') ] }, { test: /\.ts$/, loader: 'awesome-typescript-loader', query: { // use inline sourcemaps for "karma-remap-coverage" reporter sourceMap: false, inlineSourceMap: true, module: "commonjs", removeComments: true }, exclude: [/\.e2e\.ts$/] }, { enforce: 'post', test: /\.(js|ts)$/, loader: 'istanbul-instrumenter-loader', include: helpers.root('src'), exclude: [/\.spec\.ts$/, /\.e2e\.ts$/, /node_modules/] }], }, plugins: [ // fix the warning in ./~/@angular/core/src/linker/system_js_ng_module_factory_loader.js new webpack.ContextReplacementPlugin( /angular(\\|\/)core(\\|\/)(esm(\\|\/)src|src)(\\|\/)linker/, helpers.root('./src') ), new LoaderOptionsPlugin({ debug: true, options: { /** * Static analysis linter for TypeScript advanced options configuration * Description: An extensible linter for the TypeScript language. * * See: https://github.com/wbuchwalter/tslint-loader */ tslint: { emitErrors: false, failOnHint: false, resourcePath: 'src' }, } }) ] };
config/webpack.test.js
/** * Adapted from angular2-webpack-starter */ const helpers = require('./helpers'), LoaderOptionsPlugin = require('webpack/lib/LoaderOptionsPlugin'); /** * Webpack Plugins */ module.exports = { /** * Source map for Karma from the help of karma-sourcemap-loader & karma-webpack * * Do not change, leave as is or it wont work. * See: https://github.com/webpack/karma-webpack#source-maps */ devtool: 'inline-source-map', resolve: { extensions: ['.ts', '.js'], modules: [helpers.root('src'), 'node_modules'] }, module: { rules: [{ enforce: 'pre', test: /\.ts$/, loader: 'tslint', exclude: [helpers.root('node_modules')] }, { enforce: 'pre', test: /\.js$/, loader: 'source-map-loader', exclude: [ // these packages have problems with their sourcemaps helpers.root('node_modules/rxjs'), helpers.root('node_modules/@angular') ] }, { test: /\.ts$/, loader: 'awesome-typescript-loader', query: { // use inline sourcemaps for "karma-remap-coverage" reporter sourceMap: false, inlineSourceMap: true, module: "commonjs", removeComments: true }, exclude: [/\.e2e\.ts$/] }, { enforce: 'post', test: /\.(js|ts)$/, loader: 'istanbul-instrumenter-loader', include: helpers.root('src'), exclude: [/\.spec\.ts$/, /\.e2e\.ts$/, /node_modules/] }], }, plugins: [ // fix the warning in ./~/@angular/core/src/linker/system_js_ng_module_factory_loader.js new webpack.ContextReplacementPlugin( /angular(\\|\/)core(\\|\/)(esm(\\|\/)src|src)(\\|\/)linker/, helpers.root('./src') ), new LoaderOptionsPlugin({ debug: true, options: { /** * Static analysis linter for TypeScript advanced options configuration * Description: An extensible linter for the TypeScript language. * * See: https://github.com/wbuchwalter/tslint-loader */ tslint: { emitErrors: false, failOnHint: false, resourcePath: 'src' }, } }) ] };
chore: fixed tests config
config/webpack.test.js
chore: fixed tests config
<ide><path>onfig/webpack.test.js <ide> */ <ide> <ide> const helpers = require('./helpers'), <add> webpack = require('webpack'), <ide> LoaderOptionsPlugin = require('webpack/lib/LoaderOptionsPlugin'); <ide> <ide> /**
Java
apache-2.0
424d8ea3e51d967b7e75681eca6fd0f9c70d244e
0
hitakaken/novbank-store
package com.novbank.store.service.metadata.schema; /** * Created by HP on 2015/4/20. */ public enum PropertyType { STATIC_PROPERTY, DYNAMIC_PROPERTY, DIRECT_RELATION, DYNAMIC_RELATION, }
src/main/java/com/novbank/store/service/metadata/schema/PropertyType.java
package com.novbank.store.service.metadata.schema; /** * Created by HP on 2015/4/20. */ public enum PropertyType { STATIC_PROPERTY, STATIC_RELATIONSHIP, DYNAMIC_PROPERTY, DYNAMIC_RELATIONSHIP, }
Initial Commit
src/main/java/com/novbank/store/service/metadata/schema/PropertyType.java
Initial Commit
<ide><path>rc/main/java/com/novbank/store/service/metadata/schema/PropertyType.java <ide> */ <ide> public enum PropertyType { <ide> STATIC_PROPERTY, <del> STATIC_RELATIONSHIP, <ide> DYNAMIC_PROPERTY, <del> DYNAMIC_RELATIONSHIP, <add> DIRECT_RELATION, <add> DYNAMIC_RELATION, <ide> }
Java
apache-2.0
468eef3202525272bf12c4558c5c94e9bf92649b
0
Sch3lp/entmob_examples
package be.pxl.spring.rest.fallout.logging; import org.springframework.jms.annotation.JmsListener; import org.springframework.stereotype.Component; import javax.jms.Message; @Component public class JMSMessageConsumer { @JmsListener(destination = "ItemLogQueue") public void onMessage(Message message) { //TODO: save to log table or something System.out.println("Hi, I'm JMSMessageConsumer and I got " + message); } }
src/main/java/be/pxl/spring/rest/fallout/logging/JMSMessageConsumer.java
package be.pxl.spring.rest.fallout.logging; import org.springframework.jms.annotation.JmsListener; import org.springframework.stereotype.Component; import javax.jms.Message; @Component public class JMSMessageConsumer { @JmsListener(destination = "ItemLogQueue") public void onMessage(Message message) { System.out.println("Hi, I'm JMSMessageConsumer and I got " + message); } }
Added TODO
src/main/java/be/pxl/spring/rest/fallout/logging/JMSMessageConsumer.java
Added TODO
<ide><path>rc/main/java/be/pxl/spring/rest/fallout/logging/JMSMessageConsumer.java <ide> <ide> @JmsListener(destination = "ItemLogQueue") <ide> public void onMessage(Message message) { <add> //TODO: save to log table or something <ide> System.out.println("Hi, I'm JMSMessageConsumer and I got " + message); <ide> } <ide> }
Java
apache-2.0
1afc18be518f6e8eb9d574e0e141fce7a045dffb
0
dibagga/selenium,oddui/selenium,lmtierney/selenium,asashour/selenium,Ardesco/selenium,SeleniumHQ/selenium,dibagga/selenium,twalpole/selenium,tbeadle/selenium,juangj/selenium,krmahadevan/selenium,dibagga/selenium,kalyanjvn1/selenium,HtmlUnit/selenium,Jarob22/selenium,5hawnknight/selenium,kalyanjvn1/selenium,davehunt/selenium,Tom-Trumper/selenium,SeleniumHQ/selenium,GorK-ChO/selenium,lmtierney/selenium,GorK-ChO/selenium,Jarob22/selenium,mojwang/selenium,carlosroh/selenium,xmhubj/selenium,Dude-X/selenium,dibagga/selenium,Dude-X/selenium,Dude-X/selenium,gurayinan/selenium,5hawnknight/selenium,HtmlUnit/selenium,5hawnknight/selenium,markodolancic/selenium,mojwang/selenium,GorK-ChO/selenium,Ardesco/selenium,HtmlUnit/selenium,gurayinan/selenium,titusfortner/selenium,asolntsev/selenium,markodolancic/selenium,mach6/selenium,DrMarcII/selenium,bayandin/selenium,mach6/selenium,Tom-Trumper/selenium,asashour/selenium,HtmlUnit/selenium,lmtierney/selenium,mach6/selenium,joshbruning/selenium,mojwang/selenium,asolntsev/selenium,Herst/selenium,DrMarcII/selenium,dibagga/selenium,SeleniumHQ/selenium,jabbrwcky/selenium,xsyntrex/selenium,tbeadle/selenium,markodolancic/selenium,GorK-ChO/selenium,Jarob22/selenium,kalyanjvn1/selenium,jsakamoto/selenium,juangj/selenium,asashour/selenium,HtmlUnit/selenium,Herst/selenium,twalpole/selenium,dibagga/selenium,markodolancic/selenium,Dude-X/selenium,titusfortner/selenium,xmhubj/selenium,Jarob22/selenium,DrMarcII/selenium,Dude-X/selenium,gurayinan/selenium,gurayinan/selenium,asolntsev/selenium,titusfortner/selenium,davehunt/selenium,TikhomirovSergey/selenium,bayandin/selenium,joshmgrant/selenium,markodolancic/selenium,jabbrwcky/selenium,joshbruning/selenium,xsyntrex/selenium,HtmlUnit/selenium,markodolancic/selenium,5hawnknight/selenium,gurayinan/selenium,chrisblock/selenium,twalpole/selenium,tbeadle/selenium,asashour/selenium,twalpole/selenium,juangj/selenium,Herst/selenium,lmtierney/selenium,DrMarcII/selenium,Tom-Trumper/selenium,twalpole/selenium,twalpole/selenium,asolntsev/selenium,Dude-X/selenium,valfirst/selenium,jsakamoto/selenium,titusfortner/selenium,SeleniumHQ/selenium,oddui/selenium,oddui/selenium,mojwang/selenium,bayandin/selenium,twalpole/selenium,joshmgrant/selenium,valfirst/selenium,gurayinan/selenium,asashour/selenium,valfirst/selenium,chrisblock/selenium,jsakamoto/selenium,TikhomirovSergey/selenium,asolntsev/selenium,TikhomirovSergey/selenium,dibagga/selenium,xsyntrex/selenium,chrisblock/selenium,carlosroh/selenium,SeleniumHQ/selenium,krmahadevan/selenium,DrMarcII/selenium,davehunt/selenium,davehunt/selenium,jabbrwcky/selenium,5hawnknight/selenium,valfirst/selenium,valfirst/selenium,xsyntrex/selenium,bayandin/selenium,TikhomirovSergey/selenium,valfirst/selenium,asashour/selenium,SeleniumHQ/selenium,valfirst/selenium,Herst/selenium,titusfortner/selenium,Tom-Trumper/selenium,GorK-ChO/selenium,lmtierney/selenium,valfirst/selenium,Herst/selenium,asolntsev/selenium,xsyntrex/selenium,joshbruning/selenium,jabbrwcky/selenium,joshmgrant/selenium,jabbrwcky/selenium,Herst/selenium,5hawnknight/selenium,bayandin/selenium,asashour/selenium,Ardesco/selenium,SeleniumHQ/selenium,mojwang/selenium,oddui/selenium,juangj/selenium,HtmlUnit/selenium,jabbrwcky/selenium,joshmgrant/selenium,5hawnknight/selenium,krmahadevan/selenium,DrMarcII/selenium,juangj/selenium,asolntsev/selenium,bayandin/selenium,krmahadevan/selenium,tbeadle/selenium,GorK-ChO/selenium,carlosroh/selenium,xmhubj/selenium,SeleniumHQ/selenium,Ardesco/selenium,kalyanjvn1/selenium,kalyanjvn1/selenium,davehunt/selenium,twalpole/selenium,kalyanjvn1/selenium,chrisblock/selenium,joshmgrant/selenium,oddui/selenium,chrisblock/selenium,lmtierney/selenium,tbeadle/selenium,joshbruning/selenium,5hawnknight/selenium,Jarob22/selenium,jsakamoto/selenium,valfirst/selenium,5hawnknight/selenium,Ardesco/selenium,Dude-X/selenium,mach6/selenium,valfirst/selenium,xmhubj/selenium,Jarob22/selenium,bayandin/selenium,xsyntrex/selenium,GorK-ChO/selenium,asashour/selenium,xsyntrex/selenium,chrisblock/selenium,Tom-Trumper/selenium,tbeadle/selenium,DrMarcII/selenium,oddui/selenium,HtmlUnit/selenium,juangj/selenium,krmahadevan/selenium,dibagga/selenium,SeleniumHQ/selenium,xmhubj/selenium,SeleniumHQ/selenium,jabbrwcky/selenium,Herst/selenium,joshbruning/selenium,krmahadevan/selenium,titusfortner/selenium,gurayinan/selenium,DrMarcII/selenium,tbeadle/selenium,xsyntrex/selenium,jsakamoto/selenium,carlosroh/selenium,Herst/selenium,TikhomirovSergey/selenium,lmtierney/selenium,mach6/selenium,titusfortner/selenium,joshbruning/selenium,mach6/selenium,joshmgrant/selenium,juangj/selenium,dibagga/selenium,titusfortner/selenium,oddui/selenium,titusfortner/selenium,krmahadevan/selenium,davehunt/selenium,SeleniumHQ/selenium,tbeadle/selenium,davehunt/selenium,titusfortner/selenium,markodolancic/selenium,TikhomirovSergey/selenium,mojwang/selenium,xmhubj/selenium,kalyanjvn1/selenium,bayandin/selenium,mach6/selenium,jsakamoto/selenium,Dude-X/selenium,Jarob22/selenium,chrisblock/selenium,joshmgrant/selenium,joshbruning/selenium,lmtierney/selenium,jabbrwcky/selenium,mojwang/selenium,carlosroh/selenium,chrisblock/selenium,Ardesco/selenium,carlosroh/selenium,juangj/selenium,Ardesco/selenium,Tom-Trumper/selenium,titusfortner/selenium,GorK-ChO/selenium,DrMarcII/selenium,HtmlUnit/selenium,jsakamoto/selenium,oddui/selenium,Tom-Trumper/selenium,joshmgrant/selenium,xmhubj/selenium,carlosroh/selenium,mojwang/selenium,asolntsev/selenium,jsakamoto/selenium,tbeadle/selenium,TikhomirovSergey/selenium,HtmlUnit/selenium,lmtierney/selenium,krmahadevan/selenium,krmahadevan/selenium,joshbruning/selenium,Jarob22/selenium,joshmgrant/selenium,joshmgrant/selenium,markodolancic/selenium,joshmgrant/selenium,gurayinan/selenium,carlosroh/selenium,xsyntrex/selenium,asashour/selenium,twalpole/selenium,mach6/selenium,davehunt/selenium,jsakamoto/selenium,davehunt/selenium,oddui/selenium,markodolancic/selenium,Ardesco/selenium,gurayinan/selenium,Jarob22/selenium,juangj/selenium,joshbruning/selenium,Tom-Trumper/selenium,kalyanjvn1/selenium,Herst/selenium,carlosroh/selenium,Dude-X/selenium,xmhubj/selenium,asolntsev/selenium,TikhomirovSergey/selenium,chrisblock/selenium,TikhomirovSergey/selenium,bayandin/selenium,GorK-ChO/selenium,kalyanjvn1/selenium,mojwang/selenium,Tom-Trumper/selenium,jabbrwcky/selenium,Ardesco/selenium,mach6/selenium,valfirst/selenium,xmhubj/selenium
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.grid.web.servlet.handler; import com.google.gson.Gson; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.google.gson.JsonSyntaxException; import org.openqa.grid.common.exception.GridException; import org.openqa.grid.internal.ExternalSessionKey; import org.openqa.grid.internal.Registry; import org.openqa.grid.internal.TestSession; import org.openqa.selenium.remote.BeanToJsonConverter; import org.openqa.selenium.remote.JsonToBeanConverter; import java.util.Map; import javax.servlet.http.HttpServletRequest; public class WebDriverRequest extends SeleniumBasedRequest { public WebDriverRequest(HttpServletRequest httpServletRequest, Registry registry) { super(httpServletRequest, registry); } @Override public RequestType extractRequestType() { if ("/session".equals(getPathInfo())) { return RequestType.START_SESSION; } else if (getMethod().equalsIgnoreCase("DELETE")) { ExternalSessionKey externalKey = ExternalSessionKey.fromWebDriverRequest(getPathInfo()); if (externalKey != null && getPathInfo().endsWith("/session/" + externalKey.getKey())) { return RequestType.STOP_SESSION; } } return RequestType.REGULAR; } @Override public ExternalSessionKey extractSession() { if (getRequestType() == RequestType.START_SESSION) { throw new IllegalAccessError("Cannot call that method of a new session request."); } String path = getPathInfo(); return ExternalSessionKey.fromWebDriverRequest(path); } @Override public Map<String, Object> extractDesiredCapability() { String json = getBody(); try { JsonObject map = new JsonParser().parse(json).getAsJsonObject(); // Current W3C has required / desired capabilities wrapped in a 'capabilites' object. // This will need to be updated if/when https://github.com/w3c/webdriver/pull/327 gets merged if (map.has("capabilities")) { return new JsonToBeanConverter().convert(Map.class, map.getAsJsonObject("capabilities").getAsJsonObject("desiredCapabilities")); } JsonObject dc = map.get("desiredCapabilities").getAsJsonObject(); return new JsonToBeanConverter().convert(Map.class, dc); } catch (Exception e) { throw new GridException("Cannot extract a capabilities from the request: " + json, e); } } @Override public String getNewSessionRequestedCapability(TestSession session) { JsonObject c = new JsonObject(); c.add("desiredCapabilities", new BeanToJsonConverter().convertObject(session.getRequestedCapabilities())); return new Gson().toJson(c); } }
java/server/src/org/openqa/grid/web/servlet/handler/WebDriverRequest.java
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.grid.web.servlet.handler; import com.google.gson.Gson; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.google.gson.JsonSyntaxException; import org.openqa.grid.common.exception.GridException; import org.openqa.grid.internal.ExternalSessionKey; import org.openqa.grid.internal.Registry; import org.openqa.grid.internal.TestSession; import org.openqa.selenium.remote.BeanToJsonConverter; import org.openqa.selenium.remote.JsonToBeanConverter; import java.util.Map; import javax.servlet.http.HttpServletRequest; public class WebDriverRequest extends SeleniumBasedRequest { public WebDriverRequest(HttpServletRequest httpServletRequest, Registry registry) { super(httpServletRequest, registry); } @Override public RequestType extractRequestType() { if ("/session".equals(getPathInfo())) { return RequestType.START_SESSION; } else if (getMethod().equalsIgnoreCase("DELETE")) { ExternalSessionKey externalKey = ExternalSessionKey.fromWebDriverRequest(getPathInfo()); if (externalKey != null && getPathInfo().endsWith("/session/" + externalKey.getKey())) { return RequestType.STOP_SESSION; } } return RequestType.REGULAR; } @Override public ExternalSessionKey extractSession() { if (getRequestType() == RequestType.START_SESSION) { throw new IllegalAccessError("Cannot call that method of a new session request."); } String path = getPathInfo(); return ExternalSessionKey.fromWebDriverRequest(path); } @Override public Map<String, Object> extractDesiredCapability() { String json = getBody(); try { JsonObject map = new JsonParser().parse(json).getAsJsonObject(); JsonObject dc = map.get("desiredCapabilities").getAsJsonObject(); return new JsonToBeanConverter().convert(Map.class, dc); } catch (Exception e) { throw new GridException("Cannot extract a capabilities from the request: " + json, e); } } @Override public String getNewSessionRequestedCapability(TestSession session) { JsonObject c = new JsonObject(); c.add("desiredCapabilities", new BeanToJsonConverter().convertObject(session.getRequestedCapabilities())); return new Gson().toJson(c); } }
grid hub getting desired capabilites needs to account for w3c only requests.
java/server/src/org/openqa/grid/web/servlet/handler/WebDriverRequest.java
grid hub getting desired capabilites needs to account for w3c only requests.
<ide><path>ava/server/src/org/openqa/grid/web/servlet/handler/WebDriverRequest.java <ide> String json = getBody(); <ide> try { <ide> JsonObject map = new JsonParser().parse(json).getAsJsonObject(); <add> // Current W3C has required / desired capabilities wrapped in a 'capabilites' object. <add> // This will need to be updated if/when https://github.com/w3c/webdriver/pull/327 gets merged <add> if (map.has("capabilities")) { <add> return new JsonToBeanConverter().convert(Map.class, map.getAsJsonObject("capabilities").getAsJsonObject("desiredCapabilities")); <add> } <ide> JsonObject dc = map.get("desiredCapabilities").getAsJsonObject(); <ide> return new JsonToBeanConverter().convert(Map.class, dc); <ide>
Java
bsd-3-clause
3b4c0253a6deb66aa831e951a79115f08fb5b69e
0
vivo-project/Vitro,vivo-project/Vitro,vivo-project/Vitro,vivo-project/Vitro
/* $This file is distributed under the terms of the license in /doc/license.txt$ */ package edu.cornell.mannlib.vitro.webapp.edit.n3editing; import java.util.Calendar; import java.util.HashMap; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.hp.hpl.jena.datatypes.xsd.XSDDateTime; import com.hp.hpl.jena.rdf.model.Literal; import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary; import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary.Precision; import edu.cornell.mannlib.vitro.webapp.edit.elements.DateTimeWithPrecision; public class DateTimeIntervalValidation implements N3Validator { private static Log log = LogFactory.getLog(DateTimeIntervalValidation.class); private String startFieldName; private String endFieldName; private String startValueName; private String endValueName; private String startPrecisionName; private String endPrecisionName; public DateTimeIntervalValidation(String startFieldName, String endFieldName){ this.startFieldName = startFieldName; this.endFieldName = endFieldName; startValueName = startFieldName + ".value"; endValueName = endFieldName + ".value"; startPrecisionName = startFieldName + ".precision"; endPrecisionName = endFieldName + ".precision"; } public Map<String, String> validate(EditConfiguration editConfig, EditSubmission editSub) { Map<String, Literal> existingLiterals = editConfig.getLiteralsInScope(); Literal existingStartYear = existingLiterals.get(startValueName); Literal existingEndYear = existingLiterals.get(endValueName); Map<String, Literal> literalsFromForm = editSub.getLiteralsFromForm(); Literal formStartYear = literalsFromForm.get(startValueName); Literal formEndYear = literalsFromForm.get(endValueName); VitroVocabulary.Precision startPrecision = getPrecision(startPrecisionName, editConfig, editSub); VitroVocabulary.Precision endPrecision = getPrecision(endPrecisionName, editConfig, editSub); Map<String, String> errors = new HashMap<String, String>(); if (formStartYear != null && formEndYear != null) { errors.putAll(checkDateLiterals(formStartYear, formEndYear, startPrecision, endPrecision)); } else if (formStartYear != null && existingEndYear != null) { errors.putAll(checkDateLiterals(formStartYear, existingEndYear, startPrecision, endPrecision)); } else if (existingStartYear != null && formEndYear != null) { errors.putAll(checkDateLiterals(existingStartYear, formEndYear, startPrecision, endPrecision)); } else if (existingStartYear != null && existingEndYear != null) { errors.putAll(checkDateLiterals(existingStartYear, existingEndYear, startPrecision, endPrecision)); } if (errors.size() != 0) return errors; else return null; } private Precision getPrecision(String precisionVarName, EditConfiguration editConfig, EditSubmission editSub) { if( editSub != null && editSub.getUrisFromForm() != null && editSub.getUrisFromForm().containsKey(precisionVarName)){ String precisionStr = editSub.getUrisFromForm().get(precisionVarName); VitroVocabulary.Precision precision = DateTimeWithPrecision.toPrecision( precisionStr ); if( precision == null ) log.warn("cannot convert " + precisionStr + " to a precision"); else return precision; }else if( editConfig != null && editConfig.getUrisInScope() != null && editConfig.getUrisInScope().containsKey(precisionVarName)){ String precisionStr = editConfig.getUrisInScope().get(precisionVarName); VitroVocabulary.Precision precision = DateTimeWithPrecision.toPrecision( precisionStr ); if( precision == null ) log.warn("cannot convert " + precisionStr + " to a precision"); else return precision; } //this is what is returned if a precision was not found in the config or submission return null; } private Map<String, String> checkDateLiterals( Literal startLit, Literal endLit, VitroVocabulary.Precision startPrecision, VitroVocabulary.Precision endPrecision) { Map<String, String> errors = new HashMap<String, String>(); //check to make sure that there are precisions if( startPrecision == null ) errors.put("startFieldName", "could not determine start precision"); if( endPrecision == null ) errors.put("endFieldName" , "could not determine end precision"); if( errors.size() > 0 ) return errors; try{ XSDDateTime startDate = (XSDDateTime)startLit.getValue(); XSDDateTime endDate = (XSDDateTime)endLit.getValue(); if( startDate != null && endDate!= null ){ Calendar startCal = startDate.asCalendar(); Calendar endCal = endDate.asCalendar(); if( endCal != null ){ if( !startCal.before( endCal ) ){ if( startPrecision == VitroVocabulary.Precision.YEAR && endPrecision == VitroVocabulary.Precision.YEAR ){ errors.putAll( checkYears(startCal,endCal)); }else{ errors.put(startFieldName, "Start must be before end"); errors.put(endFieldName, "End must be after start"); } } } } }catch(ClassCastException cce){ errors.put(startFieldName, "could not format star or end date"); errors.put(endFieldName, "could not format star or end date"); log.debug("could not format dates " + cce); } return errors; } private Map<? extends String, ? extends String> checkYears( Calendar startCal, Calendar endCal) { Map<String, String> errors = new HashMap<String, String>(); if( ! (endCal.get(Calendar.YEAR) >= startCal.get(Calendar.YEAR) )){ errors.put(startFieldName, "Start must be before end"); errors.put(endFieldName, "End must be after start"); } return errors; } }
webapp/src/edu/cornell/mannlib/vitro/webapp/edit/n3editing/DateTimeIntervalValidation.java
/* $This file is distributed under the terms of the license in /doc/license.txt$ */ package edu.cornell.mannlib.vitro.webapp.edit.n3editing; import java.util.Calendar; import java.util.HashMap; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.hp.hpl.jena.datatypes.xsd.XSDDateTime; import com.hp.hpl.jena.rdf.model.Literal; public class DateTimeIntervalValidation implements N3Validator { private static Log log = LogFactory.getLog(DateTimeIntervalValidation.class); private String startFieldName; private String endFieldName; private String startValueName; private String endValueName; public DateTimeIntervalValidation(String startFieldName, String endFieldName){ this.startFieldName = startFieldName; this.endFieldName = endFieldName; startValueName = startFieldName + ".value"; endValueName = endFieldName + ".value"; } public Map<String, String> validate(EditConfiguration editConfig, EditSubmission editSub) { Map<String, Literal> existingLiterals = editConfig.getLiteralsInScope(); Literal existingStartYear = existingLiterals.get(startValueName); Literal existingEndYear = existingLiterals.get(endValueName); Map<String, Literal> literalsFromForm = editSub.getLiteralsFromForm(); Literal formStartYear = literalsFromForm.get(startValueName); Literal formEndYear = literalsFromForm.get(endValueName); Map<String, String> errors = new HashMap<String, String>(); if (formStartYear != null && formEndYear != null) { errors.putAll(checkDateLiterals(formStartYear, formEndYear)); } else if (formStartYear != null && existingEndYear != null) { errors.putAll(checkDateLiterals(formStartYear, existingEndYear)); } else if (existingStartYear != null && formEndYear != null) { errors.putAll(checkDateLiterals(existingStartYear, formEndYear)); } else if (existingStartYear != null && existingEndYear != null) { errors.putAll(checkDateLiterals(existingStartYear, existingEndYear)); } if (errors.size() != 0) return errors; else return null; } private Map<String, String> checkDateLiterals(Literal startLit, Literal endLit) { Map<String, String> errors = new HashMap<String, String>(); try{ XSDDateTime startDate = (XSDDateTime)startLit.getValue(); XSDDateTime endDate = (XSDDateTime)endLit.getValue(); if( startDate != null && endDate!= null ){ Calendar startCal = startDate.asCalendar(); Calendar endCal = endDate.asCalendar(); if( endCal != null && ! endCal.after( startCal ) ){ errors.put(startFieldName, "Start year must be before end year"); errors.put(endFieldName, "End year must be after start year"); } } }catch(ClassCastException cce){ errors.put(startFieldName, "could not format star or end date"); errors.put(endFieldName, "could not format star or end date"); log.debug("could not format dates " + cce); } return errors; } }
Fixing bug, can now enter same year for start and end NIHVIVO-1743
webapp/src/edu/cornell/mannlib/vitro/webapp/edit/n3editing/DateTimeIntervalValidation.java
Fixing bug, can now enter same year for start and end NIHVIVO-1743
<ide><path>ebapp/src/edu/cornell/mannlib/vitro/webapp/edit/n3editing/DateTimeIntervalValidation.java <ide> import com.hp.hpl.jena.datatypes.xsd.XSDDateTime; <ide> import com.hp.hpl.jena.rdf.model.Literal; <ide> <add>import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary; <add>import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary.Precision; <add>import edu.cornell.mannlib.vitro.webapp.edit.elements.DateTimeWithPrecision; <add> <ide> public class DateTimeIntervalValidation implements N3Validator { <ide> private static Log log = LogFactory.getLog(DateTimeIntervalValidation.class); <ide> <ide> <ide> private String startValueName; <ide> private String endValueName; <add> <add> private String startPrecisionName; <add> private String endPrecisionName; <ide> <ide> public DateTimeIntervalValidation(String startFieldName, String endFieldName){ <ide> this.startFieldName = startFieldName; <ide> this.endFieldName = endFieldName; <ide> startValueName = startFieldName + ".value"; <ide> endValueName = endFieldName + ".value"; <add> startPrecisionName = startFieldName + ".precision"; <add> endPrecisionName = endFieldName + ".precision"; <ide> } <ide> <ide> public Map<String, String> validate(EditConfiguration editConfig, <ide> Literal formStartYear = literalsFromForm.get(startValueName); <ide> Literal formEndYear = literalsFromForm.get(endValueName); <ide> <add> VitroVocabulary.Precision startPrecision = getPrecision(startPrecisionName, editConfig, editSub); <add> VitroVocabulary.Precision endPrecision = getPrecision(endPrecisionName, editConfig, editSub); <add> <ide> Map<String, String> errors = new HashMap<String, String>(); <ide> <ide> if (formStartYear != null && formEndYear != null) { <del> errors.putAll(checkDateLiterals(formStartYear, formEndYear)); <add> errors.putAll(checkDateLiterals(formStartYear, formEndYear, startPrecision, endPrecision)); <ide> } else if (formStartYear != null && existingEndYear != null) { <del> errors.putAll(checkDateLiterals(formStartYear, existingEndYear)); <add> errors.putAll(checkDateLiterals(formStartYear, existingEndYear, startPrecision, endPrecision)); <ide> } else if (existingStartYear != null && formEndYear != null) { <del> errors.putAll(checkDateLiterals(existingStartYear, formEndYear)); <add> errors.putAll(checkDateLiterals(existingStartYear, formEndYear, startPrecision, endPrecision)); <ide> } else if (existingStartYear != null && existingEndYear != null) { <del> errors.putAll(checkDateLiterals(existingStartYear, existingEndYear)); <add> errors.putAll(checkDateLiterals(existingStartYear, existingEndYear, startPrecision, endPrecision)); <ide> } <ide> <ide> if (errors.size() != 0) <ide> return null; <ide> } <ide> <del> private Map<String, String> checkDateLiterals(Literal startLit, Literal endLit) { <add> private Precision getPrecision(String precisionVarName, <add> EditConfiguration editConfig, EditSubmission editSub) { <add> if( editSub != null <add> && editSub.getUrisFromForm() != null <add> && editSub.getUrisFromForm().containsKey(precisionVarName)){ <add> String precisionStr = editSub.getUrisFromForm().get(precisionVarName); <add> VitroVocabulary.Precision precision = DateTimeWithPrecision.toPrecision( precisionStr ); <add> if( precision == null ) <add> log.warn("cannot convert " + precisionStr + " to a precision"); <add> else <add> return precision; <add> }else if( editConfig != null <add> && editConfig.getUrisInScope() != null <add> && editConfig.getUrisInScope().containsKey(precisionVarName)){ <add> String precisionStr = editConfig.getUrisInScope().get(precisionVarName); <add> VitroVocabulary.Precision precision = DateTimeWithPrecision.toPrecision( precisionStr ); <add> if( precision == null ) <add> log.warn("cannot convert " + precisionStr + " to a precision"); <add> else <add> return precision; <add> } <add> //this is what is returned if a precision was not found in the config or submission <add> return null; <add> } <add> <add> private Map<String, String> checkDateLiterals( <add> Literal startLit, Literal endLit, <add> VitroVocabulary.Precision startPrecision, VitroVocabulary.Precision endPrecision) { <ide> Map<String, String> errors = new HashMap<String, String>(); <add> <add> //check to make sure that there are precisions <add> if( startPrecision == null ) <add> errors.put("startFieldName", "could not determine start precision"); <add> if( endPrecision == null ) <add> errors.put("endFieldName" , "could not determine end precision"); <add> if( errors.size() > 0 ) <add> return errors; <add> <add> <ide> try{ <ide> XSDDateTime startDate = (XSDDateTime)startLit.getValue(); <ide> XSDDateTime endDate = (XSDDateTime)endLit.getValue(); <ide> Calendar startCal = startDate.asCalendar(); <ide> Calendar endCal = endDate.asCalendar(); <ide> <del> if( endCal != null && ! endCal.after( startCal ) ){ <del> errors.put(startFieldName, "Start year must be before end year"); <del> errors.put(endFieldName, "End year must be after start year"); <add> if( endCal != null ){ <add> if( !startCal.before( endCal ) ){ <add> if( startPrecision == VitroVocabulary.Precision.YEAR <add> && endPrecision == VitroVocabulary.Precision.YEAR ){ <add> errors.putAll( checkYears(startCal,endCal)); <add> }else{ <add> errors.put(startFieldName, "Start must be before end"); <add> errors.put(endFieldName, "End must be after start"); <add> } <add> } <ide> } <ide> } <ide> }catch(ClassCastException cce){ <ide> return errors; <ide> } <ide> <add> private Map<? extends String, ? extends String> checkYears( <add> Calendar startCal, Calendar endCal) { <add> <add> Map<String, String> errors = new HashMap<String, String>(); <add> <add> if( ! (endCal.get(Calendar.YEAR) >= startCal.get(Calendar.YEAR) )){ <add> errors.put(startFieldName, "Start must be before end"); <add> errors.put(endFieldName, "End must be after start"); <add> } <add> <add> return errors; <add> } <ide> }
JavaScript
mit
34eea1a9ecf2b034fc5cbc3a4f4040af7554f5b3
0
redding/romo,redding/romo,redding/romo
$.fn.romoDatepicker = function() { return $.map(this, function(element) { return new RomoDatepicker(element); }); } var RomoDatepicker = function(element) { this.elem = $(element); this.defaultFormat = 'yyyy-mm-dd' this.monthNames = [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" ] this.defaultPrevClass = undefined; this.defaultNextClass = undefined; this.defaultIndicatorClass = undefined; this.itemSelector = 'TD.romo-datepicker-day:not(.disabled)'; this.calTable = $(); this.date = undefined; this.today = new Date; this.doInit(); this.doBindElem(); this.doSetFormat(); this.doSetDate(this.elem.val()); this.doBindDropdown(); this.doBuildUI(); this.elem.trigger('datepicker:ready', [this]); } RomoDatepicker.prototype.doInit = function() { // override as needed } RomoDatepicker.prototype.doBindElem = function() { var elemWrapper = $('<div class="romo-datepicker-wrapper"></div>'); elemWrapper.css({'display': this.elem.css('display')}); this.elem.before(elemWrapper); elemWrapper.append(this.elem); var indicatorClass = this.elem.data('romo-datepicker-indicator') || this.defaultIndicatorClass; if (indicatorClass !== undefined) { var indicator = $('<i class="romo-datepicker-indicator '+indicatorClass+'"></i>'); indicator.css({'line-height': this.elem.css('height')}); this.elem.css({'padding-right': '30px'}); this.elem.after(indicator); } } RomoDatepicker.prototype.doSetFormat = function() { var format = this.elem.data('romo-datepicker-format') || this.defaultFormat; this.formatValues = this._parseFormatValues(format); } RomoDatepicker.prototype.doSetDate = function(value) { this.date = this._parseDate(value); if (this.date !== undefined) { this.elem.val(this._formatDate(this.date)); } else { this.elem.val(value); } } RomoDatepicker.prototype.doBindDropdown = function() { this.elem.addClass('romo'); this.elem.attr('data-romo-dropdown-disable-toggle', 'true'); if (this.elem.data('romo-dropdown-width') === undefined) { this.elem.attr('data-romo-dropdown-width', 'elem'); } this.romoDropdown = this.elem.romoDropdown()[0]; this.romoDropdown.doSetPopupZIndex(this.elem); this.romoDropdown.bodyElem.addClass('romo-datepicker-calendar'); this.romoDropdown.elem.on('dropdown:popupOpen', $.proxy(this.onPopupOpen, this)); this.romoDropdown.elem.on('dropdown:popupClose', $.proxy(this.onPopupClose, this)); this.romoDropdown.elem.on('dropdown:toggle', $.proxy(function(e, dropdown) { this.elem.trigger('datepicker:dropdown:toggle', [dropdown, this]); }, this)); this.romoDropdown.elem.on('dropdown:popupOpen', $.proxy(function(e, dropdown) { this.elem.trigger('datepicker:dropdown:popupOpen', [dropdown, this]); }, this)); this.romoDropdown.elem.on('dropdown:popupClose', $.proxy(function(e, dropdown) { this.elem.trigger('datepicker:dropdown:popupClose', [dropdown, this]); }, this)); this.elem.on('datepicker:triggerToggle', $.proxy(function(e) { this.romoDropdown.elem.trigger('dropdown:triggerToggle', []); }, this)); this.elem.on('datepicker:triggerPopupOpen', $.proxy(function(e) { this.romoDropdown.elem.trigger('dropdown:triggerPopupOpen', []); }, this)); this.elem.on('datepicker:triggerPopupClose', $.proxy(function(e) { this.romoDropdown.elem.trigger('dropdown:triggerPopupClose', []); }, this)); } RomoDatepicker.prototype.doBuildUI = function() { this.calTable = this._buildCalendar(); this.romoDropdown.bodyElem.html(''); this.romoDropdown.bodyElem.append(this.calTable); this.calTable.find('.romo-datepicker-prev').on('click', $.proxy(this.onPrevClick, this)); this.calTable.find('.romo-datepicker-next').on('click', $.proxy(this.onNextClick, this)); } RomoDatepicker.prototype.doRefreshUI = function(date) { var rDate = date || this.date || (new Date); this._refreshCalendar(rDate); this.elem.trigger('datepicker:refresh', [rDate, this]); this.calTable.find(this.itemSelector).on('hover', $.proxy(this.onItemHover, this)); this.calTable.find(this.itemSelector).on('click', $.proxy(this.onItemClick, this)); } RomoDatepicker.prototype.doRefreshToPrevMonth = function() { var date = this.refreshDate || this.date || (new Date); var year = date.getUTCFullYear(); var month = date.getUTCMonth() - 1; if (month < 0) { year -= 1; month = 11; } var pDate = this._UTCDate(year, month, 1); this.doRefreshUI(pDate); this.elem.trigger('datepicker:prevRefresh', [pDate, this]); } RomoDatepicker.prototype.doRefreshToNextMonth = function() { var date = this.refreshDate || this.date || (new Date); var year = date.getUTCFullYear(); var month = date.getUTCMonth() + 1; if (month > 11) { year += 1; month = 0; } var nDate = this._UTCDate(year, month, 1); this.doRefreshUI(nDate); this.elem.trigger('datepicker:nextRefresh', [nDate, this]); } RomoDatepicker.prototype.doSelectHighlightedItem = function() { var prevValue = this.elem.val(); var newValue = this.calTable.find('TD.romo-datepicker-highlight').data('romo-datepicker-value'); this.romoDropdown.doPopupClose(); this.elem.trigger('datepicker:itemSelected', [newValue, prevValue, this]); if (newValue !== prevValue) { this.doSetDate(newValue); this.elem.trigger('change'); this.elem.trigger('datepicker:change', [newValue, prevValue, this]); } } RomoDatepicker.prototype.onPopupOpen = function(e) { if (this.elem.hasClass('disabled') === false) { this.doSetDate(this.elem.val()); this.doRefreshUI(); } } RomoDatepicker.prototype.onPopupClose = function(e) { this._highlightItem($()); } RomoDatepicker.prototype.onItemHover = function(e) { if (e !== undefined) { e.preventDefault(); } this._highlightItem($(e.target)); } RomoDatepicker.prototype.onItemClick = function(e) { if (e !== undefined) { e.preventDefault(); } this.doSelectHighlightedItem(); } RomoDatepicker.prototype.onPrevClick = function(e) { if (e !== undefined) { e.preventDefault(); } this.doRefreshToPrevMonth(); } RomoDatepicker.prototype.onNextClick = function(e) { if (e !== undefined) { e.preventDefault(); } this.doRefreshToNextMonth(); } RomoDatepicker.prototype._refreshCalendar = function(date) { this.calTable.find('.romo-datepicker-title').html(this._buildCalendarTitle(date)); this.calTable.find('tbody').empty().append(this._buildCalendarBody(date)); this.refreshDate = date; } RomoDatepicker.prototype._buildCalendar = function() { var table = $('<table></table>'); table.append(this._buildCalendarHeader()); table.append($('<tbody></tbody>')); return table; } RomoDatepicker.prototype._buildCalendarHeader = function() { var prevClass = this.elem.data('romo-datepicker-prev') || this.defaultPrevClass; var nextClass = this.elem.data('romo-datepicker-next') || this.defaultNextClass; var header = $('<thead></thead'); var row = $('<tr></tr>'); var th = $('<th class="romo-datepicker-prev" title="Previous Month"></th>'); if (prevClass) { th.append('<i class="'+prevClass+'"></i>'); } else { th.text('<<'); } row.append(th); row.append($('<th class="romo-datepicker-title" colspan="5"></th>')); var th = $('<th class="romo-datepicker-next" title="Next Month"></th>'); if (nextClass) { th.append('<i class="'+nextClass+'"></i>'); } else { th.text('>>'); } row.append(th); header.append(row); row = $('<tr></tr>'); row.append($('<th class="romo-datepicker-day">Su</th>')); row.append($('<th class="romo-datepicker-day">M</th>')); row.append($('<th class="romo-datepicker-day">T</th>')); row.append($('<th class="romo-datepicker-day">W</th>')); row.append($('<th class="romo-datepicker-day">Th</th>')); row.append($('<th class="romo-datepicker-day">F</th>')); row.append($('<th class="romo-datepicker-day">S</th>')); header.append(row); return header; } RomoDatepicker.prototype._buildCalendarTitle = function(date) { return this.monthNames[date.getUTCMonth()] + ' ' + date.getUTCFullYear().toString(); } RomoDatepicker.prototype._buildCalendarBody = function(date) { var ty = this.today.getUTCFullYear(); var tm = this.today.getUTCMonth(); var td = this.today.getUTCDate(); var year = date.getUTCFullYear(); var month = date.getUTCMonth(); var day = date.getUTCDate(); var fomdow = this._UTCDate(year, month, 1).getUTCDay(); // first-of-the-month day-of-the-week if (fomdow == 0) { fomdow = 7; // don't start calendar on the first-of-the-month, show last week of prev month } var iDate = this._UTCDate(year, month, 1 - fomdow); var iWeek = 0; var html = []; while (iWeek < 6) { // render 6 weeks in the calendar var y = iDate.getUTCFullYear(); var m = iDate.getUTCMonth(); var d = iDate.getUTCDate(); var dow = iDate.getUTCDay(); var cls = []; if (dow === 0) { html.push('<tr>'); } cls.push('romo-datepicker-day'); if (dow === 0 || dow === 6) { cls.push('romo-datepicker-day-weekend'); } if (y !== year || m !== month) { cls.push('romo-datepicker-day-other'); } if (y === ty && m === tm && d === td) { cls.push('romo-datepicker-day-today'); } if (this.date && y === this.date.getUTCFullYear() && m === this.date.getUTCMonth() && d === this.date.getUTCDate()) { cls.push('selected'); } html.push('<td'); html.push(' class="'+cls.join(' ')+'"'); var dt = this._formatDate(iDate); html.push(' title="'+dt+'"'); html.push(' data-romo-datepicker-value="'+dt+'"'); html.push('>'); html.push(d.toString()); html.push('</td>'); if (dow === 6) { html.push('</tr>'); iWeek += 1; } iDate.setUTCDate(iDate.getUTCDate()+1); } return $(html.join('')); } RomoDatepicker.prototype._addIndicatorToElem = function() { // TODO, if opt and indicator icon class // create icon // append to elem } RomoDatepicker.prototype._highlightItem = function(item) { this.calTable.find('TD.romo-datepicker-highlight').removeClass('romo-datepicker-highlight'); item.addClass('romo-datepicker-highlight'); } RomoDatepicker.prototype._formatDate = function(date) { var year = date.getUTCFullYear(); var month = date.getUTCMonth() + 1; var day = date.getUTCDate(); return this.formatValues.reduce(function(prev, curr) { switch (curr) { case "yyyy": case "yyy": prev += year.toString(); break; case "yy": case "y": prev += year.toString().slice(-2); break; case "mm": prev += ("00"+ month.toString()).slice(-2); // pad 2 with "0"s break; case "m": prev += month.toString(); break; case "dd": prev += ("00"+ day.toString()).slice(-2); // pad 2 with "0"s break; case "d": prev += day.toString(); break; default: prev += curr; // delimeter, pass-thru } return prev; }, ''); } RomoDatepicker.prototype._parseFormatValues = function(value) { var regex, matches; regex = /^([m]{1,2})([^md]+)([d]{1,2})([^dy]+)([y]{2,4})$/; // mm dd yyyy or mm dd yy matches = this._regexMatches(value, regex); if (matches.length === 5) { return matches; } regex = /^([y]{3,4})([^ym]+)([m]{1,2})([^md]+)([d]{1,2})$/; // yyyy mm dd matches = this._regexMatches(value, regex); if (matches.length === 5) { return matches; } return ['yyyy', '-', 'mm', '-', 'dd']; } RomoDatepicker.prototype._parseDate = function(value) { if (value.trim() === '') { return undefined; } var dateValues = this._parseDateValues(value.trim()); if (dateValues.length === 0) { return undefined; } var year = parseInt(dateValues[0]); if (year < 0) { return undefined; } if (dateValues[0].length > 2 && year < 100) { return undefined; } if (dateValues[0].length === 2 && year < 100) { year = this._currentYear() - (this._currentYear() % 1000) + year; } var month = parseInt(dateValues[1]) - 1; if (month < 0 || month > 11) { return undefined; } var day = parseInt(dateValues[2]); var date = this._UTCDate(year, month, day); if (date.getUTCMonth() !== month) { return undefined; } return date; } RomoDatepicker.prototype._parseDateValues = function(value) { var regex, matches; regex = /^([0-9]{1,2})[^0-9]+([0-9]{1,2})[^0-9]+([0-9]{2,4})$/; // mm dd yyyy or mm dd yy matches = this._regexMatches(value, regex); if (matches.length === 3) { return [matches[2], matches[0], matches[1]]; } regex = /^([0-9]{3,4})[^0-9]+([0-9]{1,2})[^0-9]+([0-9]{1,2})$/; // yyyy mm dd matches = this._regexMatches(value, regex); if (matches.length === 3) { return matches; } regex = /^([0-9]{1,2})[^0-9]+([0-9]{1,2})$/; // mm dd matches = this._regexMatches(value, regex); if (matches.length === 2) { return [this._currentYear(), matches[0], matches[1]]; } return []; } RomoDatepicker.prototype._regexMatches = function(value, regex) { if (regex.test(value) === true) { return regex.exec(value).slice(1); } return []; } RomoDatepicker.prototype._currentYear = function() { return (new Date).getUTCFullYear(); } RomoDatepicker.prototype._UTCDate = function(year, month, day) { return new Date(Date.UTC.apply(Date, [year, month, day])); } Romo.onInitUI(function(e) { $(e.target).find('[data-romo-datepicker-auto="true"]').romoDatepicker(); });
assets/js/romo/datepicker.js
$.fn.romoDatepicker = function() { return $.map(this, function(element) { return new RomoDatepicker(element); }); } var RomoDatepicker = function(element) { this.elem = $(element); this.defaultFormat = 'yyyy-mm-dd' this.monthNames = [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" ] this.defaultLeftArrowClass = undefined; this.defaultRightArrowClass = undefined; this.defaultIndicatorClass = undefined; this.itemSelector = 'TD.romo-datepicker-day:not(.disabled)'; this.calTable = $(); this.date = undefined; this.today = new Date; this.doInit(); this.doBindElem(); this.doSetFormat(); this.doSetDate(this.elem.val()); this.doBindDropdown(); this.doBuildUI(); this.elem.trigger('datepicker:ready', [this]); } RomoDatepicker.prototype.doInit = function() { // override as needed } RomoDatepicker.prototype.doBindElem = function() { var elemWrapper = $('<div class="romo-datepicker-wrapper"></div>'); elemWrapper.css({'display': this.elem.css('display')}); this.elem.before(elemWrapper); elemWrapper.append(this.elem); var indicatorClass = this.elem.data('romo-datepicker-indicator') || this.defaultIndicatorClass; if (indicatorClass !== undefined) { var indicator = $('<i class="romo-datepicker-indicator '+indicatorClass+'"></i>'); indicator.css({'line-height': this.elem.css('height')}); this.elem.css({'padding-right': '30px'}); this.elem.after(indicator); } } RomoDatepicker.prototype.doSetFormat = function() { var format = this.elem.data('romo-datepicker-format') || this.defaultFormat; this.formatValues = this._parseFormatValues(format); } RomoDatepicker.prototype.doSetDate = function(value) { this.date = this._parseDate(value); if (this.date !== undefined) { this.elem.val(this._formatDate(this.date)); } else { this.elem.val(value); } } RomoDatepicker.prototype.doBindDropdown = function() { this.elem.addClass('romo'); this.elem.attr('data-romo-dropdown-disable-toggle', 'true'); if (this.elem.data('romo-dropdown-width') === undefined) { this.elem.attr('data-romo-dropdown-width', 'elem'); } this.romoDropdown = this.elem.romoDropdown()[0]; this.romoDropdown.doSetPopupZIndex(this.elem); this.romoDropdown.bodyElem.addClass('romo-datepicker-calendar'); this.romoDropdown.elem.on('dropdown:popupOpen', $.proxy(this.onPopupOpen, this)); this.romoDropdown.elem.on('dropdown:popupClose', $.proxy(this.onPopupClose, this)); this.romoDropdown.elem.on('dropdown:toggle', $.proxy(function(e, dropdown) { this.elem.trigger('datepicker:dropdown:toggle', [dropdown, this]); }, this)); this.romoDropdown.elem.on('dropdown:popupOpen', $.proxy(function(e, dropdown) { this.elem.trigger('datepicker:dropdown:popupOpen', [dropdown, this]); }, this)); this.romoDropdown.elem.on('dropdown:popupClose', $.proxy(function(e, dropdown) { this.elem.trigger('datepicker:dropdown:popupClose', [dropdown, this]); }, this)); this.elem.on('datepicker:triggerToggle', $.proxy(function(e) { this.romoDropdown.elem.trigger('dropdown:triggerToggle', []); }, this)); this.elem.on('datepicker:triggerPopupOpen', $.proxy(function(e) { this.romoDropdown.elem.trigger('dropdown:triggerPopupOpen', []); }, this)); this.elem.on('datepicker:triggerPopupClose', $.proxy(function(e) { this.romoDropdown.elem.trigger('dropdown:triggerPopupClose', []); }, this)); } RomoDatepicker.prototype.doBuildUI = function() { this.calTable = this._buildCalendar(); this.romoDropdown.bodyElem.html(''); this.romoDropdown.bodyElem.append(this.calTable); this.calTable.find('.romo-datepicker-prev').on('click', $.proxy(this.onPrevClick, this)); this.calTable.find('.romo-datepicker-next').on('click', $.proxy(this.onNextClick, this)); } RomoDatepicker.prototype.doRefreshUI = function(date) { var rDate = date || this.date || (new Date); this._refreshCalendar(rDate); this.elem.trigger('datepicker:refresh', [rDate, this]); this.calTable.find(this.itemSelector).on('hover', $.proxy(this.onItemHover, this)); this.calTable.find(this.itemSelector).on('click', $.proxy(this.onItemClick, this)); } RomoDatepicker.prototype.doRefreshToPrevMonth = function() { var date = this.refreshDate || this.date || (new Date); var year = date.getUTCFullYear(); var month = date.getUTCMonth() - 1; if (month < 0) { year -= 1; month = 11; } var pDate = this._UTCDate(year, month, 1); this.doRefreshUI(pDate); this.elem.trigger('datepicker:prevRefresh', [pDate, this]); } RomoDatepicker.prototype.doRefreshToNextMonth = function() { var date = this.refreshDate || this.date || (new Date); var year = date.getUTCFullYear(); var month = date.getUTCMonth() + 1; if (month > 11) { year += 1; month = 0; } var nDate = this._UTCDate(year, month, 1); this.doRefreshUI(nDate); this.elem.trigger('datepicker:nextRefresh', [nDate, this]); } RomoDatepicker.prototype.doSelectHighlightedItem = function() { var prevValue = this.elem.val(); var newValue = this.calTable.find('TD.romo-datepicker-highlight').data('romo-datepicker-value'); this.romoDropdown.doPopupClose(); this.elem.trigger('datepicker:itemSelected', [newValue, prevValue, this]); if (newValue !== prevValue) { this.doSetDate(newValue); this.elem.trigger('change'); this.elem.trigger('datepicker:change', [newValue, prevValue, this]); } } RomoDatepicker.prototype.onPopupOpen = function(e) { if (this.elem.hasClass('disabled') === false) { this.doSetDate(this.elem.val()); this.doRefreshUI(); } } RomoDatepicker.prototype.onPopupClose = function(e) { this._highlightItem($()); } RomoDatepicker.prototype.onItemHover = function(e) { if (e !== undefined) { e.preventDefault(); } this._highlightItem($(e.target)); } RomoDatepicker.prototype.onItemClick = function(e) { if (e !== undefined) { e.preventDefault(); } this.doSelectHighlightedItem(); } RomoDatepicker.prototype.onPrevClick = function(e) { if (e !== undefined) { e.preventDefault(); } this.doRefreshToPrevMonth(); } RomoDatepicker.prototype.onNextClick = function(e) { if (e !== undefined) { e.preventDefault(); } this.doRefreshToNextMonth(); } RomoDatepicker.prototype._refreshCalendar = function(date) { this.calTable.find('.romo-datepicker-title').html(this._buildCalendarTitle(date)); this.calTable.find('tbody').empty().append(this._buildCalendarBody(date)); this.refreshDate = date; } RomoDatepicker.prototype._buildCalendar = function() { var table = $('<table></table>'); table.append(this._buildCalendarHeader()); table.append($('<tbody></tbody>')); return table; } RomoDatepicker.prototype._buildCalendarHeader = function() { var header = $('<thead></thead'); var row = $('<tr></tr>'); row.append($('<th class="romo-datepicker-prev"><i class="'+this.defaultLeftArrowClass+'"></i>')); row.append($('<th class="romo-datepicker-title" colspan="5"></th>')); row.append($('<th class="romo-datepicker-next"><i class="'+this.defaultRightArrowClass+'"></i>')); header.append(row); row = $('<tr></tr>'); row.append($('<th class="romo-datepicker-day">Su</th>')); row.append($('<th class="romo-datepicker-day">M</th>')); row.append($('<th class="romo-datepicker-day">T</th>')); row.append($('<th class="romo-datepicker-day">W</th>')); row.append($('<th class="romo-datepicker-day">Th</th>')); row.append($('<th class="romo-datepicker-day">F</th>')); row.append($('<th class="romo-datepicker-day">S</th>')); header.append(row); return header; } RomoDatepicker.prototype._buildCalendarTitle = function(date) { return this.monthNames[date.getUTCMonth()] + ' ' + date.getUTCFullYear().toString(); } RomoDatepicker.prototype._buildCalendarBody = function(date) { var ty = this.today.getUTCFullYear(); var tm = this.today.getUTCMonth(); var td = this.today.getUTCDate(); var year = date.getUTCFullYear(); var month = date.getUTCMonth(); var day = date.getUTCDate(); var fomdow = this._UTCDate(year, month, 1).getUTCDay(); // first-of-the-month day-of-the-week if (fomdow == 0) { fomdow = 7; // don't start calendar on the first-of-the-month, show last week of prev month } var iDate = this._UTCDate(year, month, 1 - fomdow); var iWeek = 0; var html = []; while (iWeek < 6) { // render 6 weeks in the calendar var y = iDate.getUTCFullYear(); var m = iDate.getUTCMonth(); var d = iDate.getUTCDate(); var dow = iDate.getUTCDay(); var cls = []; if (dow === 0) { html.push('<tr>'); } cls.push('romo-datepicker-day'); if (dow === 0 || dow === 6) { cls.push('romo-datepicker-day-weekend'); } if (y !== year || m !== month) { cls.push('romo-datepicker-day-other'); } if (y === ty && m === tm && d === td) { cls.push('romo-datepicker-day-today'); } if (this.date && y === this.date.getUTCFullYear() && m === this.date.getUTCMonth() && d === this.date.getUTCDate()) { cls.push('selected'); } html.push('<td'); html.push(' class="'+cls.join(' ')+'"'); var dt = this._formatDate(iDate); html.push(' title="'+dt+'"'); html.push(' data-romo-datepicker-value="'+dt+'"'); html.push('>'); html.push(d.toString()); html.push('</td>'); if (dow === 6) { html.push('</tr>'); iWeek += 1; } iDate.setUTCDate(iDate.getUTCDate()+1); } return $(html.join('')); } RomoDatepicker.prototype._addIndicatorToElem = function() { // TODO, if opt and indicator icon class // create icon // append to elem } RomoDatepicker.prototype._highlightItem = function(item) { this.calTable.find('TD.romo-datepicker-highlight').removeClass('romo-datepicker-highlight'); item.addClass('romo-datepicker-highlight'); } RomoDatepicker.prototype._formatDate = function(date) { var year = date.getUTCFullYear(); var month = date.getUTCMonth() + 1; var day = date.getUTCDate(); return this.formatValues.reduce(function(prev, curr) { switch (curr) { case "yyyy": case "yyy": prev += year.toString(); break; case "yy": case "y": prev += year.toString().slice(-2); break; case "mm": prev += ("00"+ month.toString()).slice(-2); // pad 2 with "0"s break; case "m": prev += month.toString(); break; case "dd": prev += ("00"+ day.toString()).slice(-2); // pad 2 with "0"s break; case "d": prev += day.toString(); break; default: prev += curr; // delimeter, pass-thru } return prev; }, ''); } RomoDatepicker.prototype._parseFormatValues = function(value) { var regex, matches; regex = /^([m]{1,2})([^md]+)([d]{1,2})([^dy]+)([y]{2,4})$/; // mm dd yyyy or mm dd yy matches = this._regexMatches(value, regex); if (matches.length === 5) { return matches; } regex = /^([y]{3,4})([^ym]+)([m]{1,2})([^md]+)([d]{1,2})$/; // yyyy mm dd matches = this._regexMatches(value, regex); if (matches.length === 5) { return matches; } return ['yyyy', '-', 'mm', '-', 'dd']; } RomoDatepicker.prototype._parseDate = function(value) { if (value.trim() === '') { return undefined; } var dateValues = this._parseDateValues(value.trim()); if (dateValues.length === 0) { return undefined; } var year = parseInt(dateValues[0]); if (year < 0) { return undefined; } if (dateValues[0].length > 2 && year < 100) { return undefined; } if (dateValues[0].length === 2 && year < 100) { year = this._currentYear() - (this._currentYear() % 1000) + year; } var month = parseInt(dateValues[1]) - 1; if (month < 0 || month > 11) { return undefined; } var day = parseInt(dateValues[2]); var date = this._UTCDate(year, month, day); if (date.getUTCMonth() !== month) { return undefined; } return date; } RomoDatepicker.prototype._parseDateValues = function(value) { var regex, matches; regex = /^([0-9]{1,2})[^0-9]+([0-9]{1,2})[^0-9]+([0-9]{2,4})$/; // mm dd yyyy or mm dd yy matches = this._regexMatches(value, regex); if (matches.length === 3) { return [matches[2], matches[0], matches[1]]; } regex = /^([0-9]{3,4})[^0-9]+([0-9]{1,2})[^0-9]+([0-9]{1,2})$/; // yyyy mm dd matches = this._regexMatches(value, regex); if (matches.length === 3) { return matches; } regex = /^([0-9]{1,2})[^0-9]+([0-9]{1,2})$/; // mm dd matches = this._regexMatches(value, regex); if (matches.length === 2) { return [this._currentYear(), matches[0], matches[1]]; } return []; } RomoDatepicker.prototype._regexMatches = function(value, regex) { if (regex.test(value) === true) { return regex.exec(value).slice(1); } return []; } RomoDatepicker.prototype._currentYear = function() { return (new Date).getUTCFullYear(); } RomoDatepicker.prototype._UTCDate = function(year, month, day) { return new Date(Date.UTC.apply(Date, [year, month, day])); } Romo.onInitUI(function(e) { $(e.target).find('[data-romo-datepicker-auto="true"]').romoDatepicker(); });
datepicker: better handling of prev and next buttons This renames the class settings more appropriately. It also handles the case where no default was given by putting ascii into the buttons. This allows allows for setting the class at the component level. All this is to allow more flexibility when specifying the prev/next button content.
assets/js/romo/datepicker.js
datepicker: better handling of prev and next buttons
<ide><path>ssets/js/romo/datepicker.js <ide> "January", "February", "March", "April", "May", "June", <ide> "July", "August", "September", "October", "November", "December" <ide> ] <del> this.defaultLeftArrowClass = undefined; <del> this.defaultRightArrowClass = undefined; <add> this.defaultPrevClass = undefined; <add> this.defaultNextClass = undefined; <ide> this.defaultIndicatorClass = undefined; <ide> this.itemSelector = 'TD.romo-datepicker-day:not(.disabled)'; <ide> this.calTable = $(); <ide> } <ide> <ide> RomoDatepicker.prototype._buildCalendarHeader = function() { <add> var prevClass = this.elem.data('romo-datepicker-prev') || this.defaultPrevClass; <add> var nextClass = this.elem.data('romo-datepicker-next') || this.defaultNextClass; <ide> var header = $('<thead></thead'); <ide> <ide> var row = $('<tr></tr>'); <del> row.append($('<th class="romo-datepicker-prev"><i class="'+this.defaultLeftArrowClass+'"></i>')); <add> var th = $('<th class="romo-datepicker-prev" title="Previous Month"></th>'); <add> if (prevClass) { <add> th.append('<i class="'+prevClass+'"></i>'); <add> } else { <add> th.text('<<'); <add> } <add> row.append(th); <ide> row.append($('<th class="romo-datepicker-title" colspan="5"></th>')); <del> row.append($('<th class="romo-datepicker-next"><i class="'+this.defaultRightArrowClass+'"></i>')); <add> var th = $('<th class="romo-datepicker-next" title="Next Month"></th>'); <add> if (nextClass) { <add> th.append('<i class="'+nextClass+'"></i>'); <add> } else { <add> th.text('>>'); <add> } <add> row.append(th); <ide> header.append(row); <ide> <ide> row = $('<tr></tr>');
Java
mit
61f17f4980305f18a39874a83021bb3e786c5ea7
0
hsz/idea-gitignore,digideskio/idea-gitignore,hsz/idea-gitignore,halirutan/idea-gitignore,dragon788/idea-gitignore,halirutan/idea-gitignore,digideskio/idea-gitignore,dragon788/idea-gitignore
/* * The MIT License (MIT) * * Copyright (c) 2015 hsz Jakub Chrzanowski <[email protected]> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package mobi.hsz.idea.gitignore; import com.intellij.openapi.components.AbstractProjectComponent; import com.intellij.openapi.progress.BackgroundTaskQueue; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.Project; import com.intellij.openapi.vcs.ProjectLevelVcsManager; import com.intellij.openapi.vcs.VcsListener; import com.intellij.openapi.vfs.*; import com.intellij.psi.*; import com.intellij.psi.impl.PsiManagerImpl; import com.intellij.psi.impl.file.impl.FileManager; import com.intellij.psi.impl.file.impl.FileManagerImpl; import com.intellij.psi.search.FileTypeIndex; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.util.Alarm; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.messages.MessageBusConnection; import mobi.hsz.idea.gitignore.file.type.IgnoreFileType; import mobi.hsz.idea.gitignore.lang.IgnoreLanguage; import mobi.hsz.idea.gitignore.psi.IgnoreFile; import mobi.hsz.idea.gitignore.settings.IgnoreSettings; import mobi.hsz.idea.gitignore.util.CacheMap; import mobi.hsz.idea.gitignore.util.Utils; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collection; import java.util.List; import static mobi.hsz.idea.gitignore.settings.IgnoreSettings.KEY; /** * {@link IgnoreManager} handles ignore files indexing and status caching. * * @author Jakub Chrzanowski <[email protected]> * @since 1.0 */ public class IgnoreManager extends AbstractProjectComponent { private final CacheMap cache; private final PsiManagerImpl psiManager; private final VirtualFileManager virtualFileManager; private final Alarm alarm = new Alarm(); private final BackgroundTaskQueue queue; private final IgnoreSettings settings; private MessageBusConnection messageBus; private boolean working; private final VirtualFileListener virtualFileListener = new VirtualFileAdapter() { public boolean wasIgnoreFileType; /** * Fired when a virtual file is renamed from within IDEA, or its writable status is changed. * For files renamed externally, {@link #fileCreated} and {@link #fileDeleted} events will be fired. * * @param event the event object containing information about the change. */ @Override public void propertyChanged(@NotNull VirtualFilePropertyEvent event) { if (event.getPropertyName().equals("name")) { boolean isIgnoreFileType = isIgnoreFileType(event); if (isIgnoreFileType && !wasIgnoreFileType) { addFile(event); } else if (!isIgnoreFileType && wasIgnoreFileType) { removeFile(event); } } } /** * Fired before the change of a name or writable status of a file is processed. * * @param event the event object containing information about the change. */ @Override final public void beforePropertyChange(@NotNull VirtualFilePropertyEvent event) { wasIgnoreFileType = isIgnoreFileType(event); } /** * Fired when a virtual file is created. This event is not fired for files discovered during initial VFS initialization. * * @param event the event object containing information about the change. */ @Override public void fileCreated(@NotNull VirtualFileEvent event) { addFile(event); } @Override public void beforeFileDeletion(@NotNull VirtualFileEvent event) { removeFile(event); } /** * Fired when a virtual file is copied from within IDEA. * * @param event the event object containing information about the change. */ @Override public void fileCopied(@NotNull VirtualFileCopyEvent event) { addFile(event); } /** * Adds {@link IgnoreFile} to the {@link CacheMap}. */ private void addFile(VirtualFileEvent event) { if (isIgnoreFileType(event)) { IgnoreFile file = getIgnoreFile(event.getFile()); if (file != null) { cache.add(file); } } } /** * Removes {@link IgnoreFile} from the {@link CacheMap}. */ private void removeFile(VirtualFileEvent event) { if (isIgnoreFileType(event)) { cache.remove(getIgnoreFile(event.getFile())); } } /** * Checks if event was fired on the {@link IgnoreFileType} file. * * @param event current event * @return event called on {@link IgnoreFileType} */ protected boolean isIgnoreFileType(VirtualFileEvent event) { return event.getFile().getFileType() instanceof IgnoreFileType; } }; private final PsiTreeChangeListener psiTreeChangeListener = new PsiTreeChangeAdapter() { @Override public void childrenChanged(@NotNull PsiTreeChangeEvent event) { if (event.getParent() instanceof IgnoreFile) { IgnoreFile ignoreFile = (IgnoreFile) event.getParent(); if (((IgnoreLanguage) ignoreFile.getLanguage()).isEnabled()) { cache.hasChanged(ignoreFile); } } } }; private final IgnoreSettings.Listener settingsListener = new IgnoreSettings.Listener() { @Override public void onChange(@NotNull KEY key, Object value) { switch (key) { case IGNORED_FILE_STATUS: toggle((Boolean) value); break; case OUTER_IGNORE_RULES: case LANGUAGES: if (isEnabled()) { if (working) { cache.clear(); retrieve(); } else { enable(); } } break; } } }; private final VcsListener vcsListener = new VcsListener() { private boolean initialized; @Override public void directoryMappingChanged() { if (working && initialized) { cache.clear(); retrieve(); } initialized = true; } }; /** * Returns {@link IgnoreManager} service instance. * * @param project current project * @return {@link IgnoreManager instance} */ public static IgnoreManager getInstance(@NotNull final Project project) { return project.getComponent(IgnoreManager.class); } /** * Constructor builds {@link IgnoreManager} instance. * * @param project current project */ public IgnoreManager(@NotNull final Project project) { super(project); cache = new CacheMap(project); psiManager = (PsiManagerImpl) PsiManager.getInstance(project); virtualFileManager = VirtualFileManager.getInstance(); queue = new BackgroundTaskQueue(project, IgnoreBundle.message("cache.indexing")); settings = IgnoreSettings.getInstance(); } /** * Helper for fetching {@link IgnoreFile} using {@link VirtualFile}. * * @param file current file * @return {@link IgnoreFile} */ @Nullable private IgnoreFile getIgnoreFile(@Nullable VirtualFile file) { if (file == null || !file.exists()) { return null; } PsiFile psiFile = psiManager.findFile(file); if (psiFile == null || !(psiFile instanceof IgnoreFile)) { return null; } return (IgnoreFile) psiFile; } /** * Checks if file is ignored. * * @param file current file * @return file is ignored */ public boolean isFileIgnored(@NotNull final VirtualFile file) { return isEnabled() && cache.isFileIgnored(file); } /** * Checks if file's parents are ignored. * * @param file current file * @return file's parents are ignored */ public boolean isParentIgnored(@NotNull final VirtualFile file) { if (!isEnabled()) { return false; } VirtualFile parent = file.getParent(); while (parent != null && Utils.isInProject(parent, myProject)) { if (isFileIgnored(parent)) { return true; } parent = parent.getParent(); } return false; } /** * Checks if ignored files watching is enabled. * * @return enabled */ private boolean isEnabled() { return settings.isIgnoredFileStatus(); } /** * Invoked when the project corresponding to this component instance is opened.<p> * Note that components may be created for even unopened projects and this method can be never * invoked for a particular component instance (for example for default project). */ @Override public void projectOpened() { if (isEnabled() && !working) { enable(); } } /** * Enable manager. */ private void enable() { if (working) { return; } virtualFileManager.addVirtualFileListener(virtualFileListener); psiManager.addPsiTreeChangeListener(psiTreeChangeListener); settings.addListener(settingsListener); messageBus = myProject.getMessageBus().connect(); messageBus.subscribe(ProjectLevelVcsManager.VCS_CONFIGURATION_CHANGED, vcsListener); working = true; retrieve(); } /** * Invoked when the project corresponding to this component instance is closed.<p> * Note that components may be created for even unopened projects and this method can be never * invoked for a particular component instance (for example for default project). */ @Override public void projectClosed() { disable(); } /** * Disable manager. */ private void disable() { alarm.cancelAllRequests(); virtualFileManager.removeVirtualFileListener(virtualFileListener); psiManager.removePsiTreeChangeListener(psiTreeChangeListener); settings.removeListener(settingsListener); if (messageBus != null) { messageBus.disconnect(); } cache.clear(); working = false; } /** * Runs {@link #enable()} or {@link #disable()} depending on the passed value. * * @param enable or disable */ private void toggle(Boolean enable) { if (enable) { enable(); } else { disable(); } } /** * Triggers caching actions. */ private void retrieve() { if (!Alarm.isEventDispatchThread()) { return; } alarm.cancelAllRequests(); alarm.addRequest(new Runnable() { @Override public void run() { FileManager fileManager = psiManager.getFileManager(); if (!(fileManager instanceof FileManagerImpl)) { return; } if (((FileManagerImpl) psiManager.getFileManager()).isInitialized()) { alarm.cancelAllRequests(); queue.clear(); // Search for Ignore files in the project final GlobalSearchScope scope = GlobalSearchScope.allScope(myProject); final List<IgnoreFile> files = ContainerUtil.newArrayList(); for (final IgnoreLanguage language : IgnoreBundle.LANGUAGES) { if (language.isEnabled()) { try { Collection<VirtualFile> virtualFiles = FileTypeIndex.getFiles(language.getFileType(), scope); for (VirtualFile virtualFile : virtualFiles) { ContainerUtil.addIfNotNull(getIgnoreFile(virtualFile), files); } } catch (IndexOutOfBoundsException ignored) { } } } Utils.ignoreFilesSort(files); addTasksFor(files); // Search for outer files if (settings.isOuterIgnoreRules()) { for (IgnoreLanguage language : IgnoreBundle.LANGUAGES) { if (language.isEnabled()) { VirtualFile outerFile = language.getOuterFile(myProject); addTaskFor(getIgnoreFile(outerFile)); } } } } } /** * Adds {@link IgnoreFile} to the cache processor queue. * * @param files to cache */ private void addTasksFor(@NotNull final List<IgnoreFile> files) { if (files.isEmpty()) { return; } addTaskFor(files.remove(0), files); } /** * Adds {@link IgnoreFile} to the cache processor queue. * * @param file to cache */ private void addTaskFor(@Nullable final IgnoreFile file) { addTaskFor(file, null); } /** * Adds {@link IgnoreFile} to the cache processor queue. * * @param file to cache * @param dependentFiles files to cache if not ignored by given file */ private void addTaskFor(@Nullable final IgnoreFile file, @Nullable final List<IgnoreFile> dependentFiles) { if (file == null) { return; } queue.run(new Task.Backgroundable(myProject, IgnoreBundle.message("cache.indexing")) { @Override public void run(@NotNull ProgressIndicator indicator) { if (isFileIgnored(file.getVirtualFile())) { indicator.cancel(); } else { String path = Utils.getRelativePath(myProject.getBaseDir(), file.getVirtualFile()); indicator.setText(path); cache.add(file); } if (dependentFiles == null || dependentFiles.isEmpty()) { return; } for (IgnoreFile dependentFile : dependentFiles) { if (!isFileIgnored(dependentFile.getVirtualFile()) && !isParentIgnored(dependentFile.getVirtualFile())) { addTaskFor(dependentFile); } } } }); } }, 200); } /** * Unique name of this component. If there is another component with the same name or * name is null internal assertion will occur. * * @return the name of this component */ @NonNls @NotNull @Override public String getComponentName() { return "IgnoreManager"; } }
src/mobi/hsz/idea/gitignore/IgnoreManager.java
/* * The MIT License (MIT) * * Copyright (c) 2015 hsz Jakub Chrzanowski <[email protected]> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package mobi.hsz.idea.gitignore; import com.intellij.openapi.components.AbstractProjectComponent; import com.intellij.openapi.progress.BackgroundTaskQueue; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.Project; import com.intellij.openapi.vcs.ProjectLevelVcsManager; import com.intellij.openapi.vcs.VcsListener; import com.intellij.openapi.vfs.*; import com.intellij.psi.*; import com.intellij.psi.impl.PsiManagerImpl; import com.intellij.psi.impl.file.impl.FileManager; import com.intellij.psi.impl.file.impl.FileManagerImpl; import com.intellij.psi.search.FileTypeIndex; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.util.Alarm; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.messages.MessageBusConnection; import mobi.hsz.idea.gitignore.file.type.IgnoreFileType; import mobi.hsz.idea.gitignore.lang.IgnoreLanguage; import mobi.hsz.idea.gitignore.psi.IgnoreFile; import mobi.hsz.idea.gitignore.settings.IgnoreSettings; import mobi.hsz.idea.gitignore.util.CacheMap; import mobi.hsz.idea.gitignore.util.Utils; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; import static mobi.hsz.idea.gitignore.settings.IgnoreSettings.KEY; /** * {@link IgnoreManager} handles ignore files indexing and status caching. * * @author Jakub Chrzanowski <[email protected]> * @since 1.0 */ public class IgnoreManager extends AbstractProjectComponent { private final CacheMap cache; private final PsiManagerImpl psiManager; private final VirtualFileManager virtualFileManager; private final Alarm alarm = new Alarm(); private final BackgroundTaskQueue queue; private final IgnoreSettings settings; private MessageBusConnection messageBus; private boolean working; private final VirtualFileListener virtualFileListener = new VirtualFileAdapter() { public boolean wasIgnoreFileType; /** * Fired when a virtual file is renamed from within IDEA, or its writable status is changed. * For files renamed externally, {@link #fileCreated} and {@link #fileDeleted} events will be fired. * * @param event the event object containing information about the change. */ @Override public void propertyChanged(@NotNull VirtualFilePropertyEvent event) { if (event.getPropertyName().equals("name")) { boolean isIgnoreFileType = isIgnoreFileType(event); if (isIgnoreFileType && !wasIgnoreFileType) { addFile(event); } else if (!isIgnoreFileType && wasIgnoreFileType) { removeFile(event); } } } /** * Fired before the change of a name or writable status of a file is processed. * * @param event the event object containing information about the change. */ @Override final public void beforePropertyChange(@NotNull VirtualFilePropertyEvent event) { wasIgnoreFileType = isIgnoreFileType(event); } /** * Fired when a virtual file is created. This event is not fired for files discovered during initial VFS initialization. * * @param event the event object containing information about the change. */ @Override public void fileCreated(@NotNull VirtualFileEvent event) { addFile(event); } @Override public void beforeFileDeletion(@NotNull VirtualFileEvent event) { removeFile(event); } /** * Fired when a virtual file is copied from within IDEA. * * @param event the event object containing information about the change. */ @Override public void fileCopied(@NotNull VirtualFileCopyEvent event) { addFile(event); } /** * Adds {@link IgnoreFile} to the {@link CacheMap}. */ private void addFile(VirtualFileEvent event) { if (isIgnoreFileType(event)) { IgnoreFile file = getIgnoreFile(event.getFile()); if (file != null) { cache.add(file); } } } /** * Removes {@link IgnoreFile} from the {@link CacheMap}. */ private void removeFile(VirtualFileEvent event) { if (isIgnoreFileType(event)) { cache.remove(getIgnoreFile(event.getFile())); } } /** * Checks if event was fired on the {@link IgnoreFileType} file. * * @param event current event * @return event called on {@link IgnoreFileType} */ protected boolean isIgnoreFileType(VirtualFileEvent event) { return event.getFile().getFileType() instanceof IgnoreFileType; } }; private final PsiTreeChangeListener psiTreeChangeListener = new PsiTreeChangeAdapter() { @Override public void childrenChanged(@NotNull PsiTreeChangeEvent event) { if (event.getParent() instanceof IgnoreFile) { IgnoreFile ignoreFile = (IgnoreFile) event.getParent(); if (((IgnoreLanguage) ignoreFile.getLanguage()).isEnabled()) { cache.hasChanged(ignoreFile); } } } }; private final IgnoreSettings.Listener settingsListener = new IgnoreSettings.Listener() { @Override public void onChange(@NotNull KEY key, Object value) { switch (key) { case IGNORED_FILE_STATUS: toggle((Boolean) value); break; case OUTER_IGNORE_RULES: case LANGUAGES: if (isEnabled()) { if (working) { cache.clear(); retrieve(); } else { enable(); } } break; } } }; private final VcsListener vcsListener = new VcsListener() { private boolean initialized; @Override public void directoryMappingChanged() { if (working && initialized) { cache.clear(); retrieve(); } initialized = true; } }; /** * Returns {@link IgnoreManager} service instance. * * @param project current project * @return {@link IgnoreManager instance} */ public static IgnoreManager getInstance(@NotNull final Project project) { return project.getComponent(IgnoreManager.class); } /** * Constructor builds {@link IgnoreManager} instance. * * @param project current project */ public IgnoreManager(@NotNull final Project project) { super(project); cache = new CacheMap(project); psiManager = (PsiManagerImpl) PsiManager.getInstance(project); virtualFileManager = VirtualFileManager.getInstance(); queue = new BackgroundTaskQueue(project, IgnoreBundle.message("cache.indexing")); settings = IgnoreSettings.getInstance(); } /** * Helper for fetching {@link IgnoreFile} using {@link VirtualFile}. * * @param file current file * @return {@link IgnoreFile} */ @Nullable private IgnoreFile getIgnoreFile(@Nullable VirtualFile file) { if (file == null || !file.exists()) { return null; } PsiFile psiFile = psiManager.findFile(file); if (psiFile == null || !(psiFile instanceof IgnoreFile)) { return null; } return (IgnoreFile) psiFile; } /** * Checks if file is ignored. * * @param file current file * @return file is ignored */ public boolean isFileIgnored(@NotNull final VirtualFile file) { return isEnabled() && cache.isFileIgnored(file); } /** * Checks if file's parents are ignored. * * @param file current file * @return file's parents are ignored */ public boolean isParentIgnored(@NotNull final VirtualFile file) { if (!isEnabled()) { return false; } VirtualFile parent = file.getParent(); while (parent != null && Utils.isInProject(parent, myProject)) { if (isFileIgnored(parent)) { return true; } parent = parent.getParent(); } return false; } /** * Checks if ignored files watching is enabled. * * @return enabled */ private boolean isEnabled() { return settings.isIgnoredFileStatus(); } /** * Invoked when the project corresponding to this component instance is opened.<p> * Note that components may be created for even unopened projects and this method can be never * invoked for a particular component instance (for example for default project). */ @Override public void projectOpened() { if (isEnabled() && !working) { enable(); } } /** * Enable manager. */ private void enable() { if (working) { return; } virtualFileManager.addVirtualFileListener(virtualFileListener); psiManager.addPsiTreeChangeListener(psiTreeChangeListener); settings.addListener(settingsListener); messageBus = myProject.getMessageBus().connect(); messageBus.subscribe(ProjectLevelVcsManager.VCS_CONFIGURATION_CHANGED, vcsListener); working = true; retrieve(); } /** * Invoked when the project corresponding to this component instance is closed.<p> * Note that components may be created for even unopened projects and this method can be never * invoked for a particular component instance (for example for default project). */ @Override public void projectClosed() { disable(); } /** * Disable manager. */ private void disable() { alarm.cancelAllRequests(); virtualFileManager.removeVirtualFileListener(virtualFileListener); psiManager.removePsiTreeChangeListener(psiTreeChangeListener); settings.removeListener(settingsListener); if (messageBus != null) { messageBus.disconnect(); } cache.clear(); working = false; } /** * Runs {@link #enable()} or {@link #disable()} depending on the passed value. * * @param enable or disable */ private void toggle(Boolean enable) { if (enable) { enable(); } else { disable(); } } /** * Triggers caching actions. */ private void retrieve() { if (!Alarm.isEventDispatchThread()) { return; } alarm.cancelAllRequests(); alarm.addRequest(new Runnable() { @Override public void run() { FileManager fileManager = psiManager.getFileManager(); if (!(fileManager instanceof FileManagerImpl)) { return; } if (((FileManagerImpl) psiManager.getFileManager()).isInitialized()) { alarm.cancelAllRequests(); queue.clear(); // Search for Ignore files in the project final GlobalSearchScope scope = GlobalSearchScope.allScope(myProject); final List<IgnoreFile> files = ContainerUtil.newArrayList(); for (final IgnoreLanguage language : IgnoreBundle.LANGUAGES) { if (language.isEnabled()) { for (VirtualFile virtualFile : FileTypeIndex.getFiles(language.getFileType(), scope)) { ContainerUtil.addIfNotNull(getIgnoreFile(virtualFile), files); } } } Utils.ignoreFilesSort(files); addTasksFor(files); // Search for outer files if (settings.isOuterIgnoreRules()) { for (IgnoreLanguage language : IgnoreBundle.LANGUAGES) { if (language.isEnabled()) { VirtualFile outerFile = language.getOuterFile(myProject); addTaskFor(getIgnoreFile(outerFile)); } } } } } /** * Adds {@link IgnoreFile} to the cache processor queue. * * @param files to cache */ private void addTasksFor(@NotNull final List<IgnoreFile> files) { if (files.isEmpty()) { return; } addTaskFor(files.remove(0), files); } /** * Adds {@link IgnoreFile} to the cache processor queue. * * @param file to cache */ private void addTaskFor(@Nullable final IgnoreFile file) { addTaskFor(file, null); } /** * Adds {@link IgnoreFile} to the cache processor queue. * * @param file to cache * @param dependentFiles files to cache if not ignored by given file */ private void addTaskFor(@Nullable final IgnoreFile file, @Nullable final List<IgnoreFile> dependentFiles) { if (file == null) { return; } queue.run(new Task.Backgroundable(myProject, IgnoreBundle.message("cache.indexing")) { @Override public void run(@NotNull ProgressIndicator indicator) { if (isFileIgnored(file.getVirtualFile())) { indicator.cancel(); } else { String path = Utils.getRelativePath(myProject.getBaseDir(), file.getVirtualFile()); indicator.setText(path); cache.add(file); } if (dependentFiles == null || dependentFiles.isEmpty()) { return; } for (IgnoreFile dependentFile : dependentFiles) { if (!isFileIgnored(dependentFile.getVirtualFile()) && !isParentIgnored(dependentFile.getVirtualFile())) { addTaskFor(dependentFile); } } } }); } }, 200); } /** * Unique name of this component. If there is another component with the same name or * name is null internal assertion will occur. * * @return the name of this component */ @NonNls @NotNull @Override public String getComponentName() { return "IgnoreManager"; } }
Fixed #144 IndexOutOfBoundsException
src/mobi/hsz/idea/gitignore/IgnoreManager.java
Fixed #144 IndexOutOfBoundsException
<ide><path>rc/mobi/hsz/idea/gitignore/IgnoreManager.java <ide> import org.jetbrains.annotations.NotNull; <ide> import org.jetbrains.annotations.Nullable; <ide> <add>import java.util.Collection; <ide> import java.util.List; <ide> <ide> import static mobi.hsz.idea.gitignore.settings.IgnoreSettings.KEY; <ide> final List<IgnoreFile> files = ContainerUtil.newArrayList(); <ide> for (final IgnoreLanguage language : IgnoreBundle.LANGUAGES) { <ide> if (language.isEnabled()) { <del> for (VirtualFile virtualFile : FileTypeIndex.getFiles(language.getFileType(), scope)) { <del> ContainerUtil.addIfNotNull(getIgnoreFile(virtualFile), files); <add> try { <add> Collection<VirtualFile> virtualFiles = FileTypeIndex.getFiles(language.getFileType(), scope); <add> for (VirtualFile virtualFile : virtualFiles) { <add> ContainerUtil.addIfNotNull(getIgnoreFile(virtualFile), files); <add> } <add> } catch (IndexOutOfBoundsException ignored) { <ide> } <ide> } <ide> }
Java
apache-2.0
9c5227943b43bdb1e63fc4d3926e9b1f729b3402
0
Sausure/WIFIADB
package adb.wifi.woaiwhz.wifiadbandroid.activity; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; import android.animation.ArgbEvaluator; import android.animation.ObjectAnimator; import android.animation.TimeInterpolator; import android.animation.TypeEvaluator; import android.content.Intent; import android.graphics.drawable.GradientDrawable; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.support.annotation.IdRes; import android.support.annotation.NonNull; import android.support.design.widget.Snackbar; import android.support.v4.content.res.ResourcesCompat; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.PopupMenu; import android.support.v7.widget.SwitchCompat; import android.support.v7.widget.Toolbar; import android.text.TextUtils; import android.util.Property; import android.view.MenuItem; import android.view.View; import android.view.animation.AccelerateDecelerateInterpolator; import android.view.animation.Interpolator; import android.widget.CompoundButton; import android.widget.ImageButton; import android.widget.TextView; import adb.wifi.woaiwhz.wifiadbandroid.R; import adb.wifi.woaiwhz.wifiadbandroid.base.CircularRevealDrawable; import adb.wifi.woaiwhz.wifiadbandroid.base.OnTouchInterceptor; import adb.wifi.woaiwhz.wifiadbandroid.base.WiFiModule; import adb.wifi.woaiwhz.wifiadbandroid.bean.AlphaProperty; import adb.wifi.woaiwhz.wifiadbandroid.bean.BottomProperty; import adb.wifi.woaiwhz.wifiadbandroid.bean.ColorProperty; import adb.wifi.woaiwhz.wifiadbandroid.bean.ScaleProperty; import adb.wifi.woaiwhz.wifiadbandroid.bean.State; import adb.wifi.woaiwhz.wifiadbandroid.bean.XProperty; import adb.wifi.woaiwhz.wifiadbandroid.bean.YProperty; import adb.wifi.woaiwhz.wifiadbandroid.presenter.MainPresenter; public class MainActivity extends AppCompatActivity implements MainPresenter.MainView,View.OnClickListener, CompoundButton.OnCheckedChangeListener,PopupMenu.OnMenuItemClickListener{ private View mSplashContainer; private View mRevealHolderView; private View mMaskView; private View mLoading; private TextView mIpValue; private ImageButton mCenterButton; private View mIpContainer; private Toolbar mToolbar; private SwitchCompat mSplashSwitch; private SwitchCompat mToolbarSwitch; private View mIconPortReady; private View mIconPortUnready; private View mShowMenu; private PopupMenu mPopupMenu; private CircularRevealDrawable mRevelDrawable; private MainPresenter mPresenter; private SwitchCompat mCurrentSwitch; private SwitchCompat mHidingSwitch; private AnimatorSet mWifiReadyAnimate; private AnimatorSet mWifiUnreadyAnimate; private AnimatorSet mPortReadyAnimate; private AnimatorSet mPortUnreadyAnimate; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mSplashContainer = $(R.id.splash_container); mSplashSwitch = $(R.id.splash_switch_wifi); mRevealHolderView = $(R.id.reveal_layout); mMaskView = $(R.id.mask); mCenterButton = $(R.id.center_button); mIconPortReady = $(R.id.ic_port_ready); mIconPortUnready = $(R.id.ic_port_unready); mLoading = $(R.id.loading); mIpValue = $(R.id.ip_value); mIpContainer = $(R.id.ip_layout); mToolbarSwitch = $(R.id.switch_real); mToolbar = $(R.id.toolbar); mShowMenu = $(R.id.main_menu); init(); } private void init(){ setSupportActionBar(mToolbar); mRevelDrawable = new CircularRevealDrawable(); mRevelDrawable.putColor(State.PORT_READY,R.color.port_ready_primary); mRevelDrawable.putColor(State.PORT_UNREADY,R.color.port_unready_primary); if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { mRevealHolderView.setBackground(mRevelDrawable); }else { mRevealHolderView.setBackgroundDrawable(mRevelDrawable); } mMaskView.setOnTouchListener(new OnTouchInterceptor()); mCenterButton.setOnClickListener(this); mShowMenu.setOnClickListener(this); mSplashSwitch.setOnCheckedChangeListener(this); mToolbarSwitch.setOnCheckedChangeListener(this); mSplashSwitch.setOnClickListener(this); mToolbarSwitch.setOnClickListener(this); mCurrentSwitch = mSplashSwitch; mHidingSwitch = mToolbarSwitch; } @Override public void onWindowFocusChanged(boolean hasFocus) { super.onWindowFocusChanged(hasFocus); if (!isAnimateReady() && hasFocus) { initAnimate(); mPresenter.onStart(); } } private void initAnimate(){ initWifiStateAnimate(); initPortStateAnimate(); } private void initPortStateAnimate(){ final GradientDrawable centerDrawable = (GradientDrawable) mCenterButton.getDrawable(); final int portReadyAccent = ResourcesCompat.getColor(getResources(),R.color.port_ready_accent,null); final int portUnreadyAccent = ResourcesCompat.getColor(getResources(),R.color.port_unready_accent,null); final Property<GradientDrawable,Integer> colorProperty = new ColorProperty(Integer.class,"colorProperty"); final Property<View,Float> alphaProperty = new AlphaProperty(Float.class,"alphaProperty"); final Interpolator interpolator = new AccelerateDecelerateInterpolator(); final TypeEvaluator typeEvaluator = new ArgbEvaluator(); final ObjectAnimator readyCenterButtonColor = ObjectAnimator.ofInt(centerDrawable,colorProperty,portUnreadyAccent,portReadyAccent); readyCenterButtonColor.setEvaluator(typeEvaluator); mPortReadyAnimate = new AnimatorSet(); mPortReadyAnimate.setInterpolator(interpolator); AnimatorSet iconPortReadyAnimateSet = new AnimatorSet(); iconPortReadyAnimateSet.play(ObjectAnimator.ofFloat(mIconPortUnready,alphaProperty,0f)) .before(ObjectAnimator.ofFloat(mIconPortReady,alphaProperty,1f)); mPortReadyAnimate.play(readyCenterButtonColor) .with(ObjectAnimator.ofFloat(mIpContainer,alphaProperty,1f)) .with(iconPortReadyAnimateSet); mPortReadyAnimate.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationStart(Animator animation) { showMaskWithoutLoading(); mRevelDrawable.changeState(State.PORT_READY); } @Override public void onAnimationEnd(Animator animation) { hideMask(); } }); final ObjectAnimator unreadyCenterButton = ObjectAnimator.ofInt(centerDrawable,colorProperty,portReadyAccent,portUnreadyAccent); unreadyCenterButton.setEvaluator(typeEvaluator); AnimatorSet iconPortUnreadyAnimateSet = new AnimatorSet(); iconPortUnreadyAnimateSet.play(ObjectAnimator.ofFloat(mIconPortReady,alphaProperty,0f)) .before(ObjectAnimator.ofFloat(mIconPortUnready,alphaProperty,1f)); mPortUnreadyAnimate = new AnimatorSet(); mPortUnreadyAnimate.setInterpolator(interpolator); mPortUnreadyAnimate.play(unreadyCenterButton) .with(ObjectAnimator.ofFloat(mIpContainer,alphaProperty,0f)) .with(iconPortUnreadyAnimateSet); mPortUnreadyAnimate.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationStart(Animator animation) { showMaskWithoutLoading(); mRevelDrawable.changeState(State.PORT_UNREADY); } @Override public void onAnimationEnd(Animator animation) { hideMask(); } }); } private void initWifiStateAnimate(){ final int toolbarBottom = mToolbar.getBottom(); final int splashBottom = mSplashContainer.getBottom(); final float noScale = getResources().getInteger(R.integer.switch_no_scale); final float maxScale = getResources().getInteger(R.integer.switch_max_scale); final float bigSwitchX = mSplashSwitch.getX(); final float bigSwitchY = mSplashSwitch.getY(); final float smallSwitchX = mToolbarSwitch.getX(); final float smallSwitchY = mToolbarSwitch.getY(); final long duration = 600L; final TimeInterpolator interpolator = new AccelerateDecelerateInterpolator(); final Property<View,Float> xProperty = new XProperty(Float.class,"xProperty"); final Property<View,Float> yProperty = new YProperty(Float.class,"yProperty"); final Property<View,Integer> bottomProperty = new BottomProperty(Integer.class,"bottomProperty"); final Property<View,Float> scaleProperty = new ScaleProperty(Float.class,"scaleProperty"); mWifiReadyAnimate = new AnimatorSet(); mWifiReadyAnimate.play(ObjectAnimator.ofFloat(mSplashSwitch,scaleProperty,maxScale,noScale)) .with(ObjectAnimator.ofFloat(mSplashSwitch,xProperty,bigSwitchX,smallSwitchX)) .with(ObjectAnimator.ofFloat(mSplashSwitch,yProperty,bigSwitchY,smallSwitchY)) .with(ObjectAnimator.ofInt(mSplashContainer,bottomProperty,splashBottom,toolbarBottom)); mWifiReadyAnimate.setDuration(duration) .setInterpolator(interpolator); mWifiReadyAnimate.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationStart(Animator animation) { mRevealHolderView.setVisibility(View.VISIBLE); showMaskWithoutLoading(); } @Override public void onAnimationEnd(Animator animation) { mToolbar.setVisibility(View.VISIBLE); mSplashContainer.setVisibility(View.GONE); mSplashContainer.setBottom(splashBottom); mSplashSwitch.setScaleX(maxScale); mSplashSwitch.setScaleY(maxScale); mSplashSwitch.setX(bigSwitchX); mSplashSwitch.setY(bigSwitchY); mCurrentSwitch = mToolbarSwitch; mHidingSwitch = mSplashSwitch; hideMask(); } }); mWifiUnreadyAnimate = new AnimatorSet(); mWifiUnreadyAnimate .play(ObjectAnimator.ofFloat(mToolbarSwitch,scaleProperty,noScale,maxScale)) .with(ObjectAnimator.ofFloat(mToolbarSwitch,xProperty,smallSwitchX,bigSwitchX)) .with(ObjectAnimator.ofFloat(mToolbarSwitch,yProperty,smallSwitchY,bigSwitchY)) .with(ObjectAnimator.ofInt(mToolbar,bottomProperty,toolbarBottom,splashBottom)); mWifiUnreadyAnimate.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationStart(Animator animation) { showMaskWithoutLoading(); } @Override public void onAnimationEnd(Animator animation) { mSplashContainer.setVisibility(View.VISIBLE); mRevealHolderView.setVisibility(View.GONE); mToolbar.setVisibility(View.GONE); mToolbar.setBottom(toolbarBottom); mToolbarSwitch.setScaleX(noScale); mToolbarSwitch.setScaleY(noScale); mToolbarSwitch.setX(smallSwitchX); mToolbarSwitch.setY(smallSwitchY); mCurrentSwitch = mSplashSwitch; mHidingSwitch = mToolbarSwitch; hideMask(); Snackbar.make(mRevealHolderView,R.string.wifi_no_ready,Snackbar.LENGTH_SHORT).show(); } }); mWifiUnreadyAnimate.setDuration(duration) .setInterpolator(interpolator); mToolbar.setVisibility(View.GONE); } @Override protected void onStart() { super.onStart(); if(mPresenter == null){ mPresenter = new MainPresenter(this); } if(isAnimateReady()) { mPresenter.onStart(); } } @SuppressWarnings("unchecked") private <T extends View> T $(@IdRes int id){ return (T) findViewById(id); } @Override protected void onStop() { super.onStop(); mPresenter.onStop(); } @Override public void pageLoading(boolean show) { if(show){ showMaskWithLoading(); }else { hideMask(); } } private void showMaskWithLoading(){ setVisible(mLoading,View.VISIBLE); setVisible(mMaskView,View.VISIBLE); } private void showMaskWithoutLoading(){ setVisible(mLoading,View.GONE); setVisible(mMaskView,View.VISIBLE); } private void hideMask(){ setVisible(mMaskView,View.GONE); } private void setVisible(@NonNull View view,int visibility){ if(view.getVisibility() != visibility){ view.setVisibility(visibility); } } @Override public void onWifiUnready() { mCurrentSwitch.setChecked(false); wifiStateAnimate(false); } private void onWifiReady() { mCurrentSwitch.setChecked(true); wifiStateAnimate(true); } @Override public void onPortReady(String ip) { onWifiReady(); mIpValue.setText(ip); portStateAnimate(true); } @Override public void onPortUnready() { onWifiReady(); portStateAnimate(false); } private void portStateAnimate(boolean ready){ if(!isAnimateReady()){ return; } if(mPortReadyAnimate.isRunning()){ mPortReadyAnimate.cancel(); } if(mPortUnreadyAnimate.isRunning()){ mPortUnreadyAnimate.cancel(); } if(ready){ mPortReadyAnimate.start(); }else { mPortUnreadyAnimate.start(); } } @Override public void onActionFail(@NonNull String message) { Snackbar.make(mRevealHolderView,message,Snackbar.LENGTH_SHORT).show(); } @Override public void onClick(View v) { if(v == mCenterButton){ mPresenter.toggle(); }else if(v == mCurrentSwitch){ final boolean isChecked = mCurrentSwitch.isChecked(); WiFiModule.getInstance().enable(isChecked); }else if(v == mShowMenu){ if(mPopupMenu == null){ mPopupMenu = new PopupMenu(this,mShowMenu); mPopupMenu.getMenuInflater().inflate(R.menu.menu_main,mPopupMenu.getMenu()); mPopupMenu.setOnMenuItemClickListener(this); } mPopupMenu.show(); } } @Override public void onCheckedChanged(CompoundButton v, final boolean isChecked) { if(v != mCurrentSwitch){ return; } mHidingSwitch.setChecked(isChecked); } private boolean isAnimateReady(){ //只需判断一个就好 return mWifiUnreadyAnimate != null; } private void wifiStateAnimate(boolean ready){ if(!isAnimateReady()){ return; } if(mWifiReadyAnimate.isRunning()){ mWifiReadyAnimate.cancel(); } if(mWifiUnreadyAnimate.isRunning()){ mWifiUnreadyAnimate.cancel(); } if(ready){ mWifiReadyAnimate.start(); }else { mWifiUnreadyAnimate.start(); } } @Override public boolean onMenuItemClick(MenuItem item) { final int id = item.getItemId(); switch (id){ case R.id.get_start: browse("https://github.com/Sausure/WIFIADB/blob/master/WIFIADBAndroid/README.md"); return true; case R.id.about: browse("https://github.com/Sausure/WIFIADB/blob/master/README.md"); return true; case R.id.get_intellij_plugin: browse("https://github.com/Sausure/WIFIADB/tree/master/WIFIADBIntelliJPlugin"); return true; default: return false; } } private void browse(String url){ if(TextUtils.isEmpty(url)){ return; } final Uri uri = Uri.parse(url); final Intent intent = new Intent(Intent.ACTION_VIEW,uri); startActivity(intent); } }
WIFIADBAndroid/app/src/main/java/adb/wifi/woaiwhz/wifiadbandroid/activity/MainActivity.java
package adb.wifi.woaiwhz.wifiadbandroid.activity; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; import android.animation.ArgbEvaluator; import android.animation.ObjectAnimator; import android.animation.TimeInterpolator; import android.animation.TypeEvaluator; import android.content.Intent; import android.graphics.drawable.GradientDrawable; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.support.annotation.IdRes; import android.support.annotation.NonNull; import android.support.design.widget.Snackbar; import android.support.v4.content.res.ResourcesCompat; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.PopupMenu; import android.support.v7.widget.SwitchCompat; import android.support.v7.widget.Toolbar; import android.text.TextUtils; import android.util.Property; import android.view.MenuItem; import android.view.View; import android.view.animation.AccelerateDecelerateInterpolator; import android.view.animation.Interpolator; import android.widget.CompoundButton; import android.widget.ImageButton; import android.widget.TextView; import adb.wifi.woaiwhz.wifiadbandroid.R; import adb.wifi.woaiwhz.wifiadbandroid.base.CircularRevealDrawable; import adb.wifi.woaiwhz.wifiadbandroid.base.OnTouchInterceptor; import adb.wifi.woaiwhz.wifiadbandroid.base.WiFiModule; import adb.wifi.woaiwhz.wifiadbandroid.bean.AlphaProperty; import adb.wifi.woaiwhz.wifiadbandroid.bean.BottomProperty; import adb.wifi.woaiwhz.wifiadbandroid.bean.ColorProperty; import adb.wifi.woaiwhz.wifiadbandroid.bean.ScaleProperty; import adb.wifi.woaiwhz.wifiadbandroid.bean.State; import adb.wifi.woaiwhz.wifiadbandroid.bean.XProperty; import adb.wifi.woaiwhz.wifiadbandroid.bean.YProperty; import adb.wifi.woaiwhz.wifiadbandroid.presenter.MainPresenter; public class MainActivity extends AppCompatActivity implements MainPresenter.MainView,View.OnClickListener, CompoundButton.OnCheckedChangeListener,PopupMenu.OnMenuItemClickListener{ private View mSplashContainer; private View mRevealHolderView; private View mMaskView; private View mLoading; private TextView mIpValue; private ImageButton mCenterButton; private View mIpContainer; private Toolbar mToolbar; private SwitchCompat mSplashSwitch; private SwitchCompat mToolbarSwitch; private View mIconPortReady; private View mIconPortUnready; private View mShowMenu; private PopupMenu mPopupMenu; private CircularRevealDrawable mRevelDrawable; private MainPresenter mPresenter; private SwitchCompat mCurrentSwitch; private SwitchCompat mHidingSwitch; private AnimatorSet mWifiReadyAnimate; private AnimatorSet mWifiUnreadyAnimate; private AnimatorSet mPortReadyAnimate; private AnimatorSet mPortUnreadyAnimate; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mSplashContainer = $(R.id.splash_container); mSplashSwitch = $(R.id.splash_switch_wifi); mRevealHolderView = $(R.id.reveal_layout); mMaskView = $(R.id.mask); mCenterButton = $(R.id.center_button); mIconPortReady = $(R.id.ic_port_ready); mIconPortUnready = $(R.id.ic_port_unready); mLoading = $(R.id.loading); mIpValue = $(R.id.ip_value); mIpContainer = $(R.id.ip_layout); mToolbarSwitch = $(R.id.switch_real); mToolbar = $(R.id.toolbar); mShowMenu = $(R.id.main_menu); init(); } private void init(){ setSupportActionBar(mToolbar); mRevelDrawable = new CircularRevealDrawable(); mRevelDrawable.putColor(State.PORT_READY,R.color.port_ready_primary); mRevelDrawable.putColor(State.PORT_UNREADY,R.color.port_unready_primary); if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { mRevealHolderView.setBackground(mRevelDrawable); }else { mRevealHolderView.setBackgroundDrawable(mRevelDrawable); } mMaskView.setOnTouchListener(new OnTouchInterceptor()); mCenterButton.setOnClickListener(this); mShowMenu.setOnClickListener(this); mSplashSwitch.setOnCheckedChangeListener(this); mToolbarSwitch.setOnCheckedChangeListener(this); mSplashSwitch.setOnClickListener(this); mToolbarSwitch.setOnClickListener(this); mCurrentSwitch = mSplashSwitch; mHidingSwitch = mToolbarSwitch; } @Override public void onWindowFocusChanged(boolean hasFocus) { super.onWindowFocusChanged(hasFocus); if (!isAnimateReady() && hasFocus) { initAnimate(); mPresenter.onStart(); } } private void initAnimate(){ initWifiStateAnimate(); initPortStateAnimate(); } private void initPortStateAnimate(){ final GradientDrawable centerDrawable = (GradientDrawable) mCenterButton.getDrawable(); final int portReadyAccent = ResourcesCompat.getColor(getResources(),R.color.port_ready_accent,null); final int portUnreadyAccent = ResourcesCompat.getColor(getResources(),R.color.port_unready_accent,null); final Property<GradientDrawable,Integer> colorProperty = new ColorProperty(Integer.class,"colorProperty"); final Property<View,Float> alphaProperty = new AlphaProperty(Float.class,"alphaProperty"); final Interpolator interpolator = new AccelerateDecelerateInterpolator(); final TypeEvaluator typeEvaluator = new ArgbEvaluator(); final ObjectAnimator readyCenterButtonColor = ObjectAnimator.ofInt(centerDrawable,colorProperty,portUnreadyAccent,portReadyAccent); readyCenterButtonColor.setEvaluator(typeEvaluator); mPortReadyAnimate = new AnimatorSet(); mPortReadyAnimate.setInterpolator(interpolator); AnimatorSet iconPortReadyAnimateSet = new AnimatorSet(); iconPortReadyAnimateSet.play(ObjectAnimator.ofFloat(mIconPortUnready,alphaProperty,0f)) .before(ObjectAnimator.ofFloat(mIconPortReady,alphaProperty,1f)); mPortReadyAnimate.play(readyCenterButtonColor) .with(ObjectAnimator.ofFloat(mIpContainer,alphaProperty,1f)) .with(iconPortReadyAnimateSet); mPortReadyAnimate.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationStart(Animator animation) { showMaskWithoutLoading(); mRevelDrawable.changeState(State.PORT_READY); } @Override public void onAnimationEnd(Animator animation) { hideMask(); } }); final ObjectAnimator unreadyCenterButton = ObjectAnimator.ofInt(centerDrawable,colorProperty,portReadyAccent,portUnreadyAccent); unreadyCenterButton.setEvaluator(typeEvaluator); AnimatorSet iconPortUnreadyAnimateSet = new AnimatorSet(); iconPortUnreadyAnimateSet.play(ObjectAnimator.ofFloat(mIconPortReady,alphaProperty,0f)) .before(ObjectAnimator.ofFloat(mIconPortUnready,alphaProperty,1f)); mPortUnreadyAnimate = new AnimatorSet(); mPortUnreadyAnimate.setInterpolator(interpolator); mPortUnreadyAnimate.play(unreadyCenterButton) .with(ObjectAnimator.ofFloat(mIpContainer,alphaProperty,0f)) .with(iconPortUnreadyAnimateSet); mPortUnreadyAnimate.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationStart(Animator animation) { showMaskWithoutLoading(); mRevelDrawable.changeState(State.PORT_UNREADY); } @Override public void onAnimationEnd(Animator animation) { hideMask(); } }); } private void initWifiStateAnimate(){ final int toolbarBottom = mToolbar.getBottom(); final int splashBottom = mSplashContainer.getBottom(); final float noScale = getResources().getInteger(R.integer.switch_no_scale); final float maxScale = getResources().getInteger(R.integer.switch_max_scale); final float bigSwitchX = mSplashSwitch.getX(); final float bigSwitchY = mSplashSwitch.getY(); final float smallSwitchX = mToolbarSwitch.getX(); final float smallSwitchY = mToolbarSwitch.getY(); final long duration = 600L; final TimeInterpolator interpolator = new AccelerateDecelerateInterpolator(); final Property<View,Float> xProperty = new XProperty(Float.class,"xProperty"); final Property<View,Float> yProperty = new YProperty(Float.class,"yProperty"); final Property<View,Integer> bottomProperty = new BottomProperty(Integer.class,"bottomProperty"); final Property<View,Float> scaleProperty = new ScaleProperty(Float.class,"scaleProperty"); mWifiReadyAnimate = new AnimatorSet(); mWifiReadyAnimate.play(ObjectAnimator.ofFloat(mSplashSwitch,scaleProperty,maxScale,noScale)) .with(ObjectAnimator.ofFloat(mSplashSwitch,xProperty,bigSwitchX,smallSwitchX)) .with(ObjectAnimator.ofFloat(mSplashSwitch,yProperty,bigSwitchY,smallSwitchY)) .with(ObjectAnimator.ofInt(mSplashContainer,bottomProperty,splashBottom,toolbarBottom)); mWifiReadyAnimate.setDuration(duration) .setInterpolator(interpolator); mWifiReadyAnimate.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationStart(Animator animation) { mRevealHolderView.setVisibility(View.VISIBLE); showMaskWithoutLoading(); } @Override public void onAnimationEnd(Animator animation) { mToolbar.setVisibility(View.VISIBLE); mSplashContainer.setVisibility(View.GONE); mSplashContainer.setBottom(splashBottom); mSplashSwitch.setScaleX(maxScale); mSplashSwitch.setScaleY(maxScale); mSplashSwitch.setX(bigSwitchX); mSplashSwitch.setY(bigSwitchY); mCurrentSwitch = mToolbarSwitch; mHidingSwitch = mSplashSwitch; hideMask(); } }); mWifiUnreadyAnimate = new AnimatorSet(); mWifiUnreadyAnimate .play(ObjectAnimator.ofFloat(mToolbarSwitch,scaleProperty,noScale,maxScale)) .with(ObjectAnimator.ofFloat(mToolbarSwitch,xProperty,smallSwitchX,bigSwitchX)) .with(ObjectAnimator.ofFloat(mToolbarSwitch,yProperty,smallSwitchY,bigSwitchY)) .with(ObjectAnimator.ofInt(mToolbar,bottomProperty,toolbarBottom,splashBottom)); mWifiUnreadyAnimate.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationStart(Animator animation) { showMaskWithoutLoading(); } @Override public void onAnimationEnd(Animator animation) { mSplashContainer.setVisibility(View.VISIBLE); mRevealHolderView.setVisibility(View.GONE); mToolbar.setVisibility(View.GONE); mToolbar.setBottom(toolbarBottom); mToolbarSwitch.setScaleX(noScale); mToolbarSwitch.setScaleY(noScale); mToolbarSwitch.setX(smallSwitchX); mToolbarSwitch.setY(smallSwitchY); mCurrentSwitch = mSplashSwitch; mHidingSwitch = mToolbarSwitch; hideMask(); Snackbar.make(mRevealHolderView,R.string.wifi_no_ready,Snackbar.LENGTH_SHORT).show(); } }); mWifiUnreadyAnimate.setDuration(duration) .setInterpolator(interpolator); mToolbar.setVisibility(View.GONE); } @Override protected void onStart() { super.onStart(); if(mPresenter == null){ mPresenter = new MainPresenter(this); } if(isAnimateReady()) { mPresenter.onStart(); } } @SuppressWarnings("unchecked") private <T extends View> T $(@IdRes int id){ return (T) findViewById(id); } @Override protected void onStop() { super.onStop(); mPresenter.onStop(); } @Override public void pageLoading(boolean show) { if(show){ showMaskWithLoading(); }else { hideMask(); } } private void showMaskWithLoading(){ setVisible(mLoading,View.VISIBLE); setVisible(mMaskView,View.VISIBLE); } private void showMaskWithoutLoading(){ setVisible(mLoading,View.GONE); setVisible(mMaskView,View.VISIBLE); } private void hideMask(){ setVisible(mMaskView,View.GONE); } private void setVisible(@NonNull View view,int visibility){ if(view.getVisibility() != visibility){ view.setVisibility(visibility); } } @Override public void onWifiUnready() { mCurrentSwitch.setChecked(false); wifiStateAnimate(false); } private void onWifiReady() { mCurrentSwitch.setChecked(true); wifiStateAnimate(true); } @Override public void onPortReady(String ip) { onWifiReady(); mIpValue.setText(ip); portStateAnimate(true); } @Override public void onPortUnready() { onWifiReady(); portStateAnimate(false); } private void portStateAnimate(boolean ready){ if(!isAnimateReady()){ return; } if(mPortReadyAnimate.isRunning()){ mPortReadyAnimate.cancel(); } if(mPortUnreadyAnimate.isRunning()){ mPortUnreadyAnimate.cancel(); } if(ready){ mPortReadyAnimate.start(); }else { mPortUnreadyAnimate.start(); } } @Override public void onActionFail(@NonNull String message) { Snackbar.make(mRevealHolderView,message,Snackbar.LENGTH_SHORT).show(); } @Override public void onClick(View v) { if(v == mCenterButton){ mPresenter.toggle(); }else if(v == mCurrentSwitch){ final boolean isChecked = mCurrentSwitch.isChecked(); WiFiModule.getInstance().enable(isChecked); }else if(v == mShowMenu){ if(mPopupMenu == null){ mPopupMenu = new PopupMenu(this,mShowMenu); mPopupMenu.getMenuInflater().inflate(R.menu.menu_main,mPopupMenu.getMenu()); mPopupMenu.setOnMenuItemClickListener(this); } mPopupMenu.show(); } } @Override public void onCheckedChanged(CompoundButton v, final boolean isChecked) { if(v != mCurrentSwitch){ return; } mHidingSwitch.setChecked(isChecked); } private boolean isAnimateReady(){ //只需判断一个就好 return mWifiUnreadyAnimate != null; } private void wifiStateAnimate(boolean ready){ if(!isAnimateReady()){ return; } if(mWifiReadyAnimate.isRunning()){ mWifiReadyAnimate.cancel(); } if(mWifiUnreadyAnimate.isRunning()){ mWifiUnreadyAnimate.cancel(); } if(ready){ mWifiReadyAnimate.start(); }else { mWifiUnreadyAnimate.start(); } } @Override public boolean onMenuItemClick(MenuItem item) { final int id = item.getItemId(); switch (id){ case R.id.get_start: browse("https://github.com/Sausure/WIFIADB"); return true; case R.id.about: browse("https://github.com/Sausure/WIFIADB/tree/master/WIFIADBAndroid"); return true; case R.id.get_intellij_plugin: browse("https://github.com/Sausure/WIFIADB/tree/master/WIFIADBIntelliJPlugin"); return true; default: return false; } } private void browse(String url){ if(TextUtils.isEmpty(url)){ return; } final Uri uri = Uri.parse(url); final Intent intent = new Intent(Intent.ACTION_VIEW,uri); startActivity(intent); } }
something unimportant
WIFIADBAndroid/app/src/main/java/adb/wifi/woaiwhz/wifiadbandroid/activity/MainActivity.java
something unimportant
<ide><path>IFIADBAndroid/app/src/main/java/adb/wifi/woaiwhz/wifiadbandroid/activity/MainActivity.java <ide> <ide> switch (id){ <ide> case R.id.get_start: <del> browse("https://github.com/Sausure/WIFIADB"); <add> browse("https://github.com/Sausure/WIFIADB/blob/master/WIFIADBAndroid/README.md"); <ide> return true; <ide> <ide> case R.id.about: <del> browse("https://github.com/Sausure/WIFIADB/tree/master/WIFIADBAndroid"); <add> browse("https://github.com/Sausure/WIFIADB/blob/master/README.md"); <ide> return true; <ide> <ide> case R.id.get_intellij_plugin:
Java
apache-2.0
90ca40af7205835ccc864f6ee013c884ddf25754
0
phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida
package ca.corefacility.bioinformatics.irida.ria.integration; import ca.corefacility.bioinformatics.irida.ria.integration.pages.AbstractPage; import ca.corefacility.bioinformatics.irida.ria.integration.pages.LoginPage; import ca.corefacility.bioinformatics.irida.ria.integration.pages.user.PasswordResetPage; import com.github.springtestdbunit.annotation.DatabaseSetup; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * <p> Integration test to ensure that the Login Page works and redirects the user to the dashboard. </p> * */ @DatabaseSetup("/ca/corefacility/bioinformatics/irida/ria/web/LoginPageIT.xml") public class LoginPageIT extends AbstractIridaUIITChromeDriver { private static final String EXPIRED_USERNAME = "expiredGuy"; private static final String EXPIRED_PASSWORD = "Password1"; @Test public void testBadUsername() throws Exception { LoginPage page = LoginPage.to(driver()); page.login(LoginPage.BAD_USERNAME, LoginPage.GOOD_PASSWORD); assertTrue("Should update the url with '?error=true'", driver().getCurrentUrl().contains("login?error=true")); assertEquals("Should display error on bad login", page.getErrors(), "Incorrect Email or Password"); } @Test public void testBadPassword() throws Exception { LoginPage page = LoginPage.to(driver()); page.login(LoginPage.USER_USERNAME, LoginPage.BAD_PASSWORD); assertTrue("Should update the url with '?error=true'", driver().getCurrentUrl().contains("login?error=true")); assertEquals("Should display error on bad login", page.getErrors(), "Incorrect Email or Password"); } @Test public void testGoodLogin() throws Exception { LoginPage.login(driver(), LoginPage.MANAGER_USERNAME, LoginPage.GOOD_PASSWORD); assertTrue("The 'test' user is logged in and redirected.", driver().getCurrentUrl().contains("dashboard")); } @Test public void testExpiredCredentialsLogin() throws Exception { LoginPage page = LoginPage.to(driver()); page.login(EXPIRED_USERNAME, EXPIRED_PASSWORD); assertTrue("The 'expiredGuy' user should be sent to a password reset page.", driver().getCurrentUrl().contains("password_reset/")); } @Test public void testLoginWithChangedCredentials() { String newPassword = "aGoodP@ssW0rD"; LoginPage page = LoginPage.to(driver()); page.login(EXPIRED_USERNAME, EXPIRED_PASSWORD); PasswordResetPage passwordResetPage = new PasswordResetPage(driver()); passwordResetPage.enterPassword(newPassword, newPassword); assertTrue("Should have succeeded in changing password.", passwordResetPage.checkSuccess()); AbstractPage.logout(driver()); page = LoginPage.to(driver()); page.login(EXPIRED_USERNAME, newPassword); assertTrue("The user is logged in and redirected.", driver().getCurrentUrl().contains("dashboard")); } }
src/test/java/ca/corefacility/bioinformatics/irida/ria/integration/LoginPageIT.java
package ca.corefacility.bioinformatics.irida.ria.integration; import ca.corefacility.bioinformatics.irida.ria.integration.pages.AbstractPage; import ca.corefacility.bioinformatics.irida.ria.integration.pages.LoginPage; import ca.corefacility.bioinformatics.irida.ria.integration.pages.user.PasswordResetPage; import com.github.springtestdbunit.annotation.DatabaseSetup; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * <p> Integration test to ensure that the Login Page works and redirects the user to the dashboard. </p> * */ @DatabaseSetup("/ca/corefacility/bioinformatics/irida/ria/web/LoginPageIT.xml") public class LoginPageIT extends AbstractIridaUIITChromeDriver { private static final String EXPIRED_USERNAME = "expiredGuy"; private static final String EXPIRED_PASSWORD = "Password1"; private final int TIMES_UNTIL_PASS = 3; private static int timesRerun = 0; @Test public void testBadUsername() throws Exception { LoginPage page = LoginPage.to(driver()); page.login(LoginPage.BAD_USERNAME, LoginPage.GOOD_PASSWORD); assertTrue("Should update the url with '?error=true'", driver().getCurrentUrl().contains("login?error=true")); assertEquals("Should display error on bad login", page.getErrors(), "Incorrect Email or Password"); } @Test public void testBadPassword() throws Exception { LoginPage page = LoginPage.to(driver()); page.login(LoginPage.USER_USERNAME, LoginPage.BAD_PASSWORD); assertTrue("Should update the url with '?error=true'", driver().getCurrentUrl().contains("login?error=true")); assertEquals("Should display error on bad login", page.getErrors(), "Incorrect Email or Password"); } @Test public void testGoodLogin() throws Exception { LoginPage.login(driver(), LoginPage.MANAGER_USERNAME, LoginPage.GOOD_PASSWORD); assertTrue("The 'test' user is logged in and redirected.", driver().getCurrentUrl().contains("dashboard")); } @Test public void testExpiredCredentialsLogin() throws Exception { LoginPage page = LoginPage.to(driver()); page.login(EXPIRED_USERNAME, EXPIRED_PASSWORD); assertTrue("The 'expiredGuy' user should be sent to a password reset page.", driver().getCurrentUrl().contains("password_reset/")); } @Test public void testLoginWithChangedCredentials() { String newPassword = "aGoodP@ssW0rD"; LoginPage page = LoginPage.to(driver()); page.login(EXPIRED_USERNAME, EXPIRED_PASSWORD); PasswordResetPage passwordResetPage = new PasswordResetPage(driver()); passwordResetPage.enterPassword(newPassword, newPassword); assertTrue("Should have succeeded in changing password.", passwordResetPage.checkSuccess()); AbstractPage.logout(driver()); page = LoginPage.to(driver()); page.login(EXPIRED_USERNAME, newPassword); assertTrue("The user is logged in and redirected.", driver().getCurrentUrl().contains("dashboard")); } @Test public void testFailUntil3rdTime() { timesRerun++; if (timesRerun < TIMES_UNTIL_PASS) { throw new NullPointerException(); } } }
Removed unnecessary test for flakiness.
src/test/java/ca/corefacility/bioinformatics/irida/ria/integration/LoginPageIT.java
Removed unnecessary test for flakiness.
<ide><path>rc/test/java/ca/corefacility/bioinformatics/irida/ria/integration/LoginPageIT.java <ide> <ide> private static final String EXPIRED_USERNAME = "expiredGuy"; <ide> private static final String EXPIRED_PASSWORD = "Password1"; <del> <del> private final int TIMES_UNTIL_PASS = 3; <del> private static int timesRerun = 0; <ide> <ide> @Test <ide> public void testBadUsername() throws Exception { <ide> page.login(EXPIRED_USERNAME, newPassword); <ide> assertTrue("The user is logged in and redirected.", driver().getCurrentUrl().contains("dashboard")); <ide> } <del> <del> @Test <del> public void testFailUntil3rdTime() { <del> timesRerun++; <del> if (timesRerun < TIMES_UNTIL_PASS) { <del> throw new NullPointerException(); <del> } <del> } <ide> }
JavaScript
mit
10b12f6c59479967631ff0f7010ba462b98b2e1f
0
carbon-io/carbon-client-js,carbon-io/carbon-client-js
var path = require('path').posix var url = require('url') var _ = require('lodash') var ejson = require('@carbon-io/ejson') var Endpoint = require('./Endpoint') /*************************************************************************************************** * @namespace carbon-client */ /*************************************************************************************************** * @class Collection * @description A Collection is a wrapper around an Endpoint that exposes a higher-level * set of methods that abstracts the endpoint as a resource collection. * * Abstract interface * insert: function(objects, options, cb) {}, * insertObject: function(obj, options, cb) {}, * find: function(query, options, cb) {}, * findObject: function(id, options, cb) {}, * update: function(query, update, options, cb) {}, * updateObject: function(id, update, options, cb) {}, * save: function(objects, options, cb) {}, * saveObject: function(object, options, cb) {}, * remove: function(query, options, cb) {}, * removeObject: function(id, options, cb) {} * @memberof carbon-client */ function Collection(endpoint) { this.endpoint = endpoint } /****************************************************************************** * @method find * @description Supported calling forms: * find() * find(query) * find(query, options) * @param {Object} query -- (optional) * @param {Object} options -- (optional) * @returns {xxx} -- Cursor */ Collection.prototype.find = function() { return new Cursor(this, arguments) } /****************************************************************************** * @method _doFind * @description Supported calling forms: * find(cb) * find(query, cb) * find(query, options, cb) * @param {Object} query -- (optional) * @param {Object} options -- (optional) * @param {Function} cb -- xxx * @throws {Error} * @returns {xxx} -- xxx */ Collection.prototype._doFind = function() { var query = undefined var options = {} var cb = undefined switch (arguments.length) { case 3: options = arguments[1] case 2: query = arguments[0] case 1: cb = arguments[arguments.length - 1] break default: throw new Error('_doFind takes 1 to 3 arguments, got ' + arguments.length) } _.set(options, "parameters.query", query) // return this.endpoint.get({}, function(err, res) { // return this.endpoint.get(function(err, res) { return this.endpoint.get(options, function(err, res) { // XXX using same error for now but may want to abstract away HttpError cb(err, res ? res.body : null) }) } /*************************************************************************************************** * @method insert * @description Supported calling forms: * insert(objects, cb) * insert(objects, options, cb) * @param {Array} objects -- * @param {Object} options -- * @param {Function} cb -- xxx * @returns {xxx} -- xxx */ Collection.prototype.insert = function() { var objects = arguments[0] var options = {} var cb = arguments[arguments.length - 1] if (arguments.length == 3) { options = arguments[1] } return this.endpoint.post(objects, options, function(err, res) { try { cb(err, res ? res.body : undefined) } catch (e) { cb(e) } }) } /*************************************************************************************************** * @method insertObject * @description Supported calling forms: * insertObject(obj, cb) * insertObject(obj, options, cb) * @param {Object} obj -- * @param {Object} options -- * @param {Function} cb -- xxx * @returns {xxx} -- xxx */ Collection.prototype.insertObject = function() { var obj = arguments[0] var options = {} var cb = arguments[arguments.length - 1] if (arguments.length == 3) { options = arguments[1] } if (_.isArray(obj)) { throw new Error('insertObject: obj cannot be an array. Use Collection.insert() instead.') } // delegate to to insert return this.insert([obj], options, function(e, result) { cb(e, result ? result[0] : undefined) }) } /*************************************************************************************************** * @method update * @description Supported calling forms: * update(query, obj, cb) * update(query, obj, options, cb) * @param {Object} query -- xxx * @param {Object} obj -- xxx * @param {Object} options -- * @param {Function} cb -- xxx * @throws {Error} * @returns {xxx} -- xxx */ Collection.prototype.update = function() { var query = {} var obj = undefined var options = {} var cb = undefined switch (arguments.length) { case 4: options = arguments[2] case 3: query = arguments[0] obj = arguments[1] cb = arguments[arguments.length - 1] break default: throw new Error("Must supply a query and an obj.") } _.set(options, "parameters.query", query) var body = obj return this.endpoint.patch(body, options, function(err, res) { // XXX put or patch depending on obj? Hmmm? // XXX using same error for now but may want to abstract away HttpError cb(err, res && res.body) }) } /*************************************************************************************************** * @method remove * @description Supported calling forms: * remove(query, cb) * remove(query, options, cb) * @param {Object} query -- xxx * @param {Object} options -- * @param {Function} cb -- xxx * @throws {Error} * @returns {xxx} -- xxx */ Collection.prototype.remove = function() { var query = {} var options = {} var cb = undefined switch (arguments.length) { case 3: options = arguments[1] case 2: query = arguments[0] cb = arguments[arguments.length - 1] break default: throw new Error("Must supply a query and a cb.") } _.set(options, "parameters.query", query) return this.endpoint.delete({}, options, function(err, res) { // XXX using same error for now but may want to abstract away HttpError cb(err, res && res.body) }) } /*************************************************************************************************** * @method removeObject * @description Supported calling forms: * removeObject(id, cb) * removeObject(id, options, cb) * @param {Object} id -- xxx * @param {Function} cb -- xxx * @param {Object} options -- xxx * @returns {xxx} -- xxx */ Collection.prototype.removeObject = function() { var id = arguments[0] var options = {} var cb = arguments[arguments.length - 1] if (arguments.length == 3) { options = arguments[1] } return this._getObjectEndpoint(id).delete({}, options, function(err, res) { cb(err, null) }) } /*************************************************************************************************** * @method findObject * @description Supported calling forms: * findObject(id, cb) * @param {Object} id -- xxx * @param {Object} options -- xxx * @param {Function} cb -- xxx * @throws {Error} * @returns {xxx} -- xxx */ Collection.prototype.findObject = function() { var id = arguments[0] var options = {} var cb = arguments[arguments.length - 1] if (arguments.length == 3) { options = arguments[1] } if(!id || !cb) { throw new Error("Must supply an id and a cb.") } return this._getObjectEndpoint(id).get(options, function(err, res) { cb(err, res ? res.body : null) }) } /*************************************************************************************************** * @method save * @description Supported calling forms: * save(objects, cb) * save(objects, options, cb) * @param {Array} objects -- xxx * @param {Object} options -- xxx * @param {Function} cb -- xxx * @throws {error} * @returns {xxx} -- xxx */ Collection.prototype.save = function() { var objects = arguments[0] var options = {} var cb = arguments[arguments.length - 1] if (arguments.length == 3) { options = arguments[1] } if (!_.isArray(objects)) { throw new Error("objects must be an Array") } return this.endpoint.put(objects, options, function(err, res) { try { cb(err, res ? res.body : undefined) } catch (e) { cb(e) } }) } /*************************************************************************************************** * @method saveObject * @description Supported calling forms: * saveObject(id, obj, cb) * @param {Object} id -- xxx * @param {Object} obj -- xxx * @param {Object} options -- xxx * @param {Function} cb -- xxx * @throws {error} * @returns {xxx} -- xxx */ Collection.prototype.saveObject = function() { var id = arguments[0] var obj = arguments[1] var options = {} var cb = arguments[arguments.length - 1] if (arguments.length == 4) { options = arguments[2] } if(!id || !cb || !obj) { throw new Error("Must supply id, obj and cb.") } return this._getObjectEndpoint(id).put(obj, options, function(err, res) { try { cb(err, res ? res.body : undefined) } catch (e) { cb(e) } }) } /*************************************************************************************************** * @method updateObject * @description Supported calling forms: * updateObject(id, update, cb) * updateObject(id, update, options, cb) * @param {Object} id -- xxx * @param {Object} update -- xxx * @param {Object} options -- xxx * @param {Function} cb -- xxx * @throws {Error} * @returns {xxx} -- xxx */ Collection.prototype.updateObject = function() { var id = arguments[0] var update = arguments[1] var options = {} var cb = arguments[arguments.length - 1] if (arguments.length == 4) { options = arguments[2] } if(!id || !cb || !update) { throw new Error("Must supply id, update and cb.") } return this._getObjectEndpoint(id).patch(update, options, function(err, res) { cb(err, null) }) } /*************************************************************************************************** * @method _getObjectEndpoint() * @param {xxx} id -- xxx * @throws {Error} * @returns {xxx} -- xxx */ Collection.prototype._getObjectEndpoint = function(id) { var idPath = null if(_.isObject(id)) { if (id.$oid) { idPath = id.$oid } else if(id.toHexString) { idPath = id.toHexString() } else { throw Error("Invalid ObjectId: " + id) } if(!ejson.types.ObjectId.isValid(idPath)) { throw Error("Invalid ObjectId: " + id) } } else { idPath = id } return this.endpoint.getEndpoint(idPath) } /*************************************************************************************************** * @class Cursor * @memberof carbon-client * * @description A Cursor is a cursor object returned by Collection.find() method and used for iterating over * results from find() * Abstract interface * each: function(cb) {}, * next: function(cb) {}, * toArray: function(cb) {} * * * **** How it works: * - When you call collection.find() this will only construct a Cursor object without any data being loaded yet. * Data will be loaded when its requested, which is either by the each(), toArray(), or the next() methods. * * - Data is loaded in batches. This is implemented through paginating the server-side Carbon Collection by passing * skip/limit options of. Default batch size is 100 (Cursor.bufferLimit). * * - If a "limit" option was passed to the find() method option, then there will be one batch which contain "limit" * number of items * * - Cursor object holds on to the current loaded batch (Cursor.items) and holds on the next cursor position * (Cursor.nextItemPos). When next() called, then it will return Cursor.items[nextItemPos] and increment nextItemPos. * * - When the current page finishes, then a new page is loaded and so on until the Cursor._needToGetMore() returns false * * - toArray(): returns an array of items contain items of next() until end. So items that have already been fetched * through next() won't be returned in toArray(). e.g. * collection has ['a', 'b', 'c'] * calling * cursor = collection.find() * cursor.next() returns 'a' * calling cursor.toArray() after returns ['b', 'c'] * * */ function Cursor(collection, findArguments) { // the collection object this.collection = collection this.findArguments = parseFindArgs(findArguments) // limit for items to be fetched this.bufferLimit = 100 // skip value to be applied when fetching the next batch this.bufferSkip = 0 // current items fetched this.items = null // cursor's next item position within self.items this.nextItemPos = null } /*************************************************************************************************** * @method forEach * @param {Function} iterator -- for processing each item * @param {Function} cb -- end callback * @returns {undefined} -- undefined */ Cursor.prototype.forEach = function(iterator, cb) { var self = this function callForeachItem(e) { if (e) { cb(e) } else { var item while ((item = self._nextObject()) != null) { iterator(item) } if (self._needToGetMore()) { self.getMore(callForeachItem) } else { // make last call cb(null) } } } if (this._needToGetMore()) { this.getMore(callForeachItem) } else { callForeachItem() } } /*************************************************************************************************** * @method toArray * @param {Function} cb -- xxx * @returns {undefined} -- undefined */ Cursor.prototype.toArray = function(cb) { var self = this var result = [] function appendRemaining() { var item while ((item = self._nextObject()) != null) { result.push(item) } } // append whats currently available appendRemaining() function appendRemainingAndCallback(e) { if (e) { cb(e) return } appendRemaining() cb(null, result) } // append whatever is currently available // fetch remaining and append it to the result if (this._needToGetMore()) { this.getMore(appendRemainingAndCallback, true) } else { cb(null, result) } } /*************************************************************************************************** * @method next * @param {Function} cb -- xxx * @returns {undefined} -- undefined */ Cursor.prototype.next = function(cb) { var self = this function nextCallback(e) { if (e) { cb(e) } else { cb(null, self._nextObject()) } } if (this._needToGetMore()) { this.getMore(nextCallback) } else { nextCallback() } } /*************************************************************************************************** * @method getMore * @param {Function} cb -- xxx * @param {xxx} exhaust -- indicates whether to exhaust the cursor. defaults to false. * @returns {undefined} -- undefined */ Cursor.prototype.getMore = function(cb, exhaust) { var options = this._constructFindOptions(exhaust) var self = this this.collection._doFind(this.findArguments.query, options, function(e, data) { if (e) { cb(e) } else { self.items = data self.bufferSkip = self.bufferSkip + self.bufferLimit self.nextItemPos = 0 cb() } }) } /*************************************************************************************************** * @method _constructFindOptions * @param {xxx} exhaust -- xxx * @returns {xxx} -- xxx */ Cursor.prototype._constructFindOptions = function(exhaust) { var options = _.cloneDeep(this.findArguments.options || {}) if (!options.skip) { options.skip = this.bufferSkip } if (!options.limit && !exhaust) { options.limit = this.bufferLimit } return options } /*************************************************************************************************** * @method _nextObject * @returns {xxx} -- xxx */ Cursor.prototype._nextObject = function() { if (this.items && this.nextItemPos < this.items.length) { var item = this.items[this.nextItemPos] this.nextItemPos++ return item } else { return null } } /*************************************************************************************************** * @method _needToGetMore * @returns {boolean} -- xxx */ Cursor.prototype._needToGetMore = function() { return this.items == null|| (!this.findArguments.options.limit && this.items.length == this.bufferLimit && this.nextItemPos >= this.items.length) } /*************************************************************************************************** * @method parseFindArgs * @param {xxx} findArgs -- xxx * @returns {object} -- {query: undefined, options: {}, cb: undefined} */ function parseFindArgs(findArgs) { var query = undefined var options = {} switch (findArgs.length) { case 2: options = findArgs[1] case 1: query = findArgs[0] break default: } return { query: query, options: options } } Collection.cursorClass = Cursor /*************************************************************************************************** * exports */ module.exports = Collection
lib/Collection.js
var path = require('path').posix var url = require('url') var _ = require('lodash') var ejson = require('@carbon-io/ejson') var Endpoint = require('./Endpoint') DEFAULT_ID_HEADER = 'carbonio-id' /*************************************************************************************************** * @namespace carbon-client */ /*************************************************************************************************** * @class Collection * @description A Collection is a wrapper around an Endpoint that exposes a higher-level * set of methods that abstracts the endpoint as a resource collection. * * Abstract interface * insert: function(objects, options, cb) {}, * insertObject: function(obj, options, cb) {}, * find: function(query, options, cb) {}, * findObject: function(id, options, cb) {}, * update: function(query, update, options, cb) {}, * updateObject: function(id, update, options, cb) {}, * save: function(objects, options, cb) {}, * saveObject: function(object, options, cb) {}, * remove: function(query, options, cb) {}, * removeObject: function(id, options, cb) {} * @memberof carbon-client */ function Collection(endpoint) { this.endpoint = endpoint } Collection.idHeader = DEFAULT_ID_HEADER Collection._reconcileId = function(obj, res) { var _id = undefined if (res.statusCode === 201) { if (_.isNil(res.headers)) { throw new Error('Got 201 response without headers') } // support InsertConfig.returnsInsertedObject if (!_.isNil(res.body)) { obj = res.body } if (!_.isNil(res.headers[Collection.idHeader])) { _id = ejson.parse(res.headers[Collection.idHeader]) } else { try { _id = path.basename( path.normalize( url.parse(res.headers['location']).pathname)) } catch (e) { // if TypeError, then location header was bad, fall through if (!(e instanceof TypeError)) { // unknown error throw e } } } if (_.isNil(_id)) { throw new Error('Did not find expected _id in "location" or "' + Collection.idHeader + '" headers on 201 response') } // if obj is an array of objects then ensure that _id is an array and matches obj length if (_.isArray(obj) && (!_.isArray(_id) || obj.length != _id.length)) { throw new Error("Returned _id in headers is not array or does not match length of inserted objects") } if (!_.isNil(obj._id)) { if (ejson.isObjectId(_id) && !_id.equals(obj._id) || ejson.stringify(_id) !== ejson.stringify(obj._id)) { throw new Error( ejson.stringify(_id) + ' != ' + ejson.stringify(obj._id)) } } else { if (_.isArray(obj)) { for (var i = 0; i < obj.length; i++) { obj[i]._id = _id[i] } } else { obj._id = _id } } } return obj } /****************************************************************************** * @method find * @description Supported calling forms: * find() * find(query) * find(query, options) * @param {Object} query -- (optional) * @param {Object} options -- (optional) * @returns {xxx} -- Cursor */ Collection.prototype.find = function() { return new Cursor(this, arguments) } /****************************************************************************** * @method _doFind * @description Supported calling forms: * find(cb) * find(query, cb) * find(query, options, cb) * @param {Object} query -- (optional) * @param {Object} options -- (optional) * @param {Function} cb -- xxx * @throws {Error} * @returns {xxx} -- xxx */ Collection.prototype._doFind = function() { var query = undefined var options = {} var cb = undefined switch (arguments.length) { case 3: options = arguments[1] case 2: query = arguments[0] case 1: cb = arguments[arguments.length - 1] break default: throw new Error('_doFind takes 1 to 3 arguments, got ' + arguments.length) } _.set(options, "parameters.query", query) // return this.endpoint.get({}, function(err, res) { // return this.endpoint.get(function(err, res) { return this.endpoint.get(options, function(err, res) { // XXX using same error for now but may want to abstract away HttpError cb(err, res ? res.body : null) }) } /*************************************************************************************************** * @method insert * @description Supported calling forms: * insert(objects, cb) * insert(objects, options, cb) * @param {Array} objects -- * @param {Object} options -- * @param {Function} cb -- xxx * @returns {xxx} -- xxx */ Collection.prototype.insert = function() { var objects = arguments[0] var options = {} var cb = arguments[arguments.length - 1] if (arguments.length == 3) { options = arguments[1] } return this.endpoint.post(objects, options, function(err, res) { try { cb(err, res ? Collection._reconcileId(objects, res) : undefined) } catch (e) { cb(e) } }) } /*************************************************************************************************** * @method insertObject * @description Supported calling forms: * insertObject(obj, cb) * insertObject(obj, options, cb) * @param {Object} obj -- * @param {Object} options -- * @param {Function} cb -- xxx * @returns {xxx} -- xxx */ Collection.prototype.insertObject = function() { var obj = arguments[0] var options = {} var cb = arguments[arguments.length - 1] if (arguments.length == 3) { options = arguments[1] } if (_.isArray(obj)) { throw new Error('insertObject: obj cannot be an array. Use Collection.insert() instead.') } // delegate to to insert return this.insert([obj], options, function(e, result) { cb(e, result ? result[0] : undefined) }) } /*************************************************************************************************** * @method update * @description Supported calling forms: * update(query, obj, cb) * update(query, obj, options, cb) * @param {Object} query -- xxx * @param {Object} obj -- xxx * @param {Object} options -- * @param {Function} cb -- xxx * @throws {Error} * @returns {xxx} -- xxx */ Collection.prototype.update = function() { var query = {} var obj = undefined var options = {} var cb = undefined switch (arguments.length) { case 4: options = arguments[2] case 3: query = arguments[0] obj = arguments[1] cb = arguments[arguments.length - 1] break default: throw new Error("Must supply a query and an obj.") } _.set(options, "parameters.query", query) var body = obj return this.endpoint.patch(body, options, function(err, res) { // XXX put or patch depending on obj? Hmmm? // XXX using same error for now but may want to abstract away HttpError cb(err, res && res.body) }) } /*************************************************************************************************** * @method remove * @description Supported calling forms: * remove(query, cb) * remove(query, options, cb) * @param {Object} query -- xxx * @param {Object} options -- * @param {Function} cb -- xxx * @throws {Error} * @returns {xxx} -- xxx */ Collection.prototype.remove = function() { var query = {} var options = {} var cb = undefined switch (arguments.length) { case 3: options = arguments[1] case 2: query = arguments[0] cb = arguments[arguments.length - 1] break default: throw new Error("Must supply a query and a cb.") } _.set(options, "parameters.query", query) return this.endpoint.delete({}, options, function(err, res) { // XXX using same error for now but may want to abstract away HttpError cb(err, res && res.body) }) } /*************************************************************************************************** * @method removeObject * @description Supported calling forms: * removeObject(id, cb) * removeObject(id, options, cb) * @param {Object} id -- xxx * @param {Function} cb -- xxx * @param {Object} options -- xxx * @returns {xxx} -- xxx */ Collection.prototype.removeObject = function() { var id = arguments[0] var options = {} var cb = arguments[arguments.length - 1] if (arguments.length == 3) { options = arguments[1] } return this._getObjectEndpoint(id).delete({}, options, function(err, res) { cb(err, null) }) } /*************************************************************************************************** * @method findObject * @description Supported calling forms: * findObject(id, cb) * @param {Object} id -- xxx * @param {Object} options -- xxx * @param {Function} cb -- xxx * @throws {Error} * @returns {xxx} -- xxx */ Collection.prototype.findObject = function() { var id = arguments[0] var options = {} var cb = arguments[arguments.length - 1] if (arguments.length == 3) { options = arguments[1] } if(!id || !cb) { throw new Error("Must supply an id and a cb.") } return this._getObjectEndpoint(id).get(options, function(err, res) { cb(err, res ? res.body : null) }) } /*************************************************************************************************** * @method save * @description Supported calling forms: * save(objects, cb) * save(objects, options, cb) * @param {Array} objects -- xxx * @param {Object} options -- xxx * @param {Function} cb -- xxx * @throws {error} * @returns {xxx} -- xxx */ Collection.prototype.save = function() { var objects = arguments[0] var options = {} var cb = arguments[arguments.length - 1] if (arguments.length == 3) { options = arguments[1] } if (!_.isArray(objects)) { throw new Error("objects must be an Array") } return this.endpoint.put(objects, options, function(err, res) { try { cb(err, res ? Collection._reconcileId(objects, res) : undefined) } catch (e) { cb(e) } }) } /*************************************************************************************************** * @method saveObject * @description Supported calling forms: * saveObject(id, obj, cb) * @param {Object} id -- xxx * @param {Object} obj -- xxx * @param {Object} options -- xxx * @param {Function} cb -- xxx * @throws {error} * @returns {xxx} -- xxx */ Collection.prototype.saveObject = function() { var id = arguments[0] var obj = arguments[1] var options = {} var cb = arguments[arguments.length - 1] if (arguments.length == 4) { options = arguments[2] } if(!id || !cb || !obj) { throw new Error("Must supply id, obj and cb.") } return this._getObjectEndpoint(id).put(obj, options, function(err, res) { try { cb(err, res ? Collection._reconcileId(obj, res) : undefined) } catch (e) { cb(e) } }) } /*************************************************************************************************** * @method updateObject * @description Supported calling forms: * updateObject(id, update, cb) * updateObject(id, update, options, cb) * @param {Object} id -- xxx * @param {Object} update -- xxx * @param {Object} options -- xxx * @param {Function} cb -- xxx * @throws {Error} * @returns {xxx} -- xxx */ Collection.prototype.updateObject = function() { var id = arguments[0] var update = arguments[1] var options = {} var cb = arguments[arguments.length - 1] if (arguments.length == 4) { options = arguments[2] } if(!id || !cb || !update) { throw new Error("Must supply id, update and cb.") } return this._getObjectEndpoint(id).patch(update, options, function(err, res) { cb(err, null) }) } /*************************************************************************************************** * @method _getObjectEndpoint() * @param {xxx} id -- xxx * @throws {Error} * @returns {xxx} -- xxx */ Collection.prototype._getObjectEndpoint = function(id) { var idPath = null if(_.isObject(id)) { if (id.$oid) { idPath = id.$oid } else if(id.toHexString) { idPath = id.toHexString() } else { throw Error("Invalid ObjectId: " + id) } if(!ejson.types.ObjectId.isValid(idPath)) { throw Error("Invalid ObjectId: " + id) } } else { idPath = id } return this.endpoint.getEndpoint(idPath) } /*************************************************************************************************** * @class Cursor * @memberof carbon-client * * @description A Cursor is a cursor object returned by Collection.find() method and used for iterating over * results from find() * Abstract interface * each: function(cb) {}, * next: function(cb) {}, * toArray: function(cb) {} * * * **** How it works: * - When you call collection.find() this will only construct a Cursor object without any data being loaded yet. * Data will be loaded when its requested, which is either by the each(), toArray(), or the next() methods. * * - Data is loaded in batches. This is implemented through paginating the server-side Carbon Collection by passing * skip/limit options of. Default batch size is 100 (Cursor.bufferLimit). * * - If a "limit" option was passed to the find() method option, then there will be one batch which contain "limit" * number of items * * - Cursor object holds on to the current loaded batch (Cursor.items) and holds on the next cursor position * (Cursor.nextItemPos). When next() called, then it will return Cursor.items[nextItemPos] and increment nextItemPos. * * - When the current page finishes, then a new page is loaded and so on until the Cursor._needToGetMore() returns false * * - toArray(): returns an array of items contain items of next() until end. So items that have already been fetched * through next() won't be returned in toArray(). e.g. * collection has ['a', 'b', 'c'] * calling * cursor = collection.find() * cursor.next() returns 'a' * calling cursor.toArray() after returns ['b', 'c'] * * */ function Cursor(collection, findArguments) { // the collection object this.collection = collection this.findArguments = parseFindArgs(findArguments) // limit for items to be fetched this.bufferLimit = 100 // skip value to be applied when fetching the next batch this.bufferSkip = 0 // current items fetched this.items = null // cursor's next item position within self.items this.nextItemPos = null } /*************************************************************************************************** * @method forEach * @param {Function} iterator -- for processing each item * @param {Function} cb -- end callback * @returns {undefined} -- undefined */ Cursor.prototype.forEach = function(iterator, cb) { var self = this function callForeachItem(e) { if (e) { cb(e) } else { var item while ((item = self._nextObject()) != null) { iterator(item) } if (self._needToGetMore()) { self.getMore(callForeachItem) } else { // make last call cb(null) } } } if (this._needToGetMore()) { this.getMore(callForeachItem) } else { callForeachItem() } } /*************************************************************************************************** * @method toArray * @param {Function} cb -- xxx * @returns {undefined} -- undefined */ Cursor.prototype.toArray = function(cb) { var self = this var result = [] function appendRemaining() { var item while ((item = self._nextObject()) != null) { result.push(item) } } // append whats currently available appendRemaining() function appendRemainingAndCallback(e) { if (e) { cb(e) return } appendRemaining() cb(null, result) } // append whatever is currently available // fetch remaining and append it to the result if (this._needToGetMore()) { this.getMore(appendRemainingAndCallback, true) } else { cb(null, result) } } /*************************************************************************************************** * @method next * @param {Function} cb -- xxx * @returns {undefined} -- undefined */ Cursor.prototype.next = function(cb) { var self = this function nextCallback(e) { if (e) { cb(e) } else { cb(null, self._nextObject()) } } if (this._needToGetMore()) { this.getMore(nextCallback) } else { nextCallback() } } /*************************************************************************************************** * @method getMore * @param {Function} cb -- xxx * @param {xxx} exhaust -- indicates whether to exhaust the cursor. defaults to false. * @returns {undefined} -- undefined */ Cursor.prototype.getMore = function(cb, exhaust) { var options = this._constructFindOptions(exhaust) var self = this this.collection._doFind(this.findArguments.query, options, function(e, data) { if (e) { cb(e) } else { self.items = data self.bufferSkip = self.bufferSkip + self.bufferLimit self.nextItemPos = 0 cb() } }) } /*************************************************************************************************** * @method _constructFindOptions * @param {xxx} exhaust -- xxx * @returns {xxx} -- xxx */ Cursor.prototype._constructFindOptions = function(exhaust) { var options = _.cloneDeep(this.findArguments.options || {}) if (!options.skip) { options.skip = this.bufferSkip } if (!options.limit && !exhaust) { options.limit = this.bufferLimit } return options } /*************************************************************************************************** * @method _nextObject * @returns {xxx} -- xxx */ Cursor.prototype._nextObject = function() { if (this.items && this.nextItemPos < this.items.length) { var item = this.items[this.nextItemPos] this.nextItemPos++ return item } else { return null } } /*************************************************************************************************** * @method _needToGetMore * @returns {boolean} -- xxx */ Cursor.prototype._needToGetMore = function() { return this.items == null|| (!this.findArguments.options.limit && this.items.length == this.bufferLimit && this.nextItemPos >= this.items.length) } /*************************************************************************************************** * @method parseFindArgs * @param {xxx} findArgs -- xxx * @returns {object} -- {query: undefined, options: {}, cb: undefined} */ function parseFindArgs(findArgs) { var query = undefined var options = {} switch (findArgs.length) { case 2: options = findArgs[1] case 1: query = findArgs[0] break default: } return { query: query, options: options } } Collection.cursorClass = Cursor /*************************************************************************************************** * exports */ module.exports = Collection
return results from insert/save as is
lib/Collection.js
return results from insert/save as is
<ide><path>ib/Collection.js <ide> var ejson = require('@carbon-io/ejson') <ide> <ide> var Endpoint = require('./Endpoint') <del> <del>DEFAULT_ID_HEADER = 'carbonio-id' <ide> <ide> /*************************************************************************************************** <ide> * @namespace carbon-client <ide> this.endpoint = endpoint <ide> } <ide> <del>Collection.idHeader = DEFAULT_ID_HEADER <del>Collection._reconcileId = function(obj, res) { <del> var _id = undefined <del> <del> if (res.statusCode === 201) { <del> if (_.isNil(res.headers)) { <del> throw new Error('Got 201 response without headers') <del> } <del> <del> // support InsertConfig.returnsInsertedObject <del> if (!_.isNil(res.body)) { <del> obj = res.body <del> } <del> <del> if (!_.isNil(res.headers[Collection.idHeader])) { <del> _id = ejson.parse(res.headers[Collection.idHeader]) <del> } else { <del> try { <del> _id = path.basename( <del> path.normalize( <del> url.parse(res.headers['location']).pathname)) <del> } catch (e) { <del> // if TypeError, then location header was bad, fall through <del> if (!(e instanceof TypeError)) { <del> // unknown error <del> throw e <del> } <del> } <del> } <del> <del> if (_.isNil(_id)) { <del> throw new Error('Did not find expected _id in "location" or "' + <del> Collection.idHeader + <del> '" headers on 201 response') <del> <del> } <del> <del> // if obj is an array of objects then ensure that _id is an array and matches obj length <del> if (_.isArray(obj) && (!_.isArray(_id) || obj.length != _id.length)) { <del> throw new Error("Returned _id in headers is not array or does not match length of inserted objects") <del> } <del> <del> <del> if (!_.isNil(obj._id)) { <del> if (ejson.isObjectId(_id) && !_id.equals(obj._id) || <del> ejson.stringify(_id) !== ejson.stringify(obj._id)) { <del> throw new Error( <del> ejson.stringify(_id) + ' != ' + ejson.stringify(obj._id)) <del> } <del> } else { <del> if (_.isArray(obj)) { <del> for (var i = 0; i < obj.length; i++) { <del> obj[i]._id = _id[i] <del> } <del> } else { <del> obj._id = _id <del> } <del> <del> } <del> } <del> <del> return obj <del>} <del> <ide> /****************************************************************************** <ide> * @method find <ide> * @description Supported calling forms: <ide> <ide> return this.endpoint.post(objects, options, function(err, res) { <ide> try { <del> cb(err, res ? Collection._reconcileId(objects, res) : undefined) <add> cb(err, res ? res.body : undefined) <ide> } catch (e) { <ide> cb(e) <ide> } <ide> <ide> return this.endpoint.put(objects, options, function(err, res) { <ide> try { <del> cb(err, res ? Collection._reconcileId(objects, res) : undefined) <add> cb(err, res ? res.body : undefined) <ide> } catch (e) { <ide> cb(e) <ide> } <ide> <ide> return this._getObjectEndpoint(id).put(obj, options, function(err, res) { <ide> try { <del> cb(err, res ? Collection._reconcileId(obj, res) : undefined) <add> cb(err, res ? res.body : undefined) <ide> } catch (e) { <ide> cb(e) <ide> }
Java
bsd-3-clause
a1756c5c51b2c0432929744a57bb2d6e3291abe7
0
Lucky-Dhakad/semanticvectors,sabitaacharya/semanticvectors,Lucky-Dhakad/semanticvectors,sabitaacharya/semanticvectors,Lucky-Dhakad/semanticvectors,sabitaacharya/semanticvectors,anhth12/semanticvectors,Lucky-Dhakad/semanticvectors,sabitaacharya/semanticvectors,anhth12/semanticvectors,anhth12/semanticvectors,anhth12/semanticvectors
/** Copyright (c) 2008, University of Pittsburgh All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the University of Pittsburgh nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. **/ package pitt.search.semanticvectors; import java.util.Arrays; import java.util.Collection; import java.util.Enumeration; import java.util.Random; import java.util.logging.Logger; import java.io.File; import java.io.IOException; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermEnum; import org.apache.lucene.index.TermPositionVector; import org.apache.lucene.store.FSDirectory; import pitt.search.semanticvectors.vectors.PermutationUtils; import pitt.search.semanticvectors.vectors.Vector; import pitt.search.semanticvectors.vectors.VectorFactory; import pitt.search.semanticvectors.vectors.VectorType; /** * Implementation of vector store that creates term by term * co-occurrence vectors by iterating through all the documents in a * Lucene index. This class implements a sliding context window * approach, as used by Burgess and Lund (HAL) and Schutze amongst * others Uses a sparse representation for the basic document vectors, * which saves considerable space for collections with many individual * documents. * * @author Trevor Cohen, Dominic Widdows. */ public class TermTermVectorsFromLucene implements VectorStore { private static final Logger logger = Logger.getLogger( TermTermVectorsFromLucene.class.getCanonicalName()); private int dimension; private VectorType vectorType; private boolean retraining = false; private VectorStoreRAM termVectors; private VectorStore indexVectors; private String luceneIndexDir; private IndexReader luceneIndexReader; private int seedLength; private String[] fieldsToIndex; private int minFreq; private int maxFreq; private int windowSize; private Vector[] localindexvectors; private LuceneUtils lUtils; private int maxNonAlphabet; private String positionalmethod; /** * Used to store permutations we'll use in training. If positional method is one of the * permutations, this contains the shift for all the focus positions. */ private int[][] permutationCache; static final short NONEXISTENT = -1; @Override public VectorType getVectorType() { return vectorType; } @Override public int getDimension() { return dimension; } /** * @return The object's indexReader. */ public IndexReader getIndexReader(){ return this.luceneIndexReader; } /** * @return The object's basicTermVectors. */ public VectorStore getBasicTermVectors(){ return this.termVectors; } public String[] getFieldsToIndex(){ return this.fieldsToIndex; } // Basic VectorStore interface methods implemented through termVectors. public Vector getVector(Object term) { return termVectors.getVector(term); } public Enumeration<ObjectVector> getAllVectors() { return termVectors.getAllVectors(); } public int getNumVectors() { return termVectors.getNumVectors(); } /** * This constructor uses only the values passed, no parameters from Flag. * @param luceneIndexDir Directory containing Lucene index. * @param vectorType type of vector * @param dimension number of dimension to use for the vectors * @param seedLength Number of +1 or -1 entries in basic * vectors. Should be even to give same number of each. * @param minFreq The minimum term frequency for a term to be indexed. * @param maxFreq The minimum term frequency for a term to be indexed. * @param maxNonAlphabet * @param windowSize The size of the sliding context window. * @param positionalmethod * @param indexVectors * @param fieldsToIndex These fields will be indexed. * @throws IOException */ public TermTermVectorsFromLucene( String luceneIndexDir, VectorType vectorType, int dimension, int seedLength, int minFreq, int maxFreq, int maxNonAlphabet, int windowSize, String positionalmethod, VectorStore indexVectors, String[] fieldsToIndex) throws IOException { this.luceneIndexDir = luceneIndexDir; this.vectorType = vectorType; this.dimension = dimension; this.positionalmethod = positionalmethod; this.minFreq = minFreq; this.maxFreq = maxFreq; this.maxNonAlphabet = maxNonAlphabet; this.fieldsToIndex = fieldsToIndex; this.seedLength = seedLength; this.windowSize = windowSize; this.indexVectors = indexVectors; // TODO(widdows): This clearly demonstrates the need for catching flag values and // turning them into enums earlier in the pipeline. This would be a very silly place to // have a programming typo cause an error! if (positionalmethod.equals("permutation") || positionalmethod.equals("permutation_plus_basic")) { initializePermutations();} else if (positionalmethod.equals("directional")) { initializeDirectionalPermutations(); } trainTermTermVectors(); } /** * Initialize all permutations that might be used. */ private void initializePermutations() { permutationCache = new int[windowSize][PermutationUtils.getPermutationLength(vectorType, dimension)]; for (int i = 0; i < windowSize; ++i) { permutationCache[i] = PermutationUtils.getShiftPermutation( vectorType, dimension, i - windowSize/2); } } /** * Initialize all permutations that might be used (i.e +1 and -1). */ private void initializeDirectionalPermutations() { permutationCache = new int[2][PermutationUtils.getPermutationLength(vectorType, dimension)]; permutationCache[0] = PermutationUtils.getShiftPermutation( vectorType, dimension, -1); permutationCache[1] = PermutationUtils.getShiftPermutation( vectorType, dimension, 1); } private void trainTermTermVectors() throws IOException, RuntimeException { // Check that the Lucene index contains Term Positions. LuceneUtils.compressIndex(luceneIndexDir); this.luceneIndexReader = IndexReader.open(FSDirectory.open(new File(luceneIndexDir))); Collection<String> fields_with_positions = luceneIndexReader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION); if (fields_with_positions.isEmpty()) { logger.warning("Term-term indexing requires a Lucene index containing TermPositionVectors." + "\nTry rebuilding Lucene index using pitt.search.lucene.IndexFilePositions"); throw new IOException("Lucene indexes not built correctly."); } lUtils = new LuceneUtils(luceneIndexDir); // If basicTermVectors was passed in, set state accordingly. if (indexVectors != null) { retraining = true; VerbatimLogger.info("Reusing basic term vectors; number of terms: " + indexVectors.getNumVectors() + "\n"); } else { this.indexVectors = new VectorStoreRAM(vectorType, dimension); } Random random = new Random(); this.termVectors = new VectorStoreRAM(vectorType, dimension); // Iterate through an enumeration of terms and allocate initial term vectors. // If not retraining, create random elemental vectors as well. TermEnum terms = this.luceneIndexReader.terms(); int tc = 0; while(terms.next()) { Term term = terms.term(); // Skip terms that don't pass the filter. if (!lUtils.termFilter(terms.term(), fieldsToIndex, minFreq, maxFreq, maxNonAlphabet)) { continue; } tc++; Vector termVector = VectorFactory.createZeroVector(Flags.vectortype, dimension); // Place each term vector in the vector store. this.termVectors.putVector(term.text(), termVector); // Do the same for random index vectors unless retraining with trained term vectors if (!retraining) { Vector indexVector = VectorFactory.generateRandomVector( vectorType, dimension, seedLength, random); ((VectorStoreRAM) this.indexVectors).putVector(term.text(), indexVector); } } VerbatimLogger.info("Created basic term vectors for " + tc + " terms (and " + luceneIndexReader.numDocs() + " docs).\n"); // Iterate through documents. int numdocs = this.luceneIndexReader.numDocs(); for (int dc = 0; dc < numdocs; ++dc) { // Output progress counter. if ((dc % 10000 == 0) || (dc < 10000 && dc % 1000 == 0)) { VerbatimLogger.info("Processed " + dc + " documents ... "); } for (String field: fieldsToIndex) { TermPositionVector vex = (TermPositionVector) luceneIndexReader.getTermFreqVector(dc, field); if (vex != null) processTermPositionVector(vex); } } VerbatimLogger.info("Created " + termVectors.getNumVectors() + " term vectors ...\n"); VerbatimLogger.info("Normalizing term vectors.\n"); Enumeration<ObjectVector> e = termVectors.getAllVectors(); while (e.hasMoreElements()) { e.nextElement().getVector().normalize(); } String randFile = "randomvectors.bin"; // If building a permutation index, these need to be written out to be reused. // // TODO(widdows): It is odd to do this here while not writing out the semantic // term vectors here. We should redesign this. if ((positionalmethod.equals("permutation") || (positionalmethod.equals("permutation_plus_basic"))) && !retraining) { VerbatimLogger.info("Normalizing and writing random vectors to " + randFile + "\n"); Enumeration<ObjectVector> f = indexVectors.getAllVectors(); while (f.hasMoreElements()) { f.nextElement().getVector().normalize(); } new VectorStoreWriter().writeVectors(randFile, this.indexVectors); } } /** * For each term, add term index vector * for any term occurring within a window of size windowSize such * that for example if windowSize = 5 with the window over the * phrase "your life is your life" the index vectors for terms * "your" and "life" would each be added to the term vector for * "is" twice. * * TermPositionVectors contain arrays of (1) terms as text (2) * term frequencies and (3) term positions within a * document. The index of a particular term within this array * will be referred to as the 'local index' in comments. */ private void processTermPositionVector(TermPositionVector vex) throws ArrayIndexOutOfBoundsException { int[] freqs = vex.getTermFrequencies(); // Find number of positions in document (across all terms). int numwords = freqs.length; int numpositions = 0; for (short tcn = 0; tcn < numwords; ++tcn) { int[] posns = vex.getTermPositions(tcn); for (int pc = 0; pc < posns.length; ++pc) { numpositions = Math.max(numpositions, posns[pc]); } } numpositions += 1; //convert from zero-based index to count // Create local random index and term vectors for relevant terms. localindexvectors = new Vector[numwords]; Vector[] localtermvectors = new Vector[numwords]; // Create index with one space for each position. short[] positions = new short[numpositions]; Arrays.fill(positions, NONEXISTENT); String[] docterms = vex.getTerms(); for (short tcn = 0; tcn < numwords; ++tcn) { // Insert local term indices in position vector. int[] posns = vex.getTermPositions(tcn); // Get all positions of term in document for (int pc = 0; pc < posns.length; ++pc) { // Set position of index vector to local // (document-specific) index of term in this position. int position = posns[pc]; positions[position] = tcn; } // Only terms that have passed the term filter are included in the VectorStores. if (this.indexVectors.getVector(docterms[tcn]) != null) { // Retrieve relevant random index vectors. localindexvectors[tcn] = indexVectors.getVector(docterms[tcn]); // Retrieve relevant term vectors. localtermvectors[tcn] = termVectors.getVector(docterms[tcn]); } } /** Iterate through positions adding index vectors of terms * occurring within window to term vector for focus term **/ int windowRadius = windowSize / 2; for (int focusposn = 0; focusposn < positions.length; ++focusposn) { int focusterm = positions[focusposn]; if (focusterm == NONEXISTENT) continue; int windowstart = Math.max(0, focusposn - windowRadius); int windowend = Math.min(focusposn + windowRadius, positions.length - 1); for (int cursor = windowstart; cursor <= windowend; cursor++) { if (cursor == focusposn) continue; int coterm = positions[cursor]; if (coterm == NONEXISTENT) continue; if (this.indexVectors.getVector(docterms[coterm]) == null || localtermvectors[focusterm] == null) { continue; } // calculate permutation required for either Sahlgren (2008) implementation // encoding word order, or encoding direction as in Burgess and Lund's HAL if (positionalmethod.equals("permutation_plus_basic") || positionalmethod.equals("basic")) { // docterms[coterm] contains the term in position[w] in this document. localtermvectors[focusterm].superpose(localindexvectors[coterm], 1, null); } if ((positionalmethod.equals("permutation")) || (positionalmethod.equals("permutation_plus_basic"))) { int[] permutation = permutationCache[cursor - focusposn + windowRadius]; localtermvectors[focusterm].superpose(localindexvectors[coterm], 1, permutation); } else if (positionalmethod.equals("directional")) { int[] permutation = permutationCache[(int) Math.max(0,Math.signum(cursor - focusposn))]; localtermvectors[focusterm].superpose(localindexvectors[coterm], 1, permutation); } } } } }
src/pitt/search/semanticvectors/TermTermVectorsFromLucene.java
/** Copyright (c) 2008, University of Pittsburgh All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the University of Pittsburgh nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. **/ package pitt.search.semanticvectors; import java.util.Arrays; import java.util.Collection; import java.util.Enumeration; import java.util.Random; import java.util.logging.Logger; import java.io.File; import java.io.IOException; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermEnum; import org.apache.lucene.index.TermPositionVector; import org.apache.lucene.store.FSDirectory; import pitt.search.semanticvectors.vectors.PermutationUtils; import pitt.search.semanticvectors.vectors.Vector; import pitt.search.semanticvectors.vectors.VectorFactory; import pitt.search.semanticvectors.vectors.VectorType; /** * Implementation of vector store that creates term by term * co-occurrence vectors by iterating through all the documents in a * Lucene index. This class implements a sliding context window * approach, as used by Burgess and Lund (HAL) and Schutze amongst * others Uses a sparse representation for the basic document vectors, * which saves considerable space for collections with many individual * documents. * * @author Trevor Cohen, Dominic Widdows. */ public class TermTermVectorsFromLucene implements VectorStore { private static final Logger logger = Logger.getLogger( TermTermVectorsFromLucene.class.getCanonicalName()); private int dimension; private VectorType vectorType; private boolean retraining = false; private VectorStoreRAM termVectors; private VectorStore indexVectors; private String luceneIndexDir; private IndexReader luceneIndexReader; private int seedLength; private String[] fieldsToIndex; private int minFreq; private int maxFreq; private int windowSize; private Vector[] localindexvectors; private LuceneUtils lUtils; private int maxNonAlphabet; private String positionalmethod; /** * Used to store permutations we'll use in training. If positional method is one of the * permutations, this contains the shift for all the focus positions. */ private int[][] permutationCache; static final short NONEXISTENT = -1; @Override public VectorType getVectorType() { return vectorType; } @Override public int getDimension() { return dimension; } /** * @return The object's indexReader. */ public IndexReader getIndexReader(){ return this.luceneIndexReader; } /** * @return The object's basicTermVectors. */ public VectorStore getBasicTermVectors(){ return this.termVectors; } public String[] getFieldsToIndex(){ return this.fieldsToIndex; } // Basic VectorStore interface methods implemented through termVectors. public Vector getVector(Object term) { return termVectors.getVector(term); } public Enumeration<ObjectVector> getAllVectors() { return termVectors.getAllVectors(); } public int getNumVectors() { return termVectors.getNumVectors(); } /** * This constructor uses only the values passed, no parameters from Flag. * @param luceneIndexDir Directory containing Lucene index. * @param vectorType type of vector * @param dimension number of dimension to use for the vectors * @param seedLength Number of +1 or -1 entries in basic * vectors. Should be even to give same number of each. * @param minFreq The minimum term frequency for a term to be indexed. * @param maxFreq The minimum term frequency for a term to be indexed. * @param maxNonAlphabet * @param windowSize The size of the sliding context window. * @param positionalmethod * @param indexVectors * @param fieldsToIndex These fields will be indexed. * @throws IOException */ public TermTermVectorsFromLucene( String luceneIndexDir, VectorType vectorType, int dimension, int seedLength, int minFreq, int maxFreq, int maxNonAlphabet, int windowSize, String positionalmethod, VectorStore indexVectors, String[] fieldsToIndex) throws IOException { this.luceneIndexDir = luceneIndexDir; this.vectorType = vectorType; this.dimension = dimension; this.positionalmethod = positionalmethod; this.minFreq = minFreq; this.maxFreq = maxFreq; this.maxNonAlphabet = maxNonAlphabet; this.fieldsToIndex = fieldsToIndex; this.seedLength = seedLength; this.windowSize = windowSize; this.indexVectors = indexVectors; // TODO(widdows): This clearly demonstrates the need for catching flag values and // turning them into enums earlier in the pipeline. This would be a very silly place to // have a programming typo cause an error! if (positionalmethod.equals("permutation") || positionalmethod.equals("permutation_plus_basic")) { initializePermutations(); } trainTermTermVectors(); } /** * Initialize all permutations that might be used. */ private void initializePermutations() { permutationCache = new int[windowSize][PermutationUtils.getPermutationLength(vectorType, dimension)]; for (int i = 0; i < windowSize; ++i) { permutationCache[i] = PermutationUtils.getShiftPermutation( vectorType, dimension, i - windowSize/2); } } private void trainTermTermVectors() throws IOException, RuntimeException { // Check that the Lucene index contains Term Positions. LuceneUtils.compressIndex(luceneIndexDir); this.luceneIndexReader = IndexReader.open(FSDirectory.open(new File(luceneIndexDir))); Collection<String> fields_with_positions = luceneIndexReader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION); if (fields_with_positions.isEmpty()) { logger.warning("Term-term indexing requires a Lucene index containing TermPositionVectors." + "\nTry rebuilding Lucene index using pitt.search.lucene.IndexFilePositions"); throw new IOException("Lucene indexes not built correctly."); } lUtils = new LuceneUtils(luceneIndexDir); // If basicTermVectors was passed in, set state accordingly. if (indexVectors != null) { retraining = true; VerbatimLogger.info("Reusing basic term vectors; number of terms: " + indexVectors.getNumVectors() + "\n"); } else { this.indexVectors = new VectorStoreRAM(vectorType, dimension); } Random random = new Random(); this.termVectors = new VectorStoreRAM(vectorType, dimension); // Iterate through an enumeration of terms and allocate initial term vectors. // If not retraining, create random elemental vectors as well. TermEnum terms = this.luceneIndexReader.terms(); int tc = 0; while(terms.next()) { Term term = terms.term(); // Skip terms that don't pass the filter. if (!lUtils.termFilter(terms.term(), fieldsToIndex, minFreq, maxFreq, maxNonAlphabet)) { continue; } tc++; Vector termVector = VectorFactory.createZeroVector(Flags.vectortype, dimension); // Place each term vector in the vector store. this.termVectors.putVector(term.text(), termVector); // Do the same for random index vectors unless retraining with trained term vectors if (!retraining) { Vector indexVector = VectorFactory.generateRandomVector( vectorType, dimension, seedLength, random); ((VectorStoreRAM) this.indexVectors).putVector(term.text(), indexVector); } } VerbatimLogger.info("Created basic term vectors for " + tc + " terms (and " + luceneIndexReader.numDocs() + " docs).\n"); // Iterate through documents. int numdocs = this.luceneIndexReader.numDocs(); for (int dc = 0; dc < numdocs; ++dc) { // Output progress counter. if ((dc % 10000 == 0) || (dc < 10000 && dc % 1000 == 0)) { VerbatimLogger.info("Processed " + dc + " documents ... "); } for (String field: fieldsToIndex) { TermPositionVector vex = (TermPositionVector) luceneIndexReader.getTermFreqVector(dc, field); if (vex != null) processTermPositionVector(vex); } } VerbatimLogger.info("Created " + termVectors.getNumVectors() + " term vectors ...\n"); VerbatimLogger.info("Normalizing term vectors.\n"); Enumeration<ObjectVector> e = termVectors.getAllVectors(); while (e.hasMoreElements()) { e.nextElement().getVector().normalize(); } String randFile = "randomvectors.bin"; // If building a permutation index, these need to be written out to be reused. // // TODO(widdows): It is odd to do this here while not writing out the semantic // term vectors here. We should redesign this. if ((positionalmethod.equals("permutation") || (positionalmethod.equals("permutation_plus_basic"))) && !retraining) { VerbatimLogger.info("Normalizing and writing random vectors to " + randFile + "\n"); Enumeration<ObjectVector> f = indexVectors.getAllVectors(); while (f.hasMoreElements()) { f.nextElement().getVector().normalize(); } new VectorStoreWriter().writeVectors(randFile, this.indexVectors); } } /** * For each term, add term index vector * for any term occurring within a window of size windowSize such * that for example if windowSize = 5 with the window over the * phrase "your life is your life" the index vectors for terms * "your" and "life" would each be added to the term vector for * "is" twice. * * TermPositionVectors contain arrays of (1) terms as text (2) * term frequencies and (3) term positions within a * document. The index of a particular term within this array * will be referred to as the 'local index' in comments. */ private void processTermPositionVector(TermPositionVector vex) throws ArrayIndexOutOfBoundsException { int[] freqs = vex.getTermFrequencies(); // Find number of positions in document (across all terms). int numwords = freqs.length; int numpositions = 0; for (short tcn = 0; tcn < numwords; ++tcn) { int[] posns = vex.getTermPositions(tcn); for (int pc = 0; pc < posns.length; ++pc) { numpositions = Math.max(numpositions, posns[pc]); } } numpositions += 1; //convert from zero-based index to count // Create local random index and term vectors for relevant terms. localindexvectors = new Vector[numwords]; Vector[] localtermvectors = new Vector[numwords]; // Create index with one space for each position. short[] positions = new short[numpositions]; Arrays.fill(positions, NONEXISTENT); String[] docterms = vex.getTerms(); for (short tcn = 0; tcn < numwords; ++tcn) { // Insert local term indices in position vector. int[] posns = vex.getTermPositions(tcn); // Get all positions of term in document for (int pc = 0; pc < posns.length; ++pc) { // Set position of index vector to local // (document-specific) index of term in this position. int position = posns[pc]; positions[position] = tcn; } // Only terms that have passed the term filter are included in the VectorStores. if (this.indexVectors.getVector(docterms[tcn]) != null) { // Retrieve relevant random index vectors. localindexvectors[tcn] = indexVectors.getVector(docterms[tcn]); // Retrieve relevant term vectors. localtermvectors[tcn] = termVectors.getVector(docterms[tcn]); } } /** Iterate through positions adding index vectors of terms * occurring within window to term vector for focus term **/ int windowRadius = windowSize / 2; for (int focusposn = 0; focusposn < positions.length; ++focusposn) { int focusterm = positions[focusposn]; if (focusterm == NONEXISTENT) continue; int windowstart = Math.max(0, focusposn - windowRadius); int windowend = Math.min(focusposn + windowRadius, positions.length - 1); for (int cursor = windowstart; cursor <= windowend; cursor++) { if (cursor == focusposn) continue; int coterm = positions[cursor]; if (coterm == NONEXISTENT) continue; if (this.indexVectors.getVector(docterms[coterm]) == null || localtermvectors[focusterm] == null) { continue; } // calculate permutation required for either Sahlgren (2008) implementation // encoding word order, or encoding direction as in Burgess and Lund's HAL if (positionalmethod.equals("permutation_plus_basic") || positionalmethod.equals("basic")) { // docterms[coterm] contains the term in position[w] in this document. localtermvectors[focusterm].superpose(localindexvectors[coterm], 1, null); } if ((positionalmethod.equals("permutation")) || (positionalmethod.equals("permutation_plus_basic"))) { int[] permutation = permutationCache[cursor - focusposn + windowRadius]; localtermvectors[focusterm].superpose(localindexvectors[coterm], 1, permutation); } else if (positionalmethod.equals("directional")) { localtermvectors[focusterm].bind(localindexvectors[coterm], cursor - focusposn); } } } } }
Quick fix to the error I introduced by changing "bind": TermTermVectors now uses permutations exclusively, although we may wish to introduce the option of other binding operators once these are implemented.
src/pitt/search/semanticvectors/TermTermVectorsFromLucene.java
Quick fix to the error I introduced by changing "bind": TermTermVectors now uses permutations exclusively, although we may wish to introduce the option of other binding operators once these are implemented.
<ide><path>rc/pitt/search/semanticvectors/TermTermVectorsFromLucene.java <ide> // have a programming typo cause an error! <ide> if (positionalmethod.equals("permutation") <ide> || positionalmethod.equals("permutation_plus_basic")) { <del> initializePermutations(); <add> initializePermutations();} <add> else if (positionalmethod.equals("directional")) { <add> initializeDirectionalPermutations(); <ide> } <ide> trainTermTermVectors(); <ide> } <ide> permutationCache[i] = PermutationUtils.getShiftPermutation( <ide> vectorType, dimension, i - windowSize/2); <ide> } <add> } <add> <add> /** <add> * Initialize all permutations that might be used (i.e +1 and -1). <add> */ <add> private void initializeDirectionalPermutations() { <add> permutationCache = <add> new int[2][PermutationUtils.getPermutationLength(vectorType, dimension)]; <add> <add> permutationCache[0] = PermutationUtils.getShiftPermutation( <add> vectorType, dimension, -1); <add> <add> permutationCache[1] = PermutationUtils.getShiftPermutation( <add> vectorType, dimension, 1); <add> <ide> } <ide> <ide> private void trainTermTermVectors() throws IOException, RuntimeException { <ide> int[] permutation = permutationCache[cursor - focusposn + windowRadius]; <ide> localtermvectors[focusterm].superpose(localindexvectors[coterm], 1, permutation); <ide> } else if (positionalmethod.equals("directional")) { <del> localtermvectors[focusterm].bind(localindexvectors[coterm], cursor - focusposn); <add> int[] permutation = permutationCache[(int) Math.max(0,Math.signum(cursor - focusposn))]; <add> localtermvectors[focusterm].superpose(localindexvectors[coterm], 1, permutation); <add> <ide> } <ide> } <ide> }
JavaScript
mit
8cefdfd6119637a4ef7e7339afdfd90757629749
0
egjs/egjs,egjs/egjs
(function(global) { global.eg = {}; var dependency = { "jQuery": { "url": "http://jquery.com/" }, "Hammer": { "url": "http://hammerjs.github.io/" }, "Outlayer": { "url": "https://github.com/metafizzy/outlayer/" } }; // jscs:disable maximumLineLength var templateMessage = [ "[egjs] The {{name}} library must be loaded before {{componentName}}.", "[egjs] For AMD environment (like RequireJS), \"{{name}}\" must be declared, which is required by {{componentName}}.", "[egjs] The {{index}} argument of {{componentName}} is missing.\n\rDownload {{name}} from [{{url}}].", "[egjs] The {{name}} parameter of {{componentName}} is not valid.\n\rPlease check and try again.", "[egjs] The {{index}} argument of {{componentName}} is undefined.\n\rPlease check and try again." ]; // jscs:enable maximumLineLength var ordinal = [ "1st", "2nd", "3rd"]; function changeOrdinal(index) { return index > 2 ? (index + 1) + "th" : ordinal[index]; } function replaceStr(str, obj) { var i; for (i in obj) { str = str.replace(new RegExp("{{" + i + "}}","gi"), obj[i]); } return str; } function checkDependency(componentName, di) { var i = 0; var l = di.length; var message = []; var paramList = []; var require = global.require; var dependencyInfo; var param; var messageInfo; var isString; var isUndefined; var registedDependency; var isNotGlobal; var specifiedAMD; for (; i < l; i++) { param = di[i]; messageInfo = { "index": changeOrdinal(i), "name": param, "componentName": componentName }; isString = typeof di[i] === "string"; isUndefined = di[i] === undefined; registedDependency = isString && (dependencyInfo = dependency[di[i]]); isNotGlobal = isString && dependencyInfo && !global[di[i]]; specifiedAMD = isNotGlobal && require && require.specified(di[i]); // Message decision flow // argument // |--------------|--------------| // undefined string !string&&!undefined // | | | // msg(4) | (OK) // defined dependency // | // |-----------------------------| // | | // msg(3) in global // | // |------------------------------| // use AMD (OK) // | // |------------------------------| // msg(2) require.specified // | // |------------------------------| // msg(1) require.defined // | // |------------------------------| // msg(0) (OK) if (!isString && !isUndefined) { paramList.push(param); continue; } if (specifiedAMD && require.defined(di[i])) { param = require(di[i]); paramList.push(param); continue; } if (specifiedAMD && !require.defined(di[i])) { messageInfo.url = dependencyInfo.url; message.push(replaceStr(templateMessage[0], messageInfo)); continue; } if (isNotGlobal && require && !require.specified(di[i])) { messageInfo.url = dependencyInfo.url; message.push(replaceStr(templateMessage[1], messageInfo)); continue; } if (isNotGlobal && !require) { messageInfo.url = dependencyInfo.url; message.push(replaceStr(templateMessage[2], messageInfo)); continue; } if (registedDependency && global[di[i]]) { param = global[di[i]]; paramList.push(param); continue; } if (isString && !dependencyInfo) { message.push(replaceStr(templateMessage[3], messageInfo)); continue; } if (di[i] === undefined) { message.push(replaceStr(templateMessage[4], messageInfo)); continue; } } return [paramList, message]; } /** * Regist module. * @private */ global.eg.module = function(name, di, fp) { var result = checkDependency(name, di); if (result[1].length) { throw new Error(result[1].join("\n\r")); } else { fp.apply(global, result[0]); } }; })(window);
src/module.js
(function(global) { global.eg = {}; var dependency = { "jQuery": { "url": "http://jquery.com/" }, "Hammer": { "url": "http://hammerjs.github.io/" }, "Outlayer": { "url": "https://github.com/metafizzy/outlayer/" } }; // jscs:disable maximumLineLength var templateMessage = [ "[egjs] The {{name}} library must be loaded before {{componentName}}.", "[egjs] For AMD evnronment (like RequireJS), \"{{name}}\" must be declared, which is required by {{componentName}}.", "[egjs] The {{componentName}} in {{index}} argument is missing.\n\rDownload {{name}} from [{{url}}].", "[egjs] The {{name}} parameter of {{componentName}} is not valid.\n\rPlease check and try again.", "[egjs] The {{componentName}} in {{index}} argument is undefined.\n\rPlease check and try again." ]; // jscs:enable maximumLineLength var ordinal = [ "1st", "2nd", "3rd"]; function changeOdinal(index) { return index > 2 ? index + "th" : ordinal[index]; } function replaceStr(str, obj) { var i; for (i in obj) { str = str.replace(new RegExp("{{" + i + "}}","gi"), obj[i]); } return str; } function checkDependency(componentName, di) { var i = 0; var l = di.length; var message = []; var paramList = []; var require = global.require; var dependencyInfo; var param; var messageInfo; var isString; var isUndefined; var registedDependency; var isNotGlobal; var specifiedAMD; for (; i < l; i++) { param = di[i]; messageInfo = { "index": changeOdinal(i), "name": param, "componentName": componentName }; isString = typeof di[i] === "string"; isUndefined = di[i] === undefined; registedDependency = isString && (dependencyInfo = dependency[di[i]]); isNotGlobal = isString && dependencyInfo && !global[di[i]]; specifiedAMD = isNotGlobal && require && require.specified(di[i]); // Message decision flow // argument // |--------------|--------------| // undefined string !string&&!undefined // | | | // msg(4) | (OK) // defined dependency // | // |-----------------------------| // | | // msg(3) in global // | // |------------------------------| // use AMD (OK) // | // |------------------------------| // msg(2) require.specified // | // |------------------------------| // msg(1) require.defined // | // |------------------------------| // msg(0) (OK) if (!isString && !isUndefined) { paramList.push(param); continue; } if (specifiedAMD && require.defined(di[i])) { param = require(di[i]); paramList.push(param); continue; } if (specifiedAMD && !require.defined(di[i])) { messageInfo.url = dependencyInfo.url; message.push(replaceStr(templateMessage[0], messageInfo)); continue; } if (isNotGlobal && require && !require.specified(di[i])) { messageInfo.url = dependencyInfo.url; message.push(replaceStr(templateMessage[1], messageInfo)); continue; } if (isNotGlobal && !require) { messageInfo.url = dependencyInfo.url; message.push(replaceStr(templateMessage[2], messageInfo)); continue; } if (registedDependency && global[di[i]]) { param = global[di[i]]; paramList.push(param); continue; } if (isString && !dependencyInfo) { message.push(replaceStr(templateMessage[3], messageInfo)); continue; } if (di[i] === undefined) { message.push(replaceStr(templateMessage[4], messageInfo)); continue; } } return [paramList, message]; } /** * Regist module. * @private */ global.eg.module = function(name, di, fp) { var result = checkDependency(name, di); if (result[1].length) { throw new Error(result[1].join("\n\r")); } else { fp.apply(global, result[0]); } }; })(window);
fix(module): Correction on error message handling Fixed ordinal number handling. Modified awkward error message. Ref g-48
src/module.js
fix(module): Correction on error message handling
<ide><path>rc/module.js <ide> // jscs:disable maximumLineLength <ide> var templateMessage = [ <ide> "[egjs] The {{name}} library must be loaded before {{componentName}}.", <del> "[egjs] For AMD evnronment (like RequireJS), \"{{name}}\" must be declared, which is required by {{componentName}}.", <del> "[egjs] The {{componentName}} in {{index}} argument is missing.\n\rDownload {{name}} from [{{url}}].", <add> "[egjs] For AMD environment (like RequireJS), \"{{name}}\" must be declared, which is required by {{componentName}}.", <add> "[egjs] The {{index}} argument of {{componentName}} is missing.\n\rDownload {{name}} from [{{url}}].", <ide> "[egjs] The {{name}} parameter of {{componentName}} is not valid.\n\rPlease check and try again.", <del> "[egjs] The {{componentName}} in {{index}} argument is undefined.\n\rPlease check and try again." <add> "[egjs] The {{index}} argument of {{componentName}} is undefined.\n\rPlease check and try again." <ide> ]; <ide> <ide> // jscs:enable maximumLineLength <ide> <ide> var ordinal = [ "1st", "2nd", "3rd"]; <ide> <del> function changeOdinal(index) { <del> return index > 2 ? index + "th" : ordinal[index]; <add> function changeOrdinal(index) { <add> return index > 2 ? (index + 1) + "th" : ordinal[index]; <ide> } <ide> <ide> function replaceStr(str, obj) { <ide> for (; i < l; i++) { <ide> param = di[i]; <ide> messageInfo = { <del> "index": changeOdinal(i), <add> "index": changeOrdinal(i), <ide> "name": param, <ide> "componentName": componentName <ide> };
Java
apache-2.0
4b572cc4e909c6d04805c8a49af7250e44ad3008
0
apache/commons-digester,apache/commons-digester,apache/commons-digester,mohanaraosv/commons-digester,mohanaraosv/commons-digester,callMeDimit/commons-digester,callMeDimit/commons-digester,callMeDimit/commons-digester,mohanaraosv/commons-digester
package org.apache.commons.digester3; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.lang.reflect.InvocationTargetException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.util.ArrayList; import java.util.Collections; import java.util.EmptyStackException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Stack; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import javax.xml.validation.Schema; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.xml.sax.Attributes; import org.xml.sax.ContentHandler; import org.xml.sax.EntityResolver; import org.xml.sax.ErrorHandler; import org.xml.sax.InputSource; import org.xml.sax.Locator; import org.xml.sax.SAXException; import org.xml.sax.SAXNotRecognizedException; import org.xml.sax.SAXNotSupportedException; import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; /** * <p> * A <strong>Digester</strong> processes an XML input stream by matching a series of element nesting patterns to execute * Rules that have been added prior to the start of parsing. * </p> * <p> * See the <a href="package-summary.html#package_description">Digester Developer Guide</a> for more information. * </p> * <p> * <strong>IMPLEMENTATION NOTE</strong> - A single Digester instance may only be used within the context of a single * thread at a time, and a call to <code>parse()</code> must be completed before another can be initiated even from the * same thread. * </p> * <p> * A Digester instance should not be used for parsing more than one input document. The problem is that the Digester * class has quite a few member variables whose values "evolve" as SAX events are received during a parse. When reusing * the Digester instance, all these members must be reset back to their initial states before the second parse begins. * The "clear()" method makes a stab at resetting these, but it is actually rather a difficult problem. If you are * determined to reuse Digester instances, then at the least you should call the clear() method before each parse, and * must call it if the Digester parse terminates due to an exception during a parse. * </p> * <p> * <strong>LEGACY IMPLEMENTATION NOTE</strong> - When using the legacy XML schema support (instead of using the * {@link Schema} class), a bug in Xerces 2.0.2 prevents the support of XML schema. You need Xerces 2.1/2.3 and up to * make this class work with the legacy XML schema support. * </p> * <p> * This package was inspired by the <code>XmlMapper</code> class that was part of Tomcat 3.0 and 3.1, but is organized * somewhat differently. * </p> */ public class Digester extends DefaultHandler { // --------------------------------------------------------- Constructors /** * Construct a new Digester with default properties. */ public Digester() { super(); } /** * Construct a new Digester, allowing a SAXParser to be passed in. This allows Digester to be used in environments * which are unfriendly to JAXP1.1 (such as WebLogic 6.0). This may help in places where you are able to load JAXP * 1.1 classes yourself. * * @param parser The SAXParser used to parse XML streams */ public Digester( SAXParser parser ) { super(); this.parser = parser; } /** * Construct a new Digester, allowing an XMLReader to be passed in. This allows Digester to be used in environments * which are unfriendly to JAXP1.1 (such as WebLogic 6.0). Note that if you use this option you have to configure * namespace and validation support yourself, as these properties only affect the SAXParser and emtpy constructor. * * @param reader The XMLReader used to parse XML streams */ public Digester( XMLReader reader ) { super(); this.reader = reader; } // --------------------------------------------------- Instance Variables /** * The body text of the current element. */ private StringBuilder bodyText = new StringBuilder(); /** * The stack of body text string buffers for surrounding elements. */ private final Stack<StringBuilder> bodyTexts = new Stack<StringBuilder>(); /** * Stack whose elements are List objects, each containing a list of Rule objects as returned from Rules.getMatch(). * As each xml element in the input is entered, the matching rules are pushed onto this stack. After the end tag is * reached, the matches are popped again. The depth of is stack is therefore exactly the same as the current * "nesting" level of the input xml. * * @since 1.6 */ private final Stack<List<Rule>> matches = new Stack<List<Rule>>(); /** * The class loader to use for instantiating application objects. If not specified, the context class loader, or the * class loader used to load Digester itself, is used, based on the value of the <code>useContextClassLoader</code> * variable. */ private ClassLoader classLoader = null; /** * Has this Digester been configured yet. */ private boolean configured = false; /** * The EntityResolver used by the SAX parser. By default it use this class */ private EntityResolver entityResolver; /** * The URLs of entityValidator that have been registered, keyed by the public identifier that corresponds. */ private final HashMap<String, URL> entityValidator = new HashMap<String, URL>(); /** * The application-supplied error handler that is notified when parsing warnings, errors, or fatal errors occur. */ private ErrorHandler errorHandler = null; /** * The SAXParserFactory that is created the first time we need it. */ private SAXParserFactory factory = null; /** * The Locator associated with our parser. */ private Locator locator = null; /** * The current match pattern for nested element processing. */ private String match = ""; /** * Do we want a "namespace aware" parser. */ private boolean namespaceAware = false; /** * Registered namespaces we are currently processing. The key is the namespace prefix that was declared in the * document. The value is an Stack of the namespace URIs this prefix has been mapped to -- the top Stack element is * the most current one. (This architecture is required because documents can declare nested uses of the same prefix * for different Namespace URIs). */ private final HashMap<String, Stack<String>> namespaces = new HashMap<String, Stack<String>>(); /** * Do we want a "XInclude aware" parser. */ private boolean xincludeAware = false; /** * The parameters stack being utilized by CallMethodRule and CallParamRule rules. * * @since 2.0 */ private final Stack<Object[]> params = new Stack<Object[]>(); /** * The SAXParser we will use to parse the input stream. */ private SAXParser parser = null; /** * The public identifier of the DTD we are currently parsing under (if any). */ private String publicId = null; /** * The XMLReader used to parse digester rules. */ private XMLReader reader = null; /** * The "root" element of the stack (in other words, the last object that was popped. */ private Object root = null; /** * The <code>Rules</code> implementation containing our collection of <code>Rule</code> instances and associated * matching policy. If not established before the first rule is added, a default implementation will be provided. */ private Rules rules = null; /** * The XML schema to use for validating an XML instance. * * @since 2.0 */ private Schema schema = null; /** * The object stack being constructed. */ private final Stack<Object> stack = new Stack<Object>(); /** * Do we want to use the Context ClassLoader when loading classes for instantiating new objects. Default is * <code>false</code>. */ private boolean useContextClassLoader = false; /** * Do we want to use a validating parser. */ private boolean validating = false; /** * The Log to which most logging calls will be made. */ private Log log = LogFactory.getLog( "org.apache.commons.digester3.Digester" ); /** * The Log to which all SAX event related logging calls will be made. */ private Log saxLog = LogFactory.getLog( "org.apache.commons.digester3.Digester.sax" ); /** * The schema language supported. By default, we use this one. */ protected static final String W3C_XML_SCHEMA = "http://www.w3.org/2001/XMLSchema"; /** * An optional class that substitutes values in attributes and body text. This may be null and so a null check is * always required before use. */ private Substitutor substitutor; /** Stacks used for interrule communication, indexed by name String */ private final HashMap<String, Stack<Object>> stacksByName = new HashMap<String, Stack<Object>>(); /** * If not null, then calls by the parser to this object's characters, startElement, endElement and * processingInstruction methods are forwarded to the specified object. This is intended to allow rules to * temporarily "take control" of the sax events. In particular, this is used by NodeCreateRule. * <p> * See setCustomContentHandler. */ private ContentHandler customContentHandler = null; /** * Object which will receive callbacks for every pop/push action on the default stack or named stacks. */ private StackAction stackAction = null; // ------------------------------------------------------------- Properties /** * Return the currently mapped namespace URI for the specified prefix, if any; otherwise return <code>null</code>. * These mappings come and go dynamically as the document is parsed. * * @param prefix Prefix to look up * @return the currently mapped namespace URI for the specified prefix */ public String findNamespaceURI( String prefix ) { Stack<String> nsStack = namespaces.get( prefix ); if ( nsStack == null ) { return null; } try { return ( nsStack.peek() ); } catch ( EmptyStackException e ) { return null; } } /** * Return the class loader to be used for instantiating application objects when required. This is determined based * upon the following rules: * <ul> * <li>The class loader set by <code>setClassLoader()</code>, if any</li> * <li>The thread context class loader, if it exists and the <code>useContextClassLoader</code> property is set to * true</li> * <li>The class loader used to load the Digester class itself. * </ul> * * @return the class loader to be used for instantiating application objects. */ public ClassLoader getClassLoader() { if ( this.classLoader != null ) { return ( this.classLoader ); } if ( this.useContextClassLoader ) { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); if ( classLoader != null ) { return ( classLoader ); } } return ( this.getClass().getClassLoader() ); } /** * Set the class loader to be used for instantiating application objects when required. * * @param classLoader The new class loader to use, or <code>null</code> to revert to the standard rules */ public void setClassLoader( ClassLoader classLoader ) { this.classLoader = classLoader; } /** * Return the current depth of the element stack. * * @return the current depth of the element stack. */ public int getCount() { return ( stack.size() ); } /** * Return the name of the XML element that is currently being processed. * * @return the name of the XML element that is currently being processed. */ public String getCurrentElementName() { String elementName = match; int lastSlash = elementName.lastIndexOf( '/' ); if ( lastSlash >= 0 ) { elementName = elementName.substring( lastSlash + 1 ); } return ( elementName ); } /** * Return the error handler for this Digester. * * @return the error handler for this Digester. */ public ErrorHandler getErrorHandler() { return ( this.errorHandler ); } /** * Set the error handler for this Digester. * * @param errorHandler The new error handler */ public void setErrorHandler( ErrorHandler errorHandler ) { this.errorHandler = errorHandler; } /** * Return the SAXParserFactory we will use, creating one if necessary. * * @return the SAXParserFactory we will use, creating one if necessary. */ public SAXParserFactory getFactory() { if ( factory == null ) { factory = SAXParserFactory.newInstance(); factory.setNamespaceAware( namespaceAware ); factory.setXIncludeAware( xincludeAware ); factory.setValidating( validating ); factory.setSchema( schema ); } return ( factory ); } /** * Returns a flag indicating whether the requested feature is supported by the underlying implementation of * <code>org.xml.sax.XMLReader</code>. See <a href="http://www.saxproject.org">the saxproject website</a> for * information about the standard SAX2 feature flags. * * @param feature Name of the feature to inquire about * @return true, if the requested feature is supported by the underlying implementation of * <code>org.xml.sax.XMLReader</code>, false otherwise * @exception ParserConfigurationException if a parser configuration error occurs * @exception SAXNotRecognizedException if the property name is not recognized * @exception SAXNotSupportedException if the property name is recognized but not supported */ public boolean getFeature( String feature ) throws ParserConfigurationException, SAXNotRecognizedException, SAXNotSupportedException { return ( getFactory().getFeature( feature ) ); } /** * Sets a flag indicating whether the requested feature is supported by the underlying implementation of * <code>org.xml.sax.XMLReader</code>. See <a href="http://www.saxproject.org">the saxproject website</a> for * information about the standard SAX2 feature flags. In order to be effective, this method must be called * <strong>before</strong> the <code>getParser()</code> method is called for the first time, either directly or * indirectly. * * @param feature Name of the feature to set the status for * @param value The new value for this feature * @exception ParserConfigurationException if a parser configuration error occurs * @exception SAXNotRecognizedException if the property name is not recognized * @exception SAXNotSupportedException if the property name is recognized but not supported */ public void setFeature( String feature, boolean value ) throws ParserConfigurationException, SAXNotRecognizedException, SAXNotSupportedException { getFactory().setFeature( feature, value ); } /** * Return the current Logger associated with this instance of the Digester * * @return the current Logger associated with this instance of the Digester */ public Log getLogger() { return log; } /** * Set the current logger for this Digester. * * @param log the current logger for this Digester. */ public void setLogger( Log log ) { this.log = log; } /** * Gets the logger used for logging SAX-related information. <strong>Note</strong> the output is finely grained. * * @return the logger used for logging SAX-related information * @since 1.6 */ public Log getSAXLogger() { return saxLog; } /** * Sets the logger used for logging SAX-related information. <strong>Note</strong> the output is finely grained. * * @param saxLog the logger used for logging SAX-related information, not null * @since 1.6 */ public void setSAXLogger( Log saxLog ) { this.saxLog = saxLog; } /** * Return the current rule match path * * @return the current rule match path */ public String getMatch() { return match; } /** * Return the "namespace aware" flag for parsers we create. * * @return the "namespace aware" flag for parsers we create. */ public boolean getNamespaceAware() { return ( this.namespaceAware ); } /** * Set the "namespace aware" flag for parsers we create. * * @param namespaceAware The new "namespace aware" flag */ public void setNamespaceAware( boolean namespaceAware ) { this.namespaceAware = namespaceAware; } /** * Return the XInclude-aware flag for parsers we create. XInclude functionality additionally requires * namespace-awareness. * * @return The XInclude-aware flag * @see #getNamespaceAware() * @since 2.0 */ public boolean getXIncludeAware() { return ( this.xincludeAware ); } /** * Set the XInclude-aware flag for parsers we create. This additionally requires namespace-awareness. * * @param xincludeAware The new XInclude-aware flag * @see #setNamespaceAware(boolean) * @since 2.0 */ public void setXIncludeAware( boolean xincludeAware ) { this.xincludeAware = xincludeAware; } /** * Set the public id of the current file being parse. * * @param publicId the DTD/Schema public's id. */ public void setPublicId( String publicId ) { this.publicId = publicId; } /** * Return the public identifier of the DTD we are currently parsing under, if any. * * @return the public identifier of the DTD we are currently parsing under, if any. */ public String getPublicId() { return ( this.publicId ); } /** * Return the namespace URI that will be applied to all subsequently added <code>Rule</code> objects. * * @return the namespace URI that will be applied to all subsequently added <code>Rule</code> objects. */ public String getRuleNamespaceURI() { return ( getRules().getNamespaceURI() ); } /** * Set the namespace URI that will be applied to all subsequently added <code>Rule</code> objects. * * @param ruleNamespaceURI Namespace URI that must match on all subsequently added rules, or <code>null</code> for * matching regardless of the current namespace URI */ public void setRuleNamespaceURI( String ruleNamespaceURI ) { getRules().setNamespaceURI( ruleNamespaceURI ); } /** * Return the SAXParser we will use to parse the input stream. * * If there is a problem creating the parser, return <code>null</code>. * * @return the SAXParser we will use to parse the input stream */ public SAXParser getParser() { // Return the parser we already created (if any) if ( parser != null ) { return ( parser ); } // Create a new parser try { parser = getFactory().newSAXParser(); } catch ( Exception e ) { log.error( "Digester.getParser: ", e ); return ( null ); } return ( parser ); } /** * Return the current value of the specified property for the underlying <code>XMLReader</code> implementation. * * See <a href="http://www.saxproject.org">the saxproject website</a> for information about the standard SAX2 * properties. * * @param property Property name to be retrieved * @return the current value of the specified property for the underlying <code>XMLReader</code> implementation. * @exception SAXNotRecognizedException if the property name is not recognized * @exception SAXNotSupportedException if the property name is recognized but not supported */ public Object getProperty( String property ) throws SAXNotRecognizedException, SAXNotSupportedException { return ( getParser().getProperty( property ) ); } /** * Set the current value of the specified property for the underlying <code>XMLReader</code> implementation. See <a * href="http://www.saxproject.org">the saxproject website</a> for information about the standard SAX2 properties. * * @param property Property name to be set * @param value Property value to be set * @exception SAXNotRecognizedException if the property name is not recognized * @exception SAXNotSupportedException if the property name is recognized but not supported */ public void setProperty( String property, Object value ) throws SAXNotRecognizedException, SAXNotSupportedException { getParser().setProperty( property, value ); } /** * Return the <code>Rules</code> implementation object containing our rules collection and associated matching * policy. If none has been established, a default implementation will be created and returned. * * @return the <code>Rules</code> implementation object. */ public Rules getRules() { if ( this.rules == null ) { this.rules = new RulesBase(); this.rules.setDigester( this ); } return ( this.rules ); } /** * Set the <code>Rules</code> implementation object containing our rules collection and associated matching policy. * * @param rules New Rules implementation */ public void setRules( Rules rules ) { this.rules = rules; this.rules.setDigester( this ); } /** * Return the XML Schema used when parsing. * * @return The {@link Schema} instance in use. * @since 2.0 */ public Schema getXMLSchema() { return ( this.schema ); } /** * Set the XML Schema to be used when parsing. * * @param schema The {@link Schema} instance to use. * @since 2.0 */ public void setXMLSchema( Schema schema ) { this.schema = schema; } /** * Return the boolean as to whether the context ClassLoader should be used. * * @return true, if the context ClassLoader should be used, false otherwise. */ public boolean getUseContextClassLoader() { return useContextClassLoader; } /** * Determine whether to use the Context ClassLoader (the one found by calling * <code>Thread.currentThread().getContextClassLoader()</code>) to resolve/load classes that are defined in various * rules. If not using Context ClassLoader, then the class-loading defaults to using the calling-class' ClassLoader. * * @param use determines whether to use Context ClassLoader. */ public void setUseContextClassLoader( boolean use ) { useContextClassLoader = use; } /** * Return the validating parser flag. * * @return the validating parser flag. */ public boolean getValidating() { return ( this.validating ); } /** * Set the validating parser flag. This must be called before <code>parse()</code> is called the first time. * * @param validating The new validating parser flag. */ public void setValidating( boolean validating ) { this.validating = validating; } /** * Return the XMLReader to be used for parsing the input document. * * FIXME: there is a bug in JAXP/XERCES that prevent the use of a parser that contains a schema with a DTD. * * @return the XMLReader to be used for parsing the input document. * @exception SAXException if no XMLReader can be instantiated */ public XMLReader getXMLReader() throws SAXException { if ( reader == null ) { reader = getParser().getXMLReader(); } reader.setDTDHandler( this ); reader.setContentHandler( this ); if ( entityResolver == null ) { reader.setEntityResolver( this ); } else { reader.setEntityResolver( entityResolver ); } reader.setErrorHandler( this ); return reader; } /** * Gets the <code>Substitutor</code> used to convert attributes and body text. * * @return the <code>Substitutor</code> used to convert attributes and body text, * null if not substitutions are to be performed. */ public Substitutor getSubstitutor() { return substitutor; } /** * Sets the <code>Substitutor</code> to be used to convert attributes and body text. * * @param substitutor the Substitutor to be used to convert attributes and body text or null if not substitution of * these values is to be performed. */ public void setSubstitutor( Substitutor substitutor ) { this.substitutor = substitutor; } /** * returns the custom SAX ContentHandler where events are redirected. * * @return the custom SAX ContentHandler where events are redirected. * @see #setCustomContentHandler(ContentHandler) * @since 1.7 */ public ContentHandler getCustomContentHandler() { return customContentHandler; } /** * Redirects (or cancels redirecting) of SAX ContentHandler events to an external object. * <p> * When this object's customContentHandler is non-null, any SAX events received from the parser will simply be * passed on to the specified object instead of this object handling them. This allows Rule classes to take control * of the SAX event stream for a while in order to do custom processing. Such a rule should save the old value * before setting a new one, and restore the old value in order to resume normal digester processing. * <p> * An example of a Rule which needs this feature is NodeCreateRule. * <p> * Note that saving the old value is probably not needed as it should always be null; a custom rule that wants to * take control could only have been called when there was no custom content handler. But it seems cleaner to * properly save/restore the value and maybe some day this will come in useful. * <p> * Note also that this is not quite equivalent to * * <pre> * digester.getXMLReader().setContentHandler( handler ) * </pre> * * for these reasons: * <ul> * <li>Some xml parsers don't like having setContentHandler called after parsing has started. The Aelfred parser is * one example.</li> * <li>Directing the events via the Digester object potentially allows us to log information about those SAX events * at the digester level.</li> * </ul> * * @param handler the custom SAX ContentHandler where events are redirected. * @since 1.7 */ public void setCustomContentHandler( ContentHandler handler ) { customContentHandler = handler; } /** * Define a callback object which is invoked whenever an object is pushed onto a digester object stack, * or popped off one. * * @param stackAction the callback object which is invoked whenever an object is pushed onto a digester * object stack, or popped off one. * @since 1.8 */ public void setStackAction( StackAction stackAction ) { this.stackAction = stackAction; } /** * Return the callback object which is invoked whenever an object is pushed onto a digester object stack, * or popped off one. * * @return the callback object which is invoked whenever an object is pushed onto a digester object stack, * or popped off one. * @see #setStackAction(StackAction). * @since 1.8 */ public StackAction getStackAction() { return stackAction; } /** * Get the most current namespaces for all prefixes. * * @return Map A map with namespace prefixes as keys and most current namespace URIs for the corresponding prefixes * as values * @since 1.8 */ public Map<String, String> getCurrentNamespaces() { if ( !namespaceAware ) { log.warn( "Digester is not namespace aware" ); } Map<String, String> currentNamespaces = new HashMap<String, String>(); for ( Map.Entry<String, Stack<String>> nsEntry : namespaces.entrySet() ) { try { currentNamespaces.put( nsEntry.getKey(), nsEntry.getValue().peek() ); } catch ( RuntimeException e ) { // rethrow, after logging log.error( e.getMessage(), e ); throw e; } } return currentNamespaces; } // ------------------------------------------------- ContentHandler Methods /** * {@inheritDoc} */ @Override public void characters( char buffer[], int start, int length ) throws SAXException { if ( customContentHandler != null ) { // forward calls instead of handling them here customContentHandler.characters( buffer, start, length ); return; } if ( saxLog.isDebugEnabled() ) { saxLog.debug( "characters(" + new String( buffer, start, length ) + ")" ); } bodyText.append( buffer, start, length ); } /** * {@inheritDoc} */ @Override public void endDocument() throws SAXException { if ( saxLog.isDebugEnabled() ) { if ( getCount() > 1 ) { saxLog.debug( "endDocument(): " + getCount() + " elements left" ); } else { saxLog.debug( "endDocument()" ); } } // Fire "finish" events for all defined rules for ( Rule rule : getRules().rules() ) { try { rule.finish(); } catch ( Exception e ) { log.error( "Finish event threw exception", e ); throw createSAXException( e ); } catch ( Error e ) { log.error( "Finish event threw error", e ); throw e; } } // Perform final cleanup clear(); } /** * {@inheritDoc} */ @Override public void endElement( String namespaceURI, String localName, String qName ) throws SAXException { if ( customContentHandler != null ) { // forward calls instead of handling them here customContentHandler.endElement( namespaceURI, localName, qName ); return; } boolean debug = log.isDebugEnabled(); if ( debug ) { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "endElement(" + namespaceURI + "," + localName + "," + qName + ")" ); } log.debug( " match='" + match + "'" ); log.debug( " bodyText='" + bodyText + "'" ); } // the actual element name is either in localName or qName, depending // on whether the parser is namespace aware String name = localName; if ( ( name == null ) || ( name.length() < 1 ) ) { name = qName; } // Fire "body" events for all relevant rules List<Rule> rules = matches.pop(); if ( ( rules != null ) && ( rules.size() > 0 ) ) { String bodyText = this.bodyText.toString(); Substitutor substitutor = getSubstitutor(); if ( substitutor != null ) { bodyText = substitutor.substitute( bodyText ); } for ( int i = 0; i < rules.size(); i++ ) { try { Rule rule = rules.get( i ); if ( debug ) { log.debug( " Fire body() for " + rule ); } rule.body( namespaceURI, name, bodyText ); } catch ( Exception e ) { log.error( "Body event threw exception", e ); throw createSAXException( e ); } catch ( Error e ) { log.error( "Body event threw error", e ); throw e; } } } else { if ( debug ) { log.debug( " No rules found matching '" + match + "'." ); } } // Recover the body text from the surrounding element bodyText = bodyTexts.pop(); if ( debug ) { log.debug( " Popping body text '" + bodyText.toString() + "'" ); } // Fire "end" events for all relevant rules in reverse order if ( rules != null ) { for ( int i = 0; i < rules.size(); i++ ) { int j = ( rules.size() - i ) - 1; try { Rule rule = rules.get( j ); if ( debug ) { log.debug( " Fire end() for " + rule ); } rule.end( namespaceURI, name ); } catch ( Exception e ) { log.error( "End event threw exception", e ); throw createSAXException( e ); } catch ( Error e ) { log.error( "End event threw error", e ); throw e; } } } // Recover the previous match expression int slash = match.lastIndexOf( '/' ); if ( slash >= 0 ) { match = match.substring( 0, slash ); } else { match = ""; } } /** * {@inheritDoc} */ @Override public void endPrefixMapping( String prefix ) throws SAXException { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "endPrefixMapping(" + prefix + ")" ); } // Deregister this prefix mapping Stack<String> stack = namespaces.get( prefix ); if ( stack == null ) { return; } try { stack.pop(); if ( stack.empty() ) { namespaces.remove( prefix ); } } catch ( EmptyStackException e ) { throw createSAXException( "endPrefixMapping popped too many times" ); } } /** * {@inheritDoc} */ @Override public void ignorableWhitespace( char buffer[], int start, int len ) throws SAXException { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "ignorableWhitespace(" + new String( buffer, start, len ) + ")" ); } // No processing required } /** * {@inheritDoc} */ @Override public void processingInstruction( String target, String data ) throws SAXException { if ( customContentHandler != null ) { // forward calls instead of handling them here customContentHandler.processingInstruction( target, data ); return; } if ( saxLog.isDebugEnabled() ) { saxLog.debug( "processingInstruction('" + target + "','" + data + "')" ); } // No processing is required } /** * Gets the document locator associated with our parser. * * @return the Locator supplied by the document parser */ public Locator getDocumentLocator() { return locator; } /** * {@inheritDoc} */ @Override public void setDocumentLocator( Locator locator ) { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "setDocumentLocator(" + locator + ")" ); } this.locator = locator; } /** * {@inheritDoc} */ @Override public void skippedEntity( String name ) throws SAXException { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "skippedEntity(" + name + ")" ); } // No processing required } /** * {@inheritDoc} */ @Override public void startDocument() throws SAXException { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "startDocument()" ); } // ensure that the digester is properly configured, as // the digester could be used as a SAX ContentHandler // rather than via the parse() methods. configure(); } /** * {@inheritDoc} */ @Override public void startElement( String namespaceURI, String localName, String qName, Attributes list ) throws SAXException { boolean debug = log.isDebugEnabled(); if ( customContentHandler != null ) { // forward calls instead of handling them here customContentHandler.startElement( namespaceURI, localName, qName, list ); return; } if ( saxLog.isDebugEnabled() ) { saxLog.debug( "startElement(" + namespaceURI + "," + localName + "," + qName + ")" ); } // Save the body text accumulated for our surrounding element bodyTexts.push( bodyText ); if ( debug ) { log.debug( " Pushing body text '" + bodyText.toString() + "'" ); } bodyText = new StringBuilder(); // the actual element name is either in localName or qName, depending // on whether the parser is namespace aware String name = localName; if ( ( name == null ) || ( name.length() < 1 ) ) { name = qName; } // Compute the current matching rule StringBuilder sb = new StringBuilder( match ); if ( match.length() > 0 ) { sb.append( '/' ); } sb.append( name ); match = sb.toString(); if ( debug ) { log.debug( " New match='" + match + "'" ); } // Fire "begin" events for all relevant rules List<Rule> rules = getRules().match( namespaceURI, match ); matches.push( rules ); if ( ( rules != null ) && ( rules.size() > 0 ) ) { Substitutor substitutor = getSubstitutor(); if ( substitutor != null ) { list = substitutor.substitute( list ); } for ( int i = 0; i < rules.size(); i++ ) { try { Rule rule = rules.get( i ); if ( debug ) { log.debug( " Fire begin() for " + rule ); } rule.begin( namespaceURI, name, list ); } catch ( Exception e ) { log.error( "Begin event threw exception", e ); throw createSAXException( e ); } catch ( Error e ) { log.error( "Begin event threw error", e ); throw e; } } } else { if ( debug ) { log.debug( " No rules found matching '" + match + "'." ); } } } /** * {@inheritDoc} */ @Override public void startPrefixMapping( String prefix, String namespaceURI ) throws SAXException { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "startPrefixMapping(" + prefix + "," + namespaceURI + ")" ); } // Register this prefix mapping Stack<String> stack = namespaces.get( prefix ); if ( stack == null ) { stack = new Stack<String>(); namespaces.put( prefix, stack ); } stack.push( namespaceURI ); } // ----------------------------------------------------- DTDHandler Methods /** * {@inheritDoc} */ @Override public void notationDecl( String name, String publicId, String systemId ) { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "notationDecl(" + name + "," + publicId + "," + systemId + ")" ); } } /** * {@inheritDoc} */ @Override public void unparsedEntityDecl( String name, String publicId, String systemId, String notation ) { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "unparsedEntityDecl(" + name + "," + publicId + "," + systemId + "," + notation + ")" ); } } // ----------------------------------------------- EntityResolver Methods /** * Set the <code>EntityResolver</code> used by SAX when resolving public id and system id. This must be called * before the first call to <code>parse()</code>. * * @param entityResolver a class that implement the <code>EntityResolver</code> interface. */ public void setEntityResolver( EntityResolver entityResolver ) { this.entityResolver = entityResolver; } /** * Return the Entity Resolver used by the SAX parser. * * @return the Entity Resolver used by the SAX parser. */ public EntityResolver getEntityResolver() { return entityResolver; } /** * {@inheritDoc} */ @Override public InputSource resolveEntity( String publicId, String systemId ) throws SAXException { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "resolveEntity('" + publicId + "', '" + systemId + "')" ); } if ( publicId != null ) { this.publicId = publicId; } // Has this system identifier been registered? URL entityURL = null; if ( publicId != null ) { entityURL = entityValidator.get( publicId ); } // Redirect the schema location to a local destination if ( entityURL == null && systemId != null ) { entityURL = entityValidator.get( systemId ); } if ( entityURL == null ) { if ( systemId == null ) { // cannot resolve if ( log.isDebugEnabled() ) { log.debug( " Cannot resolve null entity, returning null InputSource" ); } return ( null ); } // try to resolve using system ID if ( log.isDebugEnabled() ) { log.debug( " Trying to resolve using system ID '" + systemId + "'" ); } try { entityURL = new URL( systemId ); } catch ( MalformedURLException e ) { throw new IllegalArgumentException( "Malformed URL '" + systemId + "' : " + e.getMessage() ); } } // Return an input source to our alternative URL if ( log.isDebugEnabled() ) { log.debug( " Resolving to alternate DTD '" + entityURL + "'" ); } try { return createInputSourceFromURL( entityURL ); } catch ( Exception e ) { throw createSAXException( e ); } } // ------------------------------------------------- ErrorHandler Methods /** * {@inheritDoc} */ @Override public void error( SAXParseException exception ) throws SAXException { log.error( "Parse Error at line " + exception.getLineNumber() + " column " + exception.getColumnNumber() + ": " + exception.getMessage(), exception ); if ( errorHandler != null ) { errorHandler.error( exception ); } } /** * {@inheritDoc} */ @Override public void fatalError( SAXParseException exception ) throws SAXException { log.error( "Parse Fatal Error at line " + exception.getLineNumber() + " column " + exception.getColumnNumber() + ": " + exception.getMessage(), exception ); if ( errorHandler != null ) { errorHandler.fatalError( exception ); } } /** * {@inheritDoc} */ @Override public void warning( SAXParseException exception ) throws SAXException { if ( errorHandler != null ) { log.warn( "Parse Warning Error at line " + exception.getLineNumber() + " column " + exception.getColumnNumber() + ": " + exception.getMessage(), exception ); errorHandler.warning( exception ); } } // ------------------------------------------------------- Public Methods /** * Parse the content of the specified file using this Digester. Returns the root element from the object stack (if * any). * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param file File containing the XML data to be parsed * @return the root element from the object stack (if any) * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs */ public <T> T parse( File file ) throws IOException, SAXException { if ( file == null ) { throw new IllegalArgumentException( "File to parse is null" ); } configure(); InputSource input = new InputSource( new FileInputStream( file ) ); input.setSystemId( file.toURI().toURL().toString() ); getXMLReader().parse( input ); cleanup(); return ( this.<T> getRoot() ); } /** * Parse the content of the specified input source using this Digester. Returns the root element from the object * stack (if any). * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param input Input source containing the XML data to be parsed * @return the root element from the object stack (if any) * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs */ public <T> T parse( InputSource input ) throws IOException, SAXException { if ( input == null ) { throw new IllegalArgumentException( "InputSource to parse is null" ); } configure(); getXMLReader().parse( input ); cleanup(); return this.<T> getRoot(); } /** * Parse the content of the specified input stream using this Digester. Returns the root element from the object * stack (if any). * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param input Input stream containing the XML data to be parsed * @return the root element from the object stack (if any) * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs */ public <T> T parse( InputStream input ) throws IOException, SAXException { if ( input == null ) { throw new IllegalArgumentException( "InputStream to parse is null" ); } configure(); InputSource is = new InputSource( input ); getXMLReader().parse( is ); cleanup(); return ( this.<T> getRoot() ); } /** * Parse the content of the specified reader using this Digester. Returns the root element from the object stack (if * any). * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param reader Reader containing the XML data to be parsed * @return the root element from the object stack (if any) * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs */ public <T> T parse( Reader reader ) throws IOException, SAXException { if ( reader == null ) { throw new IllegalArgumentException( "Reader to parse is null" ); } configure(); InputSource is = new InputSource( reader ); getXMLReader().parse( is ); cleanup(); return ( this.<T> getRoot() ); } /** * Parse the content of the specified URI using this Digester. Returns the root element from the object stack (if * any). * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param uri URI containing the XML data to be parsed * @return the root element from the object stack (if any) * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs */ public <T> T parse( String uri ) throws IOException, SAXException { if ( uri == null ) { throw new IllegalArgumentException( "String URI to parse is null" ); } configure(); InputSource is = createInputSourceFromURL( uri ); getXMLReader().parse( is ); cleanup(); return ( this.<T> getRoot() ); } /** * Parse the content of the specified URL using this Digester. Returns the root element from the object stack (if * any). * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param url URL containing the XML data to be parsed * @return the root element from the object stack (if any) * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs * @since 1.8 */ public <T> T parse( URL url ) throws IOException, SAXException { if ( url == null ) { throw new IllegalArgumentException( "URL to parse is null" ); } configure(); InputSource is = createInputSourceFromURL( url ); getXMLReader().parse( is ); cleanup(); return ( this.<T> getRoot() ); } /** * <p> * Register the specified DTD URL for the specified public identifier. This must be called before the first call to * <code>parse()</code>. * </p> * <p> * <code>Digester</code> contains an internal <code>EntityResolver</code> implementation. This maps * <code>PUBLICID</code>'s to URLs (from which the resource will be loaded). A common use case for this method is to * register local URLs (possibly computed at runtime by a classloader) for DTDs. This allows the performance * advantage of using a local version without having to ensure every <code>SYSTEM</code> URI on every processed xml * document is local. This implementation provides only basic functionality. If more sophisticated features are * required, using {@link #setEntityResolver} to set a custom resolver is recommended. * </p> * <p> * <strong>Note:</strong> This method will have no effect when a custom <code>EntityResolver</code> has been set. * (Setting a custom <code>EntityResolver</code> overrides the internal implementation.) * </p> * * @param publicId Public identifier of the DTD to be resolved * @param entityURL The URL to use for reading this DTD * @since 1.8 */ public void register( String publicId, URL entityURL ) { if ( log.isDebugEnabled() ) { log.debug( "register('" + publicId + "', '" + entityURL + "'" ); } entityValidator.put( publicId, entityURL ); } /** * <p> * Convenience method that registers the string version of an entity URL instead of a URL version. * </p> * * @param publicId Public identifier of the entity to be resolved * @param entityURL The URL to use for reading this entity */ public void register( String publicId, String entityURL ) { if ( log.isDebugEnabled() ) { log.debug( "register('" + publicId + "', '" + entityURL + "'" ); } try { entityValidator.put( publicId, new URL( entityURL ) ); } catch ( MalformedURLException e ) { throw new IllegalArgumentException( "Malformed URL '" + entityURL + "' : " + e.getMessage() ); } } /** * Convenience method that registers DTD URLs for the specified public identifiers. * * @param entityValidator The URLs of entityValidator that have been registered, keyed by the public * identifier that corresponds. * @since 3.0 */ public void registerAll( Map<String, URL> entityValidator ) { this.entityValidator.putAll( entityValidator ); } /** * <p> * <code>List</code> of <code>InputSource</code> instances created by a <code>createInputSourceFromURL()</code> * method call. These represent open input streams that need to be closed to avoid resource leaks, as well as * potentially locked JAR files on Windows. * </p> */ protected List<InputSource> inputSources = new ArrayList<InputSource>( 5 ); /** * Given a URL, return an InputSource that reads from that URL. * <p> * Ideally this function would not be needed and code could just use <code>new InputSource(entityURL)</code>. * Unfortunately it appears that when the entityURL points to a file within a jar archive a caching mechanism inside * the InputSource implementation causes a file-handle to the jar file to remain open. On Windows systems this then * causes the jar archive file to be locked on disk ("in use") which makes it impossible to delete the jar file - * and that really stuffs up "undeploy" in webapps in particular. * <p> * In JDK1.4 and later, Apache XercesJ is used as the xml parser. The InputSource object provided is converted into * an XMLInputSource, and eventually passed to an instance of XMLDocumentScannerImpl to specify the source data to * be converted into tokens for the rest of the XMLReader code to handle. XMLDocumentScannerImpl calls * fEntityManager.startDocumentEntity(source), where fEntityManager is declared in ancestor class XMLScanner to be * an XMLEntityManager. In that class, if the input source stream is null, then: * * <pre> * URL location = new URL( expandedSystemId ); * URLConnection connect = location.openConnection(); * if ( connect instanceof HttpURLConnection ) * { * setHttpProperties( connect, xmlInputSource ); * } * stream = connect.getInputStream(); * </pre> * * This method pretty much duplicates the standard behaviour, except that it calls URLConnection.setUseCaches(false) * before opening the connection. * * @param url The URL has to be read * @return The InputSource that reads from the input URL * @throws IOException if any error occurs while reading the input URL * @since 1.8 */ public InputSource createInputSourceFromURL( URL url ) throws IOException { URLConnection connection = url.openConnection(); connection.setUseCaches( false ); InputStream stream = connection.getInputStream(); InputSource source = new InputSource( stream ); source.setSystemId( url.toExternalForm() ); inputSources.add( source ); return source; } /** * <p> * Convenience method that creates an <code>InputSource</code> from the string version of a URL. * </p> * * @param url URL for which to create an <code>InputSource</code> * @return The InputSource that reads from the input URL * @throws IOException if any error occurs while reading the input URL * @since 1.8 */ public InputSource createInputSourceFromURL( String url ) throws IOException { return createInputSourceFromURL( new URL( url ) ); } // --------------------------------------------------------- Rule Methods /** * <p> * Register a new Rule matching the specified pattern. This method sets the <code>Digester</code> property on the * rule. * </p> * * @param pattern Element matching pattern * @param rule Rule to be registered */ public void addRule( String pattern, Rule rule ) { rule.setDigester( this ); getRules().add( pattern, rule ); } /** * Register a set of Rule instances defined in a RuleSet. * * @param ruleSet The RuleSet instance to configure from */ public void addRuleSet( RuleSet ruleSet ) { String oldNamespaceURI = getRuleNamespaceURI(); String newNamespaceURI = ruleSet.getNamespaceURI(); if ( log.isDebugEnabled() ) { if ( newNamespaceURI == null ) { log.debug( "addRuleSet() with no namespace URI" ); } else { log.debug( "addRuleSet() with namespace URI " + newNamespaceURI ); } } setRuleNamespaceURI( newNamespaceURI ); ruleSet.addRuleInstances( this ); setRuleNamespaceURI( oldNamespaceURI ); } /** * Add a "bean property setter" rule for the specified parameters. * * @param pattern Element matching pattern * @see BeanPropertySetterRule */ public void addBeanPropertySetter( String pattern ) { addRule( pattern, new BeanPropertySetterRule() ); } /** * Add a "bean property setter" rule for the specified parameters. * * @param pattern Element matching pattern * @param propertyName Name of property to set * @see BeanPropertySetterRule */ public void addBeanPropertySetter( String pattern, String propertyName ) { addRule( pattern, new BeanPropertySetterRule( propertyName ) ); } /** * Add an "call method" rule for a method which accepts no arguments. * * @param pattern Element matching pattern * @param methodName Method name to be called * @see CallMethodRule */ public void addCallMethod( String pattern, String methodName ) { addRule( pattern, new CallMethodRule( methodName ) ); } /** * Add an "call method" rule for the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to be called * @param paramCount Number of expected parameters (or zero for a single parameter from the body of this element) * @see CallMethodRule */ public void addCallMethod( String pattern, String methodName, int paramCount ) { addRule( pattern, new CallMethodRule( methodName, paramCount ) ); } /** * Add an "call method" rule for the specified parameters. If <code>paramCount</code> is set to zero the rule will * use the body of the matched element as the single argument of the method, unless <code>paramTypes</code> is null * or empty, in this case the rule will call the specified method with no arguments. * * @param pattern Element matching pattern * @param methodName Method name to be called * @param paramCount Number of expected parameters (or zero for a single parameter from the body of this element) * @param paramTypes Set of Java class names for the types of the expected parameters (if you wish to use a * primitive type, specify the corresonding Java wrapper class instead, such as * <code>java.lang.Boolean</code> for a <code>boolean</code> parameter) * @see CallMethodRule */ public void addCallMethod( String pattern, String methodName, int paramCount, String paramTypes[] ) { addRule( pattern, new CallMethodRule( methodName, paramCount, paramTypes ) ); } /** * Add an "call method" rule for the specified parameters. If <code>paramCount</code> is set to zero the rule will * use the body of the matched element as the single argument of the method, unless <code>paramTypes</code> is null * or empty, in this case the rule will call the specified method with no arguments. * * @param pattern Element matching pattern * @param methodName Method name to be called * @param paramCount Number of expected parameters (or zero for a single parameter from the body of this element) * @param paramTypes The Java class names of the arguments (if you wish to use a primitive type, specify the * corresonding Java wrapper class instead, such as <code>java.lang.Boolean</code> for a * <code>boolean</code> parameter) * @see CallMethodRule */ public void addCallMethod( String pattern, String methodName, int paramCount, Class<?> paramTypes[] ) { addRule( pattern, new CallMethodRule( methodName, paramCount, paramTypes ) ); } /** * Add a "call parameter" rule for the specified parameters. * * @param pattern Element matching pattern * @param paramIndex Zero-relative parameter index to set (from the body of this element) * @see CallParamRule */ public void addCallParam( String pattern, int paramIndex ) { addRule( pattern, new CallParamRule( paramIndex ) ); } /** * Add a "call parameter" rule for the specified parameters. * * @param pattern Element matching pattern * @param paramIndex Zero-relative parameter index to set (from the specified attribute) * @param attributeName Attribute whose value is used as the parameter value * @see CallParamRule */ public void addCallParam( String pattern, int paramIndex, String attributeName ) { addRule( pattern, new CallParamRule( paramIndex, attributeName ) ); } /** * Add a "call parameter" rule. This will either take a parameter from the stack or from the current element body * text. * * @param pattern Element matching pattern * @param paramIndex The zero-relative parameter number * @param fromStack Should the call parameter be taken from the top of the stack? * @see CallParamRule */ public void addCallParam( String pattern, int paramIndex, boolean fromStack ) { addRule( pattern, new CallParamRule( paramIndex, fromStack ) ); } /** * Add a "call parameter" rule that sets a parameter from the stack. This takes a parameter from the given position * on the stack. * * @param pattern Element matching pattern * @param paramIndex The zero-relative parameter number * @param stackIndex set the call parameter to the stackIndex'th object down the stack, where 0 is the top of the * stack, 1 the next element down and so on * @see CallMethodRule */ public void addCallParam( String pattern, int paramIndex, int stackIndex ) { addRule( pattern, new CallParamRule( paramIndex, stackIndex ) ); } /** * Add a "call parameter" rule that sets a parameter from the current <code>Digester</code> matching path. This is * sometimes useful when using rules that support wildcards. * * @param pattern the pattern that this rule should match * @param paramIndex The zero-relative parameter number * @see CallMethodRule */ public void addCallParamPath( String pattern, int paramIndex ) { addRule( pattern, new PathCallParamRule( paramIndex ) ); } /** * Add a "call parameter" rule that sets a parameter from a caller-provided object. This can be used to pass * constants such as strings to methods; it can also be used to pass mutable objects, providing ways for objects to * do things like "register" themselves with some shared object. * <p> * Note that when attempting to locate a matching method to invoke, the true type of the paramObj is used, so that * despite the paramObj being passed in here as type Object, the target method can declare its parameters as being * the true type of the object (or some ancestor type, according to the usual type-conversion rules). * * @param pattern Element matching pattern * @param paramIndex The zero-relative parameter number * @param paramObj Any arbitrary object to be passed to the target method. * @see CallMethodRule * @since 1.6 */ public void addObjectParam( String pattern, int paramIndex, Object paramObj ) { addRule( pattern, new ObjectParamRule( paramIndex, paramObj ) ); } /** * Add a "factory create" rule for the specified parameters. Exceptions thrown during the object creation process * will be propagated. * * @param pattern Element matching pattern * @param className Java class name of the object creation factory class * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, String className ) { addFactoryCreate( pattern, className, false ); } /** * Add a "factory create" rule for the specified parameters. Exceptions thrown during the object creation process * will be propagated. * * @param pattern Element matching pattern * @param clazz Java class of the object creation factory class * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, Class<? extends ObjectCreationFactory<?>> clazz ) { addFactoryCreate( pattern, clazz, false ); } /** * Add a "factory create" rule for the specified parameters. Exceptions thrown during the object creation process * will be propagated. * * @param pattern Element matching pattern * @param className Java class name of the object creation factory class * @param attributeName Attribute name which, if present, overrides the value specified by <code>className</code> * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, String className, String attributeName ) { addFactoryCreate( pattern, className, attributeName, false ); } /** * Add a "factory create" rule for the specified parameters. Exceptions thrown during the object creation process * will be propagated. * * @param pattern Element matching pattern * @param clazz Java class of the object creation factory class * @param attributeName Attribute name which, if present, overrides the value specified by <code>className</code> * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, Class<? extends ObjectCreationFactory<?>> clazz, String attributeName ) { addFactoryCreate( pattern, clazz, attributeName, false ); } /** * Add a "factory create" rule for the specified parameters. Exceptions thrown during the object creation process * will be propagated. * * @param pattern Element matching pattern * @param creationFactory Previously instantiated ObjectCreationFactory to be utilized * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, ObjectCreationFactory<?> creationFactory ) { addFactoryCreate( pattern, creationFactory, false ); } /** * Add a "factory create" rule for the specified parameters. * * @param pattern Element matching pattern * @param className Java class name of the object creation factory class * @param ignoreCreateExceptions when <code>true</code> any exceptions thrown during object creation will be * ignored. * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, String className, boolean ignoreCreateExceptions ) { addRule( pattern, new FactoryCreateRule( className, ignoreCreateExceptions ) ); } /** * Add a "factory create" rule for the specified parameters. * * @param pattern Element matching pattern * @param clazz Java class of the object creation factory class * @param ignoreCreateExceptions when <code>true</code> any exceptions thrown during object creation will be * ignored. * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, Class<? extends ObjectCreationFactory<?>> clazz, boolean ignoreCreateExceptions ) { addRule( pattern, new FactoryCreateRule( clazz, ignoreCreateExceptions ) ); } /** * Add a "factory create" rule for the specified parameters. * * @param pattern Element matching pattern * @param className Java class name of the object creation factory class * @param attributeName Attribute name which, if present, overrides the value specified by <code>className</code> * @param ignoreCreateExceptions when <code>true</code> any exceptions thrown during object creation will be * ignored. * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, String className, String attributeName, boolean ignoreCreateExceptions ) { addRule( pattern, new FactoryCreateRule( className, attributeName, ignoreCreateExceptions ) ); } /** * Add a "factory create" rule for the specified parameters. * * @param pattern Element matching pattern * @param clazz Java class of the object creation factory class * @param attributeName Attribute name which, if present, overrides the value specified by <code>className</code> * @param ignoreCreateExceptions when <code>true</code> any exceptions thrown during object creation will be * ignored. * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, Class<? extends ObjectCreationFactory<?>> clazz, String attributeName, boolean ignoreCreateExceptions ) { addRule( pattern, new FactoryCreateRule( clazz, attributeName, ignoreCreateExceptions ) ); } /** * Add a "factory create" rule for the specified parameters. * * @param pattern Element matching pattern * @param creationFactory Previously instantiated ObjectCreationFactory to be utilized * @param ignoreCreateExceptions when <code>true</code> any exceptions thrown during object creation will be * ignored. * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, ObjectCreationFactory<?> creationFactory, boolean ignoreCreateExceptions ) { creationFactory.setDigester( this ); addRule( pattern, new FactoryCreateRule( creationFactory, ignoreCreateExceptions ) ); } /** * Add an "object create" rule for the specified parameters. * * @param pattern Element matching pattern * @param className Java class name to be created * @see ObjectCreateRule */ public void addObjectCreate( String pattern, String className ) { addRule( pattern, new ObjectCreateRule( className ) ); } /** * Add an "object create" rule for the specified parameters. * * @param pattern Element matching pattern * @param clazz Java class to be created * @see ObjectCreateRule */ public void addObjectCreate( String pattern, Class<?> clazz ) { addRule( pattern, new ObjectCreateRule( clazz ) ); } /** * Add an "object create" rule for the specified parameters. * * @param pattern Element matching pattern * @param className Default Java class name to be created * @param attributeName Attribute name that optionally overrides the default Java class name to be created * @see ObjectCreateRule */ public void addObjectCreate( String pattern, String className, String attributeName ) { addRule( pattern, new ObjectCreateRule( className, attributeName ) ); } /** * Add an "object create" rule for the specified parameters. * * @param pattern Element matching pattern * @param attributeName Attribute name that optionally overrides * @param clazz Default Java class to be created the default Java class name to be created * @see ObjectCreateRule */ public void addObjectCreate( String pattern, String attributeName, Class<?> clazz ) { addRule( pattern, new ObjectCreateRule( attributeName, clazz ) ); } /** * Adds an {@link SetNestedPropertiesRule}. * * @param pattern register the rule with this pattern * @since 1.6 */ public void addSetNestedProperties( String pattern ) { addRule( pattern, new SetNestedPropertiesRule() ); } /** * Adds an {@link SetNestedPropertiesRule}. * * @param pattern register the rule with this pattern * @param elementName elment name that a property maps to * @param propertyName property name of the element mapped from * @since 1.6 */ public void addSetNestedProperties( String pattern, String elementName, String propertyName ) { addRule( pattern, new SetNestedPropertiesRule( elementName, propertyName ) ); } /** * Adds an {@link SetNestedPropertiesRule}. * * @param pattern register the rule with this pattern * @param elementNames elment names that (in order) map to properties * @param propertyNames property names that (in order) elements are mapped to * @since 1.6 */ public void addSetNestedProperties( String pattern, String[] elementNames, String[] propertyNames ) { addRule( pattern, new SetNestedPropertiesRule( elementNames, propertyNames ) ); } /** * Add a "set next" rule for the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the parent element * @see SetNextRule */ public void addSetNext( String pattern, String methodName ) { addRule( pattern, new SetNextRule( methodName ) ); } /** * Add a "set next" rule for the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the parent element * @param paramType Java class name of the expected parameter type (if you wish to use a primitive type, specify the * corresonding Java wrapper class instead, such as <code>java.lang.Boolean</code> for a * <code>boolean</code> parameter) * @see SetNextRule */ public void addSetNext( String pattern, String methodName, String paramType ) { addRule( pattern, new SetNextRule( methodName, paramType ) ); } /** * Add {@link SetRootRule} with the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the root object * @see SetRootRule */ public void addSetRoot( String pattern, String methodName ) { addRule( pattern, new SetRootRule( methodName ) ); } /** * Add {@link SetRootRule} with the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the root object * @param paramType Java class name of the expected parameter type * @see SetRootRule */ public void addSetRoot( String pattern, String methodName, String paramType ) { addRule( pattern, new SetRootRule( methodName, paramType ) ); } /** * Add a "set properties" rule for the specified parameters. * * @param pattern Element matching pattern * @see SetPropertiesRule */ public void addSetProperties( String pattern ) { addRule( pattern, new SetPropertiesRule() ); } /** * Add a "set properties" rule with a single overridden parameter. See * {@link SetPropertiesRule#SetPropertiesRule(String attributeName, String propertyName)} * * @param pattern Element matching pattern * @param attributeName map this attribute * @param propertyName to this property * @see SetPropertiesRule */ public void addSetProperties( String pattern, String attributeName, String propertyName ) { addRule( pattern, new SetPropertiesRule( attributeName, propertyName ) ); } /** * Add a "set properties" rule with overridden parameters. See * {@link SetPropertiesRule#SetPropertiesRule(String [] attributeNames, String [] propertyNames)} * * @param pattern Element matching pattern * @param attributeNames names of attributes with custom mappings * @param propertyNames property names these attributes map to * @see SetPropertiesRule */ public void addSetProperties( String pattern, String[] attributeNames, String[] propertyNames ) { addRule( pattern, new SetPropertiesRule( attributeNames, propertyNames ) ); } /** * Add a "set property" rule for the specified parameters. * * @param pattern Element matching pattern * @param name Attribute name containing the property name to be set * @param value Attribute name containing the property value to set * @see SetPropertyRule */ public void addSetProperty( String pattern, String name, String value ) { addRule( pattern, new SetPropertyRule( name, value ) ); } /** * Add a "set top" rule for the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the parent element * @see SetTopRule */ public void addSetTop( String pattern, String methodName ) { addRule( pattern, new SetTopRule( methodName ) ); } /** * Add a "set top" rule for the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the parent element * @param paramType Java class name of the expected parameter type (if you wish to use a primitive type, specify the * corresonding Java wrapper class instead, such as <code>java.lang.Boolean</code> for a * <code>boolean</code> parameter) * @see SetTopRule */ public void addSetTop( String pattern, String methodName, String paramType ) { addRule( pattern, new SetTopRule( methodName, paramType ) ); } // --------------------------------------------------- Object Stack Methods /** * Clear the current contents of the default object stack, the param stack, all named stacks, and other internal * variables. * <p> * Calling this method <i>might</i> allow another document of the same type to be correctly parsed. However this * method was not intended for this purpose (just to tidy up memory usage). In general, a separate Digester object * should be created for each document to be parsed. * <p> * Note that this method is called automatically after a document has been successfully parsed by a Digester * instance. However it is not invoked automatically when a parse fails, so when reusing a Digester instance (which * is not recommended) this method <i>must</i> be called manually after a parse failure. */ public void clear() { match = ""; bodyTexts.clear(); params.clear(); publicId = null; stack.clear(); stacksByName.clear(); customContentHandler = null; } /** * Return the top object on the stack without removing it. * * If there are no objects on the stack, return <code>null</code>. * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @return the top object on the stack without removing it. */ public <T> T peek() { try { return this.<T> npeSafeCast( stack.peek() ); } catch ( EmptyStackException e ) { log.warn( "Empty stack (returning null)" ); return ( null ); } } /** * Return the n'th object down the stack, where 0 is the top element and [getCount()-1] is the bottom element. If * the specified index is out of range, return <code>null</code>. * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param n Index of the desired element, where 0 is the top of the stack, 1 is the next element down, and so on. * @return the n'th object down the stack */ public <T> T peek( int n ) { int index = ( stack.size() - 1 ) - n; if ( index < 0 ) { log.warn( "Empty stack (returning null)" ); return ( null ); } try { return this.<T> npeSafeCast( stack.get( index ) ); } catch ( EmptyStackException e ) { log.warn( "Empty stack (returning null)" ); return ( null ); } } /** * Pop the top object off of the stack, and return it. If there are no objects on the stack, return * <code>null</code>. * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @return the top object popped off of the stack */ public <T> T pop() { try { T popped = this.<T> npeSafeCast( stack.pop() ); if ( stackAction != null ) { popped = stackAction.onPop( this, null, popped ); } return popped; } catch ( EmptyStackException e ) { log.warn( "Empty stack (returning null)" ); return ( null ); } } /** * Push a new object onto the top of the object stack. * * @param <T> any type of the pushed object * @param object The new object */ public <T> void push( T object ) { if ( stackAction != null ) { object = stackAction.onPush( this, null, object ); } if ( stack.size() == 0 ) { root = object; } stack.push( object ); } /** * Pushes the given object onto the stack with the given name. If no stack already exists with the given name then * one will be created. * * @param <T> any type of the pushed object * @param stackName the name of the stack onto which the object should be pushed * @param value the Object to be pushed onto the named stack. * @since 1.6 */ public <T> void push( String stackName, T value ) { if ( stackAction != null ) { value = stackAction.onPush( this, stackName, value ); } Stack<Object> namedStack = stacksByName.get( stackName ); if ( namedStack == null ) { namedStack = new Stack<Object>(); stacksByName.put( stackName, namedStack ); } namedStack.push( value ); } /** * <p> * Pops (gets and removes) the top object from the stack with the given name. * </p> * <p> * <strong>Note:</strong> a stack is considered empty if no objects have been pushed onto it yet. * </p> * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param stackName the name of the stack from which the top value is to be popped. * @return the top <code>Object</code> on the stack or or null if the stack is either empty or has not been created * yet * @throws EmptyStackException if the named stack is empty * @since 1.6 */ public <T> T pop( String stackName ) { T result = null; Stack<Object> namedStack = stacksByName.get( stackName ); if ( namedStack == null ) { if ( log.isDebugEnabled() ) { log.debug( "Stack '" + stackName + "' is empty" ); } throw new EmptyStackException(); } result = this.<T> npeSafeCast( namedStack.pop() ); if ( stackAction != null ) { result = stackAction.onPop( this, stackName, result ); } return result; } /** * <p> * Gets the top object from the stack with the given name. This method does not remove the object from the stack. * </p> * <p> * <strong>Note:</strong> a stack is considered empty if no objects have been pushed onto it yet. * </p> * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param stackName the name of the stack to be peeked * @return the top <code>Object</code> on the stack or null if the stack is either empty or has not been created yet * @throws EmptyStackException if the named stack is empty * @since 1.6 */ public <T> T peek( String stackName ) { return this.<T> npeSafeCast( peek( stackName, 0 ) ); } /** * <p> * Gets the top object from the stack with the given name. This method does not remove the object from the stack. * </p> * <p> * <strong>Note:</strong> a stack is considered empty if no objects have been pushed onto it yet. * </p> * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param stackName the name of the stack to be peeked * @param n Index of the desired element, where 0 is the top of the stack, 1 is the next element down, and so on. * @return the specified <code>Object</code> on the stack. * @throws EmptyStackException if the named stack is empty * @since 1.6 */ public <T> T peek( String stackName, int n ) { T result = null; Stack<Object> namedStack = stacksByName.get( stackName ); if ( namedStack == null ) { if ( log.isDebugEnabled() ) { log.debug( "Stack '" + stackName + "' is empty" ); } throw new EmptyStackException(); } int index = ( namedStack.size() - 1 ) - n; if ( index < 0 ) { throw new EmptyStackException(); } result = this.<T> npeSafeCast( namedStack.get( index ) ); return result; } /** * <p> * Is the stack with the given name empty? * </p> * <p> * <strong>Note:</strong> a stack is considered empty if no objects have been pushed onto it yet. * </p> * * @param stackName the name of the stack whose emptiness should be evaluated * @return true if the given stack if empty * @since 1.6 */ public boolean isEmpty( String stackName ) { boolean result = true; Stack<Object> namedStack = stacksByName.get( stackName ); if ( namedStack != null ) { result = namedStack.isEmpty(); } return result; } /** * Returns the root element of the tree of objects created as a result of applying the rule objects to the input * XML. * <p> * If the digester stack was "primed" by explicitly pushing a root object onto the stack before parsing started, * then that root object is returned here. * <p> * Alternatively, if a Rule which creates an object (eg ObjectCreateRule) matched the root element of the xml, then * the object created will be returned here. * <p> * In other cases, the object most recently pushed onto an empty digester stack is returned. This would be a most * unusual use of digester, however; one of the previous configurations is much more likely. * <p> * Note that when using one of the Digester.parse methods, the return value from the parse method is exactly the * same as the return value from this method. However when the Digester is being used as a SAXContentHandler, no * such return value is available; in this case, this method allows you to access the root object that has been * created after parsing has completed. * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @return the root object that has been created after parsing or null if the digester has not parsed any XML yet. */ public <T> T getRoot() { return this.<T> npeSafeCast( root ); } /** * This method allows the "root" variable to be reset to null. * <p> * It is not considered safe for a digester instance to be reused to parse multiple xml documents. However if you * are determined to do so, then you should call both clear() and resetRoot() before each parse. * * @since 1.7 */ public void resetRoot() { root = null; } // ------------------------------------------------ Parameter Stack Methods // ------------------------------------------------------ Protected Methods /** * <p> * Clean up allocated resources after parsing is complete. The default method closes input streams that have been * created by Digester itself. If you override this method in a subclass, be sure to call * <code>super.cleanup()</code> to invoke this logic. * </p> * * @since 1.8 */ protected void cleanup() { // If we created any InputSource objects in this instance, // they each have an input stream that should be closed for ( InputSource source : inputSources ) { try { source.getByteStream().close(); } catch ( IOException e ) { // Fall through so we get them all } } inputSources.clear(); } /** * <p> * Provide a hook for lazy configuration of this <code>Digester</code> instance. The default implementation does * nothing, but subclasses can override as needed. * </p> * <p> * <strong>Note</strong> This method may be called more than once. Once only initialization code should be placed in * {@link #initialize} or the code should take responsibility by checking and setting the {@link #configured} flag. * </p> */ protected void configure() { // Do not configure more than once if ( configured ) { return; } // Perform lazy configuration as needed initialize(); // call hook method for subclasses that want to be initialized once only // Nothing else required by default // Set the configuration flag to avoid repeating configured = true; } /** * Checks the Digester instance has been configured. * * @return true, if the Digester instance has been configured, false otherwise * @since 3.0 */ public boolean isConfigured() { return configured; } /** * <p> * Provides a hook for lazy initialization of this <code>Digester</code> instance. The default implementation does * nothing, but subclasses can override as needed. Digester (by default) only calls this method once. * </p> * <p> * <strong>Note</strong> This method will be called by {@link #configure} only when the {@link #configured} flag is * false. Subclasses that override <code>configure</code> or who set <code>configured</code> may find that this * method may be called more than once. * </p> * * @since 1.6 */ protected void initialize() { // Perform lazy initialization as needed // Nothing required by default } // -------------------------------------------------------- Package Methods /** * Return the set of DTD URL registrations, keyed by public identifier. NOTE: the returned map is in read-only mode. * * @return the read-only Map of DTD URL registrations. */ Map<String, URL> getRegistrations() { return Collections.unmodifiableMap( entityValidator ); } /** * <p> * Return the top object on the parameters stack without removing it. If there are no objects on the stack, return * <code>null</code>. * </p> * <p> * The parameters stack is used to store <code>CallMethodRule</code> parameters. See {@link #params}. * </p> * * @return the top object on the parameters stack without removing it. */ public Object[] peekParams() { try { return ( params.peek() ); } catch ( EmptyStackException e ) { log.warn( "Empty stack (returning null)" ); return ( null ); } } /** * <p> * Return the n'th object down the parameters stack, where 0 is the top element and [getCount()-1] is the bottom * element. If the specified index is out of range, return <code>null</code>. * </p> * <p> * The parameters stack is used to store <code>CallMethodRule</code> parameters. See {@link #params}. * </p> * * @param n Index of the desired element, where 0 is the top of the stack, 1 is the next element down, and so on. * @return the n'th object down the parameters stack */ public Object[] peekParams( int n ) { int index = ( params.size() - 1 ) - n; if ( index < 0 ) { log.warn( "Empty stack (returning null)" ); return ( null ); } try { return ( params.get( index ) ); } catch ( EmptyStackException e ) { log.warn( "Empty stack (returning null)" ); return ( null ); } } /** * <p> * Pop the top object off of the parameters stack, and return it. If there are no objects on the stack, return * <code>null</code>. * </p> * <p> * The parameters stack is used to store <code>CallMethodRule</code> parameters. See {@link #params}. * </p> * * @return the top object popped off of the parameters stack */ public Object[] popParams() { try { if ( log.isTraceEnabled() ) { log.trace( "Popping params" ); } return ( params.pop() ); } catch ( EmptyStackException e ) { log.warn( "Empty stack (returning null)" ); return ( null ); } } /** * <p> * Push a new object onto the top of the parameters stack. * </p> * <p> * The parameters stack is used to store <code>CallMethodRule</code> parameters. See {@link #params}. * </p> * * @param object The new object */ public void pushParams( Object... object ) { if ( log.isTraceEnabled() ) { log.trace( "Pushing params" ); } params.push( object ); } /** * Create a SAX exception which also understands about the location in the digester file where the exception occurs * * @param message the custom SAX exception message * @param e the exception cause * @return the new SAX exception */ public SAXException createSAXException( String message, Exception e ) { if ( ( e != null ) && ( e instanceof InvocationTargetException ) ) { Throwable t = ( (InvocationTargetException) e ).getTargetException(); if ( ( t != null ) && ( t instanceof Exception ) ) { e = (Exception) t; } } if ( locator != null ) { String error = "Error at line " + locator.getLineNumber() + " char " + locator.getColumnNumber() + ": " + message; if ( e != null ) { return new SAXParseException( error, locator, e ); } return new SAXParseException( error, locator ); } log.error( "No Locator!" ); if ( e != null ) { return new SAXException( message, e ); } return new SAXException( message ); } /** * Create a SAX exception which also understands about the location in the digester file where the exception occurs * * @param e the exception cause * @return the new SAX exception */ public SAXException createSAXException( Exception e ) { if ( e instanceof InvocationTargetException ) { Throwable t = ( (InvocationTargetException) e ).getTargetException(); if ( ( t != null ) && ( t instanceof Exception ) ) { e = (Exception) t; } } return createSAXException( e.getMessage(), e ); } /** * Create a SAX exception which also understands about the location in the digester file where the exception occurs * * @param message the custom SAX exception message * @return the new SAX exception */ public SAXException createSAXException( String message ) { return createSAXException( message, null ); } /** * Helps casting the input object to given type, avoiding NPEs. * * @since 3.0 * @param <T> the type the input object has to be cast. * @param obj the object has to be cast. * @return the casted object, if input object is not null, null otherwise. */ private <T> T npeSafeCast( Object obj ) { if ( obj == null ) { return null; } @SuppressWarnings( "unchecked" ) T result = (T) obj; return result; } }
src/main/java/org/apache/commons/digester3/Digester.java
package org.apache.commons.digester3; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.lang.reflect.InvocationTargetException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.util.ArrayList; import java.util.Collections; import java.util.EmptyStackException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Stack; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import javax.xml.validation.Schema; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.xml.sax.Attributes; import org.xml.sax.ContentHandler; import org.xml.sax.EntityResolver; import org.xml.sax.ErrorHandler; import org.xml.sax.InputSource; import org.xml.sax.Locator; import org.xml.sax.SAXException; import org.xml.sax.SAXNotRecognizedException; import org.xml.sax.SAXNotSupportedException; import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; /** * <p> * A <strong>Digester</strong> processes an XML input stream by matching a series of element nesting patterns to execute * Rules that have been added prior to the start of parsing. * </p> * <p> * See the <a href="package-summary.html#package_description">Digester Developer Guide</a> for more information. * </p> * <p> * <strong>IMPLEMENTATION NOTE</strong> - A single Digester instance may only be used within the context of a single * thread at a time, and a call to <code>parse()</code> must be completed before another can be initiated even from the * same thread. * </p> * <p> * A Digester instance should not be used for parsing more than one input document. The problem is that the Digester * class has quite a few member variables whose values "evolve" as SAX events are received during a parse. When reusing * the Digester instance, all these members must be reset back to their initial states before the second parse begins. * The "clear()" method makes a stab at resetting these, but it is actually rather a difficult problem. If you are * determined to reuse Digester instances, then at the least you should call the clear() method before each parse, and * must call it if the Digester parse terminates due to an exception during a parse. * </p> * <p> * <strong>LEGACY IMPLEMENTATION NOTE</strong> - When using the legacy XML schema support (instead of using the * {@link Schema} class), a bug in Xerces 2.0.2 prevents the support of XML schema. You need Xerces 2.1/2.3 and up to * make this class work with the legacy XML schema support. * </p> * <p> * This package was inspired by the <code>XmlMapper</code> class that was part of Tomcat 3.0 and 3.1, but is organized * somewhat differently. * </p> */ public class Digester extends DefaultHandler { // --------------------------------------------------------- Constructors /** * Construct a new Digester with default properties. */ public Digester() { super(); } /** * Construct a new Digester, allowing a SAXParser to be passed in. This allows Digester to be used in environments * which are unfriendly to JAXP1.1 (such as WebLogic 6.0). This may help in places where you are able to load JAXP * 1.1 classes yourself. * * @param parser The SAXParser used to parse XML streams */ public Digester( SAXParser parser ) { super(); this.parser = parser; } /** * Construct a new Digester, allowing an XMLReader to be passed in. This allows Digester to be used in environments * which are unfriendly to JAXP1.1 (such as WebLogic 6.0). Note that if you use this option you have to configure * namespace and validation support yourself, as these properties only affect the SAXParser and emtpy constructor. * * @param reader The XMLReader used to parse XML streams */ public Digester( XMLReader reader ) { super(); this.reader = reader; } // --------------------------------------------------- Instance Variables /** * The body text of the current element. */ private StringBuilder bodyText = new StringBuilder(); /** * The stack of body text string buffers for surrounding elements. */ private final Stack<StringBuilder> bodyTexts = new Stack<StringBuilder>(); /** * Stack whose elements are List objects, each containing a list of Rule objects as returned from Rules.getMatch(). * As each xml element in the input is entered, the matching rules are pushed onto this stack. After the end tag is * reached, the matches are popped again. The depth of is stack is therefore exactly the same as the current * "nesting" level of the input xml. * * @since 1.6 */ private final Stack<List<Rule>> matches = new Stack<List<Rule>>(); /** * The class loader to use for instantiating application objects. If not specified, the context class loader, or the * class loader used to load Digester itself, is used, based on the value of the <code>useContextClassLoader</code> * variable. */ private ClassLoader classLoader = null; /** * Has this Digester been configured yet. */ private boolean configured = false; /** * The EntityResolver used by the SAX parser. By default it use this class */ private EntityResolver entityResolver; /** * The URLs of entityValidator that have been registered, keyed by the public identifier that corresponds. */ private final HashMap<String, URL> entityValidator = new HashMap<String, URL>(); /** * The application-supplied error handler that is notified when parsing warnings, errors, or fatal errors occur. */ private ErrorHandler errorHandler = null; /** * The SAXParserFactory that is created the first time we need it. */ private SAXParserFactory factory = null; /** * The Locator associated with our parser. */ private Locator locator = null; /** * The current match pattern for nested element processing. */ private String match = ""; /** * Do we want a "namespace aware" parser. */ private boolean namespaceAware = false; /** * Registered namespaces we are currently processing. The key is the namespace prefix that was declared in the * document. The value is an Stack of the namespace URIs this prefix has been mapped to -- the top Stack element is * the most current one. (This architecture is required because documents can declare nested uses of the same prefix * for different Namespace URIs). */ private final HashMap<String, Stack<String>> namespaces = new HashMap<String, Stack<String>>(); /** * Do we want a "XInclude aware" parser. */ private boolean xincludeAware = false; /** * The parameters stack being utilized by CallMethodRule and CallParamRule rules. * * @since 2.0 */ private final Stack<Object[]> params = new Stack<Object[]>(); /** * The SAXParser we will use to parse the input stream. */ private SAXParser parser = null; /** * The public identifier of the DTD we are currently parsing under (if any). */ private String publicId = null; /** * The XMLReader used to parse digester rules. */ private XMLReader reader = null; /** * The "root" element of the stack (in other words, the last object that was popped. */ private Object root = null; /** * The <code>Rules</code> implementation containing our collection of <code>Rule</code> instances and associated * matching policy. If not established before the first rule is added, a default implementation will be provided. */ private Rules rules = null; /** * The XML schema to use for validating an XML instance. * * @since 2.0 */ private Schema schema = null; /** * The object stack being constructed. */ private final Stack<Object> stack = new Stack<Object>(); /** * Do we want to use the Context ClassLoader when loading classes for instantiating new objects. Default is * <code>false</code>. */ private boolean useContextClassLoader = false; /** * Do we want to use a validating parser. */ private boolean validating = false; /** * The Log to which most logging calls will be made. */ private Log log = LogFactory.getLog( "org.apache.commons.digester3.Digester" ); /** * The Log to which all SAX event related logging calls will be made. */ private Log saxLog = LogFactory.getLog( "org.apache.commons.digester3.Digester.sax" ); /** * The schema language supported. By default, we use this one. */ protected static final String W3C_XML_SCHEMA = "http://www.w3.org/2001/XMLSchema"; /** * An optional class that substitutes values in attributes and body text. This may be null and so a null check is * always required before use. */ private Substitutor substitutor; /** Stacks used for interrule communication, indexed by name String */ private final HashMap<String, Stack<Object>> stacksByName = new HashMap<String, Stack<Object>>(); /** * If not null, then calls by the parser to this object's characters, startElement, endElement and * processingInstruction methods are forwarded to the specified object. This is intended to allow rules to * temporarily "take control" of the sax events. In particular, this is used by NodeCreateRule. * <p> * See setCustomContentHandler. */ private ContentHandler customContentHandler = null; /** * Object which will receive callbacks for every pop/push action on the default stack or named stacks. */ private StackAction stackAction = null; // ------------------------------------------------------------- Properties /** * Return the currently mapped namespace URI for the specified prefix, if any; otherwise return <code>null</code>. * These mappings come and go dynamically as the document is parsed. * * @param prefix Prefix to look up * @return the currently mapped namespace URI for the specified prefix */ public String findNamespaceURI( String prefix ) { Stack<String> nsStack = namespaces.get( prefix ); if ( nsStack == null ) { return null; } try { return ( nsStack.peek() ); } catch ( EmptyStackException e ) { return null; } } /** * Return the class loader to be used for instantiating application objects when required. This is determined based * upon the following rules: * <ul> * <li>The class loader set by <code>setClassLoader()</code>, if any</li> * <li>The thread context class loader, if it exists and the <code>useContextClassLoader</code> property is set to * true</li> * <li>The class loader used to load the Digester class itself. * </ul> * * @return the class loader to be used for instantiating application objects. */ public ClassLoader getClassLoader() { if ( this.classLoader != null ) { return ( this.classLoader ); } if ( this.useContextClassLoader ) { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); if ( classLoader != null ) { return ( classLoader ); } } return ( this.getClass().getClassLoader() ); } /** * Set the class loader to be used for instantiating application objects when required. * * @param classLoader The new class loader to use, or <code>null</code> to revert to the standard rules */ public void setClassLoader( ClassLoader classLoader ) { this.classLoader = classLoader; } /** * Return the current depth of the element stack. * * @return the current depth of the element stack. */ public int getCount() { return ( stack.size() ); } /** * Return the name of the XML element that is currently being processed. * * @return the name of the XML element that is currently being processed. */ public String getCurrentElementName() { String elementName = match; int lastSlash = elementName.lastIndexOf( '/' ); if ( lastSlash >= 0 ) { elementName = elementName.substring( lastSlash + 1 ); } return ( elementName ); } /** * Return the error handler for this Digester. * * @return the error handler for this Digester. */ public ErrorHandler getErrorHandler() { return ( this.errorHandler ); } /** * Set the error handler for this Digester. * * @param errorHandler The new error handler */ public void setErrorHandler( ErrorHandler errorHandler ) { this.errorHandler = errorHandler; } /** * Return the SAXParserFactory we will use, creating one if necessary. * * @return the SAXParserFactory we will use, creating one if necessary. */ public SAXParserFactory getFactory() { if ( factory == null ) { factory = SAXParserFactory.newInstance(); factory.setNamespaceAware( namespaceAware ); factory.setXIncludeAware( xincludeAware ); factory.setValidating( validating ); factory.setSchema( schema ); } return ( factory ); } /** * Returns a flag indicating whether the requested feature is supported by the underlying implementation of * <code>org.xml.sax.XMLReader</code>. See <a href="http://www.saxproject.org">the saxproject website</a> for * information about the standard SAX2 feature flags. * * @param feature Name of the feature to inquire about * @return true, if the requested feature is supported by the underlying implementation of * <code>org.xml.sax.XMLReader</code>, false otherwise * @exception ParserConfigurationException if a parser configuration error occurs * @exception SAXNotRecognizedException if the property name is not recognized * @exception SAXNotSupportedException if the property name is recognized but not supported */ public boolean getFeature( String feature ) throws ParserConfigurationException, SAXNotRecognizedException, SAXNotSupportedException { return ( getFactory().getFeature( feature ) ); } /** * Sets a flag indicating whether the requested feature is supported by the underlying implementation of * <code>org.xml.sax.XMLReader</code>. See <a href="http://www.saxproject.org">the saxproject website</a> for * information about the standard SAX2 feature flags. In order to be effective, this method must be called * <strong>before</strong> the <code>getParser()</code> method is called for the first time, either directly or * indirectly. * * @param feature Name of the feature to set the status for * @param value The new value for this feature * @exception ParserConfigurationException if a parser configuration error occurs * @exception SAXNotRecognizedException if the property name is not recognized * @exception SAXNotSupportedException if the property name is recognized but not supported */ public void setFeature( String feature, boolean value ) throws ParserConfigurationException, SAXNotRecognizedException, SAXNotSupportedException { getFactory().setFeature( feature, value ); } /** * Return the current Logger associated with this instance of the Digester * * @return the current Logger associated with this instance of the Digester */ public Log getLogger() { return log; } /** * Set the current logger for this Digester. * * @param log the current logger for this Digester. */ public void setLogger( Log log ) { this.log = log; } /** * Gets the logger used for logging SAX-related information. <strong>Note</strong> the output is finely grained. * * @return the logger used for logging SAX-related information * @since 1.6 */ public Log getSAXLogger() { return saxLog; } /** * Sets the logger used for logging SAX-related information. <strong>Note</strong> the output is finely grained. * * @param saxLog the logger used for logging SAX-related information, not null * @since 1.6 */ public void setSAXLogger( Log saxLog ) { this.saxLog = saxLog; } /** * Return the current rule match path * * @return the current rule match path */ public String getMatch() { return match; } /** * Return the "namespace aware" flag for parsers we create. * * @return the "namespace aware" flag for parsers we create. */ public boolean getNamespaceAware() { return ( this.namespaceAware ); } /** * Set the "namespace aware" flag for parsers we create. * * @param namespaceAware The new "namespace aware" flag */ public void setNamespaceAware( boolean namespaceAware ) { this.namespaceAware = namespaceAware; } /** * Return the XInclude-aware flag for parsers we create. XInclude functionality additionally requires * namespace-awareness. * * @return The XInclude-aware flag * @see #getNamespaceAware() * @since 2.0 */ public boolean getXIncludeAware() { return ( this.xincludeAware ); } /** * Set the XInclude-aware flag for parsers we create. This additionally requires namespace-awareness. * * @param xincludeAware The new XInclude-aware flag * @see #setNamespaceAware(boolean) * @since 2.0 */ public void setXIncludeAware( boolean xincludeAware ) { this.xincludeAware = xincludeAware; } /** * Set the public id of the current file being parse. * * @param publicId the DTD/Schema public's id. */ public void setPublicId( String publicId ) { this.publicId = publicId; } /** * Return the public identifier of the DTD we are currently parsing under, if any. * * @return the public identifier of the DTD we are currently parsing under, if any. */ public String getPublicId() { return ( this.publicId ); } /** * Return the namespace URI that will be applied to all subsequently added <code>Rule</code> objects. * * @return the namespace URI that will be applied to all subsequently added <code>Rule</code> objects. */ public String getRuleNamespaceURI() { return ( getRules().getNamespaceURI() ); } /** * Set the namespace URI that will be applied to all subsequently added <code>Rule</code> objects. * * @param ruleNamespaceURI Namespace URI that must match on all subsequently added rules, or <code>null</code> for * matching regardless of the current namespace URI */ public void setRuleNamespaceURI( String ruleNamespaceURI ) { getRules().setNamespaceURI( ruleNamespaceURI ); } /** * Return the SAXParser we will use to parse the input stream. * * If there is a problem creating the parser, return <code>null</code>. * * @return the SAXParser we will use to parse the input stream */ public SAXParser getParser() { // Return the parser we already created (if any) if ( parser != null ) { return ( parser ); } // Create a new parser try { parser = getFactory().newSAXParser(); } catch ( Exception e ) { log.error( "Digester.getParser: ", e ); return ( null ); } return ( parser ); } /** * Return the current value of the specified property for the underlying <code>XMLReader</code> implementation. * * See <a href="http://www.saxproject.org">the saxproject website</a> for information about the standard SAX2 * properties. * * @param property Property name to be retrieved * @return the current value of the specified property for the underlying <code>XMLReader</code> implementation. * @exception SAXNotRecognizedException if the property name is not recognized * @exception SAXNotSupportedException if the property name is recognized but not supported */ public Object getProperty( String property ) throws SAXNotRecognizedException, SAXNotSupportedException { return ( getParser().getProperty( property ) ); } /** * Set the current value of the specified property for the underlying <code>XMLReader</code> implementation. See <a * href="http://www.saxproject.org">the saxproject website</a> for information about the standard SAX2 properties. * * @param property Property name to be set * @param value Property value to be set * @exception SAXNotRecognizedException if the property name is not recognized * @exception SAXNotSupportedException if the property name is recognized but not supported */ public void setProperty( String property, Object value ) throws SAXNotRecognizedException, SAXNotSupportedException { getParser().setProperty( property, value ); } /** * Return the <code>Rules</code> implementation object containing our rules collection and associated matching * policy. If none has been established, a default implementation will be created and returned. * * @return the <code>Rules</code> implementation object. */ public Rules getRules() { if ( this.rules == null ) { this.rules = new RulesBase(); this.rules.setDigester( this ); } return ( this.rules ); } /** * Set the <code>Rules</code> implementation object containing our rules collection and associated matching policy. * * @param rules New Rules implementation */ public void setRules( Rules rules ) { this.rules = rules; this.rules.setDigester( this ); } /** * Return the XML Schema used when parsing. * * @return The {@link Schema} instance in use. * @since 2.0 */ public Schema getXMLSchema() { return ( this.schema ); } /** * Set the XML Schema to be used when parsing. * * @param schema The {@link Schema} instance to use. * @since 2.0 */ public void setXMLSchema( Schema schema ) { this.schema = schema; } /** * Return the boolean as to whether the context ClassLoader should be used. * * @return true, if the context ClassLoader should be used, false otherwise. */ public boolean getUseContextClassLoader() { return useContextClassLoader; } /** * Determine whether to use the Context ClassLoader (the one found by calling * <code>Thread.currentThread().getContextClassLoader()</code>) to resolve/load classes that are defined in various * rules. If not using Context ClassLoader, then the class-loading defaults to using the calling-class' ClassLoader. * * @param use determines whether to use Context ClassLoader. */ public void setUseContextClassLoader( boolean use ) { useContextClassLoader = use; } /** * Return the validating parser flag. * * @return the validating parser flag. */ public boolean getValidating() { return ( this.validating ); } /** * Set the validating parser flag. This must be called before <code>parse()</code> is called the first time. * * @param validating The new validating parser flag. */ public void setValidating( boolean validating ) { this.validating = validating; } /** * Return the XMLReader to be used for parsing the input document. * * FIXME: there is a bug in JAXP/XERCES that prevent the use of a parser that contains a schema with a DTD. * * @return the XMLReader to be used for parsing the input document. * @exception SAXException if no XMLReader can be instantiated */ public XMLReader getXMLReader() throws SAXException { if ( reader == null ) { reader = getParser().getXMLReader(); } reader.setDTDHandler( this ); reader.setContentHandler( this ); if ( entityResolver == null ) { reader.setEntityResolver( this ); } else { reader.setEntityResolver( entityResolver ); } reader.setErrorHandler( this ); return reader; } /** * Gets the <code>Substitutor</code> used to convert attributes and body text. * * @return the <code>Substitutor</code> used to convert attributes and body text, * null if not substitutions are to be performed. */ public Substitutor getSubstitutor() { return substitutor; } /** * Sets the <code>Substitutor</code> to be used to convert attributes and body text. * * @param substitutor the Substitutor to be used to convert attributes and body text or null if not substitution of * these values is to be performed. */ public void setSubstitutor( Substitutor substitutor ) { this.substitutor = substitutor; } /** * returns the custom SAX ContentHandler where events are redirected. * * @return the custom SAX ContentHandler where events are redirected. * @see #setCustomContentHandler(ContentHandler) * @since 1.7 */ public ContentHandler getCustomContentHandler() { return customContentHandler; } /** * Redirects (or cancels redirecting) of SAX ContentHandler events to an external object. * <p> * When this object's customContentHandler is non-null, any SAX events received from the parser will simply be * passed on to the specified object instead of this object handling them. This allows Rule classes to take control * of the SAX event stream for a while in order to do custom processing. Such a rule should save the old value * before setting a new one, and restore the old value in order to resume normal digester processing. * <p> * An example of a Rule which needs this feature is NodeCreateRule. * <p> * Note that saving the old value is probably not needed as it should always be null; a custom rule that wants to * take control could only have been called when there was no custom content handler. But it seems cleaner to * properly save/restore the value and maybe some day this will come in useful. * <p> * Note also that this is not quite equivalent to * * <pre> * digester.getXMLReader().setContentHandler( handler ) * </pre> * * for these reasons: * <ul> * <li>Some xml parsers don't like having setContentHandler called after parsing has started. The Aelfred parser is * one example.</li> * <li>Directing the events via the Digester object potentially allows us to log information about those SAX events * at the digester level.</li> * </ul> * * @param handler the custom SAX ContentHandler where events are redirected. * @since 1.7 */ public void setCustomContentHandler( ContentHandler handler ) { customContentHandler = handler; } /** * Define a callback object which is invoked whenever an object is pushed onto a digester object stack, * or popped off one. * * @param stackAction the callback object which is invoked whenever an object is pushed onto a digester * object stack, or popped off one. * @since 1.8 */ public void setStackAction( StackAction stackAction ) { this.stackAction = stackAction; } /** * Return the callback object which is invoked whenever an object is pushed onto a digester object stack, * or popped off one. * * @return the callback object which is invoked whenever an object is pushed onto a digester object stack, * or popped off one. * @see #setStackAction(StackAction). * @since 1.8 */ public StackAction getStackAction() { return stackAction; } /** * Get the most current namespaces for all prefixes. * * @return Map A map with namespace prefixes as keys and most current namespace URIs for the corresponding prefixes * as values * @since 1.8 */ public Map<String, String> getCurrentNamespaces() { if ( !namespaceAware ) { log.warn( "Digester is not namespace aware" ); } Map<String, String> currentNamespaces = new HashMap<String, String>(); for ( Map.Entry<String, Stack<String>> nsEntry : namespaces.entrySet() ) { try { currentNamespaces.put( nsEntry.getKey(), nsEntry.getValue().peek() ); } catch ( RuntimeException e ) { // rethrow, after logging log.error( e.getMessage(), e ); throw e; } } return currentNamespaces; } // ------------------------------------------------- ContentHandler Methods /** * {@inheritDoc} */ @Override public void characters( char buffer[], int start, int length ) throws SAXException { if ( customContentHandler != null ) { // forward calls instead of handling them here customContentHandler.characters( buffer, start, length ); return; } if ( saxLog.isDebugEnabled() ) { saxLog.debug( "characters(" + new String( buffer, start, length ) + ")" ); } bodyText.append( buffer, start, length ); } /** * {@inheritDoc} */ @Override public void endDocument() throws SAXException { if ( saxLog.isDebugEnabled() ) { if ( getCount() > 1 ) { saxLog.debug( "endDocument(): " + getCount() + " elements left" ); } else { saxLog.debug( "endDocument()" ); } } // Fire "finish" events for all defined rules for ( Rule rule : getRules().rules() ) { try { rule.finish(); } catch ( Exception e ) { log.error( "Finish event threw exception", e ); throw createSAXException( e ); } catch ( Error e ) { log.error( "Finish event threw error", e ); throw e; } } // Perform final cleanup clear(); } /** * {@inheritDoc} */ @Override public void endElement( String namespaceURI, String localName, String qName ) throws SAXException { if ( customContentHandler != null ) { // forward calls instead of handling them here customContentHandler.endElement( namespaceURI, localName, qName ); return; } boolean debug = log.isDebugEnabled(); if ( debug ) { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "endElement(" + namespaceURI + "," + localName + "," + qName + ")" ); } log.debug( " match='" + match + "'" ); log.debug( " bodyText='" + bodyText + "'" ); } // the actual element name is either in localName or qName, depending // on whether the parser is namespace aware String name = localName; if ( ( name == null ) || ( name.length() < 1 ) ) { name = qName; } // Fire "body" events for all relevant rules List<Rule> rules = matches.pop(); if ( ( rules != null ) && ( rules.size() > 0 ) ) { String bodyText = this.bodyText.toString(); Substitutor substitutor = getSubstitutor(); if ( substitutor != null ) { bodyText = substitutor.substitute( bodyText ); } for ( int i = 0; i < rules.size(); i++ ) { try { Rule rule = rules.get( i ); if ( debug ) { log.debug( " Fire body() for " + rule ); } rule.body( namespaceURI, name, bodyText ); } catch ( Exception e ) { log.error( "Body event threw exception", e ); throw createSAXException( e ); } catch ( Error e ) { log.error( "Body event threw error", e ); throw e; } } } else { if ( debug ) { log.debug( " No rules found matching '" + match + "'." ); } } // Recover the body text from the surrounding element bodyText = bodyTexts.pop(); if ( debug ) { log.debug( " Popping body text '" + bodyText.toString() + "'" ); } // Fire "end" events for all relevant rules in reverse order if ( rules != null ) { for ( int i = 0; i < rules.size(); i++ ) { int j = ( rules.size() - i ) - 1; try { Rule rule = rules.get( j ); if ( debug ) { log.debug( " Fire end() for " + rule ); } rule.end( namespaceURI, name ); } catch ( Exception e ) { log.error( "End event threw exception", e ); throw createSAXException( e ); } catch ( Error e ) { log.error( "End event threw error", e ); throw e; } } } // Recover the previous match expression int slash = match.lastIndexOf( '/' ); if ( slash >= 0 ) { match = match.substring( 0, slash ); } else { match = ""; } } /** * {@inheritDoc} */ @Override public void endPrefixMapping( String prefix ) throws SAXException { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "endPrefixMapping(" + prefix + ")" ); } // Deregister this prefix mapping Stack<String> stack = namespaces.get( prefix ); if ( stack == null ) { return; } try { stack.pop(); if ( stack.empty() ) { namespaces.remove( prefix ); } } catch ( EmptyStackException e ) { throw createSAXException( "endPrefixMapping popped too many times" ); } } /** * {@inheritDoc} */ @Override public void ignorableWhitespace( char buffer[], int start, int len ) throws SAXException { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "ignorableWhitespace(" + new String( buffer, start, len ) + ")" ); } // No processing required } /** * {@inheritDoc} */ @Override public void processingInstruction( String target, String data ) throws SAXException { if ( customContentHandler != null ) { // forward calls instead of handling them here customContentHandler.processingInstruction( target, data ); return; } if ( saxLog.isDebugEnabled() ) { saxLog.debug( "processingInstruction('" + target + "','" + data + "')" ); } // No processing is required } /** * Gets the document locator associated with our parser. * * @return the Locator supplied by the document parser */ public Locator getDocumentLocator() { return locator; } /** * {@inheritDoc} */ @Override public void setDocumentLocator( Locator locator ) { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "setDocumentLocator(" + locator + ")" ); } this.locator = locator; } /** * {@inheritDoc} */ @Override public void skippedEntity( String name ) throws SAXException { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "skippedEntity(" + name + ")" ); } // No processing required } /** * {@inheritDoc} */ @Override public void startDocument() throws SAXException { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "startDocument()" ); } // ensure that the digester is properly configured, as // the digester could be used as a SAX ContentHandler // rather than via the parse() methods. configure(); } /** * {@inheritDoc} */ @Override public void startElement( String namespaceURI, String localName, String qName, Attributes list ) throws SAXException { boolean debug = log.isDebugEnabled(); if ( customContentHandler != null ) { // forward calls instead of handling them here customContentHandler.startElement( namespaceURI, localName, qName, list ); return; } if ( saxLog.isDebugEnabled() ) { saxLog.debug( "startElement(" + namespaceURI + "," + localName + "," + qName + ")" ); } // Save the body text accumulated for our surrounding element bodyTexts.push( bodyText ); if ( debug ) { log.debug( " Pushing body text '" + bodyText.toString() + "'" ); } bodyText = new StringBuilder(); // the actual element name is either in localName or qName, depending // on whether the parser is namespace aware String name = localName; if ( ( name == null ) || ( name.length() < 1 ) ) { name = qName; } // Compute the current matching rule StringBuilder sb = new StringBuilder( match ); if ( match.length() > 0 ) { sb.append( '/' ); } sb.append( name ); match = sb.toString(); if ( debug ) { log.debug( " New match='" + match + "'" ); } // Fire "begin" events for all relevant rules List<Rule> rules = getRules().match( namespaceURI, match ); matches.push( rules ); if ( ( rules != null ) && ( rules.size() > 0 ) ) { Substitutor substitutor = getSubstitutor(); if ( substitutor != null ) { list = substitutor.substitute( list ); } for ( int i = 0; i < rules.size(); i++ ) { try { Rule rule = rules.get( i ); if ( debug ) { log.debug( " Fire begin() for " + rule ); } rule.begin( namespaceURI, name, list ); } catch ( Exception e ) { log.error( "Begin event threw exception", e ); throw createSAXException( e ); } catch ( Error e ) { log.error( "Begin event threw error", e ); throw e; } } } else { if ( debug ) { log.debug( " No rules found matching '" + match + "'." ); } } } /** * {@inheritDoc} */ @Override public void startPrefixMapping( String prefix, String namespaceURI ) throws SAXException { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "startPrefixMapping(" + prefix + "," + namespaceURI + ")" ); } // Register this prefix mapping Stack<String> stack = namespaces.get( prefix ); if ( stack == null ) { stack = new Stack<String>(); namespaces.put( prefix, stack ); } stack.push( namespaceURI ); } // ----------------------------------------------------- DTDHandler Methods /** * {@inheritDoc} */ @Override public void notationDecl( String name, String publicId, String systemId ) { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "notationDecl(" + name + "," + publicId + "," + systemId + ")" ); } } /** * {@inheritDoc} */ @Override public void unparsedEntityDecl( String name, String publicId, String systemId, String notation ) { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "unparsedEntityDecl(" + name + "," + publicId + "," + systemId + "," + notation + ")" ); } } // ----------------------------------------------- EntityResolver Methods /** * Set the <code>EntityResolver</code> used by SAX when resolving public id and system id. This must be called * before the first call to <code>parse()</code>. * * @param entityResolver a class that implement the <code>EntityResolver</code> interface. */ public void setEntityResolver( EntityResolver entityResolver ) { this.entityResolver = entityResolver; } /** * Return the Entity Resolver used by the SAX parser. * * @return the Entity Resolver used by the SAX parser. */ public EntityResolver getEntityResolver() { return entityResolver; } /** * {@inheritDoc} */ @Override public InputSource resolveEntity( String publicId, String systemId ) throws SAXException { if ( saxLog.isDebugEnabled() ) { saxLog.debug( "resolveEntity('" + publicId + "', '" + systemId + "')" ); } if ( publicId != null ) { this.publicId = publicId; } // Has this system identifier been registered? URL entityURL = null; if ( publicId != null ) { entityURL = entityValidator.get( publicId ); } // Redirect the schema location to a local destination if ( entityURL == null && systemId != null ) { entityURL = entityValidator.get( systemId ); } if ( entityURL == null ) { if ( systemId == null ) { // cannot resolve if ( log.isDebugEnabled() ) { log.debug( " Cannot resolve null entity, returning null InputSource" ); } return ( null ); } // try to resolve using system ID if ( log.isDebugEnabled() ) { log.debug( " Trying to resolve using system ID '" + systemId + "'" ); } try { entityURL = new URL( systemId ); } catch ( MalformedURLException e ) { throw new IllegalArgumentException( "Malformed URL '" + systemId + "' : " + e.getMessage() ); } } // Return an input source to our alternative URL if ( log.isDebugEnabled() ) { log.debug( " Resolving to alternate DTD '" + entityURL + "'" ); } try { return createInputSourceFromURL( entityURL ); } catch ( Exception e ) { throw createSAXException( e ); } } // ------------------------------------------------- ErrorHandler Methods /** * {@inheritDoc} */ @Override public void error( SAXParseException exception ) throws SAXException { log.error( "Parse Error at line " + exception.getLineNumber() + " column " + exception.getColumnNumber() + ": " + exception.getMessage(), exception ); if ( errorHandler != null ) { errorHandler.error( exception ); } } /** * {@inheritDoc} */ @Override public void fatalError( SAXParseException exception ) throws SAXException { log.error( "Parse Fatal Error at line " + exception.getLineNumber() + " column " + exception.getColumnNumber() + ": " + exception.getMessage(), exception ); if ( errorHandler != null ) { errorHandler.fatalError( exception ); } } /** * {@inheritDoc} */ @Override public void warning( SAXParseException exception ) throws SAXException { if ( errorHandler != null ) { log.warn( "Parse Warning Error at line " + exception.getLineNumber() + " column " + exception.getColumnNumber() + ": " + exception.getMessage(), exception ); errorHandler.warning( exception ); } } // ------------------------------------------------------- Public Methods /** * Parse the content of the specified file using this Digester. Returns the root element from the object stack (if * any). * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param file File containing the XML data to be parsed * @return the root element from the object stack (if any) * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs */ public <T> T parse( File file ) throws IOException, SAXException { if ( file == null ) { throw new IllegalArgumentException( "File to parse is null" ); } configure(); InputSource input = new InputSource( new FileInputStream( file ) ); input.setSystemId( file.toURI().toURL().toString() ); getXMLReader().parse( input ); cleanup(); return ( this.<T> getRoot() ); } /** * Parse the content of the specified input source using this Digester. Returns the root element from the object * stack (if any). * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param input Input source containing the XML data to be parsed * @return the root element from the object stack (if any) * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs */ public <T> T parse( InputSource input ) throws IOException, SAXException { if ( input == null ) { throw new IllegalArgumentException( "InputSource to parse is null" ); } configure(); getXMLReader().parse( input ); cleanup(); return this.<T> getRoot(); } /** * Parse the content of the specified input stream using this Digester. Returns the root element from the object * stack (if any). * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param input Input stream containing the XML data to be parsed * @return the root element from the object stack (if any) * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs */ public <T> T parse( InputStream input ) throws IOException, SAXException { if ( input == null ) { throw new IllegalArgumentException( "InputStream to parse is null" ); } configure(); InputSource is = new InputSource( input ); getXMLReader().parse( is ); cleanup(); return ( this.<T> getRoot() ); } /** * Parse the content of the specified reader using this Digester. Returns the root element from the object stack (if * any). * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param reader Reader containing the XML data to be parsed * @return the root element from the object stack (if any) * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs */ public <T> T parse( Reader reader ) throws IOException, SAXException { if ( reader == null ) { throw new IllegalArgumentException( "Reader to parse is null" ); } configure(); InputSource is = new InputSource( reader ); getXMLReader().parse( is ); cleanup(); return ( this.<T> getRoot() ); } /** * Parse the content of the specified URI using this Digester. Returns the root element from the object stack (if * any). * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param uri URI containing the XML data to be parsed * @return the root element from the object stack (if any) * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs */ public <T> T parse( String uri ) throws IOException, SAXException { if ( uri == null ) { throw new IllegalArgumentException( "String URI to parse is null" ); } configure(); InputSource is = createInputSourceFromURL( uri ); getXMLReader().parse( is ); cleanup(); return ( this.<T> getRoot() ); } /** * Parse the content of the specified URL using this Digester. Returns the root element from the object stack (if * any). * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param url URL containing the XML data to be parsed * @return the root element from the object stack (if any) * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs * @since 1.8 */ public <T> T parse( URL url ) throws IOException, SAXException { if ( url == null ) { throw new IllegalArgumentException( "URL to parse is null" ); } configure(); InputSource is = createInputSourceFromURL( url ); getXMLReader().parse( is ); cleanup(); return ( this.<T> getRoot() ); } /** * <p> * Register the specified DTD URL for the specified public identifier. This must be called before the first call to * <code>parse()</code>. * </p> * <p> * <code>Digester</code> contains an internal <code>EntityResolver</code> implementation. This maps * <code>PUBLICID</code>'s to URLs (from which the resource will be loaded). A common use case for this method is to * register local URLs (possibly computed at runtime by a classloader) for DTDs. This allows the performance * advantage of using a local version without having to ensure every <code>SYSTEM</code> URI on every processed xml * document is local. This implementation provides only basic functionality. If more sophisticated features are * required, using {@link #setEntityResolver} to set a custom resolver is recommended. * </p> * <p> * <strong>Note:</strong> This method will have no effect when a custom <code>EntityResolver</code> has been set. * (Setting a custom <code>EntityResolver</code> overrides the internal implementation.) * </p> * * @param publicId Public identifier of the DTD to be resolved * @param entityURL The URL to use for reading this DTD * @since 1.8 */ public void register( String publicId, URL entityURL ) { if ( log.isDebugEnabled() ) { log.debug( "register('" + publicId + "', '" + entityURL + "'" ); } entityValidator.put( publicId, entityURL ); } /** * <p> * Convenience method that registers the string version of an entity URL instead of a URL version. * </p> * * @param publicId Public identifier of the entity to be resolved * @param entityURL The URL to use for reading this entity */ public void register( String publicId, String entityURL ) { if ( log.isDebugEnabled() ) { log.debug( "register('" + publicId + "', '" + entityURL + "'" ); } try { entityValidator.put( publicId, new URL( entityURL ) ); } catch ( MalformedURLException e ) { throw new IllegalArgumentException( "Malformed URL '" + entityURL + "' : " + e.getMessage() ); } } /** * Convenience method that registers DTD URLs for the specified public identifiers. * * @param entityValidator The URLs of entityValidator that have been registered, keyed by the public * identifier that corresponds. * @since 3.0 */ public void registerAll( Map<String, URL> entityValidator ) { this.entityValidator.putAll( entityValidator ); } /** * <p> * <code>List</code> of <code>InputSource</code> instances created by a <code>createInputSourceFromURL()</code> * method call. These represent open input streams that need to be closed to avoid resource leaks, as well as * potentially locked JAR files on Windows. * </p> */ protected List<InputSource> inputSources = new ArrayList<InputSource>( 5 ); /** * Given a URL, return an InputSource that reads from that URL. * <p> * Ideally this function would not be needed and code could just use <code>new InputSource(entityURL)</code>. * Unfortunately it appears that when the entityURL points to a file within a jar archive a caching mechanism inside * the InputSource implementation causes a file-handle to the jar file to remain open. On Windows systems this then * causes the jar archive file to be locked on disk ("in use") which makes it impossible to delete the jar file - * and that really stuffs up "undeploy" in webapps in particular. * <p> * In JDK1.4 and later, Apache XercesJ is used as the xml parser. The InputSource object provided is converted into * an XMLInputSource, and eventually passed to an instance of XMLDocumentScannerImpl to specify the source data to * be converted into tokens for the rest of the XMLReader code to handle. XMLDocumentScannerImpl calls * fEntityManager.startDocumentEntity(source), where fEntityManager is declared in ancestor class XMLScanner to be * an XMLEntityManager. In that class, if the input source stream is null, then: * * <pre> * URL location = new URL( expandedSystemId ); * URLConnection connect = location.openConnection(); * if ( connect instanceof HttpURLConnection ) * { * setHttpProperties( connect, xmlInputSource ); * } * stream = connect.getInputStream(); * </pre> * * This method pretty much duplicates the standard behaviour, except that it calls URLConnection.setUseCaches(false) * before opening the connection. * * @param url The URL has to be read * @return The InputSource that reads from the input URL * @throws IOException if any error occurs while reading the input URL * @since 1.8 */ public InputSource createInputSourceFromURL( URL url ) throws IOException { URLConnection connection = url.openConnection(); connection.setUseCaches( false ); InputStream stream = connection.getInputStream(); InputSource source = new InputSource( stream ); source.setSystemId( url.toExternalForm() ); inputSources.add( source ); return source; } /** * <p> * Convenience method that creates an <code>InputSource</code> from the string version of a URL. * </p> * * @param url URL for which to create an <code>InputSource</code> * @return The InputSource that reads from the input URL * @throws IOException if any error occurs while reading the input URL * @since 1.8 */ public InputSource createInputSourceFromURL( String url ) throws IOException { return createInputSourceFromURL( new URL( url ) ); } // --------------------------------------------------------- Rule Methods /** * <p> * Register a new Rule matching the specified pattern. This method sets the <code>Digester</code> property on the * rule. * </p> * * @param pattern Element matching pattern * @param rule Rule to be registered */ public void addRule( String pattern, Rule rule ) { rule.setDigester( this ); getRules().add( pattern, rule ); } /** * Register a set of Rule instances defined in a RuleSet. * * @param ruleSet The RuleSet instance to configure from */ public void addRuleSet( RuleSet ruleSet ) { String oldNamespaceURI = getRuleNamespaceURI(); String newNamespaceURI = ruleSet.getNamespaceURI(); if ( log.isDebugEnabled() ) { if ( newNamespaceURI == null ) { log.debug( "addRuleSet() with no namespace URI" ); } else { log.debug( "addRuleSet() with namespace URI " + newNamespaceURI ); } } setRuleNamespaceURI( newNamespaceURI ); ruleSet.addRuleInstances( this ); setRuleNamespaceURI( oldNamespaceURI ); } /** * Add a "bean property setter" rule for the specified parameters. * * @param pattern Element matching pattern * @see BeanPropertySetterRule */ public void addBeanPropertySetter( String pattern ) { addRule( pattern, new BeanPropertySetterRule() ); } /** * Add a "bean property setter" rule for the specified parameters. * * @param pattern Element matching pattern * @param propertyName Name of property to set * @see BeanPropertySetterRule */ public void addBeanPropertySetter( String pattern, String propertyName ) { addRule( pattern, new BeanPropertySetterRule( propertyName ) ); } /** * Add an "call method" rule for a method which accepts no arguments. * * @param pattern Element matching pattern * @param methodName Method name to be called * @see CallMethodRule */ public void addCallMethod( String pattern, String methodName ) { addRule( pattern, new CallMethodRule( methodName ) ); } /** * Add an "call method" rule for the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to be called * @param paramCount Number of expected parameters (or zero for a single parameter from the body of this element) * @see CallMethodRule */ public void addCallMethod( String pattern, String methodName, int paramCount ) { addRule( pattern, new CallMethodRule( methodName, paramCount ) ); } /** * Add an "call method" rule for the specified parameters. If <code>paramCount</code> is set to zero the rule will * use the body of the matched element as the single argument of the method, unless <code>paramTypes</code> is null * or empty, in this case the rule will call the specified method with no arguments. * * @param pattern Element matching pattern * @param methodName Method name to be called * @param paramCount Number of expected parameters (or zero for a single parameter from the body of this element) * @param paramTypes Set of Java class names for the types of the expected parameters (if you wish to use a * primitive type, specify the corresonding Java wrapper class instead, such as * <code>java.lang.Boolean</code> for a <code>boolean</code> parameter) * @see CallMethodRule */ public void addCallMethod( String pattern, String methodName, int paramCount, String paramTypes[] ) { addRule( pattern, new CallMethodRule( methodName, paramCount, paramTypes ) ); } /** * Add an "call method" rule for the specified parameters. If <code>paramCount</code> is set to zero the rule will * use the body of the matched element as the single argument of the method, unless <code>paramTypes</code> is null * or empty, in this case the rule will call the specified method with no arguments. * * @param pattern Element matching pattern * @param methodName Method name to be called * @param paramCount Number of expected parameters (or zero for a single parameter from the body of this element) * @param paramTypes The Java class names of the arguments (if you wish to use a primitive type, specify the * corresonding Java wrapper class instead, such as <code>java.lang.Boolean</code> for a * <code>boolean</code> parameter) * @see CallMethodRule */ public void addCallMethod( String pattern, String methodName, int paramCount, Class<?> paramTypes[] ) { addRule( pattern, new CallMethodRule( methodName, paramCount, paramTypes ) ); } /** * Add a "call parameter" rule for the specified parameters. * * @param pattern Element matching pattern * @param paramIndex Zero-relative parameter index to set (from the body of this element) * @see CallParamRule */ public void addCallParam( String pattern, int paramIndex ) { addRule( pattern, new CallParamRule( paramIndex ) ); } /** * Add a "call parameter" rule for the specified parameters. * * @param pattern Element matching pattern * @param paramIndex Zero-relative parameter index to set (from the specified attribute) * @param attributeName Attribute whose value is used as the parameter value * @see CallParamRule */ public void addCallParam( String pattern, int paramIndex, String attributeName ) { addRule( pattern, new CallParamRule( paramIndex, attributeName ) ); } /** * Add a "call parameter" rule. This will either take a parameter from the stack or from the current element body * text. * * @param pattern Element matching pattern * @param paramIndex The zero-relative parameter number * @param fromStack Should the call parameter be taken from the top of the stack? * @see CallParamRule */ public void addCallParam( String pattern, int paramIndex, boolean fromStack ) { addRule( pattern, new CallParamRule( paramIndex, fromStack ) ); } /** * Add a "call parameter" rule that sets a parameter from the stack. This takes a parameter from the given position * on the stack. * * @param pattern Element matching pattern * @param paramIndex The zero-relative parameter number * @param stackIndex set the call parameter to the stackIndex'th object down the stack, where 0 is the top of the * stack, 1 the next element down and so on * @see CallMethodRule */ public void addCallParam( String pattern, int paramIndex, int stackIndex ) { addRule( pattern, new CallParamRule( paramIndex, stackIndex ) ); } /** * Add a "call parameter" rule that sets a parameter from the current <code>Digester</code> matching path. This is * sometimes useful when using rules that support wildcards. * * @param pattern the pattern that this rule should match * @param paramIndex The zero-relative parameter number * @see CallMethodRule */ public void addCallParamPath( String pattern, int paramIndex ) { addRule( pattern, new PathCallParamRule( paramIndex ) ); } /** * Add a "call parameter" rule that sets a parameter from a caller-provided object. This can be used to pass * constants such as strings to methods; it can also be used to pass mutable objects, providing ways for objects to * do things like "register" themselves with some shared object. * <p> * Note that when attempting to locate a matching method to invoke, the true type of the paramObj is used, so that * despite the paramObj being passed in here as type Object, the target method can declare its parameters as being * the true type of the object (or some ancestor type, according to the usual type-conversion rules). * * @param pattern Element matching pattern * @param paramIndex The zero-relative parameter number * @param paramObj Any arbitrary object to be passed to the target method. * @see CallMethodRule * @since 1.6 */ public void addObjectParam( String pattern, int paramIndex, Object paramObj ) { addRule( pattern, new ObjectParamRule( paramIndex, paramObj ) ); } /** * Add a "factory create" rule for the specified parameters. Exceptions thrown during the object creation process * will be propagated. * * @param pattern Element matching pattern * @param className Java class name of the object creation factory class * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, String className ) { addFactoryCreate( pattern, className, false ); } /** * Add a "factory create" rule for the specified parameters. Exceptions thrown during the object creation process * will be propagated. * * @param pattern Element matching pattern * @param clazz Java class of the object creation factory class * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, Class<? extends ObjectCreationFactory<?>> clazz ) { addFactoryCreate( pattern, clazz, false ); } /** * Add a "factory create" rule for the specified parameters. Exceptions thrown during the object creation process * will be propagated. * * @param pattern Element matching pattern * @param className Java class name of the object creation factory class * @param attributeName Attribute name which, if present, overrides the value specified by <code>className</code> * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, String className, String attributeName ) { addFactoryCreate( pattern, className, attributeName, false ); } /** * Add a "factory create" rule for the specified parameters. Exceptions thrown during the object creation process * will be propagated. * * @param pattern Element matching pattern * @param clazz Java class of the object creation factory class * @param attributeName Attribute name which, if present, overrides the value specified by <code>className</code> * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, Class<? extends ObjectCreationFactory<?>> clazz, String attributeName ) { addFactoryCreate( pattern, clazz, attributeName, false ); } /** * Add a "factory create" rule for the specified parameters. Exceptions thrown during the object creation process * will be propagated. * * @param pattern Element matching pattern * @param creationFactory Previously instantiated ObjectCreationFactory to be utilized * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, ObjectCreationFactory<?> creationFactory ) { addFactoryCreate( pattern, creationFactory, false ); } /** * Add a "factory create" rule for the specified parameters. * * @param pattern Element matching pattern * @param className Java class name of the object creation factory class * @param ignoreCreateExceptions when <code>true</code> any exceptions thrown during object creation will be * ignored. * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, String className, boolean ignoreCreateExceptions ) { addRule( pattern, new FactoryCreateRule( className, ignoreCreateExceptions ) ); } /** * Add a "factory create" rule for the specified parameters. * * @param pattern Element matching pattern * @param clazz Java class of the object creation factory class * @param ignoreCreateExceptions when <code>true</code> any exceptions thrown during object creation will be * ignored. * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, Class<? extends ObjectCreationFactory<?>> clazz, boolean ignoreCreateExceptions ) { addRule( pattern, new FactoryCreateRule( clazz, ignoreCreateExceptions ) ); } /** * Add a "factory create" rule for the specified parameters. * * @param pattern Element matching pattern * @param className Java class name of the object creation factory class * @param attributeName Attribute name which, if present, overrides the value specified by <code>className</code> * @param ignoreCreateExceptions when <code>true</code> any exceptions thrown during object creation will be * ignored. * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, String className, String attributeName, boolean ignoreCreateExceptions ) { addRule( pattern, new FactoryCreateRule( className, attributeName, ignoreCreateExceptions ) ); } /** * Add a "factory create" rule for the specified parameters. * * @param pattern Element matching pattern * @param clazz Java class of the object creation factory class * @param attributeName Attribute name which, if present, overrides the value specified by <code>className</code> * @param ignoreCreateExceptions when <code>true</code> any exceptions thrown during object creation will be * ignored. * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, Class<? extends ObjectCreationFactory<?>> clazz, String attributeName, boolean ignoreCreateExceptions ) { addRule( pattern, new FactoryCreateRule( clazz, attributeName, ignoreCreateExceptions ) ); } /** * Add a "factory create" rule for the specified parameters. * * @param pattern Element matching pattern * @param creationFactory Previously instantiated ObjectCreationFactory to be utilized * @param ignoreCreateExceptions when <code>true</code> any exceptions thrown during object creation will be * ignored. * @see FactoryCreateRule */ public void addFactoryCreate( String pattern, ObjectCreationFactory<?> creationFactory, boolean ignoreCreateExceptions ) { creationFactory.setDigester( this ); addRule( pattern, new FactoryCreateRule( creationFactory, ignoreCreateExceptions ) ); } /** * Add an "object create" rule for the specified parameters. * * @param pattern Element matching pattern * @param className Java class name to be created * @see ObjectCreateRule */ public void addObjectCreate( String pattern, String className ) { addRule( pattern, new ObjectCreateRule( className ) ); } /** * Add an "object create" rule for the specified parameters. * * @param pattern Element matching pattern * @param clazz Java class to be created * @see ObjectCreateRule */ public void addObjectCreate( String pattern, Class<?> clazz ) { addRule( pattern, new ObjectCreateRule( clazz ) ); } /** * Add an "object create" rule for the specified parameters. * * @param pattern Element matching pattern * @param className Default Java class name to be created * @param attributeName Attribute name that optionally overrides the default Java class name to be created * @see ObjectCreateRule */ public void addObjectCreate( String pattern, String className, String attributeName ) { addRule( pattern, new ObjectCreateRule( className, attributeName ) ); } /** * Add an "object create" rule for the specified parameters. * * @param pattern Element matching pattern * @param attributeName Attribute name that optionally overrides * @param clazz Default Java class to be created the default Java class name to be created * @see ObjectCreateRule */ public void addObjectCreate( String pattern, String attributeName, Class<?> clazz ) { addRule( pattern, new ObjectCreateRule( attributeName, clazz ) ); } /** * Adds an {@link SetNestedPropertiesRule}. * * @param pattern register the rule with this pattern * @since 1.6 */ public void addSetNestedProperties( String pattern ) { addRule( pattern, new SetNestedPropertiesRule() ); } /** * Adds an {@link SetNestedPropertiesRule}. * * @param pattern register the rule with this pattern * @param elementName elment name that a property maps to * @param propertyName property name of the element mapped from * @since 1.6 */ public void addSetNestedProperties( String pattern, String elementName, String propertyName ) { addRule( pattern, new SetNestedPropertiesRule( elementName, propertyName ) ); } /** * Adds an {@link SetNestedPropertiesRule}. * * @param pattern register the rule with this pattern * @param elementNames elment names that (in order) map to properties * @param propertyNames property names that (in order) elements are mapped to * @since 1.6 */ public void addSetNestedProperties( String pattern, String[] elementNames, String[] propertyNames ) { addRule( pattern, new SetNestedPropertiesRule( elementNames, propertyNames ) ); } /** * Add a "set next" rule for the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the parent element * @see SetNextRule */ public void addSetNext( String pattern, String methodName ) { addRule( pattern, new SetNextRule( methodName ) ); } /** * Add a "set next" rule for the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the parent element * @param paramType Java class name of the expected parameter type (if you wish to use a primitive type, specify the * corresonding Java wrapper class instead, such as <code>java.lang.Boolean</code> for a * <code>boolean</code> parameter) * @see SetNextRule */ public void addSetNext( String pattern, String methodName, String paramType ) { addRule( pattern, new SetNextRule( methodName, paramType ) ); } /** * Add {@link SetRootRule} with the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the root object * @see SetRootRule */ public void addSetRoot( String pattern, String methodName ) { addRule( pattern, new SetRootRule( methodName ) ); } /** * Add {@link SetRootRule} with the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the root object * @param paramType Java class name of the expected parameter type * @see SetRootRule */ public void addSetRoot( String pattern, String methodName, String paramType ) { addRule( pattern, new SetRootRule( methodName, paramType ) ); } /** * Add a "set properties" rule for the specified parameters. * * @param pattern Element matching pattern * @see SetPropertiesRule */ public void addSetProperties( String pattern ) { addRule( pattern, new SetPropertiesRule() ); } /** * Add a "set properties" rule with a single overridden parameter. See * {@link SetPropertiesRule#SetPropertiesRule(String attributeName, String propertyName)} * * @param pattern Element matching pattern * @param attributeName map this attribute * @param propertyName to this property * @see SetPropertiesRule */ public void addSetProperties( String pattern, String attributeName, String propertyName ) { addRule( pattern, new SetPropertiesRule( attributeName, propertyName ) ); } /** * Add a "set properties" rule with overridden parameters. See * {@link SetPropertiesRule#SetPropertiesRule(String [] attributeNames, String [] propertyNames)} * * @param pattern Element matching pattern * @param attributeNames names of attributes with custom mappings * @param propertyNames property names these attributes map to * @see SetPropertiesRule */ public void addSetProperties( String pattern, String[] attributeNames, String[] propertyNames ) { addRule( pattern, new SetPropertiesRule( attributeNames, propertyNames ) ); } /** * Add a "set property" rule for the specified parameters. * * @param pattern Element matching pattern * @param name Attribute name containing the property name to be set * @param value Attribute name containing the property value to set * @see SetPropertyRule */ public void addSetProperty( String pattern, String name, String value ) { addRule( pattern, new SetPropertyRule( name, value ) ); } /** * Add a "set top" rule for the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the parent element * @see SetTopRule */ public void addSetTop( String pattern, String methodName ) { addRule( pattern, new SetTopRule( methodName ) ); } /** * Add a "set top" rule for the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the parent element * @param paramType Java class name of the expected parameter type (if you wish to use a primitive type, specify the * corresonding Java wrapper class instead, such as <code>java.lang.Boolean</code> for a * <code>boolean</code> parameter) * @see SetTopRule */ public void addSetTop( String pattern, String methodName, String paramType ) { addRule( pattern, new SetTopRule( methodName, paramType ) ); } // --------------------------------------------------- Object Stack Methods /** * Clear the current contents of the default object stack, the param stack, all named stacks, and other internal * variables. * <p> * Calling this method <i>might</i> allow another document of the same type to be correctly parsed. However this * method was not intended for this purpose (just to tidy up memory usage). In general, a separate Digester object * should be created for each document to be parsed. * <p> * Note that this method is called automatically after a document has been successfully parsed by a Digester * instance. However it is not invoked automatically when a parse fails, so when reusing a Digester instance (which * is not recommended) this method <i>must</i> be called manually after a parse failure. */ public void clear() { match = ""; bodyTexts.clear(); params.clear(); publicId = null; stack.clear(); stacksByName.clear(); customContentHandler = null; } /** * Return the top object on the stack without removing it. * * If there are no objects on the stack, return <code>null</code>. * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @return the top object on the stack without removing it. */ public <T> T peek() { try { return this.<T> npeSafeCast( stack.peek() ); } catch ( EmptyStackException e ) { log.warn( "Empty stack (returning null)" ); return ( null ); } } /** * Return the n'th object down the stack, where 0 is the top element and [getCount()-1] is the bottom element. If * the specified index is out of range, return <code>null</code>. * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param n Index of the desired element, where 0 is the top of the stack, 1 is the next element down, and so on. * @return the n'th object down the stack */ public <T> T peek( int n ) { int index = ( stack.size() - 1 ) - n; if ( index < 0 ) { log.warn( "Empty stack (returning null)" ); return ( null ); } try { return this.<T> npeSafeCast( stack.get( index ) ); } catch ( EmptyStackException e ) { log.warn( "Empty stack (returning null)" ); return ( null ); } } /** * Pop the top object off of the stack, and return it. If there are no objects on the stack, return * <code>null</code>. * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @return the top object popped off of the stack */ public <T> T pop() { try { T popped = this.<T> npeSafeCast( stack.pop() ); if ( stackAction != null ) { popped = stackAction.onPop( this, null, popped ); } return popped; } catch ( EmptyStackException e ) { log.warn( "Empty stack (returning null)" ); return ( null ); } } /** * Push a new object onto the top of the object stack. * * @param <T> any type of the pushed object * @param object The new object */ public <T> void push( T object ) { if ( stackAction != null ) { object = stackAction.onPush( this, null, object ); } if ( stack.size() == 0 ) { root = object; } stack.push( object ); } /** * Pushes the given object onto the stack with the given name. If no stack already exists with the given name then * one will be created. * * @param <T> any type of the pushed object * @param stackName the name of the stack onto which the object should be pushed * @param value the Object to be pushed onto the named stack. * @since 1.6 */ public <T> void push( String stackName, T value ) { if ( stackAction != null ) { value = stackAction.onPush( this, stackName, value ); } Stack<Object> namedStack = stacksByName.get( stackName ); if ( namedStack == null ) { namedStack = new Stack<Object>(); stacksByName.put( stackName, namedStack ); } namedStack.push( value ); } /** * <p> * Pops (gets and removes) the top object from the stack with the given name. * </p> * <p> * <strong>Note:</strong> a stack is considered empty if no objects have been pushed onto it yet. * </p> * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param stackName the name of the stack from which the top value is to be popped. * @return the top <code>Object</code> on the stack or or null if the stack is either empty or has not been created * yet * @throws EmptyStackException if the named stack is empty * @since 1.6 */ public <T> T pop( String stackName ) { T result = null; Stack<Object> namedStack = stacksByName.get( stackName ); if ( namedStack == null ) { if ( log.isDebugEnabled() ) { log.debug( "Stack '" + stackName + "' is empty" ); } throw new EmptyStackException(); } result = this.<T> npeSafeCast( namedStack.pop() ); if ( stackAction != null ) { result = stackAction.onPop( this, stackName, result ); } return result; } /** * <p> * Gets the top object from the stack with the given name. This method does not remove the object from the stack. * </p> * <p> * <strong>Note:</strong> a stack is considered empty if no objects have been pushed onto it yet. * </p> * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param stackName the name of the stack to be peeked * @return the top <code>Object</code> on the stack or null if the stack is either empty or has not been created yet * @throws EmptyStackException if the named stack is empty * @since 1.6 */ public <T> T peek( String stackName ) { return this.<T> npeSafeCast( peek( stackName, 0 ) ); } /** * <p> * Gets the top object from the stack with the given name. This method does not remove the object from the stack. * </p> * <p> * <strong>Note:</strong> a stack is considered empty if no objects have been pushed onto it yet. * </p> * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @param stackName the name of the stack to be peeked * @param n Index of the desired element, where 0 is the top of the stack, 1 is the next element down, and so on. * @return the specified <code>Object</code> on the stack. * @throws EmptyStackException if the named stack is empty * @since 1.6 */ public <T> T peek( String stackName, int n ) { T result = null; Stack<Object> namedStack = stacksByName.get( stackName ); if ( namedStack == null ) { if ( log.isDebugEnabled() ) { log.debug( "Stack '" + stackName + "' is empty" ); } throw new EmptyStackException(); } int index = ( namedStack.size() - 1 ) - n; if ( index < 0 ) { throw new EmptyStackException(); } result = this.<T> npeSafeCast( namedStack.get( index ) ); return result; } /** * <p> * Is the stack with the given name empty? * </p> * <p> * <strong>Note:</strong> a stack is considered empty if no objects have been pushed onto it yet. * </p> * * @param stackName the name of the stack whose emptiness should be evaluated * @return true if the given stack if empty * @since 1.6 */ public boolean isEmpty( String stackName ) { boolean result = true; Stack<Object> namedStack = stacksByName.get( stackName ); if ( namedStack != null ) { result = namedStack.isEmpty(); } return result; } /** * Returns the root element of the tree of objects created as a result of applying the rule objects to the input * XML. * <p> * If the digester stack was "primed" by explicitly pushing a root object onto the stack before parsing started, * then that root object is returned here. * <p> * Alternatively, if a Rule which creates an object (eg ObjectCreateRule) matched the root element of the xml, then * the object created will be returned here. * <p> * In other cases, the object most recently pushed onto an empty digester stack is returned. This would be a most * unusual use of digester, however; one of the previous configurations is much more likely. * <p> * Note that when using one of the Digester.parse methods, the return value from the parse method is exactly the * same as the return value from this method. However when the Digester is being used as a SAXContentHandler, no * such return value is available; in this case, this method allows you to access the root object that has been * created after parsing has completed. * * @param <T> the type used to auto-cast the returned object to the assigned variable type * @return the root object that has been created after parsing or null if the digester has not parsed any XML yet. */ public <T> T getRoot() { return this.<T> npeSafeCast( root ); } /** * This method allows the "root" variable to be reset to null. * <p> * It is not considered safe for a digester instance to be reused to parse multiple xml documents. However if you * are determined to do so, then you should call both clear() and resetRoot() before each parse. * * @since 1.7 */ public void resetRoot() { root = null; } // ------------------------------------------------ Parameter Stack Methods // ------------------------------------------------------ Protected Methods /** * <p> * Clean up allocated resources after parsing is complete. The default method closes input streams that have been * created by Digester itself. If you override this method in a subclass, be sure to call * <code>super.cleanup()</code> to invoke this logic. * </p> * * @since 1.8 */ protected void cleanup() { // If we created any InputSource objects in this instance, // they each have an input stream that should be closed for ( InputSource source : inputSources ) { try { source.getByteStream().close(); } catch ( IOException e ) { // Fall through so we get them all } } inputSources.clear(); } /** * <p> * Provide a hook for lazy configuration of this <code>Digester</code> instance. The default implementation does * nothing, but subclasses can override as needed. * </p> * <p> * <strong>Note</strong> This method may be called more than once. Once only initialization code should be placed in * {@link #initialize} or the code should take responsibility by checking and setting the {@link #configured} flag. * </p> */ protected void configure() { // Do not configure more than once if ( configured ) { return; } // Perform lazy configuration as needed initialize(); // call hook method for subclasses that want to be initialized once only // Nothing else required by default // Set the configuration flag to avoid repeating configured = true; } /** * Checks the Digester instance has been configured. * * @return true, if the Digester instance has been configured, false otherwise * @since 3.0 */ public boolean isConfigured() { return configured; } /** * <p> * Provides a hook for lazy initialization of this <code>Digester</code> instance. The default implementation does * nothing, but subclasses can override as needed. Digester (by default) only calls this method once. * </p> * <p> * <strong>Note</strong> This method will be called by {@link #configure} only when the {@link #configured} flag is * false. Subclasses that override <code>configure</code> or who set <code>configured</code> may find that this * method may be called more than once. * </p> * * @since 1.6 */ protected void initialize() { // Perform lazy initialization as needed // Nothing required by default } // -------------------------------------------------------- Package Methods /** * Return the set of DTD URL registrations, keyed by public identifier. NOTE: the returned map is in read-only mode. * * @return the read-only Map of DTD URL registrations. */ Map<String, URL> getRegistrations() { return Collections.unmodifiableMap( entityValidator ); } /** * <p> * Return the top object on the parameters stack without removing it. If there are no objects on the stack, return * <code>null</code>. * </p> * <p> * The parameters stack is used to store <code>CallMethodRule</code> parameters. See {@link #params}. * </p> * * @return the top object on the parameters stack without removing it. */ public Object[] peekParams() { try { return ( params.peek() ); } catch ( EmptyStackException e ) { log.warn( "Empty stack (returning null)" ); return ( null ); } } /** * <p> * Return the n'th object down the parameters stack, where 0 is the top element and [getCount()-1] is the bottom * element. If the specified index is out of range, return <code>null</code>. * </p> * <p> * The parameters stack is used to store <code>CallMethodRule</code> parameters. See {@link #params}. * </p> * * @param n Index of the desired element, where 0 is the top of the stack, 1 is the next element down, and so on. * @return the n'th object down the parameters stack */ public Object[] peekParams( int n ) { int index = ( params.size() - 1 ) - n; if ( index < 0 ) { log.warn( "Empty stack (returning null)" ); return ( null ); } try { return ( params.get( index ) ); } catch ( EmptyStackException e ) { log.warn( "Empty stack (returning null)" ); return ( null ); } } /** * <p> * Pop the top object off of the parameters stack, and return it. If there are no objects on the stack, return * <code>null</code>. * </p> * <p> * The parameters stack is used to store <code>CallMethodRule</code> parameters. See {@link #params}. * </p> * * @param the top object popped off of the parameters stack */ public Object[] popParams() { try { if ( log.isTraceEnabled() ) { log.trace( "Popping params" ); } return ( params.pop() ); } catch ( EmptyStackException e ) { log.warn( "Empty stack (returning null)" ); return ( null ); } } /** * <p> * Push a new object onto the top of the parameters stack. * </p> * <p> * The parameters stack is used to store <code>CallMethodRule</code> parameters. See {@link #params}. * </p> * * @param object The new object */ public void pushParams( Object... object ) { if ( log.isTraceEnabled() ) { log.trace( "Pushing params" ); } params.push( object ); } /** * Create a SAX exception which also understands about the location in the digester file where the exception occurs * * @param message the custom SAX exception message * @param e the exception cause * @return the new SAX exception */ public SAXException createSAXException( String message, Exception e ) { if ( ( e != null ) && ( e instanceof InvocationTargetException ) ) { Throwable t = ( (InvocationTargetException) e ).getTargetException(); if ( ( t != null ) && ( t instanceof Exception ) ) { e = (Exception) t; } } if ( locator != null ) { String error = "Error at line " + locator.getLineNumber() + " char " + locator.getColumnNumber() + ": " + message; if ( e != null ) { return new SAXParseException( error, locator, e ); } return new SAXParseException( error, locator ); } log.error( "No Locator!" ); if ( e != null ) { return new SAXException( message, e ); } return new SAXException( message ); } /** * Create a SAX exception which also understands about the location in the digester file where the exception occurs * * @param e the exception cause * @return the new SAX exception */ public SAXException createSAXException( Exception e ) { if ( e instanceof InvocationTargetException ) { Throwable t = ( (InvocationTargetException) e ).getTargetException(); if ( ( t != null ) && ( t instanceof Exception ) ) { e = (Exception) t; } } return createSAXException( e.getMessage(), e ); } /** * Create a SAX exception which also understands about the location in the digester file where the exception occurs * * @param message the custom SAX exception message * @return the new SAX exception */ public SAXException createSAXException( String message ) { return createSAXException( message, null ); } /** * Helps casting the input object to given type, avoiding NPEs. * * @since 3.0 * @param <T> the type the input object has to be cast. * @param obj the object has to be cast. * @return the casted object, if input object is not null, null otherwise. */ private <T> T npeSafeCast( Object obj ) { if ( obj == null ) { return null; } @SuppressWarnings( "unchecked" ) T result = (T) obj; return result; } }
fixed @return tag on popParams() method git-svn-id: c3d1f7498fb08a2885afe49e111c402c6cd8f5f6@1128639 13f79535-47bb-0310-9956-ffa450edef68
src/main/java/org/apache/commons/digester3/Digester.java
fixed @return tag on popParams() method
<ide><path>rc/main/java/org/apache/commons/digester3/Digester.java <ide> * The parameters stack is used to store <code>CallMethodRule</code> parameters. See {@link #params}. <ide> * </p> <ide> * <del> * @param the top object popped off of the parameters stack <add> * @return the top object popped off of the parameters stack <ide> */ <ide> public Object[] popParams() <ide> {
Java
mit
error: pathspec 'src/main/java/seedu/address/storage/ConfigStorage.java' did not match any file(s) known to git
5c51b308c88dd17b88cc2de8c1f89c99fed33f66
1
CS2103JAN2017-F14-B3/main,CS2103JAN2017-F14-B3/main
package seedu.address.storage; import java.io.IOException; import seedu.address.commons.core.Config; import seedu.address.commons.exceptions.DataConversionException; public interface ConfigStorage { Config readConfig() throws DataConversionException, IOException; void saveConfig(Config config) throws IOException; }
src/main/java/seedu/address/storage/ConfigStorage.java
configstorage interface
src/main/java/seedu/address/storage/ConfigStorage.java
configstorage interface
<ide><path>rc/main/java/seedu/address/storage/ConfigStorage.java <add>package seedu.address.storage; <add> <add>import java.io.IOException; <add> <add>import seedu.address.commons.core.Config; <add>import seedu.address.commons.exceptions.DataConversionException; <add> <add>public interface ConfigStorage { <add> Config readConfig() throws DataConversionException, IOException; <add> void saveConfig(Config config) throws IOException; <add>}
JavaScript
mit
c61ae73672c71ff190f85152a91ddd7703361a49
0
phated/grunt-jade
/* * grunt-jade * https://github.com/phated/grunt-jade * * Copyright (c) 2012 Blaine Bublitz * Licensed under the MIT license. */ module.exports = function(grunt) { var jade = require('jade') , path = require('path') , jadeRuntimePath = require.resolve('jade/lib/runtime'); // ========================================================================== // TASKS // ========================================================================== grunt.registerMultiTask('jade', 'Your task description goes here.', function() { // Options object for jade var options = grunt.utils._.extend({ client: true, runtime: true, compileDebug: false }, this.data.options); var wrapper = grunt.utils._.extend({ wrap: true, amd: false, dependencies: '' }, this.data.wrapper); // Reference to the dest dir var dest = path.normalize(this.file.dest + '/') , files = grunt.file.expandFiles(this.file.src); // Make the dest dir if it doesn't exist grunt.file.mkdir(dest); // Loop through all files and write them to files files.forEach(function(filepath) { options.filename = filepath; var fileExtname = path.extname(filepath) , src = grunt.file.read(filepath) , outputFilename = path.basename(filepath, fileExtname) , outputExtension = options.client ? '.js' : '.html' , outputFilepath = dest + outputFilename + outputExtension , compiled = grunt.helper('compile', src, options, wrapper, outputFilename, filepath); grunt.file.write(outputFilepath, compiled); }); if(options.client && options.runtime){ grunt.helper('runtime', dest, wrapper); } }); // ========================================================================== // HELPERS // ========================================================================== grunt.registerHelper('compile', function(src, options, wrapper, filename, extraMsg) { var msg = 'Compiling' + (extraMsg ? ' ' + extraMsg : '') + '...'; grunt.verbose.write(msg); var compiled = jade.compile(src, options); grunt.verbose.ok(); var output; // Was compilation successful? if(compiled){ // Are we writing JS? if(options.client){ compiled = String(compiled); // Are we wrapping it? if(wrapper.wrap){ output = grunt.helper('wrap', compiled, wrapper, filename); } else { output = compiled; } } else { // Spit out output = compiled(options); } } return output; }); grunt.registerHelper('wrap', function(compiled, wrapper, filename){ // Generate path for wrapper template var templatePath = __dirname + '/../support/' + (wrapper.amd ? 'amd' : 'no-amd') + '.template'; // Read in the correct wrapper template var template = grunt.file.read(templatePath); grunt.verbose.write('Wrapping ' + filename + ' template...'); // Compile template with params var wrappedTemplate = grunt.template.process(template, { compiledTemplate: compiled, filename: filename, dependencies: wrapper.dependencies }); grunt.verbose.ok(); return wrappedTemplate; }); grunt.registerHelper('runtime', function(dest, wrapper){ // Generate path for wrapper template var templatePath = __dirname + '/../support/' + (wrapper.amd ? 'amd' : 'no-amd') + '-runtime.template'; // Read in the correct wrapper template var template = grunt.file.read(templatePath); var runtime = grunt.file.read(jadeRuntimePath); grunt.verbose.write('Wrapping runtime.js...'); // Compile template with params var wrappedTemplate = grunt.template.process(template, { runtime: runtime }); var filename = wrapper.dependencies ? wrapper.dependencies : 'runtime'; grunt.file.write(dest + filename + '.js', wrappedTemplate); return; }); };
tasks/jade.js
/* * grunt-jade * https://github.com/phated/grunt-jade * * Copyright (c) 2012 Blaine Bublitz * Licensed under the MIT license. */ module.exports = function(grunt) { var jade = require('jade') , path = require('path') , jadeRuntimePath = require.resolve('jade/lib/runtime'); // ========================================================================== // TASKS // ========================================================================== grunt.registerMultiTask('jade', 'Your task description goes here.', function() { // Options object for jade var options = grunt.utils._.extend({ client: true, runtime: true, compileDebug: false }, this.data.options); var wrapper = grunt.utils._.extend({ wrap: true, amd: false, dependencies: '' }, this.data.wrapper); // Reference to the dest dir var dest = path.normalize(this.file.dest + '/') , files = grunt.file.expandFiles(this.file.src); // Make the dest dir if it doesn't exist grunt.file.mkdir(dest); // Loop through all files and write them to files files.forEach(function(filepath) { var fileExtname = path.extname(filepath) , src = grunt.file.read(filepath) , outputFilename = path.basename(filepath, fileExtname) , outputExtension = options.client ? '.js' : '.html' , outputFilepath = dest + outputFilename + outputExtension , compiled = grunt.helper('compile', src, options, wrapper, outputFilename, filepath); grunt.file.write(outputFilepath, compiled); }); if(options.client && options.runtime){ grunt.helper('runtime', dest, wrapper); } }); // ========================================================================== // HELPERS // ========================================================================== grunt.registerHelper('compile', function(src, options, wrapper, filename, extraMsg) { var msg = 'Compiling' + (extraMsg ? ' ' + extraMsg : '') + '...'; grunt.verbose.write(msg); var compiled = jade.compile(src, options); grunt.verbose.ok(); var output; // Was compilation successful? if(compiled){ // Are we writing JS? if(options.client){ compiled = String(compiled); // Are we wrapping it? if(wrapper.wrap){ output = grunt.helper('wrap', compiled, wrapper, filename); } else { output = compiled; } } else { // Spit out output = compiled(options); } } return output; }); grunt.registerHelper('wrap', function(compiled, wrapper, filename){ // Generate path for wrapper template var templatePath = __dirname + '/../support/' + (wrapper.amd ? 'amd' : 'no-amd') + '.template'; // Read in the correct wrapper template var template = grunt.file.read(templatePath); grunt.verbose.write('Wrapping ' + filename + ' template...'); // Compile template with params var wrappedTemplate = grunt.template.process(template, { compiledTemplate: compiled, filename: filename, dependencies: wrapper.dependencies }); grunt.verbose.ok(); return wrappedTemplate; }); grunt.registerHelper('runtime', function(dest, wrapper){ // Generate path for wrapper template var templatePath = __dirname + '/../support/' + (wrapper.amd ? 'amd' : 'no-amd') + '-runtime.template'; // Read in the correct wrapper template var template = grunt.file.read(templatePath); var runtime = grunt.file.read(jadeRuntimePath); grunt.verbose.write('Wrapping runtime.js...'); // Compile template with params var wrappedTemplate = grunt.template.process(template, { runtime: runtime }); var filename = wrapper.dependencies ? wrapper.dependencies : 'runtime'; grunt.file.write(dest + filename + '.js', wrappedTemplate); return; }); };
added options.filename for ability to use "extends" in jade templates
tasks/jade.js
added options.filename for ability to use "extends" in jade templates
<ide><path>asks/jade.js <ide> <ide> // Loop through all files and write them to files <ide> files.forEach(function(filepath) { <add> options.filename = filepath; <ide> var fileExtname = path.extname(filepath) <ide> , src = grunt.file.read(filepath) <ide> , outputFilename = path.basename(filepath, fileExtname)
Java
mit
15e4d56d96ee55a477ae1377560c0b888cdd50bf
0
mhradek/aurkitu,mhradek/aurkitu
package com.michaelhradek.aurkitu; import com.michaelhradek.aurkitu.plugin.Application; import org.apache.maven.plugin.testing.AbstractMojoTestCase; import org.apache.maven.plugin.testing.MojoRule; import org.apache.maven.project.MavenProject; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import org.mockito.runners.MockitoJUnitRunner; import java.io.File; import java.util.ArrayList; /** * @author m.hradek */ @RunWith(MockitoJUnitRunner.class) public class ApplicationTest extends AbstractMojoTestCase { @Mock private MavenProject mockProject; /** * @see junit.framework.TestCase#setUp() */ @Override protected void setUp() throws Exception { // required for mojo lookups to work super.setUp(); } protected void tearDown() throws Exception { super.tearDown(); } @Rule public MojoRule rule = new MojoRule() { @Override protected void before() throws Throwable {} @Override protected void after() {} }; @Before public void before() { MockitoAnnotations.initMocks(this); } /** * @throws Exception Unable to locate file. */ @Test public void testBasicRead() throws Exception { Mockito.when(mockProject.getCompileClasspathElements()).thenReturn(new ArrayList<>()); File testPom = new File(getBasedir(),"src/test/resources/plugin-basic/pom.xml"); Assert.assertNotNull(testPom); Assert.assertTrue(testPom.exists()); Assert.assertTrue(testPom.isFile()); Application mojo = new Application(); // mojo = (Application) this.configureMojo( // mojo, extractPluginConfiguration(Application.MOJO_NAME, testPom) // ); // // assertNotNull(mojo); // mojo.execute(); } @Test public void testBasicWithProjectRead() throws Exception { File testPom = new File(getBasedir(), "src/test/resources/plugin-basic-with-project/pom.xml"); Assert.assertNotNull(testPom); Assert.assertTrue(testPom.exists()); Assert.assertTrue(testPom.isFile()); // Application mojo = new Application(); // mojo = (Application) this.configureMojo( // mojo, extractPluginConfiguration(Application.MOJO_NAME, testPom) // ); // // Field projectField = mojo.getClass().getDeclaredField("project"); // projectField.setAccessible(true); // MavenProject mavenProject = (MavenProject) projectField.get(mojo); // // Assert.assertNotNull(mavenProject); } // @Test(expected = MojoExecutionException.class) // public void testBasicReadWithProject() throws Exception { // File testPom = new File(getBasedir(),"src/test/resources/plugin-basic-with-project/pom.xml"); // Assert.assertNotNull(testPom); Assert.assertTrue(testPom.exists()); // Assert.assertTrue(testPom.isFile()); // // Application mojo = new Application(); // mojo = (Application) this.configureMojo( // mojo, extractPluginConfiguration(Application.MOJO_NAME, testPom) // ); // // assertNotNull(mojo); // mojo.execute(); // } }
aurkitu-maven-plugin/src/test/java/com/michaelhradek/aurkitu/ApplicationTest.java
package com.michaelhradek.aurkitu; import com.michaelhradek.aurkitu.plugin.Application; import org.apache.maven.plugin.testing.AbstractMojoTestCase; import org.apache.maven.plugin.testing.MojoRule; import org.apache.maven.project.MavenProject; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import org.mockito.runners.MockitoJUnitRunner; import java.io.File; import java.lang.reflect.Field; import java.util.ArrayList; /** * @author m.hradek */ @RunWith(MockitoJUnitRunner.class) public class ApplicationTest extends AbstractMojoTestCase { @Mock private MavenProject mockProject; /** * @see junit.framework.TestCase#setUp() */ @Override protected void setUp() throws Exception { // required for mojo lookups to work super.setUp(); } protected void tearDown() throws Exception { super.tearDown(); } @Rule public MojoRule rule = new MojoRule() { @Override protected void before() throws Throwable {} @Override protected void after() {} }; @Before public void before() { MockitoAnnotations.initMocks(this); } /** * @throws Exception Unable to locate file. */ @Test public void testBasicRead() throws Exception { Mockito.when(mockProject.getCompileClasspathElements()).thenReturn(new ArrayList<>()); File testPom = new File(getBasedir(),"src/test/resources/plugin-basic/pom.xml"); Assert.assertNotNull(testPom); Assert.assertTrue(testPom.exists()); Assert.assertTrue(testPom.isFile()); Application mojo = new Application(); // mojo = (Application) this.configureMojo( // mojo, extractPluginConfiguration(Application.MOJO_NAME, testPom) // ); // // assertNotNull(mojo); // mojo.execute(); } @Test public void testBasicWithProjectRead() throws Exception { File testPom = new File(getBasedir(), "src/test/resources/plugin-basic-with-project/pom.xml"); Assert.assertNotNull(testPom); Assert.assertTrue(testPom.exists()); Assert.assertTrue(testPom.isFile()); Application mojo = new Application(); // mojo = (Application) this.configureMojo( // mojo, extractPluginConfiguration(Application.MOJO_NAME, testPom) // ); // // Field projectField = mojo.getClass().getDeclaredField("project"); // projectField.setAccessible(true); // MavenProject mavenProject = (MavenProject) projectField.get(mojo); // // Assert.assertNotNull(mavenProject); } // @Test(expected = MojoExecutionException.class) // public void testBasicReadWithProject() throws Exception { // File testPom = new File(getBasedir(),"src/test/resources/plugin-basic-with-project/pom.xml"); // Assert.assertNotNull(testPom); Assert.assertTrue(testPom.exists()); // Assert.assertTrue(testPom.isFile()); // // Application mojo = new Application(); // mojo = (Application) this.configureMojo( // mojo, extractPluginConfiguration(Application.MOJO_NAME, testPom) // ); // // assertNotNull(mojo); // mojo.execute(); // } }
Removed unused includes and commented out unused code.
aurkitu-maven-plugin/src/test/java/com/michaelhradek/aurkitu/ApplicationTest.java
Removed unused includes and commented out unused code.
<ide><path>urkitu-maven-plugin/src/test/java/com/michaelhradek/aurkitu/ApplicationTest.java <ide> import org.mockito.runners.MockitoJUnitRunner; <ide> <ide> import java.io.File; <del>import java.lang.reflect.Field; <ide> import java.util.ArrayList; <ide> <ide> /** <ide> Assert.assertTrue(testPom.exists()); <ide> Assert.assertTrue(testPom.isFile()); <ide> <del> Application mojo = new Application(); <add>// Application mojo = new Application(); <ide> // mojo = (Application) this.configureMojo( <ide> // mojo, extractPluginConfiguration(Application.MOJO_NAME, testPom) <ide> // );
Java
mit
13e377ab55eba5c5d704f37ac72ca986b03c8303
0
mzmine/mzmine3,mzmine/mzmine3
/* * Copyright 2006-2021 The MZmine Development Team * * This file is part of MZmine. * * MZmine is free software; you can redistribute it and/or modify it under the terms of the GNU * General Public License as published by the Free Software Foundation; either version 2 of the * License, or (at your option) any later version. * * MZmine is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even * the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License along with MZmine; if not, * write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * */ package io.github.mzmine.modules.visualization.chromatogram; import com.google.common.collect.Range; import io.github.mzmine.datamodel.FeatureIdentity; import io.github.mzmine.datamodel.FeatureStatus; import io.github.mzmine.datamodel.MZmineProject; import io.github.mzmine.datamodel.RawDataFile; import io.github.mzmine.datamodel.Scan; import io.github.mzmine.datamodel.features.Feature; import io.github.mzmine.datamodel.features.ModularFeature; import io.github.mzmine.datamodel.features.ModularFeatureListRow; import io.github.mzmine.main.MZmineCore; import io.github.mzmine.modules.MZmineModuleCategory; import io.github.mzmine.modules.MZmineRunnableModule; import io.github.mzmine.parameters.ParameterSet; import io.github.mzmine.parameters.parametertypes.selectors.ScanSelection; import io.github.mzmine.taskcontrol.Task; import io.github.mzmine.util.ExitCode; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * TIC/XIC visualizer using JFreeChart library */ public class ChromatogramVisualizerModule implements MZmineRunnableModule { private static final String MODULE_NAME = "TIC/XIC visualizer"; private static final String MODULE_DESCRIPTION = "TIC/XIC visualizer."; // TODO public static void setupNewTICVisualizer(final RawDataFile dataFile) { setupNewTICVisualizer(new RawDataFile[]{dataFile}); } public static void setupNewTICVisualizer(final RawDataFile[] dataFiles) { setupNewTICVisualizer(MZmineCore.getProjectManager().getCurrentProject().getDataFiles(), dataFiles, new Feature[0], new Feature[0], null, null, null); } public static void setupNewTICVisualizer(final RawDataFile[] allFiles, final RawDataFile[] selectedFiles, final Feature[] allPeaks, final Feature[] selectedPeaks, final Map<Feature, String> peakLabels, ScanSelection scanSelection, final Range<Double> mzRange) { assert allFiles != null; final ChromatogramVisualizerModule myInstance = MZmineCore.getModuleInstance(ChromatogramVisualizerModule.class); final TICVisualizerParameters myParameters = (TICVisualizerParameters) MZmineCore .getConfiguration().getModuleParameters(ChromatogramVisualizerModule.class); myParameters.getParameter(TICVisualizerParameters.PLOT_TYPE).setValue(TICPlotType.BASEPEAK); if (scanSelection != null) { myParameters.getParameter(TICVisualizerParameters.scanSelection).setValue(scanSelection); } if (mzRange != null) { myParameters.getParameter(TICVisualizerParameters.MZ_RANGE).setValue(mzRange); } if (myParameters.showSetupDialog(true, allFiles, selectedFiles, allPeaks, selectedPeaks) == ExitCode.OK) { final TICVisualizerParameters p = (TICVisualizerParameters) myParameters.cloneParameterSet(); if (peakLabels != null) { p.setPeakLabelMap(peakLabels); } myInstance.runModule(MZmineCore.getProjectManager().getCurrentProject(), p, new ArrayList<Task>(), new Date()); // date is irrelevant } } public static void showNewTICVisualizerWindow(final RawDataFile[] dataFiles, final Feature[] selectionPeaks, final Map<Feature, String> peakLabels, final ScanSelection scanSelection, final TICPlotType plotType, final Range<Double> mzRange) { TICVisualizerTab window = new TICVisualizerTab(dataFiles, plotType, scanSelection, mzRange, Arrays.asList(selectionPeaks), peakLabels); MZmineCore.getDesktop().addTab(window); } public static void showNewTICVisualizerWindow(final RawDataFile[] dataFiles, final List<Feature> selectionPeaks, final Map<Feature, String> peakLabels, final ScanSelection scanSelection, final TICPlotType plotType, final Range<Double> mzRange) { TICVisualizerTab window = new TICVisualizerTab(dataFiles, plotType, scanSelection, mzRange, selectionPeaks, peakLabels); MZmineCore.getDesktop().addTab(window); } public static void visualizeFeatureListRows(Collection<ModularFeatureListRow> rows) { final Map<Feature, String> labelsMap = new HashMap<>(); final Set<RawDataFile> files = new HashSet<>(); Range<Double> mzRange = null; final List<Feature> selectedFeatures = new ArrayList<>(); for (ModularFeatureListRow row : rows) { for (final Feature f : row.getFeatures()) { final ModularFeature feature = (ModularFeature) f; if(feature == null || feature.getFeatureStatus() == FeatureStatus.UNKNOWN) { continue; } if (mzRange == null) { mzRange = feature.getRawDataPointsMZRange(); double upper = mzRange.upperEndpoint(); double lower = mzRange.lowerEndpoint(); if ((upper - lower) < 0.000001) { // Workaround to make ultra narrow mzRanges (e.g. from imported mzTab peaklist), // a more reasonable default for a HRAM instrument (~5ppm) double fiveppm = (upper * 5E-6); mzRange = Range.closed(lower - fiveppm, upper + fiveppm); } } else { mzRange = mzRange.span(feature.getRawDataPointsMZRange()); } selectedFeatures.add(feature); // Label the peak with the row's preferred identity. final FeatureIdentity identity = row.getPreferredFeatureIdentity(); if (identity != null) { labelsMap.put(feature, identity.getName()); } files.add(feature.getRawDataFile()); } } ScanSelection scanSelection = new ScanSelection(1); showNewTICVisualizerWindow(files.toArray(new RawDataFile[0]), selectedFeatures.toArray(new Feature[selectedFeatures.size()]), labelsMap, scanSelection, TICPlotType.BASEPEAK, mzRange); } public static void setUpVisualiserFromFeatures(Collection<ModularFeatureListRow> rows, @Nullable RawDataFile selectedFile) { // Map peaks to their identity labels. final Map<Feature, String> labelsMap = new HashMap<>(); Range<Double> mzRange = null; final ArrayList<Feature> allFeatures = new ArrayList<>(); final ArrayList<Feature> selectedFeatures = new ArrayList<>(); final Set<RawDataFile> allFiles = new HashSet<>(); allFiles.addAll(rows.stream().flatMap(row -> row.getRawDataFiles().stream()). collect(Collectors.toSet())); for (final ModularFeatureListRow row : rows) { // Label the peak with the row's preferred identity. final FeatureIdentity identity = row.getPreferredFeatureIdentity(); for (final Feature feature : row.getFeatures()) { if(feature == null || feature.getFeatureStatus() == FeatureStatus.UNKNOWN) { continue; } allFeatures.add(feature); if (feature.getRawDataFile() == selectedFile) { selectedFeatures.add(feature); } if (mzRange == null) { mzRange = feature.getRawDataPointsMZRange(); } else { mzRange = mzRange.span(feature.getRawDataPointsMZRange()); } if (identity != null) { labelsMap.put(feature, identity.getName()); } // allFiles.add(feature.getRawDataFile()); } } ScanSelection scanSelection = new ScanSelection(1); setupNewTICVisualizer( MZmineCore.getProjectManager().getCurrentProject().getDataFiles(), allFiles.toArray(new RawDataFile[0]), allFeatures.toArray(new Feature[allFeatures.size()]), selectedFeatures.toArray(new Feature[selectedFeatures.size()]), labelsMap, scanSelection, mzRange); } @Override public @NotNull String getName() { return MODULE_NAME; } @Override public @NotNull String getDescription() { return MODULE_DESCRIPTION; } @Override @NotNull public ExitCode runModule(@NotNull MZmineProject project, @NotNull ParameterSet parameters, @NotNull Collection<Task> tasks, @NotNull Date moduleCallDate) { final RawDataFile[] dataFiles = parameters.getParameter(TICVisualizerParameters.DATA_FILES) .getValue().getMatchingRawDataFiles(); final Range<Double> mzRange = parameters.getParameter(TICVisualizerParameters.MZ_RANGE).getValue(); final ScanSelection scanSelection = parameters.getParameter(TICVisualizerParameters.scanSelection).getValue(); final TICPlotType plotType = parameters.getParameter(TICVisualizerParameters.PLOT_TYPE).getValue(); final List<Feature> selectionPeaks = parameters.getParameter(TICVisualizerParameters.PEAKS).getValue(); // Add the window to the desktop only if we actually have any raw // data to show. boolean weHaveData = false; for (RawDataFile dataFile : dataFiles) { Scan selectedScans[] = scanSelection.getMatchingScans(dataFile); if (selectedScans.length > 0) { weHaveData = true; } } if (weHaveData) { TICVisualizerTab window = new TICVisualizerTab(dataFiles, plotType, scanSelection, mzRange, selectionPeaks, ((TICVisualizerParameters) parameters).getPeakLabelMap()); MZmineCore.getDesktop().addTab(window); } else { MZmineCore.getDesktop().displayErrorMessage("No scans found"); } return ExitCode.OK; } @Override public @NotNull MZmineModuleCategory getModuleCategory() { return MZmineModuleCategory.VISUALIZATIONRAWDATA; } @Override public @NotNull Class<? extends ParameterSet> getParameterSetClass() { return TICVisualizerParameters.class; } }
src/main/java/io/github/mzmine/modules/visualization/chromatogram/ChromatogramVisualizerModule.java
/* * Copyright 2006-2021 The MZmine Development Team * * This file is part of MZmine. * * MZmine is free software; you can redistribute it and/or modify it under the terms of the GNU * General Public License as published by the Free Software Foundation; either version 2 of the * License, or (at your option) any later version. * * MZmine is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even * the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License along with MZmine; if not, * write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * */ package io.github.mzmine.modules.visualization.chromatogram; import com.google.common.collect.Range; import io.github.mzmine.datamodel.FeatureIdentity; import io.github.mzmine.datamodel.FeatureStatus; import io.github.mzmine.datamodel.MZmineProject; import io.github.mzmine.datamodel.RawDataFile; import io.github.mzmine.datamodel.Scan; import io.github.mzmine.datamodel.features.Feature; import io.github.mzmine.datamodel.features.ModularFeature; import io.github.mzmine.datamodel.features.ModularFeatureListRow; import io.github.mzmine.main.MZmineCore; import io.github.mzmine.modules.MZmineModuleCategory; import io.github.mzmine.modules.MZmineRunnableModule; import io.github.mzmine.parameters.ParameterSet; import io.github.mzmine.parameters.parametertypes.selectors.ScanSelection; import io.github.mzmine.taskcontrol.Task; import io.github.mzmine.util.ExitCode; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * TIC/XIC visualizer using JFreeChart library */ public class ChromatogramVisualizerModule implements MZmineRunnableModule { private static final String MODULE_NAME = "TIC/XIC visualizer"; private static final String MODULE_DESCRIPTION = "TIC/XIC visualizer."; // TODO public static void setupNewTICVisualizer(final RawDataFile dataFile) { setupNewTICVisualizer(new RawDataFile[]{dataFile}); } public static void setupNewTICVisualizer(final RawDataFile[] dataFiles) { setupNewTICVisualizer(MZmineCore.getProjectManager().getCurrentProject().getDataFiles(), dataFiles, new Feature[0], new Feature[0], null, null, null); } public static void setupNewTICVisualizer(final RawDataFile[] allFiles, final RawDataFile[] selectedFiles, final Feature[] allPeaks, final Feature[] selectedPeaks, final Map<Feature, String> peakLabels, ScanSelection scanSelection, final Range<Double> mzRange) { assert allFiles != null; final ChromatogramVisualizerModule myInstance = MZmineCore.getModuleInstance(ChromatogramVisualizerModule.class); final TICVisualizerParameters myParameters = (TICVisualizerParameters) MZmineCore .getConfiguration().getModuleParameters(ChromatogramVisualizerModule.class); myParameters.getParameter(TICVisualizerParameters.PLOT_TYPE).setValue(TICPlotType.BASEPEAK); if (scanSelection != null) { myParameters.getParameter(TICVisualizerParameters.scanSelection).setValue(scanSelection); } if (mzRange != null) { myParameters.getParameter(TICVisualizerParameters.MZ_RANGE).setValue(mzRange); } if (myParameters.showSetupDialog(true, allFiles, selectedFiles, allPeaks, selectedPeaks) == ExitCode.OK) { final TICVisualizerParameters p = (TICVisualizerParameters) myParameters.cloneParameterSet(); if (peakLabels != null) { p.setPeakLabelMap(peakLabels); } myInstance.runModule(MZmineCore.getProjectManager().getCurrentProject(), p, new ArrayList<Task>(), new Date()); // date is irrelevant } } public static void showNewTICVisualizerWindow(final RawDataFile[] dataFiles, final Feature[] selectionPeaks, final Map<Feature, String> peakLabels, final ScanSelection scanSelection, final TICPlotType plotType, final Range<Double> mzRange) { TICVisualizerTab window = new TICVisualizerTab(dataFiles, plotType, scanSelection, mzRange, Arrays.asList(selectionPeaks), peakLabels); MZmineCore.getDesktop().addTab(window); } public static void showNewTICVisualizerWindow(final RawDataFile[] dataFiles, final List<Feature> selectionPeaks, final Map<Feature, String> peakLabels, final ScanSelection scanSelection, final TICPlotType plotType, final Range<Double> mzRange) { TICVisualizerTab window = new TICVisualizerTab(dataFiles, plotType, scanSelection, mzRange, selectionPeaks, peakLabels); MZmineCore.getDesktop().addTab(window); } public static void visualizeFeatureListRows(Collection<ModularFeatureListRow> rows) { final Map<Feature, String> labelsMap = new HashMap<>(); final Set<RawDataFile> files = new HashSet<>(); Range<Double> mzRange = null; final List<Feature> selectedFeatures = new ArrayList<>(); for (ModularFeatureListRow row : rows) { for (final Feature f : row.getFeatures()) { final ModularFeature feature = (ModularFeature) f; if(feature == null || feature.getFeatureStatus() == FeatureStatus.UNKNOWN) { continue; } if (mzRange == null) { mzRange = feature.getRawDataPointsMZRange(); double upper = mzRange.upperEndpoint(); double lower = mzRange.lowerEndpoint(); if ((upper - lower) < 0.000001) { // Workaround to make ultra narrow mzRanges (e.g. from imported mzTab peaklist), // a more reasonable default for a HRAM instrument (~5ppm) double fiveppm = (upper * 5E-6); mzRange = Range.closed(lower - fiveppm, upper + fiveppm); } } else { mzRange = mzRange.span(feature.getRawDataPointsMZRange()); } selectedFeatures.add(feature); // Label the peak with the row's preferred identity. final FeatureIdentity identity = row.getPreferredFeatureIdentity(); if (identity != null) { labelsMap.put(feature, identity.getName()); } files.add(feature.getRawDataFile()); } } ScanSelection scanSelection = new ScanSelection(1); showNewTICVisualizerWindow(files.toArray(new RawDataFile[0]), selectedFeatures.toArray(new Feature[selectedFeatures.size()]), labelsMap, scanSelection, TICPlotType.BASEPEAK, mzRange); } public static void setUpVisualiserFromFeatures(Collection<ModularFeatureListRow> rows, @Nullable RawDataFile selectedFile) { // Map peaks to their identity labels. final Map<Feature, String> labelsMap = new HashMap<>(); Range<Double> mzRange = null; final ArrayList<Feature> allFeatures = new ArrayList<>(); final ArrayList<Feature> selectedFeatures = new ArrayList<>(); final Set<RawDataFile> allFiles = new HashSet<>(); allFiles.addAll(rows.stream().flatMap(row -> row.getRawDataFiles().stream()). collect(Collectors.toSet())); for (final ModularFeatureListRow row : rows) { // Label the peak with the row's preferred identity. final FeatureIdentity identity = row.getPreferredFeatureIdentity(); for (final Feature feature : row.getFeatures()) { allFeatures.add(feature); if (feature.getRawDataFile() == selectedFile) { selectedFeatures.add(feature); } if (mzRange == null) { mzRange = feature.getRawDataPointsMZRange(); } else { mzRange = mzRange.span(feature.getRawDataPointsMZRange()); } if (identity != null) { labelsMap.put(feature, identity.getName()); } // allFiles.add(feature.getRawDataFile()); } } ScanSelection scanSelection = new ScanSelection(1); setupNewTICVisualizer( MZmineCore.getProjectManager().getCurrentProject().getDataFiles(), allFiles.toArray(new RawDataFile[0]), allFeatures.toArray(new Feature[allFeatures.size()]), selectedFeatures.toArray(new Feature[selectedFeatures.size()]), labelsMap, scanSelection, mzRange); } @Override public @NotNull String getName() { return MODULE_NAME; } @Override public @NotNull String getDescription() { return MODULE_DESCRIPTION; } @Override @NotNull public ExitCode runModule(@NotNull MZmineProject project, @NotNull ParameterSet parameters, @NotNull Collection<Task> tasks, @NotNull Date moduleCallDate) { final RawDataFile[] dataFiles = parameters.getParameter(TICVisualizerParameters.DATA_FILES) .getValue().getMatchingRawDataFiles(); final Range<Double> mzRange = parameters.getParameter(TICVisualizerParameters.MZ_RANGE).getValue(); final ScanSelection scanSelection = parameters.getParameter(TICVisualizerParameters.scanSelection).getValue(); final TICPlotType plotType = parameters.getParameter(TICVisualizerParameters.PLOT_TYPE).getValue(); final List<Feature> selectionPeaks = parameters.getParameter(TICVisualizerParameters.PEAKS).getValue(); // Add the window to the desktop only if we actually have any raw // data to show. boolean weHaveData = false; for (RawDataFile dataFile : dataFiles) { Scan selectedScans[] = scanSelection.getMatchingScans(dataFile); if (selectedScans.length > 0) { weHaveData = true; } } if (weHaveData) { TICVisualizerTab window = new TICVisualizerTab(dataFiles, plotType, scanSelection, mzRange, selectionPeaks, ((TICVisualizerParameters) parameters).getPeakLabelMap()); MZmineCore.getDesktop().addTab(window); } else { MZmineCore.getDesktop().displayErrorMessage("No scans found"); } return ExitCode.OK; } @Override public @NotNull MZmineModuleCategory getModuleCategory() { return MZmineModuleCategory.VISUALIZATIONRAWDATA; } @Override public @NotNull Class<? extends ParameterSet> getParameterSetClass() { return TICVisualizerParameters.class; } }
fix undetected features added to xic dialog
src/main/java/io/github/mzmine/modules/visualization/chromatogram/ChromatogramVisualizerModule.java
fix undetected features added to xic dialog
<ide><path>rc/main/java/io/github/mzmine/modules/visualization/chromatogram/ChromatogramVisualizerModule.java <ide> final FeatureIdentity identity = row.getPreferredFeatureIdentity(); <ide> <ide> for (final Feature feature : row.getFeatures()) { <add> if(feature == null || feature.getFeatureStatus() == FeatureStatus.UNKNOWN) { <add> continue; <add> } <ide> <ide> allFeatures.add(feature); <ide> if (feature.getRawDataFile() == selectedFile) {
Java
agpl-3.0
9ec6a6d1aad7b623728bc65a6a836bc9ee21a9ce
0
isokissa3/mcMMO,Maximvdw/mcMMO,jhonMalcom79/mcMMO_pers,virustotalop/mcMMO,EvilOlaf/mcMMO
package com.gmail.nossr50.events.experience; import org.bukkit.entity.Player; import com.gmail.nossr50.skills.utilities.SkillType; /** * Called when a user levels up in a skill */ public class McMMOPlayerLevelUpEvent extends McMMOPlayerExperienceEvent { private int levelsGained; public McMMOPlayerLevelUpEvent(Player player, SkillType skill) { super(player, skill); this.levelsGained = 1; } public McMMOPlayerLevelUpEvent(Player player, SkillType skill, int levelsGained) { super(player, skill); this.levelsGained = levelsGained; } /** * @return The number of levels gained in this event */ public int getLevelsGained() { return levelsGained; } /** * @param levelsGained int number of levels gained in this event */ public void setLevelsGained(int levelsGained) { this.levelsGained = levelsGained; } }
src/main/java/com/gmail/nossr50/events/experience/McMMOPlayerLevelUpEvent.java
package com.gmail.nossr50.events.experience; import org.bukkit.entity.Player; import com.gmail.nossr50.skills.utilities.SkillType; /** * Called when a user levels up in a skill */ public class McMMOPlayerLevelUpEvent extends McMMOPlayerExperienceEvent { private int levelsGained; public McMMOPlayerLevelUpEvent(Player player, SkillType skill) { super(player, skill); this.levelsGained = 1; // Always 1 for now as we call in the loop where the levelups are calculated, could change later! } /** * @return The number of levels gained in this event */ public int getLevelsGained() { return levelsGained; } }
Add constructor for adding multiple levels at once. Need to work out how to handle level-ups in order for this to be properly cancelled, however.
src/main/java/com/gmail/nossr50/events/experience/McMMOPlayerLevelUpEvent.java
Add constructor for adding multiple levels at once. Need to work out how to handle level-ups in order for this to be properly cancelled, however.
<ide><path>rc/main/java/com/gmail/nossr50/events/experience/McMMOPlayerLevelUpEvent.java <ide> <ide> public McMMOPlayerLevelUpEvent(Player player, SkillType skill) { <ide> super(player, skill); <del> this.levelsGained = 1; // Always 1 for now as we call in the loop where the levelups are calculated, could change later! <add> this.levelsGained = 1; <add> } <add> <add> public McMMOPlayerLevelUpEvent(Player player, SkillType skill, int levelsGained) { <add> super(player, skill); <add> this.levelsGained = levelsGained; <ide> } <ide> <ide> /** <ide> public int getLevelsGained() { <ide> return levelsGained; <ide> } <add> <add> /** <add> * @param levelsGained int number of levels gained in this event <add> */ <add> public void setLevelsGained(int levelsGained) { <add> this.levelsGained = levelsGained; <add> } <ide> }
Java
mit
e4fbf5a2d66470e0d72b0a1eecac85a389fefca4
0
greboid/DMDirc,greboid/DMDirc,csmith/DMDirc,csmith/DMDirc,DMDirc/DMDirc,greboid/DMDirc,ShaneMcC/DMDirc-Client,csmith/DMDirc,ShaneMcC/DMDirc-Client,greboid/DMDirc,DMDirc/DMDirc,csmith/DMDirc,DMDirc/DMDirc,ShaneMcC/DMDirc-Client,DMDirc/DMDirc,ShaneMcC/DMDirc-Client
/* * Copyright (c) 2006-2007 Chris Smith, Shane Mc Cormack, Gregory Holmes * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * * SVN: $Id$ */ package uk.org.ownage.dmdirc.plugins; import java.util.Hashtable; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; public class PluginManager { /** * List of known plugins. */ private Hashtable<String,Plugin> knownPlugins = new Hashtable<String,Plugin>(); /** * List of known plugin classNames. */ private Hashtable<String,String> knownPluginNames = new Hashtable<String,String>(); /** * Directory where plugins are stored. */ private String myDir; /** * Create a new PluginManager. */ public PluginManager() { myDir = "."; } /** * Create a new PluginManager. */ public PluginManager(final String directory) { myDir = directory; } /** * Add a new plugin. * * @param pluginName Name of plugin * @param className Class Name of Plugin object * @return True if loaded. */ public boolean addPlugin(final String pluginName, final String className) { if (knownPlugins.containsKey(pluginName.toLowerCase())) { return false; } Plugin plugin = loadPlugin(className); if (plugin == null) { return false; } plugin.onLoad(); knownPlugins.put(pluginName.toLowerCase(), plugin); knownPluginNames.put(pluginName.toLowerCase(), className); return true; } /** * Remove a plugin. * * @param pluginName Name of plugin * @return True if removed. */ public boolean delPlugin(final String pluginName) { if (!knownPlugins.containsKey(pluginName.toLowerCase())) { return false; } Plugin plugin = getPlugin(pluginName); try { plugin.onUnload(); } catch (Exception e) { // TODO: Log Unload Errors somewhere. } knownPlugins.remove(pluginName.toLowerCase()); knownPluginNames.remove(pluginName.toLowerCase()); plugin = null; return true; } /** * Reload a plugin. * * @param pluginName Name of plugin * @return True if reloaded. */ public boolean reloadPlugin(final String pluginName) { if (!knownPlugins.containsKey(pluginName.toLowerCase())) { return false; } final String filename = knownPluginNames.get(pluginName.toLowerCase()); delPlugin(pluginName); return addPlugin(pluginName, filename); } /** * Get a plugin instance. * * @param pluginName Name of plugin * @return Plugin instance, or null */ public Plugin getPlugin(final String pluginName) { if (!knownPlugins.containsKey(pluginName.toLowerCase())) { return null; } return knownPlugins.get(pluginName.toLowerCase()); } /** * Get string[] of known plugin names. * * @return string[] of known plugin names. */ public String[] getNames() { final String[] result = new String[knownPlugins.size()]; int i = 0; for (String name : knownPlugins.keySet()) { result[i++] = name; } return result; } /** * Get classname of a given plugin name. * * @return classname of a given plugin name. */ public String getClassName(final String pluginName) { if (!knownPluginNames.containsKey(pluginName)) { return ""; } else { return knownPluginNames.get(pluginName); } } /** * Load a plugin with a given className * * @param className Class Name of plugin to load. */ private Plugin loadPlugin(final String className) { Plugin result; try { ClassLoader cl = new PluginClassLoader(myDir); Class<?> c = (Class<?>)cl.loadClass(className); Constructor<?> constructor = c.getConstructor(new Class[] {}); result = (Plugin)constructor.newInstance(new Object[] {}); } catch (ClassNotFoundException cnfe) { System.out.println("ClassNotFoundException "+cnfe.getMessage()); cnfe.printStackTrace(); result = null; } catch (NoSuchMethodException nsme) { System.out.println("NoSuchMethodException"); result = null; } catch (IllegalAccessException iae) { System.out.println("IllegalAccessException"); result = null; } catch (InvocationTargetException ite) { System.out.println("InvocationTargetException"); result = null; } catch (InstantiationException ie) { System.out.println("InstantiationException"); result = null; } return result; } }
src/uk/org/ownage/dmdirc/plugins/PluginManager.java
/* * Copyright (c) 2006-2007 Chris Smith, Shane Mc Cormack, Gregory Holmes * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * * SVN: $Id$ */ package uk.org.ownage.dmdirc.plugins; import java.util.Hashtable; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; public class PluginManager { /** * List of known plugins. */ private Hashtable<String,Plugin> knownPlugins = new Hashtable<String,Plugin>(); /** * List of known plugin classNames. */ private Hashtable<String,String> knownPluginNames = new Hashtable<String,String>(); /** * Directory where plugins are stored. */ private String myDir; /** * Create a new PluginManager. */ public PluginManager() { myDir = "."; } /** * Create a new PluginManager. */ public PluginManager(final String directory) { myDir = directory; } /** * Add a new plugin. * * @param pluginName Name of plugin * @param className Class Name of Plugin object * @return True if loaded. */ public boolean addPlugin(final String pluginName, final String className) { if (knownPlugins.containsKey(pluginName.toLowerCase())) { return false; } Plugin plugin = loadPlugin(className); if (plugin == null) { return false; } knownPlugins.put(pluginName.toLowerCase(), plugin); knownPluginNames.put(pluginName.toLowerCase(), className); return true; } /** * Remove a plugin. * * @param pluginName Name of plugin * @return True if removed. */ public boolean delPlugin(final String pluginName) { if (!knownPlugins.containsKey(pluginName.toLowerCase())) { return false; } Plugin plugin = getPlugin(pluginName); plugin.onUnload(); knownPlugins.remove(pluginName.toLowerCase()); knownPluginNames.remove(pluginName.toLowerCase()); plugin = null; return true; } /** * Reload a plugin. * * @param pluginName Name of plugin * @return True if reloaded. */ public boolean reloadPlugin(final String pluginName) { if (!knownPlugins.containsKey(pluginName.toLowerCase())) { return false; } final String filename = knownPluginNames.get(pluginName.toLowerCase()); delPlugin(pluginName); return addPlugin(pluginName, filename); } /** * Get a plugin instance. * * @param pluginName Name of plugin * @return Plugin instance, or null */ public Plugin getPlugin(final String pluginName) { if (!knownPlugins.containsKey(pluginName.toLowerCase())) { return null; } return knownPlugins.get(pluginName.toLowerCase()); } /** * Get string[] of known plugin names. * * @return string[] of known plugin names. */ public String[] getNames() { final String[] result = new String[knownPlugins.size()]; int i = 0; for (String name : knownPlugins.keySet()) { result[i++] = name; } return result; } /** * Get classname of a given plugin name. * * @return classname of a given plugin name. */ public String getClassName(final String pluginName) { if (!knownPluginNames.containsKey(pluginName)) { return ""; } else { return knownPluginNames.get(pluginName); } } /** * Load a plugin with a given className * * @param className Class Name of plugin to load. */ private Plugin loadPlugin(final String className) { Plugin result; try { ClassLoader cl = new PluginClassLoader(myDir); Class<?> c = (Class<?>)cl.loadClass(className); Constructor<?> constructor = c.getConstructor(new Class[] {}); result = (Plugin)constructor.newInstance(new Object[] {}); } catch (ClassNotFoundException cnfe) { System.out.println("ClassNotFoundException "+cnfe.getMessage()); cnfe.printStackTrace(); result = null; } catch (NoSuchMethodException nsme) { System.out.println("NoSuchMethodException"); result = null; } catch (IllegalAccessException iae) { System.out.println("IllegalAccessException"); result = null; } catch (InvocationTargetException ite) { System.out.println("InvocationTargetException"); result = null; } catch (InstantiationException ie) { System.out.println("InstantiationException"); result = null; } return result; } }
re-added onLoad() call to addPlugin git-svn-id: 50f83ef66c13f323b544ac924010c921a9f4a0f7@836 00569f92-eb28-0410-84fd-f71c24880f43
src/uk/org/ownage/dmdirc/plugins/PluginManager.java
re-added onLoad() call to addPlugin
<ide><path>rc/uk/org/ownage/dmdirc/plugins/PluginManager.java <ide> if (knownPlugins.containsKey(pluginName.toLowerCase())) { return false; } <ide> Plugin plugin = loadPlugin(className); <ide> if (plugin == null) { return false; } <add> plugin.onLoad(); <ide> knownPlugins.put(pluginName.toLowerCase(), plugin); <ide> knownPluginNames.put(pluginName.toLowerCase(), className); <ide> return true; <ide> public boolean delPlugin(final String pluginName) { <ide> if (!knownPlugins.containsKey(pluginName.toLowerCase())) { return false; } <ide> Plugin plugin = getPlugin(pluginName); <del> plugin.onUnload(); <add> try { <add> plugin.onUnload(); <add> } catch (Exception e) { <add> // TODO: Log Unload Errors somewhere. <add> } <ide> knownPlugins.remove(pluginName.toLowerCase()); <ide> knownPluginNames.remove(pluginName.toLowerCase()); <ide> plugin = null;
Java
mit
707a5da0b6dc5fcdb4325f4176f795397255493a
0
flintproject/Flint,flintproject/Flint
/* -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:set ts=4 sw=4 sts=4 et: */ package jp.oist.flint.form.job; import jp.oist.flint.form.sub.JobWindow; import jp.oist.flint.garuda.GarudaClient; import jp.oist.flint.job.Job; import jp.oist.flint.job.Progress; import jp.oist.flint.util.DurationFormat; import jp.oist.flint.util.PeriodFormat; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.Font; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.IOException; import javax.swing.BorderFactory; import javax.swing.BoxLayout; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JProgressBar; import javax.swing.SwingConstants; import javax.swing.UIManager; public class JobCell extends JPanel { private final JobWindow mJobWindow; private final int mIndex; private boolean mIsCancelled = false; public JobCell(JobWindow jobWindow, int index) { mJobWindow = jobWindow; mIndex = index; initComponents(); setBackground(UIManager.getColor("List.background")); if (!GarudaClient.isRunning()) { btn_SendViaGaruda.setEnabled(false); } } @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { pnl_Top = new JPanel(); lbl_Title = new JLabel(); pnl_Middle = new JPanel(); jPanel2 = new JPanel(); mProgressBar = new JProgressBar(); btn_Cancel = new JButton(); pnl_Bottom = new JPanel(); lbl_Detail = new JLabel(); jPanel4 = new JPanel(); btn_SendViaGaruda = new JButton(); btn_Export = new JButton(); btn_View = new JButton(); setBorder(BorderFactory.createEmptyBorder(0, 10, 0, 10)); setMinimumSize(new Dimension(0, 70)); setPreferredSize(new Dimension(200, 70)); setLayout(new BoxLayout(this, BoxLayout.PAGE_AXIS)); pnl_Top.setMaximumSize(new Dimension(32767, 20)); pnl_Top.setOpaque(false); pnl_Top.setPreferredSize(new Dimension(800, 20)); pnl_Top.setLayout(new BorderLayout()); pnl_Top.add(lbl_Title, BorderLayout.CENTER); add(pnl_Top); pnl_Middle.setMinimumSize(new Dimension(0, 0)); pnl_Middle.setOpaque(false); pnl_Middle.setPreferredSize(new Dimension(800, 20)); pnl_Middle.setLayout(new BoxLayout(pnl_Middle, BoxLayout.LINE_AXIS)); jPanel2.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 10)); jPanel2.setMinimumSize(new Dimension(0, 20)); jPanel2.setOpaque(false); jPanel2.setPreferredSize(new Dimension(0, 23)); jPanel2.setLayout(new BoxLayout(jPanel2, BoxLayout.LINE_AXIS)); mProgressBar.setMinimumSize(new Dimension(0, 20)); mProgressBar.setPreferredSize(new Dimension(0, 20)); mProgressBar.setStringPainted(true); jPanel2.add(mProgressBar); pnl_Middle.add(jPanel2); btn_Cancel.setIcon(new ImageIcon(getClass().getResource("/jp/oist/flint/image/cancel.png"))); // NOI18N btn_Cancel.setActionCommand("jobcell.action.cancel"); btn_Cancel.setIconTextGap(0); btn_Cancel.setMaximumSize(new Dimension(20, 20)); btn_Cancel.setMinimumSize(new Dimension(20, 20)); btn_Cancel.setPreferredSize(new Dimension(20, 20)); btn_Cancel.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { btn_CancelActionPerformed(evt); } }); pnl_Middle.add(btn_Cancel); add(pnl_Middle); pnl_Bottom.setMaximumSize(new Dimension(65534, 30)); pnl_Bottom.setMinimumSize(new Dimension(0, 30)); pnl_Bottom.setOpaque(false); pnl_Bottom.setPreferredSize(new Dimension(800, 30)); pnl_Bottom.setLayout(new BoxLayout(pnl_Bottom, BoxLayout.LINE_AXIS)); lbl_Detail.setFont(new Font("Lucida Grande", 0, 12)); // NOI18N lbl_Detail.setForeground(Color.gray); lbl_Detail.setVerticalAlignment(SwingConstants.TOP); lbl_Detail.setMaximumSize(new Dimension(32333, 20)); lbl_Detail.setMinimumSize(new Dimension(0, 20)); lbl_Detail.setPreferredSize(new Dimension(600, 20)); pnl_Bottom.add(lbl_Detail); jPanel4.setMaximumSize(new Dimension(250, 20)); jPanel4.setMinimumSize(new Dimension(250, 30)); jPanel4.setOpaque(false); jPanel4.setPreferredSize(new Dimension(400, 30)); FlowLayout flowLayout1 = new FlowLayout(FlowLayout.RIGHT, 2, 0); flowLayout1.setAlignOnBaseline(true); jPanel4.setLayout(flowLayout1); btn_SendViaGaruda.setText("Send via Garuda"); btn_SendViaGaruda.setActionCommand("jobcell.action.sendviagaruda"); btn_SendViaGaruda.setMaximumSize(new Dimension(110, 20)); btn_SendViaGaruda.setMinimumSize(new Dimension(110, 20)); btn_SendViaGaruda.setPreferredSize(new Dimension(130, 20)); btn_SendViaGaruda.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { btn_SendViaGarudaActionPerformed(evt); } }); jPanel4.add(btn_SendViaGaruda); btn_Export.setText("Export"); btn_Export.setActionCommand("jobcell.action.export"); btn_Export.setMaximumSize(new Dimension(75, 20)); btn_Export.setMinimumSize(new Dimension(75, 20)); btn_Export.setPreferredSize(new Dimension(75, 20)); btn_Export.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { btn_ExportActionPerformed(evt); } }); jPanel4.add(btn_Export); btn_View.setText("View"); btn_View.setActionCommand("jobcell.action.view"); btn_View.setMaximumSize(new Dimension(75, 20)); btn_View.setMinimumSize(new Dimension(75, 20)); btn_View.setPreferredSize(new Dimension(75, 20)); btn_View.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { btn_ViewActionPerformed(evt); } }); jPanel4.add(btn_View); pnl_Bottom.add(jPanel4); add(pnl_Bottom); }// </editor-fold>//GEN-END:initComponents private void btn_CancelActionPerformed(ActionEvent evt) {//GEN-FIRST:event_btn_CancelActionPerformed mJobWindow.cancelJobPerformed(mIndex); }//GEN-LAST:event_btn_CancelActionPerformed private void btn_ExportActionPerformed(ActionEvent evt) {//GEN-FIRST:event_btn_ExportActionPerformed mJobWindow.exportPerformed(mIndex); }//GEN-LAST:event_btn_ExportActionPerformed private void btn_SendViaGarudaActionPerformed(ActionEvent evt) {//GEN-FIRST:event_btn_SendViaGarudaActionPerformed mJobWindow.sendViaGarudaPerformed(mIndex); }//GEN-LAST:event_btn_SendViaGarudaActionPerformed private void btn_ViewActionPerformed(ActionEvent evt) {//GEN-FIRST:event_btn_ViewActionPerformed mJobWindow.plotPerformed(mIndex); }//GEN-LAST:event_btn_ViewActionPerformed /* * Return true if finished, false otherwise. */ public boolean setProgress(Job job) { Progress progress = job.getProgress(); int percent = progress.getPercent(); mProgressBar.setValue(percent); StringBuilder sb = new StringBuilder(); if (mIsCancelled) { sb.append("cancelled | "); } sb.append(String.format("%1$3d", percent)); sb.append(" % | "); sb.append(PeriodFormat.fromTo(progress.getStarted(), progress.getLastUpdated())); sb.append(" ("); sb.append(DurationFormat.fromMillis(progress.getElapsedMillis())); sb.append(")"); mProgressBar.setString(sb.toString()); mProgressBar.repaint(); if (lbl_Detail.getText().isEmpty()) { try { lbl_Detail.setText(job.getParameterDescription()); } catch (IOException ex) { // give up } } boolean b = isFinished(); if (b) { btn_Cancel.setEnabled(false); } return b; } public void setCancelled(boolean cancelled) { mIsCancelled = cancelled; btn_Cancel.setEnabled(!cancelled); } private boolean isFinished () { return mIsCancelled || isCompleted(); } private boolean isCompleted () { return mProgressBar.getMaximum() == mProgressBar.getValue(); } // Variables declaration - do not modify//GEN-BEGIN:variables private JButton btn_Cancel; private JButton btn_Export; private JButton btn_SendViaGaruda; private JButton btn_View; private JPanel jPanel2; private JPanel jPanel4; private JLabel lbl_Detail; private JLabel lbl_Title; private JProgressBar mProgressBar; private JPanel pnl_Bottom; private JPanel pnl_Middle; private JPanel pnl_Top; // End of variables declaration//GEN-END:variables }
flint/src/jp/oist/flint/form/job/JobCell.java
/* -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:set ts=4 sw=4 sts=4 et: */ package jp.oist.flint.form.job; import jp.oist.flint.form.sub.JobWindow; import jp.oist.flint.garuda.GarudaClient; import jp.oist.flint.job.Job; import jp.oist.flint.job.Progress; import jp.oist.flint.util.DurationFormat; import jp.oist.flint.util.PeriodFormat; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.Font; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.IOException; import javax.swing.BorderFactory; import javax.swing.BoxLayout; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JProgressBar; import javax.swing.SwingConstants; import javax.swing.UIManager; public class JobCell extends JPanel { private final JobWindow mJobWindow; private final int mIndex; private boolean mIsCancelled = false; public JobCell(JobWindow jobWindow, int index) { mJobWindow = jobWindow; mIndex = index; initComponents(); setBackground(UIManager.getColor("List.background")); if (!GarudaClient.isRunning()) { btn_SendViaGaruda.setEnabled(false); } } @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { pnl_Top = new JPanel(); lbl_Title = new JLabel(); pnl_Middle = new JPanel(); jPanel2 = new JPanel(); mProgressBar = new JProgressBar(); btn_Cancel = new JButton(); pnl_Bottom = new JPanel(); lbl_Detail = new JLabel(); jPanel4 = new JPanel(); btn_SendViaGaruda = new JButton(); btn_Export = new JButton(); btn_View = new JButton(); setBorder(BorderFactory.createEmptyBorder(0, 10, 0, 10)); setMinimumSize(new Dimension(0, 70)); setPreferredSize(new Dimension(200, 70)); setLayout(new BoxLayout(this, BoxLayout.PAGE_AXIS)); pnl_Top.setMaximumSize(new Dimension(32767, 20)); pnl_Top.setOpaque(false); pnl_Top.setPreferredSize(new Dimension(800, 20)); pnl_Top.setLayout(new BorderLayout()); pnl_Top.add(lbl_Title, BorderLayout.CENTER); add(pnl_Top); pnl_Middle.setMinimumSize(new Dimension(0, 0)); pnl_Middle.setOpaque(false); pnl_Middle.setPreferredSize(new Dimension(800, 20)); pnl_Middle.setLayout(new BoxLayout(pnl_Middle, BoxLayout.LINE_AXIS)); jPanel2.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 10)); jPanel2.setMinimumSize(new Dimension(0, 20)); jPanel2.setOpaque(false); jPanel2.setPreferredSize(new Dimension(0, 23)); jPanel2.setLayout(new BoxLayout(jPanel2, BoxLayout.LINE_AXIS)); mProgressBar.setMinimumSize(new Dimension(0, 20)); mProgressBar.setPreferredSize(new Dimension(0, 20)); mProgressBar.setStringPainted(true); jPanel2.add(mProgressBar); pnl_Middle.add(jPanel2); btn_Cancel.setIcon(new ImageIcon(getClass().getResource("/jp/oist/flint/image/cancel.png"))); // NOI18N btn_Cancel.setActionCommand("jobcell.action.cancel"); btn_Cancel.setIconTextGap(0); btn_Cancel.setMaximumSize(new Dimension(20, 20)); btn_Cancel.setMinimumSize(new Dimension(20, 20)); btn_Cancel.setPreferredSize(new Dimension(20, 20)); btn_Cancel.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { btn_CancelActionPerformed(evt); } }); pnl_Middle.add(btn_Cancel); add(pnl_Middle); pnl_Bottom.setMaximumSize(new Dimension(65534, 30)); pnl_Bottom.setMinimumSize(new Dimension(0, 30)); pnl_Bottom.setOpaque(false); pnl_Bottom.setPreferredSize(new Dimension(800, 30)); pnl_Bottom.setLayout(new BoxLayout(pnl_Bottom, BoxLayout.LINE_AXIS)); lbl_Detail.setFont(new Font("Lucida Grande", 0, 12)); // NOI18N lbl_Detail.setForeground(Color.gray); lbl_Detail.setVerticalAlignment(SwingConstants.TOP); lbl_Detail.setMaximumSize(new Dimension(32333, 20)); lbl_Detail.setMinimumSize(new Dimension(0, 20)); lbl_Detail.setPreferredSize(new Dimension(600, 20)); pnl_Bottom.add(lbl_Detail); jPanel4.setMaximumSize(new Dimension(250, 20)); jPanel4.setMinimumSize(new Dimension(250, 30)); jPanel4.setOpaque(false); jPanel4.setPreferredSize(new Dimension(400, 30)); FlowLayout flowLayout1 = new FlowLayout(FlowLayout.RIGHT, 2, 0); flowLayout1.setAlignOnBaseline(true); jPanel4.setLayout(flowLayout1); btn_SendViaGaruda.setText("Send via Garuda"); btn_SendViaGaruda.setActionCommand("jobcell.action.sendviagaruda"); btn_SendViaGaruda.setMaximumSize(new Dimension(110, 20)); btn_SendViaGaruda.setMinimumSize(new Dimension(110, 20)); btn_SendViaGaruda.setPreferredSize(new Dimension(130, 20)); btn_SendViaGaruda.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { btn_SendViaGarudaActionPerformed(evt); } }); jPanel4.add(btn_SendViaGaruda); btn_Export.setText("Export"); btn_Export.setActionCommand("jobcell.action.export"); btn_Export.setMaximumSize(new Dimension(75, 20)); btn_Export.setMinimumSize(new Dimension(75, 20)); btn_Export.setPreferredSize(new Dimension(75, 20)); btn_Export.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { btn_ExportActionPerformed(evt); } }); jPanel4.add(btn_Export); btn_View.setText("View"); btn_View.setActionCommand("jobcell.action.view"); btn_View.setMaximumSize(new Dimension(75, 20)); btn_View.setMinimumSize(new Dimension(75, 20)); btn_View.setPreferredSize(new Dimension(75, 20)); btn_View.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { btn_ViewActionPerformed(evt); } }); jPanel4.add(btn_View); pnl_Bottom.add(jPanel4); add(pnl_Bottom); }// </editor-fold>//GEN-END:initComponents private void btn_CancelActionPerformed(ActionEvent evt) {//GEN-FIRST:event_btn_CancelActionPerformed mJobWindow.cancelJobPerformed(mIndex); }//GEN-LAST:event_btn_CancelActionPerformed private void btn_ExportActionPerformed(ActionEvent evt) {//GEN-FIRST:event_btn_ExportActionPerformed mJobWindow.exportPerformed(mIndex); }//GEN-LAST:event_btn_ExportActionPerformed private void btn_SendViaGarudaActionPerformed(ActionEvent evt) {//GEN-FIRST:event_btn_SendViaGarudaActionPerformed mJobWindow.sendViaGarudaPerformed(mIndex); }//GEN-LAST:event_btn_SendViaGarudaActionPerformed private void btn_ViewActionPerformed(ActionEvent evt) {//GEN-FIRST:event_btn_ViewActionPerformed mJobWindow.plotPerformed(mIndex); }//GEN-LAST:event_btn_ViewActionPerformed public void setProgress(Job job) { Progress progress = job.getProgress(); int percent = progress.getPercent(); mProgressBar.setValue(percent); StringBuilder sb = new StringBuilder(); if (mIsCancelled) { sb.append("cancelled | "); } sb.append(String.format("%1$3d", percent)); sb.append(" % | "); sb.append(PeriodFormat.fromTo(progress.getStarted(), progress.getLastUpdated())); sb.append(" ("); sb.append(DurationFormat.fromMillis(progress.getElapsedMillis())); sb.append(")"); if (isFinished()) { btn_Cancel.setEnabled(false); } mProgressBar.setString(sb.toString()); mProgressBar.repaint(); if (lbl_Detail.getText().isEmpty()) { try { lbl_Detail.setText(job.getParameterDescription()); } catch (IOException ex) { // give up } } } public void setCancelled(boolean cancelled) { mIsCancelled = cancelled; btn_Cancel.setEnabled(!cancelled); } private boolean isFinished () { return mIsCancelled || isCompleted(); } private boolean isCompleted () { return mProgressBar.getMaximum() == mProgressBar.getValue(); } // Variables declaration - do not modify//GEN-BEGIN:variables private JButton btn_Cancel; private JButton btn_Export; private JButton btn_SendViaGaruda; private JButton btn_View; private JPanel jPanel2; private JPanel jPanel4; private JLabel lbl_Detail; private JLabel lbl_Title; private JProgressBar mProgressBar; private JPanel pnl_Bottom; private JPanel pnl_Middle; private JPanel pnl_Top; // End of variables declaration//GEN-END:variables }
Return boolean to see whether it finishes or not
flint/src/jp/oist/flint/form/job/JobCell.java
Return boolean to see whether it finishes or not
<ide><path>lint/src/jp/oist/flint/form/job/JobCell.java <ide> mJobWindow.plotPerformed(mIndex); <ide> }//GEN-LAST:event_btn_ViewActionPerformed <ide> <del> public void setProgress(Job job) { <add> /* <add> * Return true if finished, false otherwise. <add> */ <add> public boolean setProgress(Job job) { <ide> Progress progress = job.getProgress(); <ide> int percent = progress.getPercent(); <ide> mProgressBar.setValue(percent); <ide> sb.append(" ("); <ide> sb.append(DurationFormat.fromMillis(progress.getElapsedMillis())); <ide> sb.append(")"); <del> <del> if (isFinished()) { <del> btn_Cancel.setEnabled(false); <del> } <del> <ide> mProgressBar.setString(sb.toString()); <ide> mProgressBar.repaint(); <ide> <ide> // give up <ide> } <ide> } <add> <add> boolean b = isFinished(); <add> if (b) { <add> btn_Cancel.setEnabled(false); <add> } <add> return b; <ide> } <ide> <ide> public void setCancelled(boolean cancelled) {
Java
apache-2.0
58001f916e5b4dbbf01b679f4c24db83f8ee1208
0
inigoillan/libanalytics
package com.inigoillan.libanalytics.algorithms.oddsketch; import com.google.common.base.Preconditions; import com.inigoillan.libanalytics.algorithms.hashers.Hash; import org.apache.log4j.Logger; import javax.annotation.Nonnull; import java.util.BitSet; /** * For details, please refer to the <a href="http://www.itu.dk/people/pagh/papers/oddsketch.pdf">OddSketch paper</a> * * @param <K> The type of {@link Hash} elements this sketch accepts */ public class OddSketch<K extends Hash> { private static final Logger LOG = Logger.getLogger(OddSketch.class); private BitSet sketch; private int size; private int elementsAdded = 0; /** * Constructor * * @param size The size of the sketch */ public OddSketch(int size) { Preconditions.checkArgument(size >= 0); this.setSize(size); BitSet sketch = new BitSet(size); sketch.clear(); this.setSketch(sketch); } /** * Adds a hashed element to the sketch */ public void addHashed(@Nonnull K hashed) { int bucket = getBucket(hashed); xorIthBit(bucket); elementsAdded++; } /** * Estimates the set size of this Odd sketch. * Uses the Poisson Approximation by default to be consistent with the Jaccard Index estimation * * @return Returns the estimated set size */ public int estimateSetSize() { return this.estimateSetSizeMarkovApproximation(); } /** * Estimates the set size for this Odd sketch based on the Markov Chain Model: * * <math xmlns="http://www.w3.org/1998/Math/MathML"><mover><mi>m</mi><mo>^</mo></mover><mo>=</mo><mfrac><mrow><mi>ln</mi><mfenced><mrow><mn>1</mn><mo>-</mo><mn>2</mn><mi>z</mi><mo>/</mo><mi>n</mi></mrow></mfenced></mrow><mrow><mi>ln</mi><mfenced><mrow><mn>1</mn><mo>-</mo><mn>2</mn><mo>/</mo><mi>n</mi></mrow></mfenced></mrow></mfrac></math> * <br> * where <math><mi>m</mi></math> is the size of the set, * <math><mi>z</mi></math> is the number of odd bins in the sketch, * and <math><mi>n</mi></math> is the total number of bins in the sketch * <br><br> * * Refer to the <a href="http://www.itu.dk/people/pagh/papers/oddsketch.pdf">OddSketch paper</a> for details. * * @return The set size estimated by the Odd sketch using the Markov Chain Model */ protected int estimateSetSizeMarkovApproximation() { int z = sketch.cardinality(); LOG.debug(String.format("Sketch cardinality is %d", z)); double numerator = Math.log(1.0 - (2.0 * z / size)); double denominator = Math.log(1.0 - (2.0 / size)); double sizeEstimation = numerator / denominator; LOG.debug(String.format("When computing set size, the numerator is %f, denominator is %f and estimation is %f", numerator, denominator, sizeEstimation)); return (int) Math.round(sizeEstimation); } /** * Estimates the set size for this Odd sketch based on the Poisson approximation: * * <math xmlns="http://www.w3.org/1998/Math/MathML"><mover><mi>m</mi><mo>^</mo></mover><mo>=</mo><mo>-</mo><mfrac><mi>n</mi><mn>2</mn></mfrac><mi>ln</mi><mfenced><mrow><mn>1</mn><mo>-</mo><mn>2</mn><mi>z</mi><mo>/</mo><mi>n</mi></mrow></mfenced></math> * <br> * where <math><mi>m</mi></math> is the size of the set, * <math><mi>z</mi></math> is the number of odd bins in the sketch, * and <math><mi>n</mi></math> is the number of bins in the sketch * <br><br> * * Refer to the <a href="http://www.itu.dk/people/pagh/papers/oddsketch.pdf">OddSketch paper</a> for details. * * @return The set size estimated by the Odd sketch using the Markov Chain Model */ protected int estimateSetSizePoissonApproximation() { int z = sketch.cardinality(); LOG.debug(String.format("Sketch cardinality is %d", z)); double ln = Math.log(1.0 - (2.0 * z / size)); double sizeEstimation = -size * ln / 2; LOG.debug(String.format("Set size estimation is %f", sizeEstimation)); return (int) Math.round(sizeEstimation); } /** * Estimates the Jaccard Index for this Odd sketch: * * <math xmlns="http://www.w3.org/1998/Math/MathML"><mover><mi>J</mi><mo>^</mo></mover><mo>=</mo><mn>1</mn><mo>+</mo><mstyle displaystyle="false"><mfrac><mi>n</mi><mrow><mn>4</mn><mi>k</mi></mrow></mfrac></mstyle><mi>ln</mi><mfenced><mrow><mn>1</mn><mo>-</mo><mstyle displaystyle="false"><mfrac><mrow><mn>2</mn><mfenced open="|" close="|"><mrow><mi>o</mi><mi>d</mi><mi>d</mi><mo>(</mo><msub><mi>S</mi><mn>1</mn></msub><mo>)</mo><mo>&#8710;</mo><mi>o</mi><mi>d</mi><mi>d</mi><mo>(</mo><msub><mi>S</mi><mn>2</mn></msub><mo>)</mo></mrow></mfenced></mrow><mi>n</mi></mfrac></mstyle></mrow></mfenced><mspace linebreak="newline"/></math> * <br> * where <math><mi>J</mi></math> is the estimated Jaccard Index, * <math><mi>k</mi></math> is the number of elements added to the sketch, * <math><mfenced open="|" close="|"><mrow><mi>o</mi><mi>d</mi><mi>d</mi><mo>(</mo><msub><mi>S</mi><mn>1</mn></msub><mo>)</mo><mo>&#8710;</mo><mi>o</mi><mi>d</mi><mi>d</mi><mo>(</mo><msub><mi>S</mi><mn>2</mn></msub><mo>)</mo></mrow></mfenced><mspace linebreak="newline"/></math> is the symmetric difference (number of 1's in the xor-ed sketch) * and <math><mi>n</mi></math> is the number of bins in the sketch * <br><br> * * Refer to the <a href="http://www.itu.dk/people/pagh/papers/oddsketch.pdf">OddSketch paper</a> for details. * * @param other * @return */ public double estimateJaccardIndex(@Nonnull OddSketch<Hash> other) { Preconditions.checkArgument(other.getSize() == this.getSize()); Preconditions.checkArgument(other.elementsAdded == this.elementsAdded); if (this.elementsAdded == 0) return 1.0; int symmetricDifference = computeSymmetricDifference(other); int k = elementsAdded; int n = this.getSize(); double inner = 1.0 - (2.0 * symmetricDifference / n); if (inner <= 0.0) return 0.0; double ln = Math.log(inner); return 1.0 + (n / (4.0 * k)) * ln; } /** * The symmetric difference of two odd sketches denoted by * <math xmlns="http://www.w3.org/1998/Math/MathML"><mo>&#8710;</mo></math> or * <math xmlns="http://www.w3.org/1998/Math/MathML"><mo>&#8853;</mo></math> * symbols, is the operation resulting of unifying the two sets and subtracting the elements in the intersection. * <br><br> * So if you have sets * <math xmlns="http://www.w3.org/1998/Math/MathML"><msub><mi>S</mi><mn>1</mn></msub><mo>=</mo><mfenced open="{" close="}"><mrow><mn>1</mn><mo>,</mo><mo>&#160;</mo><mn>2</mn><mo>,</mo><mo>&#160;</mo><mn>3</mn><mo>,</mo><mo>&#160;</mo><mn>4</mn></mrow></mfenced></math> * and * <math xmlns="http://www.w3.org/1998/Math/MathML"><msub><mi>S</mi><mn>2</mn></msub><mo>=</mo><mfenced open="{" close="}"><mrow><mn>3</mn><mo>,</mo><mo>&#160;</mo><mn>4</mn><mo>,</mo><mo>&#160;</mo><mn>5</mn><mo>,</mo><mo>&#160;</mo><mn>6</mn></mrow></mfenced></math> * the symmetric difference is going to be the set * <math xmlns="http://www.w3.org/1998/Math/MathML"><mi>S</mi><mo>=</mo><mfenced open="{" close="}"><mrow><mn>1</mn><mo>,</mo><mo>&#160;</mo><mn>2</mn><mo>,</mo><mo>&#160;</mo><mn>5</mn><mo>,</mo><mo>&#160;</mo><mn>6</mn></mrow></mfenced></math> * * @param other * @return */ protected int computeSymmetricDifference(@Nonnull OddSketch<Hash> other) { BitSet sketch = (BitSet) this.getSketch().clone(); sketch.xor(other.getSketch()); return sketch.cardinality(); } protected int getBucket(Hash hash) { return hash.mod(size); } /** * Flips (XOR) the bit in the ith position of the sketch * * @param index The posistion in the sketch to be flipped */ protected void xorIthBit(int index) { Preconditions.checkArgument(index >= 0, "The index needs to be positive"); Preconditions.checkArgument(index < size, "Index has to be in the bounds set in the size parameter"); sketch.flip(index); } /** * Gets the size of the sketch. * * @return The size of the sketch */ protected int getSize() { return size; } /** * Sets the size of the sketch. Take into account, you should instantiate a new sketch with the * {@link #setSketch(BitSet)} method in case the new size is bigger than the old one. * <br> * Tipically, you would use this method in case you are building a (de)serialization mechanism for this class * * @param size New size of the sketch */ protected void setSize(int size) { this.size = size; } /** * Gets the sketch in the form of a {@link BitSet} * * @return The sketch representation */ protected BitSet getSketch() { return this.sketch; } /** * Sets the underlying sketch * <br> * Tipically, you would use this method in case you are building a (de)serialization mechanism for this class * * @param sketch The representation of the underlying sketch */ protected void setSketch(@Nonnull BitSet sketch) { this.sketch = sketch; } }
src/main/java/com/inigoillan/libanalytics/algorithms/oddsketch/OddSketch.java
package com.inigoillan.libanalytics.algorithms.oddsketch; import com.google.common.base.Preconditions; import com.inigoillan.libanalytics.algorithms.hashers.Hash; import org.apache.log4j.Logger; import javax.annotation.Nonnull; import java.util.BitSet; /** * For details, please refer to the <a href="http://www.itu.dk/people/pagh/papers/oddsketch.pdf">OddSketch paper</a> * * @param <K> The type of {@link Hash} elements this sketch accepts */ public class OddSketch<K extends Hash> { private static final Logger LOG = Logger.getLogger(OddSketch.class); private BitSet sketch; private int size; private int elementsAdded = 0; /** * Constructor * * @param size The size of the sketch */ public OddSketch(int size) { Preconditions.checkArgument(size >= 0); this.setSize(size); BitSet sketch = new BitSet(size); sketch.clear(); this.setSketch(sketch); } /** * Adds a hashed element to the sketch */ public void addHashed(@Nonnull K hashed) { int bucket = getBucket(hashed); xorIthBit(bucket); elementsAdded++; } /** * Estimates the set size of this Odd sketch. * Uses the Poisson Approximation by default to be consistent with the Jaccard Index estimation * * @return Returns the estimated set size */ public int estimateSetSize() { return this.estimateSetSizeMarkovApproximation(); } /** * Estimates the set size for this Odd sketch based on the Markov Chain Model: * * <math xmlns="http://www.w3.org/1998/Math/MathML"><mover><mi>m</mi><mo>^</mo></mover><mo>=</mo><mfrac><mrow><mi>ln</mi><mfenced><mrow><mn>1</mn><mo>-</mo><mn>2</mn><mi>z</mi><mo>/</mo><mi>n</mi></mrow></mfenced></mrow><mrow><mi>ln</mi><mfenced><mrow><mn>1</mn><mo>-</mo><mn>2</mn><mo>/</mo><mi>n</mi></mrow></mfenced></mrow></mfrac></math> * <br> * where <math><mi>m</mi></math> is the size of the set, * <math><mi>z</mi></math> is the number of odd bins in the sketch, * and <math><mi>n</mi></math> is the total number of bins in the sketch * <br><br> * * Refer to the <a href="http://www.itu.dk/people/pagh/papers/oddsketch.pdf">OddSketch paper</a> for details. * * @return The set size estimated by the Odd sketch using the Markov Chain Model */ protected int estimateSetSizeMarkovApproximation() { int z = sketch.cardinality(); LOG.debug(String.format("Sketch cardinality is %d", z)); double numerator = Math.log(1.0 - (2.0 * z / size)); double denominator = Math.log(1.0 - (2.0 / size)); double sizeEstimation = numerator / denominator; LOG.debug(String.format("When computing set size, the numerator is %f, denominator is %f and estimation is %f", numerator, denominator, sizeEstimation)); return (int) Math.round(sizeEstimation); } /** * Estimates the set size for this Odd sketch based on the Poisson approximation: * * <math xmlns="http://www.w3.org/1998/Math/MathML"><mover><mi>m</mi><mo>^</mo></mover><mo>=</mo><mo>-</mo><mfrac><mi>n</mi><mn>2</mn></mfrac><mi>ln</mi><mfenced><mrow><mn>1</mn><mo>-</mo><mn>2</mn><mi>z</mi><mo>/</mo><mi>n</mi></mrow></mfenced></math> * <br> * where <math><mi>m</mi></math> is the size of the set, * <math><mi>z</mi></math> is the number of odd bins in the sketch, * and <math><mi>n</mi></math> is the number of bins in the sketch * <br><br> * * Refer to the <a href="http://www.itu.dk/people/pagh/papers/oddsketch.pdf">OddSketch paper</a> for details. * * @return The set size estimated by the Odd sketch using the Markov Chain Model */ protected int estimateSetSizePoissonApproximation() { int z = sketch.cardinality(); LOG.debug(String.format("Sketch cardinality is %d", z)); double ln = Math.log(1.0 - (2.0 * z / size)); double sizeEstimation = -size * ln / 2; LOG.debug(String.format("Set size estimation is %f", sizeEstimation)); return (int) Math.round(sizeEstimation); } /** * Estimates the Jaccard Index for this Odd sketch: * * <math xmlns="http://www.w3.org/1998/Math/MathML"><mover><mi>J</mi><mo>^</mo></mover><mo>=</mo><mn>1</mn><mo>+</mo><mstyle displaystyle="false"><mfrac><mi>n</mi><mrow><mn>4</mn><mi>k</mi></mrow></mfrac></mstyle><mi>ln</mi><mfenced><mrow><mn>1</mn><mo>-</mo><mstyle displaystyle="false"><mfrac><mrow><mn>2</mn><mfenced open="|" close="|"><mrow><mi>o</mi><mi>d</mi><mi>d</mi><mo>(</mo><msub><mi>S</mi><mn>1</mn></msub><mo>)</mo><mo>&#8710;</mo><mi>o</mi><mi>d</mi><mi>d</mi><mo>(</mo><msub><mi>S</mi><mn>2</mn></msub><mo>)</mo></mrow></mfenced></mrow><mi>n</mi></mfrac></mstyle></mrow></mfenced><mspace linebreak="newline"/></math> * <br> * where <math><mi>J</mi></math> is the estimated Jaccard Index, * <math><mi>k</mi></math> is the number of elements added to the sketch, * <math><mfenced open="|" close="|"><mrow><mi>o</mi><mi>d</mi><mi>d</mi><mo>(</mo><msub><mi>S</mi><mn>1</mn></msub><mo>)</mo><mo>&#8710;</mo><mi>o</mi><mi>d</mi><mi>d</mi><mo>(</mo><msub><mi>S</mi><mn>2</mn></msub><mo>)</mo></mrow></mfenced><mspace linebreak="newline"/></math> is the symmetric difference (number of 1's in the xor-ed sketch) * and <math><mi>n</mi></math> is the number of bins in the sketch * <br><br> * * Refer to the <a href="http://www.itu.dk/people/pagh/papers/oddsketch.pdf">OddSketch paper</a> for details. * * @param other * @return */ public double estimateJaccardIndex(@Nonnull OddSketch<Hash> other) { Preconditions.checkArgument(other.getSize() == this.getSize()); Preconditions.checkArgument(other.elementsAdded == this.elementsAdded); if (this.elementsAdded == 0) return 1.0; int symmetricDifference = computeSymmetricDifference(other); int k = elementsAdded; int n = this.getSize(); double inner = 1.0 - (2.0 * symmetricDifference / n); if (inner <= 0.0) return 0.0; double ln = Math.log(inner); return 1.0 + (n / (4.0 * k)) * ln; } /** * The symmetric difference of two odd sketches denoted by * <math xmlns="http://www.w3.org/1998/Math/MathML"><mo>&#8710;</mo></math> or * <math xmlns="http://www.w3.org/1998/Math/MathML"><mo>&#8853;</mo></math> * symbols, is the operation resulting of unifying the two sets and subtracting the elements in the intersection. * <br><br> * So if you have sets * <math xmlns="http://www.w3.org/1998/Math/MathML"><msub><mi>S</mi><mn>1</mn></msub><mo>=</mo><mfenced open="{" close="}"><mrow><mn>1</mn><mo>,</mo><mo>&#160;</mo><mn>2</mn><mo>,</mo><mo>&#160;</mo><mn>3</mn><mo>,</mo><mo>&#160;</mo><mn>4</mn></mrow></mfenced></math> * and * <math xmlns="http://www.w3.org/1998/Math/MathML"><msub><mi>S</mi><mn>2</mn></msub><mo>=</mo><mfenced open="{" close="}"><mrow><mn>3</mn><mo>,</mo><mo>&#160;</mo><mn>4</mn><mo>,</mo><mo>&#160;</mo><mn>5</mn><mo>,</mo><mo>&#160;</mo><mn>6</mn></mrow></mfenced></math> * the symmetric difference is going to be the set * <math xmlns="http://www.w3.org/1998/Math/MathML"><mi>S</mi><mo>=</mo><mfenced open="{" close="}"><mrow><mn>1</mn><mo>,</mo><mo>&#160;</mo><mn>2</mn><mo>,</mo><mo>&#160;</mo><mn>5</mn><mo>,</mo><mo>&#160;</mo><mn>6</mn></mrow></mfenced></math> * * @param other * @return */ protected int computeSymmetricDifference(@Nonnull OddSketch<Hash> other) { BitSet sketch = (BitSet) this.getSketch().clone(); sketch.xor(other.getSketch()); return sketch.cardinality(); } protected int getBucket(Hash hash) { return hash.mod(size); } /** * Flips (XOR) the bit in the ith position of the sketch * * @param index The posistion in the sketch to be flipped */ protected void xorIthBit(int index) { Preconditions.checkArgument(index >= 0, "The index needs to be positive"); Preconditions.checkArgument(index < size, "Index has to be in the bounds set in the size parameter"); sketch.flip(index); } /** * Gets the size of the sketch. * * @return The size of the sketch */ protected int getSize() { return size; } /** * Sets the size of the sketch. Take into account, you should instantiate a new sketch with the * {@link #setSketch(BitSet)} method in case the new size is bigger than the old one. * <br> * Tipically, you would use this method in case you are building a (de)serialization mechanism for this class * * @param size New size of the sketch */ protected void setSize(int size) { this.size = size; } /** * Gets the sketch in the form of a {@link BitSet} * * @return The sketch representation */ protected BitSet getSketch() { return this.sketch; } /** * Sets the underlying sketch * <br> * Tipically, you would use this method in case you are building a (de)serialization mechanism for this class * * @param sketch The representation of the underlying sketch */ protected void setSketch(BitSet sketch) { this.sketch = sketch; } }
Added non null annotation
src/main/java/com/inigoillan/libanalytics/algorithms/oddsketch/OddSketch.java
Added non null annotation
<ide><path>rc/main/java/com/inigoillan/libanalytics/algorithms/oddsketch/OddSketch.java <ide> * <ide> * @param sketch The representation of the underlying sketch <ide> */ <del> protected void setSketch(BitSet sketch) { <add> protected void setSketch(@Nonnull BitSet sketch) { <ide> this.sketch = sketch; <ide> } <ide> }
Java
apache-2.0
0a690e2434164fea968ce53815f82d699f5d2269
0
morfeo8marc/mapdb,ringoluo/mapdb,olupas/MapDB,jankotek/mapdb,hangum/mapdb,pettermahlen/mapdb,jankotek/mapdb,mebigfatguy/mapdb,jankotek/MapDB,jankotek/MapDB,mebigfatguy/mapdb,SleimanJneidi/mapdb,codeaudit/mapdb,binarytemple/mapdb,cocosli/mapdb,lamfire/mapdb,olupas/MapDB,sumit784/mapdb,ffjiao/mapdb
package org.mapdb; /** * Transaction factory * * @author Jan Kotek */ public class TxMaker { /** marker for deleted records*/ protected static final Object DELETED = new Object(); /** parent engine under which modifications are stored */ protected org.mapdb.TxEngine engine; public TxMaker(org.mapdb.TxEngine engine) { if(engine==null) throw new IllegalArgumentException(); if(engine.isReadOnly()) throw new IllegalArgumentException("read only"); if(!engine.canRollback()) throw new IllegalArgumentException("no rollback"); this.engine = engine; } public DB makeTx(){ return new DB(engine.snapshot()); } public void close() { engine.close(); engine = null; } /** * Executes given block withing single transaction. * If block throws {@code TxRollbackException} execution is repeated until it does not fail. * * @param txBlock */ public void execute(TxBlock txBlock) { for(;;){ DB tx = makeTx(); try{ txBlock.tx(tx); if(!tx.isClosed()) tx.commit(); return; }catch(TxRollbackException e){ //failed, so try again if(!tx.isClosed()) tx.close(); } } } /** * Executes given block withing single transaction. * If block throws {@code TxRollbackException} execution is repeated until it does not fail. * * This method returns result returned by txBlock. * * @param txBlock */ public <A> A execute(Fun.Function1<A, DB> txBlock) { for(;;){ DB tx = makeTx(); try{ A a = txBlock.run(tx); if(!tx.isClosed()) tx.commit(); return a; }catch(TxRollbackException e){ //failed, so try again if(!tx.isClosed()) tx.close(); } } } }
src/main/java/org/mapdb/TxMaker.java
package org.mapdb; /** * Transaction factory * * @author Jan Kotek */ public class TxMaker { /** marker for deleted records*/ protected static final Object DELETED = new Object(); /** parent engine under which modifications are stored */ protected org.mapdb.TxEngine engine; public TxMaker(org.mapdb.TxEngine engine) { if(engine==null) throw new IllegalArgumentException(); if(engine.isReadOnly()) throw new IllegalArgumentException("read only"); if(!engine.canRollback()) throw new IllegalArgumentException("no rollback"); this.engine = engine; } public DB makeTx(){ return new DB(engine.snapshot()); } public void close() { engine.close(); engine = null; } /** * Executes given block withing single transaction. * If block throws {@code TxRollbackException} execution is repeated until it does not fail. * * @param txBlock */ public void execute(TxBlock txBlock) { for(;;){ DB tx = makeTx(); try{ txBlock.tx(tx); if(!tx.isClosed()) tx.commit(); return; }catch(TxRollbackException e){ //failed, so try again if(!tx.isClosed()) tx.close(); } } } }
TxMaker: execute can return an result
src/main/java/org/mapdb/TxMaker.java
TxMaker: execute can return an result
<ide><path>rc/main/java/org/mapdb/TxMaker.java <ide> } <ide> } <ide> <add> /** <add> * Executes given block withing single transaction. <add> * If block throws {@code TxRollbackException} execution is repeated until it does not fail. <add> * <add> * This method returns result returned by txBlock. <add> * <add> * @param txBlock <add> */ <add> public <A> A execute(Fun.Function1<A, DB> txBlock) { <add> for(;;){ <add> DB tx = makeTx(); <add> try{ <add> A a = txBlock.run(tx); <add> if(!tx.isClosed()) <add> tx.commit(); <add> return a; <add> }catch(TxRollbackException e){ <add> //failed, so try again <add> if(!tx.isClosed()) tx.close(); <add> } <add> } <add> } <ide> }
JavaScript
mit
e208ab7f0e497fb989c9341ab5b0cf4af3589bfc
0
le0pard/pgtune,le0pard/pgtune,le0pard/pgtune
// Example webpack configuration with asset fingerprinting in production. 'use strict'; const path = require('path'); const webpack = require('webpack'); const TerserPlugin = require('terser-webpack-plugin'); const MiniCssExtractPlugin = require('mini-css-extract-plugin'); const WorkboxPlugin = require('workbox-webpack-plugin'); const WebpackAssetsManifest = require('webpack-assets-manifest'); const browserList = require('./browserslist.config'); // set NODE_ENV=production on the environment to add asset fingerprints const currentEnv = process.env.NODE_ENV || 'development'; const isProduction = currentEnv === 'production'; const preScripts = { development: [], production: [] }; const preScriptsEnv = isProduction ? preScripts['production'] : preScripts['development']; const cssLoaders = [ MiniCssExtractPlugin.loader, { loader: 'css-loader', options: { modules: false, sourceMap: true } }, { loader: 'postcss-loader', options: { sourceMap: true, postcssOptions: (loaderContext) => { const plugins = [ ['postcss-import'], ['postcss-preset-env', { stage: 1, browsers: browserList, features: { 'custom-properties': { strict: false, warnings: false, preserve: true } } }], ['lost', { flexbox: 'flex' }], ['rucksack-css'], ['postcss-browser-reporter'], ['postcss-reporter'] ]; if (isProduction) { return { plugins: plugins.concat([ ['cssnano', { preset: 'default' }] ]) }; } return {plugins}; } } }, { loader: 'sass-loader', options: { sourceMap: true, webpackImporter: true, implementation: require('sass'), sassOptions: { fiber: require('fibers'), includePaths: [ path.join(__dirname, 'webpack', 'css') ] } } } ]; const config = { target: 'web', mode: currentEnv, performance: { hints: false }, entry: { 'app': preScriptsEnv.concat(['./webpack/app.js']) }, output: { // Build assets directly in to public/webpack/, let webpack know // that all webpacked assets start with webpack/ // must match config.webpack.output_dir path: path.join(__dirname, '.tmp', 'dist'), publicPath: '/', filename: isProduction ? '[name]-[chunkhash].js' : '[name].js' }, resolve: { modules: [ path.join(__dirname, 'webpack'), path.join(__dirname, 'node_modules') ], extensions: ['.js', '.jsx', '.json', '.css', '.sass'] }, module: { rules: [ { test: /\.(js|jsx)$/, exclude: /node_modules/, use: [ 'babel-loader' ] }, { test: /\.(gif|jpg|png|woff|woff2|eot|ttf|svg|ico)$/, use: [{ loader: 'url-loader', options: { limit: 10000, name: '[name]-[hash].[ext]', outputPath: 'assets/' } }] }, { test: /\.(css|scss|sass)$/, use: cssLoaders } ] }, plugins: [ new MiniCssExtractPlugin({ filename: isProduction ? '[name]-[contenthash].css' : '[name].css' }) ], optimization: { splitChunks: { cacheGroups: { styles: { name: 'styles', test: /\.css$/, chunks: 'all', enforce: true } } } } }; if (isProduction) { config.plugins.push( new webpack.NoEmitOnErrorsPlugin(), new webpack.DefinePlugin({ 'process.env': {NODE_ENV: JSON.stringify('production')} }), new webpack.optimize.ModuleConcatenationPlugin() ); config.optimization = config.optimization || {}; config.optimization.minimizer = [ new TerserPlugin({ parallel: 2 }) ]; // Source maps config.devtool = 'source-map'; } else { config.optimization = config.optimization || {}; config.optimization.moduleIds = 'named'; // Source maps config.devtool = 'inline-source-map'; } config.plugins.push( new WebpackAssetsManifest({ output: 'assets-manifest.json', publicPath: config.output.publicPath, writeToDisk: true }), new WorkboxPlugin.InjectManifest({ swSrc: './webpack/sw.js', swDest: 'sw.js', compileSrc: true, maximumFileSizeToCacheInBytes: (isProduction ? 2097152 : 15730000) }) ) module.exports = config;
webpack.config.js
// Example webpack configuration with asset fingerprinting in production. 'use strict'; const path = require('path'); const webpack = require('webpack'); const TerserPlugin = require('terser-webpack-plugin'); const MiniCssExtractPlugin = require('mini-css-extract-plugin'); const WorkboxPlugin = require('workbox-webpack-plugin'); const WebpackAssetsManifest = require('webpack-assets-manifest'); const browserList = require('./browserslist.config'); // set NODE_ENV=production on the environment to add asset fingerprints const currentEnv = process.env.NODE_ENV || 'development'; const isProduction = currentEnv === 'production'; const preScripts = { development: [], production: [] }; const preScriptsEnv = isProduction ? preScripts['production'] : preScripts['development']; const cssLoaders = [ MiniCssExtractPlugin.loader, { loader: 'css-loader', options: { modules: false, sourceMap: true } }, { loader: 'postcss-loader', options: { sourceMap: true, postcssOptions: (loaderContext) => { const plugins = [ ['postcss-import'], ['postcss-preset-env', { stage: 1, browsers: browserList, features: { 'custom-properties': { strict: false, warnings: false, preserve: true } } }], ['lost', { flexbox: 'flex' }], ['rucksack-css'], ['postcss-browser-reporter'], ['postcss-reporter'] ]; if (isProduction) { return { plugins: plugins.concat([ ['cssnano', { preset: 'default' }] ]) }; } return {plugins}; } } }, { loader: 'sass-loader', options: { sourceMap: true, webpackImporter: true, implementation: require('sass'), sassOptions: { fiber: require('fibers'), includePaths: [ path.join(__dirname, 'webpack', 'css') ] } } } ]; const config = { target: 'web', mode: currentEnv, performance: { hints: false }, entry: { 'app': preScriptsEnv.concat(['./webpack/app.js']) }, output: { // Build assets directly in to public/webpack/, let webpack know // that all webpacked assets start with webpack/ // must match config.webpack.output_dir path: path.join(__dirname, '.tmp', 'dist'), publicPath: '/', filename: isProduction ? '[name]-[chunkhash].js' : '[name].js' }, resolve: { modules: [ path.join(__dirname, 'webpack'), path.join(__dirname, 'node_modules') ], extensions: ['.js', '.jsx', '.json', '.css', '.sass'] }, module: { rules: [ { test: /\.(js|jsx)$/, exclude: /node_modules/, use: [ 'babel-loader' ] }, { test: /\.(gif|jpg|png|woff|woff2|eot|ttf|svg|ico)$/, use: [{ loader: 'url-loader', options: { limit: 10000, name: '[name]-[hash].[ext]', outputPath: 'assets/' } }] }, { test: /\.(css|scss|sass)$/, use: cssLoaders } ] }, plugins: [ new MiniCssExtractPlugin({ filename: isProduction ? '[name]-[contenthash].css' : '[name].css' }) ], optimization: { splitChunks: { cacheGroups: { styles: { name: 'styles', test: /\.css$/, chunks: 'all', enforce: true } } } } }; if (isProduction) { config.plugins.push( new webpack.NoEmitOnErrorsPlugin(), new webpack.DefinePlugin({ 'process.env': {NODE_ENV: JSON.stringify('production')} }), new webpack.optimize.ModuleConcatenationPlugin() ); config.optimization = config.optimization || {}; config.optimization.minimizer = [ new TerserPlugin({ parallel: 2 }) ]; // Source maps config.devtool = 'source-map'; } else { config.optimization = config.optimization || {}; config.optimization.moduleIds = 'named'; // Source maps config.devtool = 'inline-source-map'; } config.plugins.push( new WebpackAssetsManifest({ output: 'assets-manifest.json', publicPath: config.output.publicPath, writeToDisk: true }), new WorkboxPlugin.InjectManifest({ swSrc: './webpack/sw.js', swDest: 'sw.js', compileSrc: true, maximumFileSizeToCacheInBytes: (isProduction ? 3145728 : 15730000) }) ) module.exports = config;
update node.js and limit
webpack.config.js
update node.js and limit
<ide><path>ebpack.config.js <ide> swSrc: './webpack/sw.js', <ide> swDest: 'sw.js', <ide> compileSrc: true, <del> maximumFileSizeToCacheInBytes: (isProduction ? 3145728 : 15730000) <add> maximumFileSizeToCacheInBytes: (isProduction ? 2097152 : 15730000) <ide> }) <ide> ) <ide>
Java
apache-2.0
b120ec37fda55110a4092a900071eaa722d14f48
0
mulesoft-consulting/sumtotal-connector,mulesoft-consulting/sumtotal-connector,mulesoft-consulting/sumtotal-connector,mulesoft-consulting/sumtotal-connector,mulesoft-consulting/sumtotal-connector
package com.mulesoft.demo.sumtotal.client; import com.sumtotalsystems.sumtotal7.sumtotalbo.User; import com.sumtotalsystems.sumtotal7.sumtotalws.AspenContextIdentity; import com.sumtotalsystems.sumtotal7.sumtotalws.UserSecurityContext; import com.sumtotalsystems.sumtotal7.sumtotalws.UserToken; import com.sumtotalsystems.sumtotal7.sumtotalws.authentication.*; import com.sumtotalsystems.sumtotal7.sumtotalws.usermanagement.CreateUser; import com.sumtotalsystems.sumtotal7.sumtotalws.usermanagement.CreateUserResponse; import com.sumtotalsystems.sumtotal7.sumtotalws.usermanagement.UserManagement; import com.sumtotalsystems.sumtotal7.sumtotalws.usermanagement.UserManagementSoap; ; import javax.jws.WebParam; import javax.xml.datatype.XMLGregorianCalendar; import javax.xml.ws.Holder; import javax.xml.ws.soap.SOAPFaultException; /** * */ public class SanityCheck { static final String USER = "wstest"; static final String PASS = "learning"; static final String DOMAIN_ID = "3"; static final int POPULATION = 1; public static void main(String [] args) throws Exception { UserToken token = doAuth(); System.out.println("Token is " + token.getValue()); User authUser = getUser(USER, token); System.out.println("Logged in User's sec role id is " + authUser.getSecurityRoleId()); String newUserName = "MuleSoft_" + new Object().hashCode(); User createdUser = doCreateUser(newUserName, authUser, token); System.out.println("New User is " + createdUser.getUsername()); logOff(token); //brief test to make sure this fails when logged out try { getUser(USER, token); } catch (SOAPFaultException sfe) { System.out.println("User token successfully invalidated"); //sfe.printStackTrace(); return; } System.out.println("If this line is reached an invalidated token has been used successfully - that's not a good thing"); } static UserSecurityContext getUserSecurityContext(UserToken userToken) { UserSecurityContext securityContext = new UserSecurityContext(); securityContext.setToken(userToken); return securityContext; } /** * RefUser is a an existing User object whose values we can copy (eg, TZ preference) as they are painful to lookup * manually. */ static User doCreateUser(String userName, User refUser, UserToken userToken) { UserManagementSoap soapClient = new UserManagement().getUserManagementSoap(); User user = new User(); user.setUsername(userName); user.setFirstName("Test"); user.setLastName("User"); user.setIsLoginEnabled(true); user.setPassword("learning"); user.setDomainId("3"); user.setSecurityRoleId(refUser.getSecurityRoleId()); user.setLanguagePreferenceId(refUser.getLanguagePreferenceId()); user.setTimeZonePreferenceId(refUser.getTimeZonePreferenceId()); User created = soapClient.createUser(user, getUserSecurityContext(userToken)); return created; } static User getUser(String userName, UserToken token) { UserManagementSoap soapClient = new UserManagement().getUserManagementSoap(); return soapClient.getUserByUserName(DOMAIN_ID, USER, POPULATION, getUserSecurityContext(token)); } static UserToken doAuth() { AuthenticationSoap soapClient = new Authentication().getAuthenticationSoap(); UserCredentials credentials = new UserCredentials(); credentials.setUsername(USER); credentials.setPasscode(PASS); credentials.setAuthenticationType(UserAuthenticationType.NOT_SPECIFIED); credentials.setAccountType(UserType.NOT_SPECIFIED); Holder<UserToken> holder = new Holder<UserToken>(new UserToken()); soapClient.login(credentials, holder); return holder.value; } static void logOff(UserToken token) { AuthenticationSoap soapClient = new Authentication().getAuthenticationSoap(); soapClient.logout(getUserSecurityContext(token)); } }
src/main/java/com/mulesoft/demo/sumtotal/client/SanityCheck.java
package com.mulesoft.demo.sumtotal.client; import com.sumtotalsystems.sumtotal7.sumtotalbo.User; import com.sumtotalsystems.sumtotal7.sumtotalws.AspenContextIdentity; import com.sumtotalsystems.sumtotal7.sumtotalws.UserSecurityContext; import com.sumtotalsystems.sumtotal7.sumtotalws.UserToken; import com.sumtotalsystems.sumtotal7.sumtotalws.authentication.*; import com.sumtotalsystems.sumtotal7.sumtotalws.usermanagement.CreateUser; import com.sumtotalsystems.sumtotal7.sumtotalws.usermanagement.CreateUserResponse; import com.sumtotalsystems.sumtotal7.sumtotalws.usermanagement.UserManagement; import com.sumtotalsystems.sumtotal7.sumtotalws.usermanagement.UserManagementSoap; ; import javax.jws.WebParam; import javax.xml.datatype.XMLGregorianCalendar; import javax.xml.ws.Holder; /** * */ public class SanityCheck { static final String USER = "wstest"; static final String PASS = "learning"; static final String DOMAIN_ID = "3"; static final int POPULATION = 1; public static void main(String [] args) throws Exception { UserToken token = doAuth(); System.out.println("Token is " + token.getValue()); User authUser = getUser(USER, token); System.out.println("Logged in User's sec role id is " + authUser.getSecurityRoleId()); String newUserName = "MuleSoft_" + new Object().hashCode(); User createdUser = doCreateUser(newUserName, authUser, token); System.out.println("New User is " + createdUser.getUsername()); } static UserSecurityContext getUserSecurityContext(UserToken userToken) { UserSecurityContext securityContext = new UserSecurityContext(); securityContext.setToken(userToken); return securityContext; } /** * RefUser is a an existing User object whose values we can copy (eg, TZ preference) as they are painful to lookup * manually. */ static User doCreateUser(String userName, User refUser, UserToken userToken) { UserManagementSoap soapClient = new UserManagement().getUserManagementSoap(); User user = new User(); user.setUsername(userName); user.setFirstName("Test"); user.setLastName("User"); user.setIsLoginEnabled(true); user.setPassword("learning"); user.setDomainId("3"); user.setSecurityRoleId(refUser.getSecurityRoleId()); user.setLanguagePreferenceId(refUser.getLanguagePreferenceId()); user.setTimeZonePreferenceId(refUser.getTimeZonePreferenceId()); User created = soapClient.createUser(user, getUserSecurityContext(userToken)); return created; } static User getUser(String userName, UserToken token) { UserManagementSoap soapClient = new UserManagement().getUserManagementSoap(); return soapClient.getUserByUserName(DOMAIN_ID, USER, POPULATION, getUserSecurityContext(token)); } static UserToken doAuth() { AuthenticationSoap soapClient = new Authentication().getAuthenticationSoap(); UserCredentials credentials = new UserCredentials(); credentials.setUsername(USER); credentials.setPasscode(PASS); credentials.setAuthenticationType(UserAuthenticationType.NOT_SPECIFIED); credentials.setAccountType(UserType.NOT_SPECIFIED); Holder<UserToken> holder = new Holder<UserToken>(new UserToken()); soapClient.login(credentials, holder); return holder.value; } }
added WS logout test
src/main/java/com/mulesoft/demo/sumtotal/client/SanityCheck.java
added WS logout test
<ide><path>rc/main/java/com/mulesoft/demo/sumtotal/client/SanityCheck.java <ide> import javax.jws.WebParam; <ide> import javax.xml.datatype.XMLGregorianCalendar; <ide> import javax.xml.ws.Holder; <add>import javax.xml.ws.soap.SOAPFaultException; <ide> <ide> /** <ide> * <ide> String newUserName = "MuleSoft_" + new Object().hashCode(); <ide> User createdUser = doCreateUser(newUserName, authUser, token); <ide> System.out.println("New User is " + createdUser.getUsername()); <add> logOff(token); <add> <add> //brief test to make sure this fails when logged out <add> try { <add> getUser(USER, token); <add> } catch (SOAPFaultException sfe) { <add> System.out.println("User token successfully invalidated"); <add> //sfe.printStackTrace(); <add> return; <add> } <add> System.out.println("If this line is reached an invalidated token has been used successfully - that's not a good thing"); <ide> <ide> } <ide> <ide> soapClient.login(credentials, holder); <ide> return holder.value; <ide> } <add> <add> static void logOff(UserToken token) { <add> AuthenticationSoap soapClient = new Authentication().getAuthenticationSoap(); <add> soapClient.logout(getUserSecurityContext(token)); <add> } <ide> }
JavaScript
mit
5760b3b696ec13e20e57f02d4d0d20d1df91842b
0
marcosmoura/generator-scaffold,marcosmoura/generator-scaffold
(function() { 'use strict'; var util = require('util'), path = require('path'), slug = require('slug'), esformatter = require('esformatter'), esOptions = { indent: { value: ' ' }, lineBreak: { after: { 'ArrayExpressionComma' : 1 } } }, fs = require('fs'), yeoman = require('yeoman-generator'), chalk = require('chalk'); var ScaffoldGenerator = module.exports = function ScaffoldGenerator(args, options) { var _this = this; yeoman.generators.Base.apply(this, arguments); this.on('end', function() { this.installDependencies({ skipInstall: options['skipInstall'], callback: function() { var command = _this.spawnCommand('npm', ['install', 'glob']); command.on('exit', function() { _this.log(chalk.cyan(' \n \n All done and no errors! Enjoy! \n \n')); _this.spawnCommand('grunt'); }); } }); }); this.pkg = JSON.parse(this.readFileAsString(path.join(__dirname, '../package.json'))); }; util.inherits(ScaffoldGenerator, yeoman.generators.Base); ScaffoldGenerator.prototype.init = function init() { var cb = this.async(); this.log(chalk.cyan('\t \t ___ __ __ _ _ ')); this.log(chalk.cyan('\t \t / __| __ __ _ / _|/ _|___| |__| |')); this.log(chalk.cyan('\t \t \\__ \\/ _/ _` | _| _/ _ \\ / _` |')); this.log(chalk.cyan('\t \t |___/\\__\\__,_|_| |_| \\___/_\\__,_|')); this.log('\n'); this.log(chalk.cyan('\t \t [ Welcome to Scaffold Generator ] \n \n')); this.log(chalk.green('I will guide you to generate your best workflow. Come with me... \n \n')); cb(); }; ScaffoldGenerator.prototype.askFor = function askFor() { var cb = this.async(), _this = this, attempts = { name: 0, description: 0, members: 0 }, prompts = [{ name: 'projectName', message: 'What is the name of your project?', validate: function(input) { var done = this.async(); if (input.trim() === '') { if (attempts.name === 0) { attempts.name = 1; done('Hey dude! You forgot to enter the project name!'); } else if (attempts.name === 1) { attempts.name = 2; done('Come on. Just write the name of the project.'); } else if (attempts.name === 2) { attempts.name = 3; done('Alright. No problem. I\'ll wait here.'); } else if (attempts.name === 3) { attempts.name = 4; done('Come on buddy! Write that name. NOW!'); } else { done('Ok! :('); } return; } done(true); } }, { name: 'projectDescription', message: 'What is the description?', validate: function(input) { var done = this.async(); if (input.trim() === '') { if (attempts.name > 0) { done('You also forgot the description.'); } else { if (attempts.description === 0) { attempts.description = 1; done('You forgot the description. Write here.'); } else if (attempts.description === 1) { done('You forgot the description again! Just write.'); } } return; } done(true); } }, { name: 'projectMember', message: 'What are people going to work on this project? (Separated by commas)', validate: function(input) { var done = this.async(); if (input.trim() === '') { if (attempts.name > 0 && attempts.description > 0) { done('Forgot again? :/ Who will work with you on this? Write separating the names with commas.'); } else { if (attempts.members === 0) { attempts.members = 1; done('Hey man. Who will work with you on this? Write separating the names with commas.'); } else if (attempts.members === 1) { done('Type the name of who will work with you, separated by commas.'); } } return; } done(true); } }, { type: 'list', name: 'projectType', message: 'What kind of project?', choices: ['Mobile Only', 'Web Only', 'Responsive', 'Single Page'], default: 0 }, { type: 'confirm', name: 'jquery', message: 'Want to include jQuery?', default: 0 }]; this.prompt(prompts, function(props) { for(var item in props) { this[item] = props[item]; } this.log(chalk.yellow(' \n \n Good! Now I will download everything you need. Time to take a coffee! \n \n')); cb(); }.bind(this)); }; ScaffoldGenerator.prototype.getScaffoldCore = function getScaffoldCore() { var cb = this.async(); this.log(chalk.green('\n \n Downloading core of scaffold')); this.tarball('https://github.com/marcosmoura/scaffold/archive/v2.zip', '.', cb); }; ScaffoldGenerator.prototype.processPackage = function processPackage() { var cb = this.async(), pkgPath = path.join(this.env.cwd, 'package.json'), pkg = JSON.parse(this.readFileAsString(pkgPath)); pkg.projectName = this.projectName; pkg.name = slug(this.projectName.toLowerCase()); pkg.description = this.projectDescription; pkg.developers = this.projectMember; var dev = this.projectMember.split(','), devList = []; for(var member in dev) { devList.push({ 'name': dev[member].trim() }); } pkg.author = devList; fs.unlink(pkgPath); this.write(pkgPath, JSON.stringify(pkg, null, 2)); cb(); }; ScaffoldGenerator.prototype.processBower = function processBower() { var cb = this.async(), bowerJson = path.join(this.env.cwd, 'bower.json'), bower = JSON.parse(this.readFileAsString(bowerJson)); if (this.projectType === 'Web Only') { delete bower.dependencies.fastclick; } if (!this.jquery) { delete bower.dependencies.jquery; } fs.unlink(bowerJson); this.write(bowerJson, JSON.stringify(bower, null, 2)); cb(); }; ScaffoldGenerator.prototype.processGruntOptions = function processGruntOptions() { var cb = this.async(), gruntPath = path.join(this.env.cwd, 'grunt'); if (this.projectType === 'Single Page') { var buildTask = path.join(gruntPath, 'tasks/build.js'), build = this.readFileAsString(buildTask), stagingTask = path.join(gruntPath, 'tasks/default.js'), staging = this.readFileAsString(stagingTask), build = this.readFileAsString(buildTask), watchOption = path.join(gruntPath, 'options/watch.js'), watch = this.readFileAsString(watchOption); build = build.replace('\'assemble:build\',', ''); build = build.replace('\'clean:build\',', '\'clean:build\', \'copy:buildHtml\','); fs.unlink(buildTask); this.write(buildTask, esformatter.format(build, esOptions)); staging = staging.replace('\'assemble:staging\',', '\'newer:copy:stagingHtml\','); fs.unlink(stagingTask); this.write(stagingTask, esformatter.format(staging, esOptions)); this.write(buildTask, esformatter.format(build, esOptions)); watch = watch.replace('\'assemble:staging\',', '\'newer:copy:stagingHtml\','); fs.unlink(watch); this.write(watch, esformatter.format(watch, esOptions)); fs.unlink(path.join(gruntPath, 'options/assemble.js')); } cb(); }; ScaffoldGenerator.prototype.getScaffoldVersion = function getScaffoldVersion() { var cb = this.async(); if (this.projectType === 'Mobile Only') { this.log(chalk.green('\n \n Downloading Mobile version')); this.tarball('https://github.com/marcosmoura/scaffold-mobile/archive/master.zip', 'dev/', cb); } else if (this.projectType === 'Web Only') { this.log(chalk.green('\n \n Downloading Web version')); this.tarball('https://github.com/marcosmoura/scaffold-web/archive/master.zip', 'dev/', cb); } else if (this.projectType === 'Responsive') { this.log(chalk.green('\n \n Downloading Responsive version')); this.tarball('https://github.com/marcosmoura/scaffold-responsive/archive/master.zip', 'dev/', cb); } else if (this.projectType === 'Single Page') { this.log(chalk.green('\n \n Downloading Single Page version')); this.tarball('https://github.com/marcosmoura/scaffold-singlepage/archive/master.zip', 'dev/', cb); } }; ScaffoldGenerator.prototype.garbageRemoval = function garbageRemoval() { var cb = this.async(), devPath = path.join(this.env.cwd, 'dev'); this.log(chalk.yellow('\n \n Removing garbage and temporary files')); fs.unlink(path.join(devPath, 'LICENSE')); fs.unlink(path.join(devPath, 'README.md')); this.log(chalk.green('\n \n Now I will install the dependencies. This may take a while. Time to go to the bathroom! \n \n ')); cb(); }; })();
app/index.js
(function() { 'use strict'; var util = require('util'), path = require('path'), slug = require('slug'), esformatter = require('esformatter'), esOptions = { indent: { value: ' ' }, lineBreak: { after: { "ArrayExpressionComma" : 1 } } }, fs = require('fs'), yeoman = require('yeoman-generator'), chalk = require('chalk'); var ScaffoldGenerator = module.exports = function ScaffoldGenerator(args, options) { var _this = this; yeoman.generators.Base.apply(this, arguments); this.on('end', function() { this.installDependencies({ skipInstall: options['skipInstall'], callback: function() { var command = _this.spawnCommand('npm', ['install', 'glob']); command.on('exit', function() { _this.log(chalk.cyan(' \n \n All done and no errors! Enjoy! \n \n')); _this.spawnCommand('grunt'); }); } }); }); this.pkg = JSON.parse(this.readFileAsString(path.join(__dirname, '../package.json'))); }; util.inherits(ScaffoldGenerator, yeoman.generators.Base); ScaffoldGenerator.prototype.init = function init() { var cb = this.async(); this.log(chalk.cyan('\t \t ___ __ __ _ _ ')); this.log(chalk.cyan('\t \t / __| __ __ _ / _|/ _|___| |__| |')); this.log(chalk.cyan('\t \t \\__ \\/ _/ _` | _| _/ _ \\ / _` |')); this.log(chalk.cyan('\t \t |___/\\__\\__,_|_| |_| \\___/_\\__,_|')); this.log('\n'); this.log(chalk.cyan('\t \t [ Welcome to Scaffold Generator ] \n \n')); this.log(chalk.green('I will guide you to generate your best workflow. Come with me... \n \n')); cb(); }; ScaffoldGenerator.prototype.askFor = function askFor() { var cb = this.async(), _this = this, attempts = { name: 0, description: 0, members: 0 }, prompts = [{ name: 'projectName', message: 'What is the name of your project?', validate: function(input) { var done = this.async(); if (input.trim() === '') { if (attempts.name === 0) { attempts.name = 1; done('Hey dude! You forgot to enter the project name!'); } else if (attempts.name === 1) { attempts.name = 2; done('Come on. Just write the name of the project.'); } else if (attempts.name === 2) { attempts.name = 3; done('Alright. No problem. I\'ll wait here.'); } else if (attempts.name === 3) { attempts.name = 4; done('Come on buddy! Write that name. NOW!'); } else { done('Ok! :('); } return; } done(true); } }, { name: 'projectDescription', message: 'What is the description?', validate: function(input) { var done = this.async(); if (input.trim() === '') { if (attempts.name > 0) { done('You also forgot the description.'); } else { if (attempts.description === 0) { attempts.description = 1; done('You forgot the description. Write here.'); } else if (attempts.description === 1) { done('You forgot the description again! Just write.'); } } return; } done(true); } }, { name: 'projectMember', message: 'What are people going to work on this project? (Separated by commas)', validate: function(input) { var done = this.async(); if (input.trim() === '') { if (attempts.name > 0 && attempts.description > 0) { done('Forgot again? :/ Who will work with you on this? Write separating the names with commas.'); } else { if (attempts.members === 0) { attempts.members = 1; done('Hey man. Who will work with you on this? Write separating the names with commas.'); } else if (attempts.members === 1) { done('Type the name of who will work with you, separated by commas.'); } } return; } done(true); } }, { type: 'list', name: 'projectType', message: 'What kind of project?', choices: ['Mobile Only', 'Web Only', 'Responsive', 'Single Page'], default: 0 }, { type: 'confirm', name: 'jquery', message: 'Want to include jQuery?', default: 0 }]; this.prompt(prompts, function(props) { for(var item in props) { this[item] = props[item]; } this.log(chalk.yellow(' \n \n Good! Now I will download everything you need. Time to take a coffee! \n \n')); cb(); }.bind(this)); }; ScaffoldGenerator.prototype.getScaffoldCore = function getScaffoldCore() { var cb = this.async(); this.log(chalk.green('\n \n Downloading core of scaffold')); this.tarball('https://github.com/marcosmoura/scaffold/archive/v2.zip', '.', cb); }; ScaffoldGenerator.prototype.processPackage = function processPackage() { var cb = this.async(), pkgPath = path.join(this.env.cwd, 'package.json'), pkg = JSON.parse(this.readFileAsString(pkgPath)); pkg.projectName = this.projectName; pkg.name = slug(this.projectName.toLowerCase()); pkg.description = this.projectDescription; pkg.developers = this.projectMember; var dev = this.projectMember.split(','), devList = []; for(var member in dev) { devList.push({ 'name': dev[member].trim() }); } pkg.author = devList; fs.unlink(pkgPath); this.write(pkgPath, JSON.stringify(pkg, null, 2)); cb(); }; ScaffoldGenerator.prototype.processBower = function processBower() { var cb = this.async(), bowerJson = path.join(this.env.cwd, 'bower.json'), bower = JSON.parse(this.readFileAsString(bowerJson)); if (this.projectType === 'Web Only') { delete bower.dependencies.fastclick; } if (!this.jquery) { delete bower.dependencies.jquery; } fs.unlink(bowerJson); this.write(bowerJson, JSON.stringify(bower, null, 2)); cb(); }; ScaffoldGenerator.prototype.processGruntOptions = function processGruntOptions() { var cb = this.async(), gruntPath = path.join(this.env.cwd, 'grunt'); if (this.projectType === 'Single Page') { var buildTask = path.join(gruntPath, 'tasks/build.js'), build = this.readFileAsString(buildTask), stagingTask = path.join(gruntPath, 'tasks/default.js'), staging = this.readFileAsString(stagingTask), build = this.readFileAsString(buildTask), watchOption = path.join(gruntPath, 'options/watch.js'), watch = this.readFileAsString(watchOption); build = build.replace("'assemble:build',", ''); build = build.replace("'clean:build',", "'clean:build', 'copy:buildHtml',"); fs.unlink(buildTask); this.write(buildTask, esformatter.format(build, esOptions)); staging = staging.replace("'assemble:staging',", "'newer:copy:stagingHtml',"); fs.unlink(stagingTask); this.write(stagingTask, esformatter.format(staging, esOptions)); this.write(buildTask, esformatter.format(build, esOptions)); watch = watch.replace("'assemble:staging',", "'newer:copy:stagingHtml',"); fs.unlink(watch); this.write(watch, esformatter.format(watch, esOptions)); fs.unlink(path.join(gruntPath, 'options/assemble.js')); } cb(); }; ScaffoldGenerator.prototype.getScaffoldVersion = function getScaffoldVersion() { var cb = this.async(); if (this.projectType === 'Mobile Only') { this.log(chalk.green('\n \n Downloading Mobile version')); this.tarball('https://github.com/marcosmoura/scaffold-mobile/archive/master.zip', 'dev/', cb); } else if (this.projectType === 'Web Only') { this.log(chalk.green('\n \n Downloading Web version')); this.tarball('https://github.com/marcosmoura/scaffold-web/archive/master.zip', 'dev/', cb); } else if (this.projectType === 'Responsive') { this.log(chalk.green('\n \n Downloading Responsive version')); this.tarball('https://github.com/marcosmoura/scaffold-responsive/archive/master.zip', 'dev/', cb); } else if (this.projectType === 'Single Page') { this.log(chalk.green('\n \n Downloading Single Page version')); this.tarball('https://github.com/marcosmoura/scaffold-singlepage/archive/master.zip', 'dev/', cb); } }; ScaffoldGenerator.prototype.garbageRemoval = function garbageRemoval() { var cb = this.async(), devPath = path.join(this.env.cwd, 'dev'); this.log(chalk.yellow('\n \n Removing garbage and temporary files')); fs.unlink(path.join(devPath, 'LICENSE')); fs.unlink(path.join(devPath, 'README.md')); this.log(chalk.green('\n \n Now I will install the dependencies. This may take a while. Time to go to the bathroom! \n \n ')); cb(); }; })();
Use singlequotes instead of double
app/index.js
Use singlequotes instead of double
<ide><path>pp/index.js <ide> }, <ide> lineBreak: { <ide> after: { <del> "ArrayExpressionComma" : 1 <add> 'ArrayExpressionComma' : 1 <ide> } <ide> } <ide> }, <ide> watchOption = path.join(gruntPath, 'options/watch.js'), <ide> watch = this.readFileAsString(watchOption); <ide> <del> build = build.replace("'assemble:build',", ''); <del> build = build.replace("'clean:build',", "'clean:build', 'copy:buildHtml',"); <add> build = build.replace('\'assemble:build\',', ''); <add> build = build.replace('\'clean:build\',', '\'clean:build\', \'copy:buildHtml\','); <ide> fs.unlink(buildTask); <ide> this.write(buildTask, esformatter.format(build, esOptions)); <ide> <del> staging = staging.replace("'assemble:staging',", "'newer:copy:stagingHtml',"); <add> staging = staging.replace('\'assemble:staging\',', '\'newer:copy:stagingHtml\','); <ide> fs.unlink(stagingTask); <ide> this.write(stagingTask, esformatter.format(staging, esOptions)); <ide> this.write(buildTask, esformatter.format(build, esOptions)); <ide> <del> watch = watch.replace("'assemble:staging',", "'newer:copy:stagingHtml',"); <add> watch = watch.replace('\'assemble:staging\',', '\'newer:copy:stagingHtml\','); <ide> fs.unlink(watch); <ide> this.write(watch, esformatter.format(watch, esOptions)); <ide>
Java
mit
2a73245d882bf17264774a3967f7fecd8ae49a7d
0
fbelzunc/build-blocker-plugin,jenkinsci/build-blocker-plugin,fbelzunc/build-blocker-plugin,jenkinsci/build-blocker-plugin
/* * The MIT License * * Copyright (c) 2004-2011, Sun Microsystems, Inc., Frederik Fromm * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.plugins.buildblocker; import hudson.Extension; import hudson.matrix.MatrixConfiguration; import hudson.model.AbstractProject; import hudson.model.Job; import hudson.model.Node; import hudson.model.Queue; import hudson.model.queue.CauseOfBlockage; import hudson.model.queue.QueueTaskDispatcher; import hudson.model.queue.SubTask; import javax.annotation.CheckForNull; import java.util.logging.Logger; import static java.util.logging.Level.FINE; /** * Queue task dispatcher that evaluates the given blocking jobs in the config of the * actual job. If a blocking job is detected, the actual job will stay in the build queue. */ @Extension public class BuildBlockerQueueTaskDispatcher extends QueueTaskDispatcher { private static final Logger LOG = Logger.getLogger(BuildBlockerQueueTaskDispatcher.class.getName()); private MonitorFactory monitorFactory; public BuildBlockerQueueTaskDispatcher() { monitorFactory = new DefaultMonitorFactory(); } //default scope for testability BuildBlockerQueueTaskDispatcher(MonitorFactory monitorFactory) { this.monitorFactory = monitorFactory; } /** * Called whenever {@link hudson.model.Queue} is considering if {@link hudson.model.Queue.Item} is ready to * execute immediately * (which doesn't necessarily mean that it gets executed right away &mdash; it's still subject to * executor availability), or if it should be considered blocked. * <p/> * <p/> * Compared to {@link #canTake(hudson.model.Node, hudson.model.Queue.BuildableItem)}, this version tells Jenkins * that the task is * simply not ready to execute, even if there's available executor. This is more efficient * than {@link #canTake(hudson.model.Node, hudson.model.Queue.BuildableItem)}, and it sends the right signal to * Jenkins so that * it won't use {@link hudson.slaves.Cloud} to try to provision new executors. * <p/> * <p/> * Vetos are additive. When multiple {@link hudson.model.queue.QueueTaskDispatcher}s are in the system, * the task is considered blocked if any one of them returns a non-null value. * (This relationship is also the same with built-in check logic.) * <p/> * <p/> * If a {@link hudson.model.queue.QueueTaskDispatcher} returns non-null from this method, the task is placed into * the 'blocked' state, and generally speaking it stays in this state for a few seconds before * its state gets re-evaluated. If a {@link hudson.model.queue.QueueTaskDispatcher} wants the blockage condition * to be re-evaluated earlier, call {@link hudson.model.Queue#scheduleMaintenance()} to initiate that process. * * @return null to indicate that the item is ready to proceed to the buildable state as far as this * {@link hudson.model.queue.QueueTaskDispatcher} is concerned. Otherwise return an object that indicates why * the build is blocked. * @since 1.427 */ @Override public CauseOfBlockage canRun(Queue.Item item) { if (item.task instanceof AbstractProject) { BuildBlockerProperty property = getBuildBlockerProperty(item); if (property != null && property.isUseBuildBlocker()) { CauseOfBlockage subTask = checkForBlock(item, property); if (subTask != null) { return subTask; } } } return super.canRun(item); } @Override public CauseOfBlockage canTake(Node node, Queue.BuildableItem item) { BuildBlockerProperty property = getBuildBlockerProperty(item); if (property != null && property.isUseBuildBlocker()) { CauseOfBlockage causeOfBlockage = checkForBlock(node, item, property); if (causeOfBlockage != null) { return causeOfBlockage; } } return super.canTake(node, item); } private CauseOfBlockage checkForBlock(Queue.Item item, BuildBlockerProperty blockingJobs) { return checkForBlock(null, item, blockingJobs); } private CauseOfBlockage checkForBlock(Node node, Queue.Item item, BuildBlockerProperty property) { if (property.getBlockingJobs() == null) { return null; } SubTask result = checkAccordingToProperties(node, item, property); if (result != null) { if (result instanceof MatrixConfiguration) { result = ((MatrixConfiguration) result).getParent(); } return CauseOfBlockage.fromMessage(Messages._BlockingJobIsRunning(item.getInQueueForString(), result.getDisplayName())); } return null; } private SubTask checkAccordingToProperties(Node node, Queue.Item item, BuildBlockerProperty properties) { BlockingJobsMonitor jobsMonitor = monitorFactory.build(properties.getBlockingJobs()); if (checkWasCalledInGlobalContext(node) && properties.getBlockLevel().isGlobal()) { LOG.logp(FINE, getClass().getName(), "checkAccordingToProperties", "calling checkAllNodesForRunningBuilds"); SubTask checkAllNodesForRunningBuildsResult = jobsMonitor.checkAllNodesForRunningBuilds(); if (foundBlocker(checkAllNodesForRunningBuildsResult)) { return checkAllNodesForRunningBuildsResult; } if (properties.getScanQueueFor().isAll()) { LOG.logp(FINE, getClass().getName(), "checkAccordingToProperties", "calling checkForQueueEntries"); SubTask checkForQueueEntriesResult = jobsMonitor.checkForQueueEntries(item); if (foundBlocker(checkForQueueEntriesResult)) { return checkForQueueEntriesResult; } } else if (properties.getScanQueueFor().isBuildable()) { LOG.logp(FINE, getClass().getName(), "checkAccordingToProperties", "calling checkForBuildableQueueEntries"); SubTask checkForBuildableQueueEntriesResult = jobsMonitor.checkForBuildableQueueEntries(item); if (foundBlocker(checkForBuildableQueueEntriesResult)) { return checkForBuildableQueueEntriesResult; } } } if (checkWasCalledInNodeContext(node) && properties.getBlockLevel().isNode() && !properties.getBlockLevel().isGlobal()) { LOG.logp(FINE, getClass().getName(), "checkAccordingToProperties", "calling checkNodeForRunningBuilds"); SubTask checkNodeForRunningBuildsResult = jobsMonitor.checkNodeForRunningBuilds(node); if (foundBlocker(checkNodeForRunningBuildsResult)) { return checkNodeForRunningBuildsResult; } if (properties.getScanQueueFor().isAll()) { LOG.logp(FINE, getClass().getName(), "checkAccordingToProperties", "calling checkNodeForQueueEntries"); SubTask checkNodeForQueueEntriesResult = jobsMonitor.checkNodeForQueueEntries(item, node); if (foundBlocker(checkNodeForQueueEntriesResult)) { return checkNodeForQueueEntriesResult; } } else if (properties.getScanQueueFor().isBuildable()) { LOG.logp(FINE, getClass().getName(), "checkAccordingToProperties", "calling checkNodeFOrBuildableQueueEntries"); SubTask checkNodeForBuildableQueueEntriesResult = jobsMonitor.checkNodeForBuildableQueueEntries(item, node); if (foundBlocker(checkNodeForBuildableQueueEntriesResult)) { return checkNodeForBuildableQueueEntriesResult; } } } return null; } private boolean checkWasCalledInNodeContext(Node node) { return node != null; } private boolean checkWasCalledInGlobalContext(Node node) { return node == null; } private boolean foundBlocker(SubTask result) { return result != null; } @CheckForNull private BuildBlockerProperty getBuildBlockerProperty(Queue.Item item) { if (!(item.task instanceof Job)) { return null; } Job job = (Job) item.task; return (BuildBlockerProperty) job.getProperty(BuildBlockerProperty.class); } }
src/main/java/hudson/plugins/buildblocker/BuildBlockerQueueTaskDispatcher.java
/* * The MIT License * * Copyright (c) 2004-2011, Sun Microsystems, Inc., Frederik Fromm * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.plugins.buildblocker; import hudson.Extension; import hudson.matrix.MatrixConfiguration; import hudson.model.AbstractProject; import hudson.model.Node; import hudson.model.Queue; import hudson.model.queue.CauseOfBlockage; import hudson.model.queue.QueueTaskDispatcher; import hudson.model.queue.SubTask; import java.util.logging.Logger; import static java.util.logging.Level.FINE; /** * Queue task dispatcher that evaluates the given blocking jobs in the config of the * actual job. If a blocking job is detected, the actual job will stay in the build queue. */ @Extension public class BuildBlockerQueueTaskDispatcher extends QueueTaskDispatcher { private static final Logger LOG = Logger.getLogger(BuildBlockerQueueTaskDispatcher.class.getName()); private MonitorFactory monitorFactory; public BuildBlockerQueueTaskDispatcher() { monitorFactory = new DefaultMonitorFactory(); } //default scope for testability BuildBlockerQueueTaskDispatcher(MonitorFactory monitorFactory) { this.monitorFactory = monitorFactory; } /** * Called whenever {@link hudson.model.Queue} is considering if {@link hudson.model.Queue.Item} is ready to * execute immediately * (which doesn't necessarily mean that it gets executed right away &mdash; it's still subject to * executor availability), or if it should be considered blocked. * <p/> * <p/> * Compared to {@link #canTake(hudson.model.Node, hudson.model.Queue.BuildableItem)}, this version tells Jenkins * that the task is * simply not ready to execute, even if there's available executor. This is more efficient * than {@link #canTake(hudson.model.Node, hudson.model.Queue.BuildableItem)}, and it sends the right signal to * Jenkins so that * it won't use {@link hudson.slaves.Cloud} to try to provision new executors. * <p/> * <p/> * Vetos are additive. When multiple {@link hudson.model.queue.QueueTaskDispatcher}s are in the system, * the task is considered blocked if any one of them returns a non-null value. * (This relationship is also the same with built-in check logic.) * <p/> * <p/> * If a {@link hudson.model.queue.QueueTaskDispatcher} returns non-null from this method, the task is placed into * the 'blocked' state, and generally speaking it stays in this state for a few seconds before * its state gets re-evaluated. If a {@link hudson.model.queue.QueueTaskDispatcher} wants the blockage condition * to be re-evaluated earlier, call {@link hudson.model.Queue#scheduleMaintenance()} to initiate that process. * * @return null to indicate that the item is ready to proceed to the buildable state as far as this * {@link hudson.model.queue.QueueTaskDispatcher} is concerned. Otherwise return an object that indicates why * the build is blocked. * @since 1.427 */ @Override public CauseOfBlockage canRun(Queue.Item item) { if (item.task instanceof AbstractProject) { BuildBlockerProperty property = getBuildBlockerProperty(item); if (property != null && property.isUseBuildBlocker()) { CauseOfBlockage subTask = checkForBlock(item, property); if (subTask != null) { return subTask; } } } return super.canRun(item); } @Override public CauseOfBlockage canTake(Node node, Queue.BuildableItem item) { if (item.task instanceof AbstractProject) { BuildBlockerProperty property = getBuildBlockerProperty(item); if (property != null && property.isUseBuildBlocker()) { CauseOfBlockage causeOfBlockage = checkForBlock(node, item, property); if (causeOfBlockage != null) { return causeOfBlockage; } } } return super.canTake(node, item); } private CauseOfBlockage checkForBlock(Queue.Item item, BuildBlockerProperty blockingJobs) { return checkForBlock(null, item, blockingJobs); } private CauseOfBlockage checkForBlock(Node node, Queue.Item item, BuildBlockerProperty property) { if (property.getBlockingJobs() == null) { return null; } SubTask result = checkAccordingToProperties(node, item, property); if (result != null) { if (result instanceof MatrixConfiguration) { result = ((MatrixConfiguration) result).getParent(); } return CauseOfBlockage.fromMessage(Messages._BlockingJobIsRunning(item.getInQueueForString(), result.getDisplayName())); } return null; } private SubTask checkAccordingToProperties(Node node, Queue.Item item, BuildBlockerProperty properties) { BlockingJobsMonitor jobsMonitor = monitorFactory.build(properties.getBlockingJobs()); if (checkWasCalledInGlobalContext(node) && properties.getBlockLevel().isGlobal()) { LOG.logp(FINE, getClass().getName(), "checkAccordingToProperties", "calling checkAllNodesForRunningBuilds"); SubTask checkAllNodesForRunningBuildsResult = jobsMonitor.checkAllNodesForRunningBuilds(); if (foundBlocker(checkAllNodesForRunningBuildsResult)) { return checkAllNodesForRunningBuildsResult; } if (properties.getScanQueueFor().isAll()) { LOG.logp(FINE, getClass().getName(), "checkAccordingToProperties", "calling checkForQueueEntries"); SubTask checkForQueueEntriesResult = jobsMonitor.checkForQueueEntries(item); if (foundBlocker(checkForQueueEntriesResult)) { return checkForQueueEntriesResult; } } else if (properties.getScanQueueFor().isBuildable()) { LOG.logp(FINE, getClass().getName(), "checkAccordingToProperties", "calling checkForBuildableQueueEntries"); SubTask checkForBuildableQueueEntriesResult = jobsMonitor.checkForBuildableQueueEntries(item); if (foundBlocker(checkForBuildableQueueEntriesResult)) { return checkForBuildableQueueEntriesResult; } } } if (checkWasCalledInNodeContext(node) && properties.getBlockLevel().isNode() && !properties.getBlockLevel().isGlobal()) { LOG.logp(FINE, getClass().getName(), "checkAccordingToProperties", "calling checkNodeForRunningBuilds"); SubTask checkNodeForRunningBuildsResult = jobsMonitor.checkNodeForRunningBuilds(node); if (foundBlocker(checkNodeForRunningBuildsResult)) { return checkNodeForRunningBuildsResult; } if (properties.getScanQueueFor().isAll()) { LOG.logp(FINE, getClass().getName(), "checkAccordingToProperties", "calling checkNodeForQueueEntries"); SubTask checkNodeForQueueEntriesResult = jobsMonitor.checkNodeForQueueEntries(item, node); if (foundBlocker(checkNodeForQueueEntriesResult)) { return checkNodeForQueueEntriesResult; } } else if (properties.getScanQueueFor().isBuildable()) { LOG.logp(FINE, getClass().getName(), "checkAccordingToProperties", "calling checkNodeFOrBuildableQueueEntries"); SubTask checkNodeForBuildableQueueEntriesResult = jobsMonitor.checkNodeForBuildableQueueEntries(item, node); if (foundBlocker(checkNodeForBuildableQueueEntriesResult)) { return checkNodeForBuildableQueueEntriesResult; } } } return null; } private boolean checkWasCalledInNodeContext(Node node) { return node != null; } private boolean checkWasCalledInGlobalContext(Node node) { return node == null; } private boolean foundBlocker(SubTask result) { return result != null; } private BuildBlockerProperty getBuildBlockerProperty(Queue.Item item) { AbstractProject project = (AbstractProject) item.task; return (BuildBlockerProperty) project.getProperty(BuildBlockerProperty.class); } }
Fix at the good level
src/main/java/hudson/plugins/buildblocker/BuildBlockerQueueTaskDispatcher.java
Fix at the good level
<ide><path>rc/main/java/hudson/plugins/buildblocker/BuildBlockerQueueTaskDispatcher.java <ide> import hudson.Extension; <ide> import hudson.matrix.MatrixConfiguration; <ide> import hudson.model.AbstractProject; <add>import hudson.model.Job; <ide> import hudson.model.Node; <ide> import hudson.model.Queue; <ide> import hudson.model.queue.CauseOfBlockage; <ide> import hudson.model.queue.QueueTaskDispatcher; <ide> import hudson.model.queue.SubTask; <ide> <add>import javax.annotation.CheckForNull; <ide> import java.util.logging.Logger; <ide> <ide> import static java.util.logging.Level.FINE; <ide> <ide> @Override <ide> public CauseOfBlockage canTake(Node node, Queue.BuildableItem item) { <del> if (item.task instanceof AbstractProject) { <del> BuildBlockerProperty property = getBuildBlockerProperty(item); <del> if (property != null && property.isUseBuildBlocker()) { <del> CauseOfBlockage causeOfBlockage = checkForBlock(node, item, property); <del> if (causeOfBlockage != null) { <del> return causeOfBlockage; <del> } <add> BuildBlockerProperty property = getBuildBlockerProperty(item); <add> if (property != null && property.isUseBuildBlocker()) { <add> CauseOfBlockage causeOfBlockage = checkForBlock(node, item, property); <add> if (causeOfBlockage != null) { <add> return causeOfBlockage; <ide> } <ide> } <ide> return super.canTake(node, item); <ide> return result != null; <ide> } <ide> <add> @CheckForNull <ide> private BuildBlockerProperty getBuildBlockerProperty(Queue.Item item) { <del> AbstractProject project = (AbstractProject) item.task; <del> <del> return (BuildBlockerProperty) project.getProperty(BuildBlockerProperty.class); <add> if (!(item.task instanceof Job)) { <add> return null; <add> } <add> Job job = (Job) item.task; <add> <add> return (BuildBlockerProperty) job.getProperty(BuildBlockerProperty.class); <ide> } <ide> }
Java
apache-2.0
3ef6e6e04aab101154c993860e605f4ae009ae93
0
lorenamgUMU/sakai,noondaysun/sakai,whumph/sakai,willkara/sakai,rodriguezdevera/sakai,joserabal/sakai,liubo404/sakai,rodriguezdevera/sakai,tl-its-umich-edu/sakai,pushyamig/sakai,willkara/sakai,OpenCollabZA/sakai,noondaysun/sakai,wfuedu/sakai,Fudan-University/sakai,hackbuteer59/sakai,introp-software/sakai,noondaysun/sakai,duke-compsci290-spring2016/sakai,wfuedu/sakai,zqian/sakai,frasese/sakai,kwedoff1/sakai,clhedrick/sakai,colczr/sakai,colczr/sakai,colczr/sakai,OpenCollabZA/sakai,conder/sakai,bzhouduke123/sakai,introp-software/sakai,frasese/sakai,noondaysun/sakai,wfuedu/sakai,whumph/sakai,surya-janani/sakai,hackbuteer59/sakai,OpenCollabZA/sakai,introp-software/sakai,liubo404/sakai,OpenCollabZA/sakai,surya-janani/sakai,kwedoff1/sakai,bkirschn/sakai,willkara/sakai,rodriguezdevera/sakai,hackbuteer59/sakai,whumph/sakai,hackbuteer59/sakai,pushyamig/sakai,bzhouduke123/sakai,OpenCollabZA/sakai,willkara/sakai,zqian/sakai,pushyamig/sakai,puramshetty/sakai,udayg/sakai,joserabal/sakai,kingmook/sakai,puramshetty/sakai,Fudan-University/sakai,lorenamgUMU/sakai,kingmook/sakai,hackbuteer59/sakai,clhedrick/sakai,noondaysun/sakai,rodriguezdevera/sakai,zqian/sakai,OpenCollabZA/sakai,rodriguezdevera/sakai,kwedoff1/sakai,OpenCollabZA/sakai,pushyamig/sakai,buckett/sakai-gitflow,rodriguezdevera/sakai,whumph/sakai,duke-compsci290-spring2016/sakai,liubo404/sakai,Fudan-University/sakai,whumph/sakai,tl-its-umich-edu/sakai,willkara/sakai,udayg/sakai,frasese/sakai,frasese/sakai,buckett/sakai-gitflow,ouit0408/sakai,pushyamig/sakai,surya-janani/sakai,lorenamgUMU/sakai,clhedrick/sakai,duke-compsci290-spring2016/sakai,bkirschn/sakai,buckett/sakai-gitflow,ouit0408/sakai,Fudan-University/sakai,pushyamig/sakai,joserabal/sakai,whumph/sakai,zqian/sakai,tl-its-umich-edu/sakai,kwedoff1/sakai,noondaysun/sakai,bzhouduke123/sakai,lorenamgUMU/sakai,bkirschn/sakai,puramshetty/sakai,puramshetty/sakai,puramshetty/sakai,willkara/sakai,liubo404/sakai,clhedrick/sakai,frasese/sakai,wfuedu/sakai,kingmook/sakai,ouit0408/sakai,colczr/sakai,pushyamig/sakai,udayg/sakai,duke-compsci290-spring2016/sakai,puramshetty/sakai,duke-compsci290-spring2016/sakai,ktakacs/sakai,rodriguezdevera/sakai,introp-software/sakai,Fudan-University/sakai,udayg/sakai,kwedoff1/sakai,tl-its-umich-edu/sakai,joserabal/sakai,ktakacs/sakai,conder/sakai,udayg/sakai,pushyamig/sakai,ouit0408/sakai,udayg/sakai,surya-janani/sakai,ouit0408/sakai,willkara/sakai,ktakacs/sakai,Fudan-University/sakai,zqian/sakai,tl-its-umich-edu/sakai,ktakacs/sakai,kingmook/sakai,bkirschn/sakai,ouit0408/sakai,udayg/sakai,surya-janani/sakai,kingmook/sakai,introp-software/sakai,kingmook/sakai,introp-software/sakai,rodriguezdevera/sakai,liubo404/sakai,tl-its-umich-edu/sakai,bkirschn/sakai,joserabal/sakai,puramshetty/sakai,clhedrick/sakai,noondaysun/sakai,bzhouduke123/sakai,kwedoff1/sakai,joserabal/sakai,bzhouduke123/sakai,colczr/sakai,wfuedu/sakai,Fudan-University/sakai,lorenamgUMU/sakai,frasese/sakai,tl-its-umich-edu/sakai,colczr/sakai,bzhouduke123/sakai,clhedrick/sakai,Fudan-University/sakai,ktakacs/sakai,zqian/sakai,clhedrick/sakai,hackbuteer59/sakai,willkara/sakai,conder/sakai,whumph/sakai,buckett/sakai-gitflow,conder/sakai,puramshetty/sakai,conder/sakai,zqian/sakai,liubo404/sakai,surya-janani/sakai,liubo404/sakai,surya-janani/sakai,noondaysun/sakai,tl-its-umich-edu/sakai,frasese/sakai,bkirschn/sakai,OpenCollabZA/sakai,bkirschn/sakai,bzhouduke123/sakai,ktakacs/sakai,ktakacs/sakai,kwedoff1/sakai,buckett/sakai-gitflow,buckett/sakai-gitflow,conder/sakai,hackbuteer59/sakai,bzhouduke123/sakai,wfuedu/sakai,duke-compsci290-spring2016/sakai,buckett/sakai-gitflow,clhedrick/sakai,introp-software/sakai,kingmook/sakai,whumph/sakai,lorenamgUMU/sakai,colczr/sakai,introp-software/sakai,zqian/sakai,liubo404/sakai,kwedoff1/sakai,lorenamgUMU/sakai,wfuedu/sakai,conder/sakai,frasese/sakai,hackbuteer59/sakai,colczr/sakai,buckett/sakai-gitflow,joserabal/sakai,ktakacs/sakai,conder/sakai,bkirschn/sakai,duke-compsci290-spring2016/sakai,kingmook/sakai,lorenamgUMU/sakai,udayg/sakai,surya-janani/sakai,ouit0408/sakai,wfuedu/sakai,duke-compsci290-spring2016/sakai,joserabal/sakai,ouit0408/sakai
package org.sakaiproject.profile2.tool.pages.panels; import java.io.IOException; import java.io.ObjectInputStream; import org.apache.log4j.Logger; import org.apache.wicket.AttributeModifier; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.markup.html.AjaxLink; import org.apache.wicket.ajax.markup.html.navigation.paging.AjaxPagingNavigator; import org.apache.wicket.extensions.ajax.markup.html.modal.ModalWindow; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.image.ContextImage; import org.apache.wicket.markup.html.link.Link; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.markup.repeater.Item; import org.apache.wicket.markup.repeater.data.DataView; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.ResourceModel; import org.apache.wicket.model.StringResourceModel; import org.sakaiproject.profile2.logic.ProfileLogic; import org.sakaiproject.profile2.logic.SakaiProxy; import org.sakaiproject.profile2.model.ProfilePrivacy; import org.sakaiproject.profile2.tool.ProfileApplication; import org.sakaiproject.profile2.tool.components.ProfileImageRenderer; import org.sakaiproject.profile2.tool.components.ProfileStatusRenderer; import org.sakaiproject.profile2.tool.dataproviders.ConfirmedFriendsDataProvider; import org.sakaiproject.profile2.tool.models.FriendAction; import org.sakaiproject.profile2.tool.pages.ViewProfile; import org.sakaiproject.profile2.tool.pages.windows.RemoveFriend; import org.sakaiproject.profile2.util.ProfileConstants; public class ConfirmedFriends extends Panel { private static final long serialVersionUID = 1L; private static final Logger log = Logger.getLogger(ConfirmedFriends.class); private transient SakaiProxy sakaiProxy; private transient ProfileLogic profileLogic; private int numConfirmedFriends = 0; private boolean ownList = false; public ConfirmedFriends(final String id, final String userUuid) { super(id); log.debug("ConfirmedFriends()"); //get SakaiProxy sakaiProxy = ProfileApplication.get().getSakaiProxy(); //get ProfileLogic profileLogic = ProfileApplication.get().getProfileLogic(); //setup model to store the actions in the modal windows final FriendAction friendActionModel = new FriendAction(); //get id of user viewing this page (will be the same if user is viewing own list, different if viewing someone else's) final String currentUserUuid = sakaiProxy.getCurrentUserId(); //if viewing own friends, you can manage them. if(userUuid.equals(currentUserUuid)) { ownList = true; } //get our list of confirmed friends as an IDataProvider ConfirmedFriendsDataProvider provider = new ConfirmedFriendsDataProvider(userUuid); //init number of friends numConfirmedFriends = provider.size(); //model so we can update the number of friends IModel numConfirmedFriendsModel = new Model() { private static final long serialVersionUID = 1L; public Object getObject() { return numConfirmedFriends; } }; //heading final WebMarkupContainer confirmedFriendsHeading = new WebMarkupContainer("confirmedFriendsHeading"); Label confirmedFriendsLabel = new Label("confirmedFriendsLabel"); //if viewing own list, "my friends", else, "their name's friends" if(ownList) { confirmedFriendsLabel.setModel(new ResourceModel("heading.friends.my")); } else { String displayName = sakaiProxy.getUserDisplayName(userUuid); confirmedFriendsLabel.setModel(new StringResourceModel("heading.friends.view", null, new Object[]{ displayName } )); } confirmedFriendsHeading.add(confirmedFriendsLabel); confirmedFriendsHeading.add(new Label("confirmedFriendsNumber", numConfirmedFriendsModel)); confirmedFriendsHeading.setOutputMarkupId(true); add(confirmedFriendsHeading); //no friends message (only show if viewing own list) /* final WebMarkupContainer noFriendsContainer = new WebMarkupContainer("noFriendsContainer"); noFriendsContainer.setOutputMarkupId(true); final Link noFriendsLink = new Link("noFriendsLink", new ResourceModel("link.friend.search")) { private static final long serialVersionUID = 1L; public void onClick() { setResponsePage(new MySearch()); } }; noFriendsContainer.add(noFriendsLink); noFriendsContainer.setVisible(false); add(noFriendsContainer); */ //container which wraps list final WebMarkupContainer confirmedFriendsContainer = new WebMarkupContainer("confirmedFriendsContainer"); confirmedFriendsContainer.setOutputMarkupId(true); //results DataView confirmedFriendsDataView = new DataView("results-list", provider) { private static final long serialVersionUID = 1L; protected void populateItem(final Item item) { //get friendUuid final String friendUuid = (String)item.getModelObject(); //setup values String displayName = sakaiProxy.getUserDisplayName(friendUuid); boolean friend; //get friend status if(ownList) { friend = true; //viewing own page of conenctions, must be friend! } else { friend = profileLogic.isUserXFriendOfUserY(userUuid, friendUuid); //other person viewing, check if they are friends } //get privacy record for the friend ProfilePrivacy privacy = profileLogic.getPrivacyRecordForUser(friendUuid); //is profile image allowed to be viewed by this user/friend? final boolean isProfileImageAllowed = profileLogic.isUserXProfileImageVisibleByUserY(friendUuid, privacy, currentUserUuid, friend); //image item.add(new ProfileImageRenderer("result-photo", friendUuid, isProfileImageAllowed, ProfileConstants.PROFILE_IMAGE_THUMBNAIL, true)); //name and link to profile Link profileLink = new Link("result-profileLink") { private static final long serialVersionUID = 1L; public void onClick() { setResponsePage(new ViewProfile(friendUuid)); } }; profileLink.add(new Label("result-name", displayName)); item.add(profileLink); //status component ProfileStatusRenderer status = new ProfileStatusRenderer("result-status", friendUuid, privacy, currentUserUuid, friend, "friendsListInfoStatusMessage", "friendsListInfoStatusDate"); status.setOutputMarkupId(true); item.add(status); /* ACTIONS */ //REMOVE FRIEND MODAL WINDOW final ModalWindow removeFriendWindow = new ModalWindow("removeFriendWindow"); removeFriendWindow.setContent(new RemoveFriend(removeFriendWindow.getContentId(), removeFriendWindow, friendActionModel, userUuid, friendUuid)); //REMOVE FRIEND LINK final AjaxLink removeFriendLink = new AjaxLink("removeFriendLink") { private static final long serialVersionUID = 1L; public void onClick(AjaxRequestTarget target) { //target.appendJavascript("Wicket.Window.get().window.style.width='800px';"); removeFriendWindow.show(target); target.appendJavascript("fixWindowVertical();"); } }; ContextImage removeFriendIcon = new ContextImage("removeFriendIcon",new Model(ProfileConstants.DELETE_IMG)); removeFriendLink.add(removeFriendIcon); removeFriendLink.add(new AttributeModifier("title", true,new ResourceModel("link.title.removefriend"))); item.add(removeFriendLink); //can only delete if own friends if(!ownList) { removeFriendLink.setEnabled(true); removeFriendLink.setVisible(false); } // REMOVE FRIEND MODAL WINDOW HANDLER removeFriendWindow.setWindowClosedCallback(new ModalWindow.WindowClosedCallback() { private static final long serialVersionUID = 1L; public void onClose(AjaxRequestTarget target){ if(friendActionModel.isRemoved()) { //decrement number of friends numConfirmedFriends--; //remove friend item from display target.appendJavascript("$('#" + item.getMarkupId() + "').slideUp();"); //update label target.addComponent(confirmedFriendsHeading); //if none left, hide whole thing if(numConfirmedFriends==0) { target.appendJavascript("$('#" + confirmedFriendsContainer.getMarkupId() + "').fadeOut();"); } } } }); item.add(removeFriendWindow); item.setOutputMarkupId(true); } }; confirmedFriendsDataView.setOutputMarkupId(true); confirmedFriendsDataView.setItemsPerPage(ProfileConstants.MAX_CONNECTIONS_PER_PAGE); confirmedFriendsContainer.add(confirmedFriendsDataView); //add results container add(confirmedFriendsContainer); //add pager AjaxPagingNavigator pager = new AjaxPagingNavigator("navigator", confirmedFriendsDataView); add(pager); //initially, if no friends, hide container if(numConfirmedFriends == 0) { confirmedFriendsContainer.setVisible(false); pager.setVisible(false); } } /* reinit for deserialisation (ie back button) */ private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); log.debug("ConfirmedFriends has been deserialized."); //re-init our transient objects profileLogic = ProfileApplication.get().getProfileLogic(); sakaiProxy = ProfileApplication.get().getSakaiProxy(); } }
profile2/tool/src/java/org/sakaiproject/profile2/tool/pages/panels/ConfirmedFriends.java
package org.sakaiproject.profile2.tool.pages.panels; import java.io.IOException; import java.io.ObjectInputStream; import org.apache.log4j.Logger; import org.apache.wicket.AttributeModifier; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.markup.html.AjaxLink; import org.apache.wicket.extensions.ajax.markup.html.modal.ModalWindow; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.image.ContextImage; import org.apache.wicket.markup.html.link.Link; import org.apache.wicket.markup.html.navigation.paging.PagingNavigator; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.markup.repeater.Item; import org.apache.wicket.markup.repeater.data.DataView; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.ResourceModel; import org.apache.wicket.model.StringResourceModel; import org.sakaiproject.profile2.logic.ProfileLogic; import org.sakaiproject.profile2.logic.SakaiProxy; import org.sakaiproject.profile2.model.ProfilePrivacy; import org.sakaiproject.profile2.tool.ProfileApplication; import org.sakaiproject.profile2.tool.components.ProfileImageRenderer; import org.sakaiproject.profile2.tool.components.ProfileStatusRenderer; import org.sakaiproject.profile2.tool.dataproviders.ConfirmedFriendsDataProvider; import org.sakaiproject.profile2.tool.models.FriendAction; import org.sakaiproject.profile2.tool.pages.ViewProfile; import org.sakaiproject.profile2.tool.pages.windows.RemoveFriend; import org.sakaiproject.profile2.util.ProfileConstants; public class ConfirmedFriends extends Panel { private static final long serialVersionUID = 1L; private static final Logger log = Logger.getLogger(ConfirmedFriends.class); private transient SakaiProxy sakaiProxy; private transient ProfileLogic profileLogic; private int numConfirmedFriends = 0; private boolean ownList = false; public ConfirmedFriends(final String id, final String userUuid) { super(id); log.debug("ConfirmedFriends()"); //get SakaiProxy sakaiProxy = ProfileApplication.get().getSakaiProxy(); //get ProfileLogic profileLogic = ProfileApplication.get().getProfileLogic(); //setup model to store the actions in the modal windows final FriendAction friendActionModel = new FriendAction(); //get id of user viewing this page (will be the same if user is viewing own list, different if viewing someone else's) final String currentUserUuid = sakaiProxy.getCurrentUserId(); //if viewing own friends, you can manage them. if(userUuid.equals(currentUserUuid)) { ownList = true; } //get our list of confirmed friends as an IDataProvider ConfirmedFriendsDataProvider provider = new ConfirmedFriendsDataProvider(userUuid); //init number of friends numConfirmedFriends = provider.size(); //model so we can update the number of friends IModel numConfirmedFriendsModel = new Model() { private static final long serialVersionUID = 1L; public Object getObject() { return numConfirmedFriends; } }; //heading final WebMarkupContainer confirmedFriendsHeading = new WebMarkupContainer("confirmedFriendsHeading"); Label confirmedFriendsLabel = new Label("confirmedFriendsLabel"); //if viewing own list, "my friends", else, "their name's friends" if(ownList) { confirmedFriendsLabel.setModel(new ResourceModel("heading.friends.my")); } else { String displayName = sakaiProxy.getUserDisplayName(userUuid); confirmedFriendsLabel.setModel(new StringResourceModel("heading.friends.view", null, new Object[]{ displayName } )); } confirmedFriendsHeading.add(confirmedFriendsLabel); confirmedFriendsHeading.add(new Label("confirmedFriendsNumber", numConfirmedFriendsModel)); confirmedFriendsHeading.setOutputMarkupId(true); add(confirmedFriendsHeading); //no friends message (only show if viewing own list) /* final WebMarkupContainer noFriendsContainer = new WebMarkupContainer("noFriendsContainer"); noFriendsContainer.setOutputMarkupId(true); final Link noFriendsLink = new Link("noFriendsLink", new ResourceModel("link.friend.search")) { private static final long serialVersionUID = 1L; public void onClick() { setResponsePage(new MySearch()); } }; noFriendsContainer.add(noFriendsLink); noFriendsContainer.setVisible(false); add(noFriendsContainer); */ //container which wraps list final WebMarkupContainer confirmedFriendsContainer = new WebMarkupContainer("confirmedFriendsContainer"); confirmedFriendsContainer.setOutputMarkupId(true); //results DataView confirmedFriendsDataView = new DataView("results-list", provider) { private static final long serialVersionUID = 1L; protected void populateItem(final Item item) { //get friendUuid final String friendUuid = (String)item.getModelObject(); //setup values String displayName = sakaiProxy.getUserDisplayName(friendUuid); boolean friend; //get friend status if(ownList) { friend = true; //viewing own page of conenctions, must be friend! } else { friend = profileLogic.isUserXFriendOfUserY(userUuid, friendUuid); //other person viewing, check if they are friends } //get privacy record for the friend ProfilePrivacy privacy = profileLogic.getPrivacyRecordForUser(friendUuid); //is profile image allowed to be viewed by this user/friend? final boolean isProfileImageAllowed = profileLogic.isUserXProfileImageVisibleByUserY(friendUuid, privacy, currentUserUuid, friend); //image item.add(new ProfileImageRenderer("result-photo", friendUuid, isProfileImageAllowed, ProfileConstants.PROFILE_IMAGE_THUMBNAIL, true)); //name and link to profile Link profileLink = new Link("result-profileLink") { private static final long serialVersionUID = 1L; public void onClick() { setResponsePage(new ViewProfile(friendUuid)); } }; profileLink.add(new Label("result-name", displayName)); item.add(profileLink); //status component ProfileStatusRenderer status = new ProfileStatusRenderer("result-status", friendUuid, privacy, currentUserUuid, friend, "friendsListInfoStatusMessage", "friendsListInfoStatusDate"); status.setOutputMarkupId(true); item.add(status); /* ACTIONS */ //REMOVE FRIEND MODAL WINDOW final ModalWindow removeFriendWindow = new ModalWindow("removeFriendWindow"); removeFriendWindow.setContent(new RemoveFriend(removeFriendWindow.getContentId(), removeFriendWindow, friendActionModel, userUuid, friendUuid)); //REMOVE FRIEND LINK final AjaxLink removeFriendLink = new AjaxLink("removeFriendLink") { private static final long serialVersionUID = 1L; public void onClick(AjaxRequestTarget target) { //target.appendJavascript("Wicket.Window.get().window.style.width='800px';"); removeFriendWindow.show(target); target.appendJavascript("fixWindowVertical();"); } }; ContextImage removeFriendIcon = new ContextImage("removeFriendIcon",new Model(ProfileConstants.DELETE_IMG)); removeFriendLink.add(removeFriendIcon); removeFriendLink.add(new AttributeModifier("title", true,new ResourceModel("link.title.removefriend"))); item.add(removeFriendLink); //can only delete if own friends if(!ownList) { removeFriendLink.setEnabled(true); removeFriendLink.setVisible(false); } // REMOVE FRIEND MODAL WINDOW HANDLER removeFriendWindow.setWindowClosedCallback(new ModalWindow.WindowClosedCallback() { private static final long serialVersionUID = 1L; public void onClose(AjaxRequestTarget target){ if(friendActionModel.isRemoved()) { //decrement number of friends numConfirmedFriends--; //remove friend item from display target.appendJavascript("$('#" + item.getMarkupId() + "').slideUp();"); //update label target.addComponent(confirmedFriendsHeading); //if none left, hide whole thing if(numConfirmedFriends==0) { target.appendJavascript("$('#" + confirmedFriendsContainer.getMarkupId() + "').fadeOut();"); } } } }); item.add(removeFriendWindow); item.setOutputMarkupId(true); } }; confirmedFriendsDataView.setOutputMarkupId(true); confirmedFriendsDataView.setItemsPerPage(ProfileConstants.MAX_CONNECTIONS_PER_PAGE); confirmedFriendsContainer.add(confirmedFriendsDataView); //add results container add(confirmedFriendsContainer); //add pager PagingNavigator pager = new PagingNavigator("navigator", confirmedFriendsDataView); add(pager); //initially, if no friends, hide container if(numConfirmedFriends == 0) { confirmedFriendsContainer.setVisible(false); pager.setVisible(false); } } /* reinit for deserialisation (ie back button) */ private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); log.debug("ConfirmedFriends has been deserialized."); //re-init our transient objects profileLogic = ProfileApplication.get().getProfileLogic(); sakaiProxy = ProfileApplication.get().getSakaiProxy(); } }
PRFL-81 paging via AJAX rather than normal requests git-svn-id: 30a950ac960002fda4f107a8e0f4b5b25bfa9e10@70613 66ffb92e-73f9-0310-93c1-f5514f145a0a
profile2/tool/src/java/org/sakaiproject/profile2/tool/pages/panels/ConfirmedFriends.java
PRFL-81 paging via AJAX rather than normal requests
<ide><path>rofile2/tool/src/java/org/sakaiproject/profile2/tool/pages/panels/ConfirmedFriends.java <ide> import org.apache.wicket.AttributeModifier; <ide> import org.apache.wicket.ajax.AjaxRequestTarget; <ide> import org.apache.wicket.ajax.markup.html.AjaxLink; <add>import org.apache.wicket.ajax.markup.html.navigation.paging.AjaxPagingNavigator; <ide> import org.apache.wicket.extensions.ajax.markup.html.modal.ModalWindow; <ide> import org.apache.wicket.markup.html.WebMarkupContainer; <ide> import org.apache.wicket.markup.html.basic.Label; <ide> import org.apache.wicket.markup.html.image.ContextImage; <ide> import org.apache.wicket.markup.html.link.Link; <del>import org.apache.wicket.markup.html.navigation.paging.PagingNavigator; <ide> import org.apache.wicket.markup.html.panel.Panel; <ide> import org.apache.wicket.markup.repeater.Item; <ide> import org.apache.wicket.markup.repeater.data.DataView; <ide> add(confirmedFriendsContainer); <ide> <ide> //add pager <del> PagingNavigator pager = new PagingNavigator("navigator", confirmedFriendsDataView); <add> AjaxPagingNavigator pager = new AjaxPagingNavigator("navigator", confirmedFriendsDataView); <ide> add(pager); <ide> <ide> //initially, if no friends, hide container
Java
mit
d6eec4a7d22b2786161c59260978b62971bcad90
0
Phazyck/JGlyphs,Phazyck/JGlyphs,Phazyck/JGlyphs
package dk.itu.jglyph.swing; import java.awt.BorderLayout; import java.awt.Container; import java.awt.Dimension; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.JButton; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.SwingUtilities; import javax.swing.UIManager; import javax.swing.UnsupportedLookAndFeelException; public class GlyphFrame extends JFrame { /** * Auto-generated serial version UID. */ private static final long serialVersionUID = -3005890889450279723L; private final static String FRAME_TITLE = "JGlyph"; private final static int MIN_WIDTH = 320; private final static int MIN_HEIGHT = 380; private GlyphPanel glyphPanel; // private GlyphPanel glyphPanel2; public GlyphFrame() { setLayout(new BorderLayout()); JPanel panelCenter = new JPanel(); add(panelCenter, BorderLayout.CENTER); JPanel panelSouth = new JPanel(); add(panelSouth, BorderLayout.SOUTH); addGlyphs(panelCenter); addButtons(panelSouth); setTitle(FRAME_TITLE); setSize(MIN_WIDTH, MIN_HEIGHT); Dimension minimumSize = new Dimension(MIN_WIDTH, MIN_HEIGHT); setMinimumSize(minimumSize); setLocationRelativeTo(null); setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); } private void addGlyph(Container container) { glyphPanel = new GlyphPanel(); container.add(glyphPanel); } private void addGlyphs(Container container) { GridLayout gridLayout = new GridLayout(1, 0); container.setLayout(gridLayout); glyphPanel = new GlyphPanel(); container.add(glyphPanel); // glyphPanel2 = new GlyphPanel(); // container.add(glyphPanel2); } private void addButtons(Container container) { // addRandomizerButton(container); // addMutatorButton(container); addButtonPass(container); addButtonFail(container); // addCrossButton(container); } // private void addRandomizerButton(Container container) // { // JButton buttonRandomize = new JButton("RANDOMIZE"); // // buttonRandomize.addActionListener(new ActionListener() { // @Override // public void actionPerformed(ActionEvent e) { // glyphPanel.randomizeGlyph(); // } // }); // // container.add(buttonRandomize); // } private void addButtonPass(Container container) { JButton buttonPass = new JButton("PASS"); buttonPass.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { glyphPanel.passGlyph(); } }); container.add(buttonPass); } private void addButtonFail(Container container) { JButton buttonFail = new JButton("FAIL"); buttonFail.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { glyphPanel.failGlyph(); } }); container.add(buttonFail); } // private void addMutatorButton(Container container) // { // JButton buttonMutate = new JButton("MUTATE"); // // buttonMutate.addActionListener(new ActionListener() { // @Override // public void actionPerformed(ActionEvent e) { // glyphPanel.mutateGlyph(); // } // }); // // container.add(buttonMutate); // } // private void addCrossButton(Container container) // { // JButton buttonCross = new JButton("CROSS"); // // buttonCross.addActionListener(new ActionListener() { // @Override // public void actionPerformed(ActionEvent e) { // glyphPanel.crossGlyph(glyphPanel2); // } // }); // // container.add(buttonCross); // } public static void main(String[] args) { try { String laf = UIManager.getSystemLookAndFeelClassName(); // laf = "javax.swing.plaf.nimbus.NimbusLookAndFeel"; // System.out.println(laf); UIManager.setLookAndFeel(laf); } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException e) { // TODO Auto-generated catch block e.printStackTrace(); } GlyphFrame frame = new GlyphFrame(); SwingUtilities.updateComponentTreeUI(frame); frame.setVisible(true); } }
src/dk/itu/jglyph/swing/GlyphFrame.java
package dk.itu.jglyph.swing; import java.awt.BorderLayout; import java.awt.Container; import java.awt.Dimension; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.JButton; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.SwingUtilities; import javax.swing.UIManager; import javax.swing.UnsupportedLookAndFeelException; public class GlyphFrame extends JFrame { /** * Auto-generated serial version UID. */ private static final long serialVersionUID = -3005890889450279723L; private final static String FRAME_TITLE = "JGlyph"; private final static int MIN_WIDTH = 320; private final static int MIN_HEIGHT = 380; private GlyphPanel glyphPanel; // private GlyphPanel glyphPanel2; public GlyphFrame() { setLayout(new BorderLayout()); JPanel panelCenter = new JPanel(); add(panelCenter, BorderLayout.CENTER); JPanel panelSouth = new JPanel(); add(panelSouth, BorderLayout.SOUTH); addGlyphs(panelCenter); addButtons(panelSouth); setTitle(FRAME_TITLE); setSize(MIN_WIDTH, MIN_HEIGHT); Dimension minimumSize = new Dimension(MIN_WIDTH, MIN_HEIGHT); setMinimumSize(minimumSize); setLocationRelativeTo(null); setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); } private void addGlyph(Container container) { glyphPanel = new GlyphPanel(); container.add(glyphPanel); } private void addGlyphs(Container container) { GridLayout gridLayout = new GridLayout(1, 0); container.setLayout(gridLayout); glyphPanel = new GlyphPanel(); container.add(glyphPanel); // glyphPanel2 = new GlyphPanel(); // container.add(glyphPanel2); } private void addButtons(Container container) { // addRandomizerButton(container); // addMutatorButton(container); addButtonPass(container); addButtonFail(container); // addCrossButton(container); } // private void addRandomizerButton(Container container) // { // JButton buttonRandomize = new JButton("RANDOMIZE"); // // buttonRandomize.addActionListener(new ActionListener() { // @Override // public void actionPerformed(ActionEvent e) { // glyphPanel.randomizeGlyph(); // } // }); // // container.add(buttonRandomize); // } private void addButtonPass(Container container) { JButton buttonPass = new JButton("PASS"); buttonPass.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { glyphPanel.passGlyph(); } }); container.add(buttonPass); } private void addButtonFail(Container container) { JButton buttonFail = new JButton("FAIL"); buttonFail.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { glyphPanel.failGlyph(); } }); container.add(buttonFail); } // private void addMutatorButton(Container container) // { // JButton buttonMutate = new JButton("MUTATE"); // // buttonMutate.addActionListener(new ActionListener() { // @Override // public void actionPerformed(ActionEvent e) { // glyphPanel.mutateGlyph(); // } // }); // // container.add(buttonMutate); // } // private void addCrossButton(Container container) // { // JButton buttonCross = new JButton("CROSS"); // // buttonCross.addActionListener(new ActionListener() { // @Override // public void actionPerformed(ActionEvent e) { // glyphPanel.crossGlyph(glyphPanel2); // } // }); // // container.add(buttonCross); // } public static void main(String[] args) { try { String laf = UIManager.getSystemLookAndFeelClassName(); // laf = "javax.swing.plaf.nimbus.NimbusLookAndFeel"; System.out.println(laf); UIManager.setLookAndFeel(laf); } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException e) { // TODO Auto-generated catch block e.printStackTrace(); } GlyphFrame frame = new GlyphFrame(); SwingUtilities.updateComponentTreeUI(frame); frame.setVisible(true); } }
Stop printing look and feel.
src/dk/itu/jglyph/swing/GlyphFrame.java
Stop printing look and feel.
<ide><path>rc/dk/itu/jglyph/swing/GlyphFrame.java <ide> try { <ide> String laf = UIManager.getSystemLookAndFeelClassName(); <ide> // laf = "javax.swing.plaf.nimbus.NimbusLookAndFeel"; <del> System.out.println(laf); <add>// System.out.println(laf); <ide> UIManager.setLookAndFeel(laf); <ide> <ide> } catch (ClassNotFoundException | InstantiationException | IllegalAccessException
Java
epl-1.0
20f53272a526de192b66af173e0ca035d0b93d82
0
debrief/debrief,pecko/debrief,debrief/debrief,debrief/debrief,theanuradha/debrief,pecko/debrief,debrief/debrief,theanuradha/debrief,alastrina123/debrief,theanuradha/debrief,pecko/debrief,theanuradha/debrief,pecko/debrief,alastrina123/debrief,pecko/debrief,theanuradha/debrief,theanuradha/debrief,pecko/debrief,debrief/debrief,alastrina123/debrief,alastrina123/debrief,pecko/debrief,alastrina123/debrief,alastrina123/debrief,theanuradha/debrief,alastrina123/debrief,debrief/debrief
/** * */ package org.mwc.cmap.plotViewer.actions; import org.mwc.cmap.core.CorePlugin; import org.mwc.cmap.core.operations.DebriefActionWrapper; import MWC.GUI.PlainChart; import MWC.GUI.Tools.Action; import MWC.GenericData.WorldArea; /** * @author ian.mayo * */ public class FitToWindow extends CoreEditorAction { protected void execute() { PlainChart theChart = getChart(); WorldArea oldArea = new WorldArea(theChart.getCanvas().getProjection().getVisibleDataArea()); Action theAction = new MWC.GUI.Tools.Chart.FitToWin.FitToWinAction(theChart, oldArea); // and wrap it DebriefActionWrapper daw = new DebriefActionWrapper(theAction, theChart.getLayers()); // and add it to the clipboard CorePlugin.run(daw); } }
trunk/org.mwc.cmap.plotViewer/src/org/mwc/cmap/plotViewer/actions/FitToWindow.java
/** * */ package org.mwc.cmap.plotViewer.actions; import org.mwc.cmap.core.CorePlugin; import org.mwc.cmap.core.operations.DebriefActionWrapper; import MWC.GUI.PlainChart; import MWC.GUI.Tools.Action; import MWC.GenericData.WorldArea; /** * @author ian.mayo * */ public class FitToWindow extends CoreEditorAction { protected void execute() { PlainChart theChart = getChart(); WorldArea oldArea = new WorldArea(theChart.getCanvas().getProjection().getVisibleDataArea()); Action theAction = new MWC.GUI.Tools.Chart.FitToWin.FitToWinAction(theChart, oldArea); // and wrap it DebriefActionWrapper daw = new DebriefActionWrapper(theAction, null); // and add it to the clipboard CorePlugin.run(daw); } }
Include parent layers object, to ease update git-svn-id: d2601f1668e3cd2de409f5c059006a6eeada0abf@1481 cb33b658-6c9e-41a7-9690-cba343611204
trunk/org.mwc.cmap.plotViewer/src/org/mwc/cmap/plotViewer/actions/FitToWindow.java
Include parent layers object, to ease update
<ide><path>runk/org.mwc.cmap.plotViewer/src/org/mwc/cmap/plotViewer/actions/FitToWindow.java <ide> Action theAction = new MWC.GUI.Tools.Chart.FitToWin.FitToWinAction(theChart, oldArea); <ide> <ide> // and wrap it <del> DebriefActionWrapper daw = new DebriefActionWrapper(theAction, null); <add> DebriefActionWrapper daw = new DebriefActionWrapper(theAction, theChart.getLayers()); <ide> <ide> // and add it to the clipboard <ide> CorePlugin.run(daw);
Java
mit
5fb8c19ee3760a22d2ca05737620ecb57fb9e53c
0
tobsbot/AKGBensheim,P0T4T0x/AKGBensheim,P0T4T0x/AKGBensheim
package de.akg_bensheim.akgbensheim; import android.app.Activity; import android.os.Build; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.LoaderManager; import android.support.v4.content.Loader; import android.support.v4.widget.SwipeRefreshLayout; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.webkit.WebView; import android.webkit.WebViewClient; import de.akg_bensheim.akgbensheim.net.ConnectionDetector; import de.akg_bensheim.akgbensheim.net.WebLoader; /** * A simple {@link Fragment} subclass. * Use the {@link SubstituteFragment#newInstance} factory method to * create an instance of this fragment. */ public class SubstituteFragment extends Fragment implements SwipeRefreshLayout.OnRefreshListener, LoaderManager.LoaderCallbacks<WebLoader.Response> { protected static final String URL_FIXED= "http://www.akg-bensheim.de/akgweb2011/content/Vertretung/w/%02d/w00000.htm"; //Key to the number of the week to load private static final String ARG_WEEK = "int:week"; private int week; private WebView webView; private boolean webViewAvailable; private SwipeRefreshLayout swipeRefreshLayout; /** * Use this factory method to create a new instance of * this fragment using the provided parameters. * @param week The week number to load * @return A new instance of fragment SubstituteFragment. */ public static SubstituteFragment newInstance(int week) { SubstituteFragment fragment = new SubstituteFragment(); Bundle args = new Bundle(); args.putInt(ARG_WEEK, week); fragment.setArguments(args); return fragment; } /** * Empty constructor. Use {@link #newInstance(int)} * for initialization. */ public SubstituteFragment() { // Required empty public constructor super(); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (getArguments() != null) { week = getArguments().getInt(ARG_WEEK); } } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { if (webView != null) { webView.destroy(); } // Inflate the layout for this fragment swipeRefreshLayout = (SwipeRefreshLayout) inflater.inflate(R.layout.fragment_supply, container, false); swipeRefreshLayout.setColorSchemeResources(R.color.primary, R.color.accent, R.color.primaryDark); swipeRefreshLayout.setOnRefreshListener(this); webView = (WebView) swipeRefreshLayout.findViewById(R.id.webview); webView.setWebViewClient(new WebViewClient(){ //TODO: Add listeners and stuff }); webView.getSettings().setJavaScriptEnabled(false); webView.getSettings().setUseWideViewPort(true); webView.getSettings().setLoadWithOverviewMode(true); webView.getSettings().setSupportZoom(true); webView.getSettings().setBuiltInZoomControls(true); webViewAvailable = true; return swipeRefreshLayout; } @Override public void onAttach(Activity activity) { super.onAttach(activity); } @Override public void onPause() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) webView.onPause(); else webView.pauseTimers(); super.onPause(); } @Override public void onResume() { if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) webView.onResume(); else webView.resumeTimers(); super.onResume(); } @Override public void onDetach() { super.onDetach(); } @Override public void onDestroyView() { webViewAvailable = false; super.onDestroyView(); } @Override public void onDestroy() { if (webView != null) { webView.destroy(); webView = null; } super.onDestroy(); } public WebView getWebView() { return webViewAvailable ? webView : null; } @Override public void onRefresh() { if(ConnectionDetector.getInstance(getActivity().getApplicationContext()).allowedToUseConnection("")) { Bundle args = new Bundle(); args.putString("url", URL_FIXED); getLoaderManager().restartLoader(0, args, this); } else { swipeRefreshLayout.setRefreshing(false); } } @Override public Loader<WebLoader.Response> onCreateLoader(int id, Bundle args) { return new WebLoader(getActivity(), args.getString("url")); } @Override public void onLoadFinished(Loader<WebLoader.Response> loader, WebLoader.Response data) { webView.loadData(data.data, "text/html", "utf-8"); if(swipeRefreshLayout.isRefreshing()) swipeRefreshLayout.setRefreshing(false); } @Override public void onLoaderReset(Loader<WebLoader.Response> loader) { Log.d("onLoaderReset", "WebLoader for week " + week + " was reset!"); } }
app/src/main/java/de/akg_bensheim/akgbensheim/SubstituteFragment.java
package de.akg_bensheim.akgbensheim; import android.app.Activity; import android.os.Build; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.content.Loader; import android.support.v4.app.LoaderManager; import android.support.v4.widget.SwipeRefreshLayout; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.webkit.WebView; import android.webkit.WebViewClient; import de.akg_bensheim.akgbensheim.net.ConnectionDetector; import de.akg_bensheim.akgbensheim.net.WebLoader; /** * A simple {@link Fragment} subclass. * Use the {@link SubstituteFragment#newInstance} factory method to * create an instance of this fragment. */ public class SubstituteFragment extends Fragment implements SwipeRefreshLayout.OnRefreshListener, LoaderManager.LoaderCallbacks<WebLoader.Response> { protected static final String URL_FIXED= "http://www.akg-bensheim.de/akgweb2011/content/Vertretung/w/%02d/w00000.htm"; //Key to the number of the week to load private static final String ARG_WEEK = "int:week"; private int week; private WebView webView; private boolean webViewAvailable; private SwipeRefreshLayout swipeRefreshLayout; /** * Use this factory method to create a new instance of * this fragment using the provided parameters. * @param week The week number to load * @return A new instance of fragment SubstituteFragment. */ public static SubstituteFragment newInstance(int week) { SubstituteFragment fragment = new SubstituteFragment(); Bundle args = new Bundle(); args.putInt(ARG_WEEK, week); fragment.setArguments(args); return fragment; } /** * Empty constructor. Use {@link #newInstance(int)} * for initialization. */ public SubstituteFragment() { // Required empty public constructor super(); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (getArguments() != null) { week = getArguments().getInt(ARG_WEEK); } } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { if (webView != null) { webView.destroy(); } // Inflate the layout for this fragment swipeRefreshLayout = (SwipeRefreshLayout) inflater.inflate(R.layout.fragment_supply, container, false); swipeRefreshLayout.setOnRefreshListener(this); webView = (WebView) swipeRefreshLayout.findViewById(R.id.webview); webView.setWebViewClient(new WebViewClient(){ }); webView.getSettings().setJavaScriptEnabled(false); webView.getSettings().setUseWideViewPort(true); webView.getSettings().setLoadWithOverviewMode(true); webView.getSettings().setSupportZoom(true); webView.getSettings().setBuiltInZoomControls(true); webViewAvailable = true; return swipeRefreshLayout; } @Override public void onAttach(Activity activity) { super.onAttach(activity); } @Override public void onPause() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) webView.onPause(); else webView.pauseTimers(); super.onPause(); } @Override public void onResume() { if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) webView.onResume(); else webView.resumeTimers(); super.onResume(); } @Override public void onDetach() { super.onDetach(); } @Override public void onDestroyView() { webViewAvailable = false; super.onDestroyView(); } @Override public void onDestroy() { if (webView != null) { webView.destroy(); webView = null; } super.onDestroy(); } public WebView getWebView() { return webViewAvailable ? webView : null; } @Override public void onRefresh() { if(ConnectionDetector.getInstance(getActivity().getApplicationContext()).allowedToUseConnection("")) { Bundle args = new Bundle(); args.putString("url", URL_FIXED); getLoaderManager().restartLoader(0, args, this); } else { swipeRefreshLayout.setRefreshing(false); } } @Override public Loader<WebLoader.Response> onCreateLoader(int id, Bundle args) { return new WebLoader(getActivity(), args.getString("url")); } @Override public void onLoadFinished(Loader<WebLoader.Response> loader, WebLoader.Response data) { webView.loadData(data.data, "text/html", "utf-8"); if(swipeRefreshLayout.isRefreshing()) swipeRefreshLayout.setRefreshing(false); } @Override public void onLoaderReset(Loader<WebLoader.Response> loader) { Log.d("onLoaderReset", "WebLoader for week " + week + " was reset!"); } }
minorspinner fixes
app/src/main/java/de/akg_bensheim/akgbensheim/SubstituteFragment.java
minorspinner fixes
<ide><path>pp/src/main/java/de/akg_bensheim/akgbensheim/SubstituteFragment.java <ide> import android.os.Build; <ide> import android.os.Bundle; <ide> import android.support.v4.app.Fragment; <add>import android.support.v4.app.LoaderManager; <ide> import android.support.v4.content.Loader; <del>import android.support.v4.app.LoaderManager; <ide> import android.support.v4.widget.SwipeRefreshLayout; <ide> import android.util.Log; <ide> import android.view.LayoutInflater; <ide> <ide> // Inflate the layout for this fragment <ide> swipeRefreshLayout = (SwipeRefreshLayout) inflater.inflate(R.layout.fragment_supply, container, false); <add> swipeRefreshLayout.setColorSchemeResources(R.color.primary, R.color.accent, R.color.primaryDark); <ide> swipeRefreshLayout.setOnRefreshListener(this); <ide> <ide> webView = (WebView) swipeRefreshLayout.findViewById(R.id.webview); <ide> webView.setWebViewClient(new WebViewClient(){ <del> <add> //TODO: Add listeners and stuff <ide> }); <ide> webView.getSettings().setJavaScriptEnabled(false); <ide> webView.getSettings().setUseWideViewPort(true);
Java
mpl-2.0
23b53e04ae3a99b6e6d736229eddbfdf149c8950
0
JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core
/************************************************************************* * * $RCSfile: PropertySet.java,v $ * * $Revision: 1.4 $ * * last change: $Author: jl $ $Date: 2002-05-02 11:02:18 $ * * The Contents of this file are made available subject to the terms of * either of the following licenses * * - GNU Lesser General Public License Version 2.1 * - Sun Industry Standards Source License Version 1.1 * * Sun Microsystems Inc., October, 2000 * * GNU Lesser General Public License Version 2.1 * ============================================= * Copyright 2000 by Sun Microsystems, Inc. * 901 San Antonio Road, Palo Alto, CA 94303, USA * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License version 2.1, as published by the Free Software Foundation. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, * MA 02111-1307 USA * * * Sun Industry Standards Source License Version 1.1 * ================================================= * The contents of this file are subject to the Sun Industry Standards * Source License Version 1.1 (the "License"); You may not use this file * except in compliance with the License. You may obtain a copy of the * License at http://www.openoffice.org/license.html. * * Software provided under this License is provided on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, * WITHOUT LIMITATION, WARRANTIES THAT THE SOFTWARE IS FREE OF DEFECTS, * MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE, OR NON-INFRINGING. * See the License for the specific provisions governing your rights and * obligations concerning the Software. * * The Initial Developer of the Original Code is: Sun Microsystems, Inc. * * Copyright: 2000 by Sun Microsystems, Inc. * * All Rights Reserved. * * Contributor(s): _______________________________________ * * ************************************************************************/ package com.sun.star.lib.uno.helper; import com.sun.star.uno.Type; import com.sun.star.lang.EventObject; import com.sun.star.lang.WrappedTargetException; import com.sun.star.uno.TypeClass; import com.sun.star.uno.AnyConverter; import com.sun.star.uno.XInterface; import com.sun.star.uno.Any; import com.sun.star.uno.UnoRuntime; import com.sun.star.beans.XPropertyChangeListener; import com.sun.star.beans.XVetoableChangeListener; import com.sun.star.beans.PropertyChangeEvent; import com.sun.star.beans.XPropertySet; import com.sun.star.beans.Property; import com.sun.star.beans.PropertyAttribute; import com.sun.star.beans.UnknownPropertyException; import com.sun.star.beans.XPropertiesChangeListener; import com.sun.star.beans.XPropertySetInfo; import com.sun.star.beans.XFastPropertySet; import com.sun.star.beans.PropertyVetoException; import com.sun.star.beans.XMultiPropertySet; import java.util.ArrayList; import java.util.List; import java.util.Iterator; import java.util.Collection; import java.util.HashMap; import java.lang.reflect.Field; import com.sun.star.lang.DisposedException; /** This class is an implementation of the interfaces com.sun.star.beans.XPropertySet, * com.sun.star.beans.XFastPropertySet and com.sun.star.beans.XMultiPropertySet. This * class has to be inherited to be used. The values of properties are stored in member * variables of the inheriting class. By overriding the methods * {@link #convertPropertyValue convertPropertyValue}, * {@link #setPropertyValueNoBroadcast setPropertyValueNoBroadcast} and * {@link #getPropertyValue(Property)} one can determine how * property values are stored. * When using the supplied implementations of this class then the member variables which * hold property values have to be declared in the class which inherits last in the inheriting * chain and they have to be public<p> * Properties have to be registered by one of the registerProperty methods. They take among other * arguments an Object named <em>id</em> which has to be a String that represents the name of * the member variable. The registering has to occur in the constructor of the inheriting class. * It is no allowed to add or change properties later on.<p> * Example: * <pre> * public class Foo extends PropertySet * { * protected int intProp; * * public Foo() * { * registerProperty("PropertyA", 0, new Type(int.class), (short)0, "intProp"); * } * } * * </pre> */ public class PropertySet extends ComponentBase implements XPropertySet, XFastPropertySet, XMultiPropertySet { private HashMap _nameToPropertyMap; private HashMap _handleToPropertyMap; private HashMap _propertyToIdMap; private Property[] arProperties; private int lastHandle= 1; protected XPropertySetInfo propertySetInfo; protected MultiTypeInterfaceContainer aBoundLC= new MultiTypeInterfaceContainer(); protected MultiTypeInterfaceContainer aVetoableLC= new MultiTypeInterfaceContainer(); public PropertySet() { super(); initMappings(); } /** Registers a property with this helper class and associates the argument <em>id</em> with it. * <em>id</em> is used to identify the storage of the property value. How property values are stored * and retrieved is determined by the methods {@link #convertPropertyValue convertPropertyValue}, * {@link #setPropertyValueNoBroadcast setPropertyValueNoBroadcast} and {@link #getPropertyValue(Property) getPropertyValue} * These methods expect <em>id</em> to be a java.lang.String which represents the name of a member variable * which holds the property value. * Only properties which are registered can be accessed. Registration has to occur during * initialization of the inheriting class (i.e. within the contructor). * @param prop The property to be registered. * @param id Identifies the properties storage. * @see #getPropertyId */ protected void registerProperty(Property prop, Object id) { putProperty(prop); assignPropertyId(prop, id); } /** Registers a property with this helper class and associates the argument id with it. * It does the same as {@link #registerProperty(Property, Object)}. The first four * arguments are used to construct a Property object. * Registration has to occur during * initialization of the inheriting class (i.e. within the contructor) * @param name The property's name (Property.Name). * @param handle The property's handle (Property.Handle). * @param Type The property's type (Property.Type). * @param attributes The property's attributes (Property.Attributes). * @param id Identifies the property's storage. */ protected void registerProperty(String name, int handle, Type type, short attributes, Object id) { Property p= new Property(name, handle, type, attributes); registerProperty(p, id); } /** Registers a property with this class and associates the argument id with it. * It does the same as {@link #registerProperty(Property, Object)}. The first three * arguments are used to construct a Property object. The value for the Property.Handle * is generated and does not have to be specified here. Use this method for registering * a property if you do not care about the Property's handles. * Registration has to occur during * initialization of the inheriting class (i.e. within the contructor). * @param name The property's name (Property.Name). * @param handle The property's handle (Property.Handle). * @param Type The property's type (Property.Type). * @param attributes The property's attributes (Property.Attributes). * @param id Identifies the property's storage. */ protected void registerProperty(String name, Type type, short attributes, Object id) { Property p= new Property(name, lastHandle++, type, attributes); registerProperty(p, id); } /** Registers a property with this class. This method expects that property values * are stored in member variables as is the case if the methods convertPropertyValue, * setPropertyValueNoBroadcast and getPropertyValue(Property) are not overridden. * It is presumed that the type of the member variable * corresponds Property.Type. For example, if the TypeClass of Property.Type is to be * a TypeClass.SHORT then the member must be a short or java.lang.Short. * The handle for the property is generated.<br> * If there is no member with the specified name or if the member has an incompatible type * then a com.sun.star.uno.RuntimeException is thrown. * @param propertyName The name of the property. * @param memberName The name of the member variable that holds the value of the property. * @param attributes The property attributes. */ protected void registerProperty(String propertyName, String memberName, short attributes) { Field propField= null; try { propField= getClass().getDeclaredField(memberName); } catch (NoSuchFieldException e) { throw new com.sun.star.uno.RuntimeException("there is no member variable: " + memberName); } Class cl= propField.getType(); Type t= new Type(cl); if (t.getTypeClass() != TypeClass.UNKNOWN) { Property p= new Property(propertyName, lastHandle++, t, attributes); registerProperty(p,memberName); } else throw new com.sun.star.uno.RuntimeException("the member has an unknown type: " + memberName); } /** Registers a property with this class. * It is presumed that the name of property is equal to the name of the member variable * that holds the property value. * @param propertyName The name of the property and the member variable that holds the property's value. * @param attributes The property attributes. * @see #registerProperty(String, String, short) */ protected void registerProperty(String propertyName, short attributes) { registerProperty(propertyName, propertyName, attributes); } /** Returns the Property object for a given property name or null if that property does * not exists (i.e. it has not been registered). Override this method * if you want to implement your own mapping from property names to Property objects. * Then you also have to override {@link #initMappings}, {@link #getProperties()} and * {@link #putProperty(Property)}. * @param propertyName The name of the property (Property.Name) * @return The Property object with the name <em>propertyName</em>. */ protected Property getProperty(String propertyName) { return (Property) _nameToPropertyMap.get(propertyName); } /** Returns the Property object with a handle (Property.Handle) as specified by the argument * <em>nHandle</em>. The method returns null if there is no such property (i.e. it has not * been registered). Override this method if you want to implement your own mapping from handles * to Property objects. Then you also have to override {@link #initMappings}, {@link #putProperty(Property)}. * @param nHandle The handle of the property (Property.Handle). * @return The Property object with the handle <em>nHandle</em> */ protected Property getPropertyByHandle(int nHandle) { return (Property) _handleToPropertyMap.get(new Integer(nHandle)); } /** Returns an array of all Property objects or an array of length null if there * are no properties. Override this method if you want to implement your own mapping from names * to Property objects. Then you also have to override {@link #initMappings}, {@link #getProperty(String)} and * {@link #putProperty}. * @return Array of all Property objects. */ protected Property[] getProperties() { if (arProperties == null) { Collection values= _nameToPropertyMap.values(); arProperties= (Property[]) values.toArray(new Property[_nameToPropertyMap.size()]); } return arProperties; } /** Stores a Property object so that it can be retrieved subsequently by * {@link #getProperty(String)},{@link #getProperties()},{@link #getPropertyByHandle(int)}. * Override this method if you want to implement your own mapping from handles * to Property objects and names to Property objects. Then you also need to override {@link #initMappings}, * {@link #getProperty(String)},{@link #getProperties()},{@link #getPropertyByHandle(int)}. * @param prop The Property object that is to be stored. */ protected void putProperty(Property prop) { _nameToPropertyMap.put(prop.Name, prop); if (prop.Handle != -1) _handleToPropertyMap.put(new Integer(prop.Handle), prop); } /** Assigns an identifyer object to a Property object so that the identifyer * can be obtained by {@link #getPropertyId getPropertyId} later on. The identifyer * is used to specify a certain storage for the property's value. If you do not * override {@link #setPropertyValueNoBroadcast() setPropertyValueNoBroadcast} or {@link #getPropertyValue(Property)} * then the argument <em>id</em> has to be a java.lang.String that equals the name of * the member variable that holds the Property's value. * Override this method if you want to implement your own mapping from Property objects to ids or * if you need ids of a type other then java.lang.String. * Then you also need to override {@link #initMappings initMappings} and {@link #getPropertyId getPropertyId}. * @param prop The Property object that is being assigned an id. * @param id The object which identifies the storage used for the property's value. * @see #registerProperty(Property, Object) */ protected void assignPropertyId(Property prop, Object id) { if (id instanceof String && ((String) id).equals("") == false) _propertyToIdMap.put(prop, id); } /** Returns the identifyer object for a certain Property. The object must have been * previously assigned to the Property object by {@link #assignPropertyId assignPropertyId}. * Override this method if you want to implement your own mapping from Property objects to ids. * Then you also need to override {@link #initMappings initMappings} and {@link #assignPropertyId assignPropertyId}. * @param prop The property for which the id is to be retrieved. * @return The id object that identifies the storage used for the property's value. * @see #registerProperty(Property, Object) */ protected Object getPropertyId(Property prop) { return _propertyToIdMap.get(prop); } /** Initializes data structures used for mappings of property names to property object, * property handles to property objects and property objects to id objects. * Override this method if you want to implement your own mappings. Then you also need to * override {@link #putProperty putProperty},{@link #getProperty getProperty}, {@link #getPropertyByHandle}, * {@link #assignPropertyId assignPropertyId} and {@link #getPropertyId getPropertyId}. */ protected void initMappings() { _nameToPropertyMap= new HashMap(); _handleToPropertyMap= new HashMap(); _propertyToIdMap= new HashMap(); } /** Makes sure that listeners which are kept in aBoundLC (XPropertyChangeListener) and aVetoableLC * (XVetoableChangeListener) receive a disposing call. Also those listeners are relesased. */ protected void postDisposing() { // Create an event with this as sender EventObject aEvt= new EventObject(this); // inform all listeners to reelease this object aBoundLC.disposeAndClear(aEvt); aVetoableLC.disposeAndClear(aEvt); } //XPropertySet ---------------------------------------------------- synchronized public void addPropertyChangeListener(String str, XPropertyChangeListener xPropertyChangeListener) throws UnknownPropertyException, WrappedTargetException { // only add listeners if you are not disposed if (! bInDispose && ! bDisposed) { if (str.length() > 0) { Property prop= getProperty(str); if (prop == null) throw new UnknownPropertyException("Property " + str + " is unknown"); // Add listener for a certain property if ((prop.Attributes & PropertyAttribute.BOUND) > 0) aBoundLC.addInterface(str, xPropertyChangeListener); else //ignore silently return; } else // Add listener for all properties listenerContainer.addInterface(XPropertyChangeListener.class, xPropertyChangeListener); } } //XPropertySet ---------------------------------------------------- synchronized public void addVetoableChangeListener(String str, com.sun.star.beans.XVetoableChangeListener xVetoableChangeListener) throws com.sun.star.beans.UnknownPropertyException, com.sun.star.lang.WrappedTargetException { // only add listeners if you are not disposed if (! bInDispose && ! bDisposed) { if (str.length() > 0) { Property prop= getProperty(str); if (prop == null) throw new UnknownPropertyException("Property " + str + " is unknown"); // Add listener for a certain property if ((prop.Attributes & PropertyAttribute.CONSTRAINED) > 0) aVetoableLC.addInterface(str, xVetoableChangeListener); else //ignore silently return; } else // Add listener for all properties listenerContainer.addInterface(XVetoableChangeListener.class, xVetoableChangeListener); } } //XPropertySet ---------------------------------------------------- public com.sun.star.beans.XPropertySetInfo getPropertySetInfo() { if (propertySetInfo == null) { synchronized (this) { if (propertySetInfo == null) propertySetInfo= new PropertySetInfo(); } } return propertySetInfo; } //XPropertySet ---------------------------------------------------- public Object getPropertyValue(String name) throws UnknownPropertyException, WrappedTargetException { Object ret= null; if (bInDispose || bDisposed) throw new com.sun.star.lang.DisposedException("The component has been disposed already"); Property prop= getProperty(name); if (prop == null) throw new UnknownPropertyException("The property " + name + " is unknown"); if ((prop.Attributes & PropertyAttribute.READONLY) == PropertyAttribute.READONLY) return null; synchronized (this) { ret= getPropertyValue(prop); } return ret; } //XPropertySet ---------------------------------------------------- synchronized public void removePropertyChangeListener(String propName, XPropertyChangeListener listener) throws UnknownPropertyException, WrappedTargetException { // all listeners are automaticly released in a dispose call if (!bInDispose && !bDisposed) { if (propName.length() > 0) { Property prop = getProperty(propName); if (prop == null) throw new UnknownPropertyException("Property " + propName + " is unknown"); aBoundLC.removeInterface(propName, listener); } else listenerContainer.removeInterface(XPropertyChangeListener.class, listener); } } //XPropertySet ---------------------------------------------------- synchronized public void removeVetoableChangeListener(String propName, XVetoableChangeListener listener) throws UnknownPropertyException, WrappedTargetException {// all listeners are automaticly released in a dispose call if (!bInDispose && !bDisposed) { if (propName.length() > 0) { Property prop = getProperty(propName); if (prop == null) throw new UnknownPropertyException("Property " + propName + " is unknown"); aVetoableLC.removeInterface(propName, listener); } else listenerContainer.removeInterface(XVetoableChangeListener.class, listener); } } //XPropertySet ---------------------------------------------------- /** Sets the value of a property. * The idl description for this interfaces, stipulates that the argument value is an Any. Since a java.lang.Object * reference has the same meaning as an Any this function accepts * java anys (com.sun.star.uno.Any) and all other appropriate objects as arguments. The value argument can be one * of these: * <ul> * <li>java.lang.Boolean</li> * <li>java.lang.Character</li> * <li>java.lang.Byte</li> * <li>java.lang.Short</li> * <li>java.lang.Integer</li> * <li>java.lang.Long</li> * <li>java.lang.Float</li> * <li>java.lang.Double</li> * <li>java.lang.String</li> * <li>com.sun.star.uno.Type</li> * <li><em>objects which implement UNO interfaces</em></li> * <li><em>arrays which contain elements of the types above</em></li> * <li>com.sun.star.uno.Any containing an instance of one of the above types</li> * </ul> * * Properties can have the attribute com.sun.star.beans.PropertyAttribute.MAYBEVOID, which means that the value * (not the type) can be void. In order to assign a void value to a property one can either pass an Any which * contains a null reference or pass null directly. In bothe cases the null reference is only accepted if * the PropertyAttribute.MAYBEVOID attribute is set for the property. * * Properties which have the attribute MAYBEVOID set (Property.Attributes) can have a void value. The following * considerations presume that the Property has that attribute set. Further, when mentioning an Any's value we * actually refer to the object returned by Any.getObject. * If the argument <em>value</em> is null, or it is an Any whose value is null (but with a valid Type) * then the member variable used for storing the property's value is set to null. * Therefore those properties can only be stored in objects * and primitive types are not allowed (one can use the wrapper classes instead,e.g. java.lang.Byte) . * If a property's value is kept in a member variable of type Any and that reference is still null * then when setPropertyValue is called with * <em>value</em> = null then the member variable is assigned an Any with type void and a null value. * Or if the argument is an Any with a null value then it is assigned to the member variable. * Further, if the variable already * references an Any and setPropertyValue is called with <em>value</em> = null, then the variable is assigned * a new Any with the same type as the previously referenced Any and with a null value. * @param name The name of the property. * @param value The new value of the property. * * */ public void setPropertyValue(String name, Object value) throws UnknownPropertyException, PropertyVetoException, com.sun.star.lang.IllegalArgumentException, WrappedTargetException { Property prop= getProperty(name); if (prop == null) throw new UnknownPropertyException("Property " + name + " is unknown"); setPropertyValue(prop, value); } /** Sets the value of a property. It checks if the property's attributes (READONLY,MAYBEVOID), allow that the * new value can be set. It also causes the notification of listeners. * @param prop The property whose value is to be set. * @param value The new value for the property. */ protected void setPropertyValue(Property prop, Object value) throws UnknownPropertyException, PropertyVetoException, com.sun.star.lang.IllegalArgumentException, WrappedTargetException { if ((prop.Attributes & PropertyAttribute.READONLY) == PropertyAttribute.READONLY) throw new com.sun.star.beans.PropertyVetoException(); // The value may be null only if MAYBEVOID attribute is set boolean bVoidValue= false; if (value instanceof Any) bVoidValue= ((Any) value).getObject() == null; else bVoidValue= value == null; if (bVoidValue && (prop.Attributes & PropertyAttribute.MAYBEVOID) == 0) throw new com.sun.star.lang.IllegalArgumentException("The property must have a value; the MAYBEVOID attribute is not set!"); if (bInDispose || bDisposed) throw new DisposedException("Component is already disposed"); //Check if the argument is allowed boolean bValueOk= false; if (value instanceof Any) bValueOk= checkType(((Any) value).getObject()); else bValueOk= checkType(value); if (! bValueOk) throw new com.sun.star.lang.IllegalArgumentException("No valid UNO type"); boolean bConversionOk= false; Object[] outConvertedVal= new Object[1]; Object[] outOldValue= new Object[1]; synchronized (this) { bConversionOk= convertPropertyValue(prop, outConvertedVal, outOldValue, value); } //The next step following the conversion is to set the new value of the property. Prior to this // the XVetoableChangeListener s have to be notified. if (bConversionOk) { // If the property is CONSTRAINED, then we must notify XVetoableChangeListener. The listener can throw a com.sun.star.lang.beans.PropertyVetoException which // will cause this method to return (the exception is not caught here). fire( new Property[]{prop}, outConvertedVal, outOldValue, true); synchronized (this) { setPropertyValueNoBroadcast(prop, outConvertedVal[0]); } // fire a change event (XPropertyChangeListener, PropertyAttribute.BOUND fire( new Property[]{prop}, outConvertedVal, outOldValue, false); } } /** Converts a value in a way so that its appropriate for storing as a property value, that is * {@link #setPropertyValueNoBroadcast setPropertyValueNoBroadcast} can process the value without any further * conversion. This implementation presumes that * the values are stored in member variables of the furthest inheriting class. For example, * class A inherits this class then members of class A * can hold property values. If there is a class B which inherits A then only members of B can hold * property values. The variables must be public. A property must have been registered (e.g. by * {@link #registerProperty(Property, Object)} in order for this method to work. The identifyer Object argument * used in the registerProperty methods must * be a java.lang.String that is the name of the member variable that holds the property value. * If one opts to store values differently then one may override * this method, as well as {@link #setPropertyValueNoBroadcast setPropertyValueNoBroadcast} and * {@link #getPropertyValue(Property) getPropertyValue(Property)}. * This method is always called as a result of a call to one of the setter methods, such as * {@link #setPropertyValue(String,Object) XPropertySet.setPropertyValue}, * {@link #setFastPropertyValue XFastPropertySet.setFastPropertyValue} * and {@link #setPropertyValues XMultiPropertySet.setPropertyValues}. * If this method fails, that is, it returns false or throws an exception, then no listeners are notified and the * property value, that was intended to be changed, remains untouched.<br /> This method does not have to deal with property attributes, such as * PropertyAttribute.READONLY or PropertyAttribute.MAYBEVOID. The processing of these attributes occurs * in the calling methods.<br /> * Only if this method returns successfully further processing, such * as listener notification and finally the modifiction of the property's value, will occur.<br /> * * The actual modification of a property's value is done by {@link #setPropertyValueNoBroadcast setPropertyValueNoBroadcast} * which is called subsequent to convertPropertyValue. *<p> * This method converts values by help of the com.sun.star.uno.AnyConverter which only does a few widening * conversions on integer types and floating point types. For example, there is the property PropA with a Type equivalent * to int.class and the * value of the property is to be stored in a member variable of type int with name intProp. Then setPropertyValue is * called: * <pre> * set.setPropertyValue( "PropA", new Byte( (byte)111)); * </pre> * At some point setPropertyValue will call convertPropertyValue and pass in the Byte object. Since we allow * that Byte values can be used with the property and know that the value is to be stored in intProp (type int) * we convert the Byte object into an Integer object which is then returned in the out-parameter <em>newVal</em>. This * conversion is actually performed by the AnyConverter. Later * the setPropertyValueNoBroadcast is called with that Integer object and the int value can be easily extracted * from the object and be assigned to the member intProp. * <p> * The method handles Any arguments the same as Object arguments. That is, the <em>setVal</em> argument can * be a java.lang.Boolean or a com.sun.star.uno.Any containing a java.lang.Boolean. Likewise, a member * containing a property value can be a com.sun.star.uno.Any or an java.lang.Object. * Then, no conversion is necessary, since they can hold all possible values. However, if * the member is an Object and <em>setVal</em> is an Any then the object contained in the any is assigned to * the member. The extra type information which exists as Type object in the Any will get lost. If this is not * intended then use an Any variable rather then an Object.<br /> * If a member is an Object or Any and the argument <em>setVal</em> is an Object, other than String or array, * then it is presumed to be an UNO object and queried for XInterface. If successful, the out-param <em>newVal</em> * returns the XInterface.<br /> * If a member is an UNO interface, then <em>setVal</em> is queried for this interface and the result is returned. * If <em>setVal</em> is null then <em>newVal</em> will be null too after return. * <p> * If a property value is stored using a primitive type the the out-parameters * <em>curVal</em> and <em>newVal</em> contain the respective wrapper class (e.g.java.lang.Byte, etc.). * curVal is used in calls to the XVetoableChangeListener and XPropertyChangeListener. * * @param property - in-param property for which the data is to be converted. * @param newVal - out-param which contains the converted value on return. * @param curVal - out-param the current value of the property. It is used in calls to the * XVetoableChangeListener and XPropertyChangeListener. * @param setVal - in-param. The value that is to be converted so that it matches Property and the internally used * dataformat for that property. * @return true - Conversion was successful. <em>newVal</em> contains a valid value for the property. false - * conversion failed for some reason. * @throws com.sun.star.lang.IllegalArgumentException The value provided is unfit for the property. * @throws com.sun.star.lang.WrappedTargetException - An exception occured during the conversion, that is to be made known * to the caller. */ protected boolean convertPropertyValue(Property property, Object[] newVal, Object[]curVal, Object setVal) throws com.sun.star.lang.IllegalArgumentException, WrappedTargetException { boolean ret= true; try { // get the member name String sMember= (String) getPropertyId(property); if (sMember != null) { // use reflection to obtain the field that holds the property value // Class.getDeclaredFields does not return inherited fields. One could use Class.getFields to // also get inherited fields, but only those which are public. Field propField= getClass().getDeclaredField(sMember); if (propField != null) { curVal[0]= propField.get(this); Class memberClass= propField.getType(); // MAYBEVOID: if setVal == null or it is an Any and getObject returns null, then a void value is to be set // This works only if there are no primitive types. For those we use the respective wrapper classes. // In this implementation, a null reference means void value. boolean bVoidValue= false; boolean bAnyVal= setVal instanceof Any; if (bAnyVal) bVoidValue= ((Any) setVal).getObject() == null; else bVoidValue= setVal == null; if (bVoidValue && memberClass.isPrimitive()) throw new com.sun.star.lang.IllegalArgumentException("The implementation does not support the MAYBEVOID attribute for this property"); Object convObj= null; //The member that keeps the value of the Property is an Any. It can contain all possible //types, therefore a conversion is not necessary. if (memberClass.equals(Any.class)) { if (bAnyVal) //parameter setVal is also an Any and can be used without further processing convObj= setVal; else { // Parameter setVal is not an Any. We need to construct an Any that contains // the argument setVal. // If setVal is an interface implementation then, we cannot constuct the // Any with setVal.getClass(), because the Any.Type._typeClass would be TypeClass.UNKNOWN. // We try to get an XInterface of setVal and set an XInterface type. if (setVal instanceof XInterface) { XInterface xint= (XInterface) UnoRuntime.queryInterface(XInterface.class, setVal); if (xint != null) convObj= new Any(new Type(XInterface.class), xint); } // The member is an any, and the past in argument was null reference (MAYBEVOID is set) else if (setVal == null) { // if the any member is still null we create a void any if (curVal[0] == null) convObj= new Any(new Type(), null); else { //otherwise we create an Any with the same type as a value of null; convObj= new Any( ((Any)curVal[0]).getType(), null); } } else convObj= new Any(new Type(setVal.getClass()), setVal); } } else convObj= convert(memberClass, setVal); newVal[0]= convObj; } } } catch (java.lang.NoSuchFieldException e) { throw new WrappedTargetException("Field does not exist", this, e); } catch (java.lang.IllegalAccessException e) { throw new WrappedTargetException("", this ,e); } return ret; } private boolean checkType(Object obj) { if (obj == null || obj instanceof Boolean || obj instanceof Character || obj instanceof Number || obj instanceof String || obj instanceof XInterface || obj instanceof Type || obj.getClass().isArray()) return true; return false; } // Param object can be an Any or other object. If obj is null then the return value is null private Object convert( Class cl, Object obj) throws com.sun.star.lang.IllegalArgumentException { Object retVal= null; //The member that keeps the value of the Property is an Object.Objects are similar to Anys in that they can // hold all types. if (obj == null || (obj instanceof Any && ((Any) obj).getObject() == null)) retVal= null; else if(cl.equals(Object.class)) { if (obj instanceof Any) obj= ((Any) obj).getObject(); retVal= obj; } else if(cl.equals(boolean.class)) retVal= new Boolean(AnyConverter.toBoolean(obj)); else if (cl.equals(char.class)) retVal= new Character(AnyConverter.toChar(obj)); else if (cl.equals(byte.class)) retVal= new Byte(AnyConverter.toByte(obj)); else if (cl.equals(short.class)) retVal= new Short(AnyConverter.toShort(obj)); else if (cl.equals(int.class)) retVal= new Integer(AnyConverter.toInt(obj)); else if (cl.equals(long.class)) retVal= new Long(AnyConverter.toLong(obj)); else if (cl.equals(float.class)) retVal= new Float(AnyConverter.toFloat(obj)); else if (cl.equals(double.class)) retVal= new Double(AnyConverter.toDouble(obj)); else if (cl.equals(String.class)) retVal= AnyConverter.toString(obj); else if (cl.isArray()) retVal= AnyConverter.toArray(obj); else if (cl.equals(Type.class)) retVal= AnyConverter.toType(obj); else if (cl.equals(Boolean.class)) retVal= new Boolean(AnyConverter.toBoolean(obj)); else if (cl.equals(Character.class)) retVal= new Character(AnyConverter.toChar(obj)); else if (cl.equals(Byte.class)) retVal= new Byte(AnyConverter.toByte(obj)); else if (cl.equals(Short.class)) retVal= new Short(AnyConverter.toShort(obj)); else if (cl.equals(Integer.class)) retVal= new Integer(AnyConverter.toInt(obj)); else if (cl.equals(Long.class)) retVal= new Long(AnyConverter.toLong(obj)); else if (cl.equals(Float.class)) retVal= new Float(AnyConverter.toFloat(obj)); else if (cl.equals(Double.class)) retVal= new Double(AnyConverter.toDouble(obj)); else if (XInterface.class.isAssignableFrom(cl)) retVal= AnyConverter.toObject(new Type(cl), obj); else throw new com.sun.star.lang.IllegalArgumentException("Could not convert the argument"); return retVal; } /** Sets the value of a property. In this implementation property values are stored in member variables * (see {@link #convertPropertyValue convertPropertyValue} Notification of property listeners * does not occur in this method. By overriding this method one can take full control about how property values * are stored. But then, the {@link #convertPropertyValue convertPropertyValue} and * {@link getPropertyValue(Property)} must be overridden too. * * A Property with the MAYBEVOID attribute set, is stored as null value. Therefore the member variable must be * an Object in order to make use of the property attribute. An exception is Any. The Any variable can be initially null, but * once it is set the reference will not become null again. If the value is to be set to * void then a new Any will be stored * with a valid type but without a value (i.e. Any.getObject returns null). * If a property has the READONLY attribute set, and one of the setter methods, such as setPropertyValue, has been * called, then this method is not going to be called. * @param property the property for which the new value is set * @param value the new value for the property. * @throws com.sun.star.lang.WrappedTargetException An exception, which has to be made known to the caller, * occured during the setting of the value. */ protected void setPropertyValueNoBroadcast(Property property, Object newVal) throws WrappedTargetException { try { // get the member name String sMember= (String) getPropertyId(property); if (sMember != null) { // use reflection to obtain the field that holds the property value // Class.getDeclaredFields does not return inherited fields. One could use Class.getFields to // also get inherited fields, but only those which are public. Field propField= getClass().getDeclaredField(sMember); if (propField != null) propField.set(this, newVal); } } catch(java.lang.Exception e) { throw new WrappedTargetException("PropertySet.setPropertyValueNoBroadcast", this, e); } } /** Retrieves the value of a property. This implementation presumes that the values are stored in member variables * of the furthest inheriting class (see {@link #convertPropertyValue convertPropertyValue}) and that the * variables are public. The property must have * been registered, for example by {@link #registerProperty(Property, Object)}. The identifyer Object argument * must have been a java.lang.String which was the name of the member variable holding the property value. * When properties are to be stored differently one has to override this method as well as * {@link #convertPropertyValue} and {@link #setPropertyValueNoBroadcast}. <br> * If a value is stored in a variable of a primitive type then this method returns an instance of the respective * wrapper class (e.g. java.lang.Boolean). * @param property The property for which the value is to be retrieved. * @return The value of the property. */ protected Object getPropertyValue(Property property) throws com.sun.star.lang.WrappedTargetException { Object ret= null; try { // get the member name String sMember= (String) getPropertyId(property); if (sMember != null) { // use reflection to obtain the field that holds the property value // Class.getDeclaredFields does not return inherited fields. One could use Class.getFields to // also get inherited fields, but only those which are public. Field propField= getClass().getDeclaredField(sMember); if (propField != null) ret= propField.get(this); } } catch(java.lang.Exception e) { throw new WrappedTargetException("PropertySet.setPropertyValue_NoBroadcast", this, e); } return ret; } /** * This method fires events to XPropertyChangeListener,XVetoableChangeListener and * XPropertiesChangeListener event sinks. * To distinguish what listeners are to be called the argument <em>bVetoable</em> is to be set to true if * a XVetoableChangeListener is meant. For XPropertyChangeListener and XPropertiesChangeListener * it is to be set to false. * * @param properties Properties wich will be or have been affected. * @param newValues the new values of the properties. * @param oldValues the old values of the properties. * @param bVetoable true means fire to VetoableChangeListener, false means fire to * XPropertyChangedListener and XMultiPropertyChangedListener. */ protected void fire( Property[] properties, Object[] newValues, Object[] oldValues, boolean bVetoable ) throws PropertyVetoException { // Only fire, if one or more properties changed int nNumProps= properties.length; if (nNumProps > 0) { PropertyChangeEvent[] arEvts= new PropertyChangeEvent[nNumProps]; int nAffectedProps= 0; // Loop over all changed properties to fill the event struct for (int i= 0; i < nNumProps; i++) { if ((bVetoable && (properties[i].Attributes & PropertyAttribute.CONSTRAINED) > 0) || (!bVetoable && (properties[i].Attributes & PropertyAttribute.BOUND) > 0)) { arEvts[i]= new PropertyChangeEvent(this, properties[i].Name, false, properties[i].Handle, oldValues[i], newValues[i]); nAffectedProps++; } } // fire the events for all changed properties for (int i= 0; i < nAffectedProps; i++) { // get the listener container for the property name InterfaceContainer lc= null; if (bVetoable) lc= aVetoableLC.getContainer(arEvts[i].PropertyName); else lc= aBoundLC.getContainer(arEvts[i].PropertyName); if (lc != null) { Iterator it= lc.iterator(); while( it.hasNext()) { Object listener= it.next(); if (bVetoable) ((XVetoableChangeListener) listener).vetoableChange(arEvts[i]); else ((XPropertyChangeListener) listener).propertyChange(arEvts[i]); } } // broadcast to all listeners with "" property name if(bVetoable) lc= listenerContainer.getContainer(XVetoableChangeListener.class); else lc= listenerContainer.getContainer(XPropertyChangeListener.class); if(lc != null) { Iterator it= lc.iterator(); while(it.hasNext() ) { Object listener= it.next(); if( bVetoable ) // fire change Events? ((XVetoableChangeListener) listener).vetoableChange(arEvts[i]); else ((XPropertyChangeListener) listener).propertyChange(arEvts[i]); } } } // fire at XPropertiesChangeListeners // if nAffectedProps == 0 then there are no BOUND properties if (!bVetoable && nAffectedProps > 0) { PropertyChangeEvent[] arReduced= new PropertyChangeEvent[nAffectedProps]; System.arraycopy(arEvts, 0, arReduced, 0, nAffectedProps); InterfaceContainer lc= listenerContainer.getContainer(XPropertiesChangeListener.class); if (lc != null) { Iterator it= lc.iterator(); while (it.hasNext()) { XPropertiesChangeListener listener = (XPropertiesChangeListener) it.next(); // fire the hole event sequence to the XPropertiesChangeListener's listener.propertiesChange( arEvts ); } } } } } // XFastPropertySet-------------------------------------------------------------------------------- public void setFastPropertyValue(int nHandle, Object aValue ) throws UnknownPropertyException, PropertyVetoException, com.sun.star.lang.IllegalArgumentException, WrappedTargetException { Property prop= getPropertyByHandle(nHandle); if (prop == null) throw new UnknownPropertyException(" The property with handle : " + nHandle +" is unknown"); setPropertyValue(prop, aValue); } // XFastPropertySet -------------------------------------------------------------------------------- public Object getFastPropertyValue(int nHandle ) throws UnknownPropertyException, WrappedTargetException { Property prop= getPropertyByHandle(nHandle); if (prop == null) throw new UnknownPropertyException("The property with handle : " + nHandle + " is unknown"); return getPropertyValue(prop); } // XMultiPropertySet ----------------------------------------------------------------------------------- public void addPropertiesChangeListener(String[] propNames, XPropertiesChangeListener listener) { listenerContainer.addInterface(XPropertiesChangeListener.class, listener); } // XMultiPropertySet ----------------------------------------------------------------------------------- public void firePropertiesChangeEvent(String[] propNames, XPropertiesChangeListener listener) { // Build the events. PropertyChangeEvent[] arEvents= new PropertyChangeEvent[propNames.length]; int eventCount= 0; // get a snapshot of the current property values synchronized (this) { for (int i= 0; i < propNames.length; i++) { Property prop= getProperty(propNames[i]); if (prop != null) { Object value= null; try { value= getPropertyValue(prop); } catch(WrappedTargetException e) { continue; } arEvents[eventCount]= new PropertyChangeEvent(this, prop.Name, false, prop.Handle, value, value); eventCount++; } } } // fire events from unsynchronized section so as to prevent deadlocks if (eventCount > 0) { // Reallocate the array of the events if necessary if (arEvents.length != eventCount) { PropertyChangeEvent[] arPropsTmp= new PropertyChangeEvent[eventCount]; System.arraycopy(arEvents, 0, arPropsTmp, 0, eventCount); arEvents= arPropsTmp; } listener.propertiesChange(arEvents); } } // XMultiPropertySet ----------------------------------------------------------------------------------- /** If a value for a property could not be retrieved then the respective element in the returned * array has the value null. */ public Object[] getPropertyValues(String[] propNames) { Object[] arValues= new Object[propNames.length]; synchronized (this) { for (int i= 0; i < propNames.length; i++) { Object value= null; try { value= getPropertyValue(propNames[i]); } catch (Exception e) { } arValues[i]= value; } } return arValues; } // XMultiPropertySet ----------------------------------------------------------------------------------- public void removePropertiesChangeListener(XPropertiesChangeListener xPropertiesChangeListener) { listenerContainer.removeInterface(XPropertiesChangeListener.class, xPropertiesChangeListener); } // XMultiPropertySet ----------------------------------------------------------------------------------- /** If the array of property names containes an unknown property then it will be ignored. */ public void setPropertyValues(String[] propNames, Object[] values) throws PropertyVetoException, com.sun.star.lang.IllegalArgumentException, com.sun.star.lang.WrappedTargetException { for (int i= 0; i < propNames.length; i++) { try { setPropertyValue(propNames[i], values[i]); } catch (UnknownPropertyException e) { continue; } } } private class PropertySetInfo implements XPropertySetInfo { public com.sun.star.beans.Property[] getProperties() { return PropertySet.this.getProperties(); } public com.sun.star.beans.Property getPropertyByName(String name) throws UnknownPropertyException { return getProperty(name); } public boolean hasPropertyByName(String name) { return getProperty(name) != null; } } }
javaunohelper/com/sun/star/lib/uno/helper/PropertySet.java
/************************************************************************* * * $RCSfile: PropertySet.java,v $ * * $Revision: 1.3 $ * * last change: $Author: jl $ $Date: 2002-04-29 08:45:46 $ * * The Contents of this file are made available subject to the terms of * either of the following licenses * * - GNU Lesser General Public License Version 2.1 * - Sun Industry Standards Source License Version 1.1 * * Sun Microsystems Inc., October, 2000 * * GNU Lesser General Public License Version 2.1 * ============================================= * Copyright 2000 by Sun Microsystems, Inc. * 901 San Antonio Road, Palo Alto, CA 94303, USA * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License version 2.1, as published by the Free Software Foundation. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, * MA 02111-1307 USA * * * Sun Industry Standards Source License Version 1.1 * ================================================= * The contents of this file are subject to the Sun Industry Standards * Source License Version 1.1 (the "License"); You may not use this file * except in compliance with the License. You may obtain a copy of the * License at http://www.openoffice.org/license.html. * * Software provided under this License is provided on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, * WITHOUT LIMITATION, WARRANTIES THAT THE SOFTWARE IS FREE OF DEFECTS, * MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE, OR NON-INFRINGING. * See the License for the specific provisions governing your rights and * obligations concerning the Software. * * The Initial Developer of the Original Code is: Sun Microsystems, Inc. * * Copyright: 2000 by Sun Microsystems, Inc. * * All Rights Reserved. * * Contributor(s): _______________________________________ * * ************************************************************************/ package com.sun.star.lib.uno.helper; import com.sun.star.uno.Type; import com.sun.star.lang.EventObject; import com.sun.star.lang.WrappedTargetException; import com.sun.star.uno.TypeClass; import com.sun.star.uno.AnyConverter; import com.sun.star.uno.XInterface; import com.sun.star.uno.Any; import com.sun.star.uno.UnoRuntime; import com.sun.star.beans.XPropertyChangeListener; import com.sun.star.beans.XVetoableChangeListener; import com.sun.star.beans.PropertyChangeEvent; import com.sun.star.beans.XPropertySet; import com.sun.star.beans.Property; import com.sun.star.beans.PropertyAttribute; import com.sun.star.beans.UnknownPropertyException; import com.sun.star.beans.XPropertiesChangeListener; import com.sun.star.beans.XPropertySetInfo; import com.sun.star.beans.XFastPropertySet; import com.sun.star.beans.PropertyVetoException; import com.sun.star.beans.XMultiPropertySet; import java.util.ArrayList; import java.util.List; import java.util.Iterator; import java.util.Collection; import java.util.HashMap; import java.lang.reflect.Field; import com.sun.star.lang.DisposedException; /** This class is an implementation of the interfaces com.sun.star.beans.XPropertySet, * com.sun.star.beans.XFastPropertySet and com.sun.star.beans.XMultiPropertySet. This * class has to be inherited to be used. The values of properties are stored in member * variables of the inheriting class. By overriding the methods * {@link #convertPropertyValue convertPropertyValue}, * {@link #setPropertyValueNoBroadcast setPropertyValueNoBroadcast} and * {@link #getPropertyValue(Property)} one can determine how * property values are stored. * When using the supplied implementations of this class then the member variables which * hold property values have to be declared in the class which inherits last in the inheriting * chain and they have to be of public, protected or package access.<p> * Properties have to be registered by one of the registerProperty methods. They take among other * arguments an Object named <em>id</em> which has to be a String that represents the name of * the member variable. The registering has to occur in the constructor of the inheriting class. * It is no allowed to add or change properties later on.<p> * Example: * <pre> * public class Foo extends PropertySet * { * protected int intProp; * * public Foo() * { * registerProperty("PropertyA", 0, new Type(int.class), (short)0, "intProp"); * } * } * * </pre> */ public class PropertySet extends ComponentBase implements XPropertySet, XFastPropertySet, XMultiPropertySet { private HashMap _nameToPropertyMap; private HashMap _handleToPropertyMap; private HashMap _propertyToIdMap; private Property[] arProperties; private int lastHandle= 1; protected XPropertySetInfo propertySetInfo; protected MultiTypeInterfaceContainer aBoundLC= new MultiTypeInterfaceContainer(); protected MultiTypeInterfaceContainer aVetoableLC= new MultiTypeInterfaceContainer(); public PropertySet() { super(); initMappings(); } /** Registers a property with this helper class and associates the argument <em>id</em> with it. * <em>id</em> is used to identify the storage of the property value. How property values are stored * and retrieved is determined by the methods {@link #convertPropertyValue convertPropertyValue}, * {@link #setPropertyValueNoBroadcast setPropertyValueNoBroadcast} and {@link #getPropertyValue(Property) getPropertyValue} * These methods expect <em>id</em> to be a java.lang.String which represents the name of a member variable * which holds the property value. * Only properties which are registered can be accessed. Registration has to occur during * initialization of the inheriting class (i.e. within the contructor). * @param prop The property to be registered. * @param id Identifies the properties storage. * @see #getPropertyId */ protected void registerProperty(Property prop, Object id) { putProperty(prop); assignPropertyId(prop, id); } /** Registers a property with this helper class and associates the argument id with it. * It does the same as {@link #registerProperty(Property, Object)}. The first four * arguments are used to construct a Property object. * Registration has to occur during * initialization of the inheriting class (i.e. within the contructor) * @param name The property's name (Property.Name). * @param handle The property's handle (Property.Handle). * @param Type The property's type (Property.Type). * @param attributes The property's attributes (Property.Attributes). * @param id Identifies the property's storage. */ protected void registerProperty(String name, int handle, Type type, short attributes, Object id) { Property p= new Property(name, handle, type, attributes); registerProperty(p, id); } /** Registers a property with this class and associates the argument id with it. * It does the same as {@link #registerProperty(Property, Object)}. The first three * arguments are used to construct a Property object. The value for the Property.Handle * is generated and does not have to be specified here. Use this method for registering * a property if you do not care about the Property's handles. * Registration has to occur during * initialization of the inheriting class (i.e. within the contructor). * @param name The property's name (Property.Name). * @param handle The property's handle (Property.Handle). * @param Type The property's type (Property.Type). * @param attributes The property's attributes (Property.Attributes). * @param id Identifies the property's storage. */ protected void registerProperty(String name, Type type, short attributes, Object id) { Property p= new Property(name, lastHandle++, type, attributes); registerProperty(p, id); } /** Registers a property with this class. This method expects that property values * are stored in member variables as is the case if the methods convertPropertyValue, * setPropertyValueNoBroadcast and getPropertyValue(Property) are not overridden. * It is presumed that the type of the member variable * corresponds Property.Type. For example, if the TypeClass of Property.Type is to be * a TypeClass.SHORT then the member must be a short or java.lang.Short. * The handle for the property is generated.<br> * If there is no member with the specified name or if the member has an incompatible type * then a com.sun.star.uno.RuntimeException is thrown. * @param propertyName The name of the property. * @param memberName The name of the member variable that holds the value of the property. * @param attributes The property attributes. */ protected void registerProperty(String propertyName, String memberName, short attributes) { Field propField= null; try { propField= getClass().getDeclaredField(memberName); } catch (NoSuchFieldException e) { throw new com.sun.star.uno.RuntimeException("there is no member variable: " + memberName); } Class cl= propField.getType(); Type t= new Type(cl); if (t.getTypeClass() != TypeClass.UNKNOWN) { Property p= new Property(propertyName, lastHandle++, t, attributes); registerProperty(p,memberName); } else throw new com.sun.star.uno.RuntimeException("the member has an unknown type: " + memberName); } /** Registers a property with this class. * It is presumed that the name of property is equal to the name of the member variable * that holds the property value. * @param propertyName The name of the property and the member variable that holds the property's value. * @param attributes The property attributes. * @see #registerProperty(String, String, short) */ protected void registerProperty(String propertyName, short attributes) { registerProperty(propertyName, propertyName, attributes); } /** Returns the Property object for a given property name or null if that property does * not exists (i.e. it has not been registered). Override this method * if you want to implement your own mapping from property names to Property objects. * Then you also have to override {@link #initMappings}, {@link #getProperties()} and * {@link #putProperty(Property)}. * @param propertyName The name of the property (Property.Name) * @return The Property object with the name <em>propertyName</em>. */ protected Property getProperty(String propertyName) { return (Property) _nameToPropertyMap.get(propertyName); } /** Returns the Property object with a handle (Property.Handle) as specified by the argument * <em>nHandle</em>. The method returns null if there is no such property (i.e. it has not * been registered). Override this method if you want to implement your own mapping from handles * to Property objects. Then you also have to override {@link #initMappings}, {@link #putProperty(Property)}. * @param nHandle The handle of the property (Property.Handle). * @return The Property object with the handle <em>nHandle</em> */ protected Property getPropertyByHandle(int nHandle) { return (Property) _handleToPropertyMap.get(new Integer(nHandle)); } /** Returns an array of all Property objects or an array of length null if there * are no properties. Override this method if you want to implement your own mapping from names * to Property objects. Then you also have to override {@link #initMappings}, {@link #getProperty(String)} and * {@link #putProperty}. * @return Array of all Property objects. */ protected Property[] getProperties() { if (arProperties == null) { Collection values= _nameToPropertyMap.values(); arProperties= (Property[]) values.toArray(new Property[_nameToPropertyMap.size()]); } return arProperties; } /** Stores a Property object so that it can be retrieved subsequently by * {@link #getProperty(String)},{@link #getProperties()},{@link #getPropertyByHandle(int)}. * Override this method if you want to implement your own mapping from handles * to Property objects and names to Property objects. Then you also need to override {@link #initMappings}, * {@link #getProperty(String)},{@link #getProperties()},{@link #getPropertyByHandle(int)}. * @param prop The Property object that is to be stored. */ protected void putProperty(Property prop) { _nameToPropertyMap.put(prop.Name, prop); if (prop.Handle != -1) _handleToPropertyMap.put(new Integer(prop.Handle), prop); } /** Assigns an identifyer object to a Property object so that the identifyer * can be obtained by {@link #getPropertyId getPropertyId} later on. The identifyer * is used to specify a certain storage for the property's value. If you do not * override {@link #setPropertyValueNoBroadcast() setPropertyValueNoBroadcast} or {@link #getPropertyValue(Property)} * then the argument <em>id</em> has to be a java.lang.String that equals the name of * the member variable that holds the Property's value. * Override this method if you want to implement your own mapping from Property objects to ids or * if you need ids of a type other then java.lang.String. * Then you also need to override {@link #initMappings initMappings} and {@link #getPropertyId getPropertyId}. * @param prop The Property object that is being assigned an id. * @param id The object which identifies the storage used for the property's value. * @see #registerProperty(Property, Object) */ protected void assignPropertyId(Property prop, Object id) { if (id instanceof String && ((String) id).equals("") == false) _propertyToIdMap.put(prop, id); } /** Returns the identifyer object for a certain Property. The object must have been * previously assigned to the Property object by {@link #assignPropertyId assignPropertyId}. * Override this method if you want to implement your own mapping from Property objects to ids. * Then you also need to override {@link #initMappings initMappings} and {@link #assignPropertyId assignPropertyId}. * @param prop The property for which the id is to be retrieved. * @return The id object that identifies the storage used for the property's value. * @see #registerProperty(Property, Object) */ protected Object getPropertyId(Property prop) { return _propertyToIdMap.get(prop); } /** Initializes data structures used for mappings of property names to property object, * property handles to property objects and property objects to id objects. * Override this method if you want to implement your own mappings. Then you also need to * override {@link #putProperty putProperty},{@link #getProperty getProperty}, {@link #getPropertyByHandle}, * {@link #assignPropertyId assignPropertyId} and {@link #getPropertyId getPropertyId}. */ protected void initMappings() { _nameToPropertyMap= new HashMap(); _handleToPropertyMap= new HashMap(); _propertyToIdMap= new HashMap(); } /** Makes sure that listeners which are kept in aBoundLC (XPropertyChangeListener) and aVetoableLC * (XVetoableChangeListener) receive a disposing call. Also those listeners are relesased. */ protected void postDisposing() { // Create an event with this as sender EventObject aEvt= new EventObject(this); // inform all listeners to reelease this object aBoundLC.disposeAndClear(aEvt); aVetoableLC.disposeAndClear(aEvt); } //XPropertySet ---------------------------------------------------- synchronized public void addPropertyChangeListener(String str, XPropertyChangeListener xPropertyChangeListener) throws UnknownPropertyException, WrappedTargetException { // only add listeners if you are not disposed if (! bInDispose && ! bDisposed) { if (str.length() > 0) { Property prop= getProperty(str); if (prop == null) throw new UnknownPropertyException("Property " + str + " is unknown"); // Add listener for a certain property if ((prop.Attributes & PropertyAttribute.BOUND) > 0) aBoundLC.addInterface(str, xPropertyChangeListener); else //ignore silently return; } else // Add listener for all properties listenerContainer.addInterface(XPropertyChangeListener.class, xPropertyChangeListener); } } //XPropertySet ---------------------------------------------------- synchronized public void addVetoableChangeListener(String str, com.sun.star.beans.XVetoableChangeListener xVetoableChangeListener) throws com.sun.star.beans.UnknownPropertyException, com.sun.star.lang.WrappedTargetException { // only add listeners if you are not disposed if (! bInDispose && ! bDisposed) { if (str.length() > 0) { Property prop= getProperty(str); if (prop == null) throw new UnknownPropertyException("Property " + str + " is unknown"); // Add listener for a certain property if ((prop.Attributes & PropertyAttribute.CONSTRAINED) > 0) aVetoableLC.addInterface(str, xVetoableChangeListener); else //ignore silently return; } else // Add listener for all properties listenerContainer.addInterface(XVetoableChangeListener.class, xVetoableChangeListener); } } //XPropertySet ---------------------------------------------------- public com.sun.star.beans.XPropertySetInfo getPropertySetInfo() { if (propertySetInfo == null) { synchronized (this) { if (propertySetInfo == null) propertySetInfo= new PropertySetInfo(); } } return propertySetInfo; } //XPropertySet ---------------------------------------------------- public Object getPropertyValue(String name) throws UnknownPropertyException, WrappedTargetException { Object ret= null; if (bInDispose || bDisposed) throw new com.sun.star.lang.DisposedException("The component has been disposed already"); Property prop= getProperty(name); if (prop == null) throw new UnknownPropertyException("The property " + name + " is unknown"); if ((prop.Attributes & PropertyAttribute.READONLY) == PropertyAttribute.READONLY) return null; synchronized (this) { ret= getPropertyValue(prop); } return ret; } //XPropertySet ---------------------------------------------------- synchronized public void removePropertyChangeListener(String propName, XPropertyChangeListener listener) throws UnknownPropertyException, WrappedTargetException { // all listeners are automaticly released in a dispose call if (!bInDispose && !bDisposed) { if (propName.length() > 0) { Property prop = getProperty(propName); if (prop == null) throw new UnknownPropertyException("Property " + propName + " is unknown"); aBoundLC.removeInterface(propName, listener); } else listenerContainer.removeInterface(XPropertyChangeListener.class, listener); } } //XPropertySet ---------------------------------------------------- synchronized public void removeVetoableChangeListener(String propName, XVetoableChangeListener listener) throws UnknownPropertyException, WrappedTargetException {// all listeners are automaticly released in a dispose call if (!bInDispose && !bDisposed) { if (propName.length() > 0) { Property prop = getProperty(propName); if (prop == null) throw new UnknownPropertyException("Property " + propName + " is unknown"); aVetoableLC.removeInterface(propName, listener); } else listenerContainer.removeInterface(XVetoableChangeListener.class, listener); } } //XPropertySet ---------------------------------------------------- /** Sets the value of a property. * The idl description for this interfaces, stipulates that the argument value is an Any. Since a java.lang.Object * reference has the same meaning as an Any this function accepts * java anys (com.sun.star.uno.Any) and all other appropriate objects as arguments. The value argument can be one * of these: * <ul> * <li>java.lang.Boolean</li> * <li>java.lang.Character</li> * <li>java.lang.Byte</li> * <li>java.lang.Short</li> * <li>java.lang.Integer</li> * <li>java.lang.Long</li> * <li>java.lang.Float</li> * <li>java.lang.Double</li> * <li>java.lang.String</li> * <li>com.sun.star.uno.Type</li> * <li><em>objects which implement UNO interfaces</em></li> * <li><em>arrays which contain elements of the types above</em></li> * <li>com.sun.star.uno.Any containing an instance of one of the above types</li> * </ul> * * Properties can have the attribute com.sun.star.beans.PropertyAttribute.MAYBEVOID, which means that the value * (not the type) can be void. In order to assign a void value to a property one can either pass an Any which * contains a null reference or pass null directly. In bothe cases the null reference is only accepted if * the PropertyAttribute.MAYBEVOID attribute is set for the property. * * Properties which have the attribute MAYBEVOID set (Property.Attributes) can have a void value. The following * considerations presume that the Property has that attribute set. Further, when mentioning an Any's value we * actually refer to the object returned by Any.getObject. * If the argument <em>value</em> is null, or it is an Any whose value is null (but with a valid Type) * then the member variable used for storing the property's value is set to null. * Therefore those properties can only be stored in objects * and primitive types are not allowed (one can use the wrapper classes instead,e.g. java.lang.Byte) . * If a property's value is kept in a member variable of type Any and that reference is still null * then when setPropertyValue is called with * <em>value</em> = null then the member variable is assigned an Any with type void and a null value. * Or if the argument is an Any with a null value then it is assigned to the member variable. * Further, if the variable already * references an Any and setPropertyValue is called with <em>value</em> = null, then the variable is assigned * a new Any with the same type as the previously referenced Any and with a null value. * @param name The name of the property. * @param value The new value of the property. * * */ public void setPropertyValue(String name, Object value) throws UnknownPropertyException, PropertyVetoException, com.sun.star.lang.IllegalArgumentException, WrappedTargetException { Property prop= getProperty(name); if (prop == null) throw new UnknownPropertyException("Property " + name + " is unknown"); setPropertyValue(prop, value); } /** Sets the value of a property. It checks if the property's attributes (READONLY,MAYBEVOID), allow that the * new value can be set. It also causes the notification of listeners. * @param prop The property whose value is to be set. * @param value The new value for the property. */ protected void setPropertyValue(Property prop, Object value) throws UnknownPropertyException, PropertyVetoException, com.sun.star.lang.IllegalArgumentException, WrappedTargetException { if ((prop.Attributes & PropertyAttribute.READONLY) == PropertyAttribute.READONLY) throw new com.sun.star.beans.PropertyVetoException(); // The value may be null only if MAYBEVOID attribute is set boolean bVoidValue= false; if (value instanceof Any) bVoidValue= ((Any) value).getObject() == null; else bVoidValue= value == null; if (bVoidValue && (prop.Attributes & PropertyAttribute.MAYBEVOID) == 0) throw new com.sun.star.lang.IllegalArgumentException("The property must have a value; the MAYBEVOID attribute is not set!"); if (bInDispose || bDisposed) throw new DisposedException("Component is already disposed"); //Check if the argument is allowed boolean bValueOk= false; if (value instanceof Any) bValueOk= checkType(((Any) value).getObject()); else bValueOk= checkType(value); if (! bValueOk) throw new com.sun.star.lang.IllegalArgumentException("No valid UNO type"); boolean bConversionOk= false; Object[] outConvertedVal= new Object[1]; Object[] outOldValue= new Object[1]; synchronized (this) { bConversionOk= convertPropertyValue(prop, outConvertedVal, outOldValue, value); } //The next step following the conversion is to set the new value of the property. Prior to this // the XVetoableChangeListener s have to be notified. if (bConversionOk) { // If the property is CONSTRAINED, then we must notify XVetoableChangeListener. The listener can throw a com.sun.star.lang.beans.PropertyVetoException which // will cause this method to return (the exception is not caught here). fire( new Property[]{prop}, outConvertedVal, outOldValue, true); synchronized (this) { setPropertyValueNoBroadcast(prop, outConvertedVal[0]); } // fire a change event (XPropertyChangeListener, PropertyAttribute.BOUND fire( new Property[]{prop}, outConvertedVal, outOldValue, false); } } /** Converts a value in a way so that its appropriate for storing as a property value, that is * {@link #setPropertyValueNoBroadcast setPropertyValueNoBroadcast} can process the value without any further * conversion. This implementation presumes that * the values are stored in member variables of the furthest inheriting class. For example, * class A inherits this class then members of class A * can hold property values. If there is a class B which inherits A then only members of B can hold * property values. The variables can have * public, protected or package level access. A property must have been registered (e.g. by * {@link #registerProperty(Property, Object)} in order for this method to work. The identifyer Object argument * used in the registerProperty methods must * be a java.lang.String that is the name of the member variable that holds the property value. * If one opts to store values differently then one may override * this method, as well as {@link #setPropertyValueNoBroadcast setPropertyValueNoBroadcast} and * {@link #getPropertyValue(Property) getPropertyValue(Property)}. * This method is always called as a result of a call to one of the setter methods, such as * {@link #setPropertyValue(String,Object) XPropertySet.setPropertyValue}, * {@link #setFastPropertyValue XFastPropertySet.setFastPropertyValue} * and {@link #setPropertyValues XMultiPropertySet.setPropertyValues}. * If this method fails, that is, it returns false or throws an exception, then no listeners are notified and the * property value, that was intended to be changed, remains untouched.<br /> This method does not have to deal with property attributes, such as * PropertyAttribute.READONLY or PropertyAttribute.MAYBEVOID. The processing of these attributes occurs * in the calling methods.<br /> * Only if this method returns successfully further processing, such * as listener notification and finally the modifiction of the property's value, will occur.<br /> * * The actual modification of a property's value is done by {@link #setPropertyValueNoBroadcast setPropertyValueNoBroadcast} * which is called subsequent to convertPropertyValue. *<p> * This method converts values by help of the com.sun.star.uno.AnyConverter which only does a few widening * conversions on integer types and floating point types. For example, there is the property PropA with a Type equivalent * to int.class and the * value of the property is to be stored in a member variable of type int with name intProp. Then setPropertyValue is * called: * <pre> * set.setPropertyValue( "PropA", new Byte( (byte)111)); * </pre> * At some point setPropertyValue will call convertPropertyValue and pass in the Byte object. Since we allow * that Byte values can be used with the property and know that the value is to be stored in intProp (type int) * we convert the Byte object into an Integer object which is then returned in the out-parameter <em>newVal</em>. This * conversion is actually performed by the AnyConverter. Later * the setPropertyValueNoBroadcast is called with that Integer object and the int value can be easily extracted * from the object and be assigned to the member intProp. * <p> * The method handles Any arguments the same as Object arguments. That is, the <em>setVal</em> argument can * be a java.lang.Boolean or a com.sun.star.uno.Any containing a java.lang.Boolean. Likewise, a member * containing a property value can be a com.sun.star.uno.Any or an java.lang.Object. * Then, no conversion is necessary, since they can hold all possible values. However, if * the member is an Object and <em>setVal</em> is an Any then the object contained in the any is assigned to * the member. The extra type information which exists as Type object in the Any will get lost. If this is not * intended then use an Any variable rather then an Object.<br /> * If a member is an Object or Any and the argument <em>setVal</em> is an Object, other than String or array, * then it is presumed to be an UNO object and queried for XInterface. If successful, the out-param <em>newVal</em> * returns the XInterface.<br /> * If a member is an UNO interface, then <em>setVal</em> is queried for this interface and the result is returned. * If <em>setVal</em> is null then <em>newVal</em> will be null too after return. * <p> * If a property value is stored using a primitive type the the out-parameters * <em>curVal</em> and <em>newVal</em> contain the respective wrapper class (e.g.java.lang.Byte, etc.). * curVal is used in calls to the XVetoableChangeListener and XPropertyChangeListener. * * @param property - in-param property for which the data is to be converted. * @param newVal - out-param which contains the converted value on return. * @param curVal - out-param the current value of the property. It is used in calls to the * XVetoableChangeListener and XPropertyChangeListener. * @param setVal - in-param. The value that is to be converted so that it matches Property and the internally used * dataformat for that property. * @return true - Conversion was successful. <em>newVal</em> contains a valid value for the property. false - * conversion failed for some reason. * @throws com.sun.star.lang.IllegalArgumentException The value provided is unfit for the property. * @throws com.sun.star.lang.WrappedTargetException - An exception occured during the conversion, that is to be made known * to the caller. */ protected boolean convertPropertyValue(Property property, Object[] newVal, Object[]curVal, Object setVal) throws com.sun.star.lang.IllegalArgumentException, WrappedTargetException { boolean ret= true; try { // get the member name String sMember= (String) getPropertyId(property); if (sMember != null) { // use reflection to obtain the field that holds the property value // Class.getDeclaredFields does not return inherited fields. One could use Class.getFields to // also get inherited fields, but only those which are public. Field propField= getClass().getDeclaredField(sMember); if (propField != null) { curVal[0]= propField.get(this); Class memberClass= propField.getType(); // MAYBEVOID: if setVal == null or it is an Any and getObject returns null, then a void value is to be set // This works only if there are no primitive types. For those we use the respective wrapper classes. // In this implementation, a null reference means void value. boolean bVoidValue= false; boolean bAnyVal= setVal instanceof Any; if (bAnyVal) bVoidValue= ((Any) setVal).getObject() == null; else bVoidValue= setVal == null; if (bVoidValue && memberClass.isPrimitive()) throw new com.sun.star.lang.IllegalArgumentException("The implementation does not support the MAYBEVOID attribute for this property"); Object convObj= null; //The member that keeps the value of the Property is an Any. It can contain all possible //types, therefore a conversion is not necessary. if (memberClass.equals(Any.class)) { if (bAnyVal) //parameter setVal is also an Any and can be used without further processing convObj= setVal; else { // Parameter setVal is not an Any. We need to construct an Any that contains // the argument setVal. // If setVal is an interface implementation then, we cannot constuct the // Any with setVal.getClass(), because the Any.Type._typeClass would be TypeClass.UNKNOWN. // We try to get an XInterface of setVal and set an XInterface type. if (setVal instanceof XInterface) { XInterface xint= (XInterface) UnoRuntime.queryInterface(XInterface.class, setVal); if (xint != null) convObj= new Any(new Type(XInterface.class), xint); } // The member is an any, and the past in argument was null reference (MAYBEVOID is set) else if (setVal == null) { // if the any member is still null we create a void any if (curVal[0] == null) convObj= new Any(new Type(), null); else { //otherwise we create an Any with the same type as a value of null; convObj= new Any( ((Any)curVal[0]).getType(), null); } } else convObj= new Any(new Type(setVal.getClass()), setVal); } } else convObj= convert(memberClass, setVal); newVal[0]= convObj; } } } catch (java.lang.NoSuchFieldException e) { throw new WrappedTargetException("Field does not exist", this, e); } catch (java.lang.IllegalAccessException e) { throw new WrappedTargetException("", this ,e); } return ret; } private boolean checkType(Object obj) { if (obj == null || obj instanceof Boolean || obj instanceof Character || obj instanceof Number || obj instanceof String || obj instanceof XInterface || obj instanceof Type || obj.getClass().isArray()) return true; return false; } // Param object can be an Any or other object. If obj is null then the return value is null private Object convert( Class cl, Object obj) throws com.sun.star.lang.IllegalArgumentException { Object retVal= null; //The member that keeps the value of the Property is an Object.Objects are similar to Anys in that they can // hold all types. if (obj == null || (obj instanceof Any && ((Any) obj).getObject() == null)) retVal= null; else if(cl.equals(Object.class)) { if (obj instanceof Any) obj= ((Any) obj).getObject(); retVal= obj; } else if(cl.equals(boolean.class)) retVal= new Boolean(AnyConverter.toBoolean(obj)); else if (cl.equals(char.class)) retVal= new Character(AnyConverter.toChar(obj)); else if (cl.equals(byte.class)) retVal= new Byte(AnyConverter.toByte(obj)); else if (cl.equals(short.class)) retVal= new Short(AnyConverter.toShort(obj)); else if (cl.equals(int.class)) retVal= new Integer(AnyConverter.toInt(obj)); else if (cl.equals(long.class)) retVal= new Long(AnyConverter.toLong(obj)); else if (cl.equals(float.class)) retVal= new Float(AnyConverter.toFloat(obj)); else if (cl.equals(double.class)) retVal= new Double(AnyConverter.toDouble(obj)); else if (cl.equals(String.class)) retVal= AnyConverter.toString(obj); else if (cl.isArray()) retVal= AnyConverter.toArray(obj); else if (cl.equals(Type.class)) retVal= AnyConverter.toType(obj); else if (cl.equals(Boolean.class)) retVal= new Boolean(AnyConverter.toBoolean(obj)); else if (cl.equals(Character.class)) retVal= new Character(AnyConverter.toChar(obj)); else if (cl.equals(Byte.class)) retVal= new Byte(AnyConverter.toByte(obj)); else if (cl.equals(Short.class)) retVal= new Short(AnyConverter.toShort(obj)); else if (cl.equals(Integer.class)) retVal= new Integer(AnyConverter.toInt(obj)); else if (cl.equals(Long.class)) retVal= new Long(AnyConverter.toLong(obj)); else if (cl.equals(Float.class)) retVal= new Float(AnyConverter.toFloat(obj)); else if (cl.equals(Double.class)) retVal= new Double(AnyConverter.toDouble(obj)); else if (XInterface.class.isAssignableFrom(cl)) retVal= AnyConverter.toObject(new Type(cl), obj); else throw new com.sun.star.lang.IllegalArgumentException("Could not convert the argument"); return retVal; } /** Sets the value of a property. In this implementation property values are stored in member variables * (see {@link #convertPropertyValue convertPropertyValue} Notification of property listeners * does not occur in this method. By overriding this method one can take full control about how property values * are stored. But then, the {@link #convertPropertyValue convertPropertyValue} and * {@link getPropertyValue(Property)} must be overridden too. * * A Property with the MAYBEVOID attribute set, is stored as null value. Therefore the member variable must be * an Object in order to make use of the property attribute. An exception is Any. The Any variable can be initially null, but * once it is set the reference will not become null again. If the value is to be set to * void then a new Any will be stored * with a valid type but without a value (i.e. Any.getObject returns null). * If a property has the READONLY attribute set, and one of the setter methods, such as setPropertyValue, has been * called, then this method is not going to be called. * @param property the property for which the new value is set * @param value the new value for the property. * @throws com.sun.star.lang.WrappedTargetException An exception, which has to be made known to the caller, * occured during the setting of the value. */ protected void setPropertyValueNoBroadcast(Property property, Object newVal) throws WrappedTargetException { try { // get the member name String sMember= (String) getPropertyId(property); if (sMember != null) { // use reflection to obtain the field that holds the property value // Class.getDeclaredFields does not return inherited fields. One could use Class.getFields to // also get inherited fields, but only those which are public. Field propField= getClass().getDeclaredField(sMember); if (propField != null) propField.set(this, newVal); } } catch(java.lang.Exception e) { throw new WrappedTargetException("PropertySet.setPropertyValueNoBroadcast", this, e); } } /** Retrieves the value of a property. This implementation presumes that the values are stored in member variables * of the furthest inheriting class (see {@link #convertPropertyValue convertPropertyValue}) and that the * variables have public, protected or package level access. The property must have * been registered, for example by {@link #registerProperty(Property, Object)}. The identifyer Object argument * must have been a java.lang.String which was the name of the member variable holding the property value. * When properties are to be stored differently one has to override this method as well as * {@link #convertPropertyValue} and {@link #setPropertyValueNoBroadcast}. <br> * If a value is stored in a variable of a primitive type then this method returns an instance of the respective * wrapper class (e.g. java.lang.Boolean). * @param property The property for which the value is to be retrieved. * @return The value of the property. */ protected Object getPropertyValue(Property property) throws com.sun.star.lang.WrappedTargetException { Object ret= null; try { // get the member name String sMember= (String) getPropertyId(property); if (sMember != null) { // use reflection to obtain the field that holds the property value // Class.getDeclaredFields does not return inherited fields. One could use Class.getFields to // also get inherited fields, but only those which are public. Field propField= getClass().getDeclaredField(sMember); if (propField != null) ret= propField.get(this); } } catch(java.lang.Exception e) { throw new WrappedTargetException("PropertySet.setPropertyValue_NoBroadcast", this, e); } return ret; } /** * This method fires events to XPropertyChangeListener,XVetoableChangeListener and * XPropertiesChangeListener event sinks. * To distinguish what listeners are to be called the argument <em>bVetoable</em> is to be set to true if * a XVetoableChangeListener is meant. For XPropertyChangeListener and XPropertiesChangeListener * it is to be set to false. * * @param properties Properties wich will be or have been affected. * @param newValues the new values of the properties. * @param oldValues the old values of the properties. * @param bVetoable true means fire to VetoableChangeListener, false means fire to * XPropertyChangedListener and XMultiPropertyChangedListener. */ protected void fire( Property[] properties, Object[] newValues, Object[] oldValues, boolean bVetoable ) throws PropertyVetoException { // Only fire, if one or more properties changed int nNumProps= properties.length; if (nNumProps > 0) { PropertyChangeEvent[] arEvts= new PropertyChangeEvent[nNumProps]; int nAffectedProps= 0; // Loop over all changed properties to fill the event struct for (int i= 0; i < nNumProps; i++) { if ((bVetoable && (properties[i].Attributes & PropertyAttribute.CONSTRAINED) > 0) || (!bVetoable && (properties[i].Attributes & PropertyAttribute.BOUND) > 0)) { arEvts[i]= new PropertyChangeEvent(this, properties[i].Name, false, properties[i].Handle, oldValues[i], newValues[i]); nAffectedProps++; } } // fire the events for all changed properties for (int i= 0; i < nAffectedProps; i++) { // get the listener container for the property name InterfaceContainer lc= null; if (bVetoable) lc= aVetoableLC.getContainer(arEvts[i].PropertyName); else lc= aBoundLC.getContainer(arEvts[i].PropertyName); if (lc != null) { Iterator it= lc.iterator(); while( it.hasNext()) { Object listener= it.next(); if (bVetoable) ((XVetoableChangeListener) listener).vetoableChange(arEvts[i]); else ((XPropertyChangeListener) listener).propertyChange(arEvts[i]); } } // broadcast to all listeners with "" property name if(bVetoable) lc= listenerContainer.getContainer(XVetoableChangeListener.class); else lc= listenerContainer.getContainer(XPropertyChangeListener.class); if(lc != null) { Iterator it= lc.iterator(); while(it.hasNext() ) { Object listener= it.next(); if( bVetoable ) // fire change Events? ((XVetoableChangeListener) listener).vetoableChange(arEvts[i]); else ((XPropertyChangeListener) listener).propertyChange(arEvts[i]); } } } // fire at XPropertiesChangeListeners // if nAffectedProps == 0 then there are no BOUND properties if (!bVetoable && nAffectedProps > 0) { PropertyChangeEvent[] arReduced= new PropertyChangeEvent[nAffectedProps]; System.arraycopy(arEvts, 0, arReduced, 0, nAffectedProps); InterfaceContainer lc= listenerContainer.getContainer(XPropertiesChangeListener.class); if (lc != null) { Iterator it= lc.iterator(); while (it.hasNext()) { XPropertiesChangeListener listener = (XPropertiesChangeListener) it.next(); // fire the hole event sequence to the XPropertiesChangeListener's listener.propertiesChange( arEvts ); } } } } } // XFastPropertySet-------------------------------------------------------------------------------- public void setFastPropertyValue(int nHandle, Object aValue ) throws UnknownPropertyException, PropertyVetoException, com.sun.star.lang.IllegalArgumentException, WrappedTargetException { Property prop= getPropertyByHandle(nHandle); if (prop == null) throw new UnknownPropertyException(" The property with handle : " + nHandle +" is unknown"); setPropertyValue(prop, aValue); } // XFastPropertySet -------------------------------------------------------------------------------- public Object getFastPropertyValue(int nHandle ) throws UnknownPropertyException, WrappedTargetException { Property prop= getPropertyByHandle(nHandle); if (prop == null) throw new UnknownPropertyException("The property with handle : " + nHandle + " is unknown"); return getPropertyValue(prop); } // XMultiPropertySet ----------------------------------------------------------------------------------- public void addPropertiesChangeListener(String[] propNames, XPropertiesChangeListener listener) { listenerContainer.addInterface(XPropertiesChangeListener.class, listener); } // XMultiPropertySet ----------------------------------------------------------------------------------- public void firePropertiesChangeEvent(String[] propNames, XPropertiesChangeListener listener) { // Build the events. PropertyChangeEvent[] arEvents= new PropertyChangeEvent[propNames.length]; int eventCount= 0; // get a snapshot of the current property values synchronized (this) { for (int i= 0; i < propNames.length; i++) { Property prop= getProperty(propNames[i]); if (prop != null) { Object value= null; try { value= getPropertyValue(prop); } catch(WrappedTargetException e) { continue; } arEvents[eventCount]= new PropertyChangeEvent(this, prop.Name, false, prop.Handle, value, value); eventCount++; } } } // fire events from unsynchronized section so as to prevent deadlocks if (eventCount > 0) { // Reallocate the array of the events if necessary if (arEvents.length != eventCount) { PropertyChangeEvent[] arPropsTmp= new PropertyChangeEvent[eventCount]; System.arraycopy(arEvents, 0, arPropsTmp, 0, eventCount); arEvents= arPropsTmp; } listener.propertiesChange(arEvents); } } // XMultiPropertySet ----------------------------------------------------------------------------------- /** If a value for a property could not be retrieved then the respective element in the returned * array has the value null. */ public Object[] getPropertyValues(String[] propNames) { Object[] arValues= new Object[propNames.length]; synchronized (this) { for (int i= 0; i < propNames.length; i++) { Object value= null; try { value= getPropertyValue(propNames[i]); } catch (Exception e) { } arValues[i]= value; } } return arValues; } // XMultiPropertySet ----------------------------------------------------------------------------------- public void removePropertiesChangeListener(XPropertiesChangeListener xPropertiesChangeListener) { listenerContainer.removeInterface(XPropertiesChangeListener.class, xPropertiesChangeListener); } // XMultiPropertySet ----------------------------------------------------------------------------------- /** If the array of property names containes an unknown property then it will be ignored. */ public void setPropertyValues(String[] propNames, Object[] values) throws PropertyVetoException, com.sun.star.lang.IllegalArgumentException, com.sun.star.lang.WrappedTargetException { for (int i= 0; i < propNames.length; i++) { try { setPropertyValue(propNames[i], values[i]); } catch (UnknownPropertyException e) { continue; } } } private class PropertySetInfo implements XPropertySetInfo { public com.sun.star.beans.Property[] getProperties() { return PropertySet.this.getProperties(); } public com.sun.star.beans.Property getPropertyByName(String name) throws UnknownPropertyException { return getProperty(name); } public boolean hasPropertyByName(String name) { return getProperty(name) != null; } } }
#97746# docu change
javaunohelper/com/sun/star/lib/uno/helper/PropertySet.java
#97746# docu change
<ide><path>avaunohelper/com/sun/star/lib/uno/helper/PropertySet.java <ide> * <ide> * $RCSfile: PropertySet.java,v $ <ide> * <del> * $Revision: 1.3 $ <del> * <del> * last change: $Author: jl $ $Date: 2002-04-29 08:45:46 $ <add> * $Revision: 1.4 $ <add> * <add> * last change: $Author: jl $ $Date: 2002-05-02 11:02:18 $ <ide> * <ide> * The Contents of this file are made available subject to the terms of <ide> * either of the following licenses <ide> * property values are stored. <ide> * When using the supplied implementations of this class then the member variables which <ide> * hold property values have to be declared in the class which inherits last in the inheriting <del> * chain and they have to be of public, protected or package access.<p> <add> * chain and they have to be public<p> <ide> * Properties have to be registered by one of the registerProperty methods. They take among other <ide> * arguments an Object named <em>id</em> which has to be a String that represents the name of <ide> * the member variable. The registering has to occur in the constructor of the inheriting class. <ide> * the values are stored in member variables of the furthest inheriting class. For example, <ide> * class A inherits this class then members of class A <ide> * can hold property values. If there is a class B which inherits A then only members of B can hold <del> * property values. The variables can have <del> * public, protected or package level access. A property must have been registered (e.g. by <add> * property values. The variables must be public. A property must have been registered (e.g. by <ide> * {@link #registerProperty(Property, Object)} in order for this method to work. The identifyer Object argument <ide> * used in the registerProperty methods must <ide> * be a java.lang.String that is the name of the member variable that holds the property value. <ide> } <ide> /** Retrieves the value of a property. This implementation presumes that the values are stored in member variables <ide> * of the furthest inheriting class (see {@link #convertPropertyValue convertPropertyValue}) and that the <del> * variables have public, protected or package level access. The property must have <add> * variables are public. The property must have <ide> * been registered, for example by {@link #registerProperty(Property, Object)}. The identifyer Object argument <ide> * must have been a java.lang.String which was the name of the member variable holding the property value. <ide> * When properties are to be stored differently one has to override this method as well as
Java
apache-2.0
3b5b9a98b058b44b666acce06757b82af634bf49
0
ieatbyte/Finaldemos
package com.wh.finaldemos.demos.system.notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.graphics.BitmapFactory; import android.os.Bundle; import android.support.v4.app.NotificationCompat; import android.support.v4.app.TaskStackBuilder; import android.widget.Button; import com.wh.finaldemos.BaseDemoActivity; import com.wh.finaldemos.R; import butterknife.BindView; import butterknife.ButterKnife; import butterknife.OnClick; import static android.R.attr.id; public class SendNotificationActivity extends BaseDemoActivity { @BindView(R.id.send_noti) Button sendBut; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_send_notification); ButterKnife.bind(this); } @OnClick(R.id.send_noti) void onSendClick() { sendNotificationWithBigImg(); } private void sendNotification() { Intent resultIntent = new Intent(this, SendNotificationActivity.class); TaskStackBuilder stackBuilder = TaskStackBuilder.create(this); // Adds the back stack stackBuilder.addParentStack(SendNotificationActivity.class); // Adds the Intent to the top of the stack stackBuilder.addNextIntent(resultIntent); // Gets a PendingIntent containing the entire back stack PendingIntent resultPendingIntent = stackBuilder.getPendingIntent(0, PendingIntent.FLAG_CANCEL_CURRENT); NotificationCompat.Builder builder = new NotificationCompat.Builder(this); builder.setSmallIcon(R.drawable.v2_loading) .setContentTitle("My notification") .setContentText("Hello World!") .setContentIntent(resultPendingIntent); NotificationManager mNotificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); mNotificationManager.notify(id, builder.build()); } private void sendNotificationWithBigImg() { Intent resultIntent = new Intent(this, SendNotificationActivity.class); TaskStackBuilder stackBuilder = TaskStackBuilder.create(this); // Adds the back stack stackBuilder.addParentStack(SendNotificationActivity.class); // Adds the Intent to the top of the stack stackBuilder.addNextIntent(resultIntent); // Gets a PendingIntent containing the entire back stack PendingIntent resultPendingIntent = stackBuilder.getPendingIntent(0, PendingIntent.FLAG_CANCEL_CURRENT); NotificationCompat.Builder builder = new NotificationCompat.Builder(this); NotificationCompat.BigPictureStyle style = new NotificationCompat.BigPictureStyle(); style.setBigContentTitle("big title"); style.setSummaryText("summery text"); style.bigPicture(BitmapFactory.decodeResource(getResources(), R.drawable.lenna)); builder.setSmallIcon(R.drawable.v2_loading).setLargeIcon(BitmapFactory.decodeResource(getResources(), R.drawable.lenna)) .setContentTitle("My notification") .setContentText("Hello World!").setDefaults(NotificationCompat.DEFAULT_ALL).setAutoCancel(true).setPriority(NotificationCompat.PRIORITY_MAX) .setContentIntent(resultPendingIntent).setStyle(style); NotificationManager mNotificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); mNotificationManager.notify(id, builder.build()); } }
FinalDemos/app/src/main/java/com/wh/finaldemos/demos/system/notification/SendNotificationActivity.java
package com.wh.finaldemos.demos.system.notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.NotificationCompat; import android.support.v4.app.TaskStackBuilder; import android.widget.Button; import com.wh.finaldemos.BaseDemoActivity; import com.wh.finaldemos.R; import butterknife.BindView; import butterknife.ButterKnife; import butterknife.OnClick; import static android.R.attr.id; public class SendNotificationActivity extends BaseDemoActivity { @BindView(R.id.send_noti) Button sendBut; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_send_notification); ButterKnife.bind(this); } @OnClick(R.id.send_noti) void onSendClick() { sendNotification(); } private void sendNotification() { Intent resultIntent = new Intent(this, SendNotificationActivity.class); TaskStackBuilder stackBuilder = TaskStackBuilder.create(this); // Adds the back stack stackBuilder.addParentStack(SendNotificationActivity.class); // Adds the Intent to the top of the stack stackBuilder.addNextIntent(resultIntent); // Gets a PendingIntent containing the entire back stack PendingIntent resultPendingIntent = stackBuilder.getPendingIntent(0, PendingIntent.FLAG_CANCEL_CURRENT); NotificationCompat.Builder builder = new NotificationCompat.Builder(this); builder.setSmallIcon(R.drawable.v2_loading) .setContentTitle("My notification") .setContentText("Hello World!") .setContentIntent(resultPendingIntent); NotificationManager mNotificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); mNotificationManager.notify(id, builder.build()); } }
add big picture style notification
FinalDemos/app/src/main/java/com/wh/finaldemos/demos/system/notification/SendNotificationActivity.java
add big picture style notification
<ide><path>inalDemos/app/src/main/java/com/wh/finaldemos/demos/system/notification/SendNotificationActivity.java <ide> import android.app.PendingIntent; <ide> import android.content.Context; <ide> import android.content.Intent; <add>import android.graphics.BitmapFactory; <ide> import android.os.Bundle; <ide> import android.support.v4.app.NotificationCompat; <ide> import android.support.v4.app.TaskStackBuilder; <ide> <ide> @OnClick(R.id.send_noti) <ide> void onSendClick() { <del> sendNotification(); <add> sendNotificationWithBigImg(); <ide> } <ide> <ide> private void sendNotification() { <ide> mNotificationManager.notify(id, builder.build()); <ide> <ide> } <add> <add> private void sendNotificationWithBigImg() { <add> Intent resultIntent = new Intent(this, SendNotificationActivity.class); <add> TaskStackBuilder stackBuilder = TaskStackBuilder.create(this); <add>// Adds the back stack <add> stackBuilder.addParentStack(SendNotificationActivity.class); <add>// Adds the Intent to the top of the stack <add> stackBuilder.addNextIntent(resultIntent); <add>// Gets a PendingIntent containing the entire back stack <add> PendingIntent resultPendingIntent = <add> stackBuilder.getPendingIntent(0, PendingIntent.FLAG_CANCEL_CURRENT); <add> NotificationCompat.Builder builder = new NotificationCompat.Builder(this); <add> NotificationCompat.BigPictureStyle style = new NotificationCompat.BigPictureStyle(); <add> style.setBigContentTitle("big title"); <add> style.setSummaryText("summery text"); <add> style.bigPicture(BitmapFactory.decodeResource(getResources(), R.drawable.lenna)); <add> builder.setSmallIcon(R.drawable.v2_loading).setLargeIcon(BitmapFactory.decodeResource(getResources(), R.drawable.lenna)) <add> .setContentTitle("My notification") <add> .setContentText("Hello World!").setDefaults(NotificationCompat.DEFAULT_ALL).setAutoCancel(true).setPriority(NotificationCompat.PRIORITY_MAX) <add> .setContentIntent(resultPendingIntent).setStyle(style); <add> NotificationManager mNotificationManager = <add> (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); <add> mNotificationManager.notify(id, builder.build()); <add> <add> } <ide> }
Java
apache-2.0
5616fca71c0a9855ae78ba8aa3d602e7c8adb3c9
0
wso2-extensions/identity-inbound-auth-oauth,wso2-extensions/identity-inbound-auth-oauth
/* * Copyright (c) 2019, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.oauth2.validators; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedUser; import org.wso2.carbon.identity.application.common.IdentityApplicationManagementException; import org.wso2.carbon.identity.application.common.model.ServiceProvider; import org.wso2.carbon.identity.central.log.mgt.utils.LoggerUtils; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.identity.oauth.common.OAuthConstants; import org.wso2.carbon.identity.oauth.common.exception.InvalidOAuthClientException; import org.wso2.carbon.identity.oauth.config.OAuthServerConfiguration; import org.wso2.carbon.identity.oauth2.IdentityOAuth2Exception; import org.wso2.carbon.identity.oauth2.authcontext.AuthorizationContextTokenGenerator; import org.wso2.carbon.identity.oauth2.dao.OAuthTokenPersistenceFactory; import org.wso2.carbon.identity.oauth2.dto.OAuth2ClientApplicationDTO; import org.wso2.carbon.identity.oauth2.dto.OAuth2IntrospectionResponseDTO; import org.wso2.carbon.identity.oauth2.dto.OAuth2TokenValidationRequestDTO; import org.wso2.carbon.identity.oauth2.dto.OAuth2TokenValidationResponseDTO; import org.wso2.carbon.identity.oauth2.internal.OAuth2ServiceComponentHolder; import org.wso2.carbon.identity.oauth2.model.AccessTokenDO; import org.wso2.carbon.identity.oauth2.util.OAuth2Util; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; import static org.wso2.carbon.identity.oauth2.util.OAuth2Util.isParsableJWT; /** * Handles the token validation by invoking the proper validation handler by looking at the token * type. */ public class TokenValidationHandler { private static TokenValidationHandler instance = null; AuthorizationContextTokenGenerator tokenGenerator = null; private static final Log log = LogFactory.getLog(TokenValidationHandler.class); private Map<String, OAuth2TokenValidator> tokenValidators = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); private static final String BEARER_TOKEN_TYPE = "Bearer"; private static final String BEARER_TOKEN_TYPE_JWT = "jwt"; private static final String BUILD_FQU_FROM_SP_CONFIG = "OAuth.BuildSubjectIdentifierFromSPConfig"; private static final String ENABLE_JWT_TOKEN_VALIDATION = "OAuth.EnableJWTTokenValidationDuringIntrospection"; private TokenValidationHandler() { tokenValidators.put(DefaultOAuth2TokenValidator.TOKEN_TYPE, new DefaultOAuth2TokenValidator()); tokenValidators.put(RefreshTokenValidator.TOKEN_TYPE, new RefreshTokenValidator()); for (Map.Entry<String, String> entry : OAuthServerConfiguration.getInstance().getTokenValidatorClassNames() .entrySet()) { String className = null; try { String type = entry.getKey(); className = entry.getValue(); Class clazz = Thread.currentThread().getContextClassLoader().loadClass(entry.getValue()); OAuth2TokenValidator tokenValidator = (OAuth2TokenValidator) clazz.newInstance(); tokenValidators.put(type, tokenValidator); } catch (ClassNotFoundException e) { log.error("Class not in build path " + className, e); } catch (InstantiationException e) { log.error("Class initialization error " + className, e); } catch (IllegalAccessException e) { log.error("Class access error " + className, e); } } // setting up the JWT if required if (OAuthServerConfiguration.getInstance().isAuthContextTokGenEnabled()) { try { Class clazz = this.getClass().getClassLoader().loadClass(OAuthServerConfiguration.getInstance() .getTokenGeneratorImplClass()); tokenGenerator = (AuthorizationContextTokenGenerator) clazz.newInstance(); tokenGenerator.init(); if (log.isDebugEnabled()) { log.debug("An instance of " + OAuthServerConfiguration.getInstance().getTokenGeneratorImplClass() + " is created for OAuthServerConfiguration."); } } catch (ClassNotFoundException e) { String errorMsg = "Class not found: " + OAuthServerConfiguration.getInstance().getTokenGeneratorImplClass(); log.error(errorMsg, e); } catch (InstantiationException e) { String errorMsg = "Error while instantiating: " + OAuthServerConfiguration.getInstance().getTokenGeneratorImplClass(); log.error(errorMsg, e); } catch (IllegalAccessException e) { String errorMsg = "Illegal access to: " + OAuthServerConfiguration.getInstance().getTokenGeneratorImplClass(); log.error(errorMsg, e); } catch (IdentityOAuth2Exception e) { String errorMsg = "Error while initializing: " + OAuthServerConfiguration.getInstance().getTokenGeneratorImplClass(); log.error(errorMsg, e); } } } public static TokenValidationHandler getInstance() { if (instance == null) { synchronized (TokenValidationHandler.class) { if (instance == null) { instance = new TokenValidationHandler(); } } } return instance; } public void addTokenValidator(String type, OAuth2TokenValidator handler) { tokenValidators.put(type, handler); } /** * @param requestDTO * @return * @throws IdentityOAuth2Exception */ public OAuth2TokenValidationResponseDTO validate(OAuth2TokenValidationRequestDTO requestDTO) throws IdentityOAuth2Exception { OAuth2ClientApplicationDTO appToken = findOAuthConsumerIfTokenIsValid(requestDTO); return appToken.getAccessTokenValidationResponse(); } /** * this is method is deprecated now. any new implementations use buildIntrospectionResponse. * * @param requestDTO * @return * @throws IdentityOAuth2Exception */ @Deprecated public OAuth2ClientApplicationDTO findOAuthConsumerIfTokenIsValid(OAuth2TokenValidationRequestDTO requestDTO) throws IdentityOAuth2Exception { OAuth2ClientApplicationDTO clientApp = new OAuth2ClientApplicationDTO(); OAuth2TokenValidationResponseDTO responseDTO = new OAuth2TokenValidationResponseDTO(); OAuth2TokenValidationMessageContext messageContext = new OAuth2TokenValidationMessageContext(requestDTO, responseDTO); OAuth2TokenValidationRequestDTO.OAuth2AccessToken accessToken = requestDTO.getAccessToken(); OAuth2TokenValidator tokenValidator = null; AccessTokenDO accessTokenDO = null; try { tokenValidator = findAccessTokenValidator(accessToken); } catch (IllegalArgumentException e) { // access token not provided. return buildClientAppErrorResponse(e.getMessage()); } try { accessTokenDO = OAuth2Util.findAccessToken(requestDTO.getAccessToken().getIdentifier(), false); } catch (IllegalArgumentException e) { // Access token not found in the system. return buildClientAppErrorResponse(e.getMessage()); } if (hasAccessTokenExpired(accessTokenDO)) { return buildClientAppErrorResponse("Access token expired"); } // Set the token expiration time responseDTO.setExpiryTime(getAccessTokenExpirationTime(accessTokenDO)); // Adding the AccessTokenDO as a context property for further use messageContext.addProperty(OAuthConstants.ACCESS_TOKEN_DO, accessTokenDO); if (!tokenValidator.validateAccessDelegation(messageContext)) { return buildClientAppErrorResponse("Invalid access delegation"); } if (!tokenValidator.validateScope(messageContext)) { return buildClientAppErrorResponse("Scope validation failed at app level"); } if (!tokenValidator.validateAccessToken(messageContext)) { return buildClientAppErrorResponse("OAuth2 access token validation failed"); } responseDTO.setAuthorizedUser(getAuthzUser(accessTokenDO)); responseDTO.setScope(accessTokenDO.getScope()); responseDTO.setValid(true); responseDTO.setTokenBinding(accessTokenDO.getTokenBinding()); if (tokenGenerator != null) { tokenGenerator.generateToken(messageContext); if (log.isDebugEnabled()) { log.debug(tokenGenerator.getClass().getName() + " generated token set to response"); } } clientApp.setAccessTokenValidationResponse(responseDTO); clientApp.setConsumerKey(accessTokenDO.getConsumerKey()); return clientApp; } /** * returns back the introspection response, which is compatible with RFC 7662. * * @param validationRequest * @return * @throws IdentityOAuth2Exception */ public OAuth2IntrospectionResponseDTO buildIntrospectionResponse(OAuth2TokenValidationRequestDTO validationRequest) throws IdentityOAuth2Exception { OAuth2TokenValidationResponseDTO responseDTO = new OAuth2TokenValidationResponseDTO(); OAuth2IntrospectionResponseDTO introResp = new OAuth2IntrospectionResponseDTO(); OAuth2TokenValidationMessageContext messageContext = new OAuth2TokenValidationMessageContext(validationRequest, responseDTO); OAuth2TokenValidationRequestDTO.OAuth2AccessToken oAuth2Token = validationRequest.getAccessToken(); // To hold the applicable validators list from all the available validators. This list will be prioritized if we // have a token_type_hint. List<OAuth2TokenValidator> applicableValidators = new ArrayList<>(); boolean isJWTTokenValidation = isJWTTokenValidation(oAuth2Token.getIdentifier()); // If we have a token type hint, we have to prioritize our list. if (oAuth2Token.getTokenType() != null) { if (tokenValidators.get(oAuth2Token.getTokenType()) != null) { // Ignore bearer token validators if the token is JWT. if (!isSkipValidatorForJWT(tokenValidators.get(oAuth2Token.getTokenType()), isJWTTokenValidation)) { applicableValidators.add(tokenValidators.get(oAuth2Token.getTokenType())); } } } // Add the rest of the validators. for (Map.Entry<String, OAuth2TokenValidator> oAuth2TokenValidator : tokenValidators.entrySet()) { // Ignore if we added this already. if (StringUtils.equals(oAuth2TokenValidator.getKey(), oAuth2Token.getTokenType())) { continue; } // Ignore bearer token validators if the token is JWT. if (isSkipValidatorForJWT(oAuth2TokenValidator.getValue(), isJWTTokenValidation)) { continue; } if (oAuth2TokenValidator.getValue() != null) { applicableValidators.add(oAuth2TokenValidator.getValue()); } } // Catch the latest exception and throw it if there aren't any active tokens. Exception exception = null; for (OAuth2TokenValidator tokenValidator : applicableValidators) { try { if (tokenValidator.validateAccessToken(messageContext)) { // We have to specially handle the access token and refresh token for further validations. if (tokenValidator instanceof DefaultOAuth2TokenValidator) { introResp = validateAccessToken(messageContext, validationRequest, tokenValidator); } else if (tokenValidator instanceof RefreshTokenValidator) { introResp = validateRefreshToken(messageContext, validationRequest, tokenValidator); } if (log.isDebugEnabled()) { log.debug("Introspecting token of the application:" + introResp.getClientId() + " using the" + " token validator " + tokenValidator.getClass().getName()); } // If there aren't any errors from the above special validations. if (introResp.isActive()) { if (log.isDebugEnabled()) { log.debug("Introspecting token is active for the application:" + introResp.getClientId()); } introResp.setTokenType(tokenValidator.getTokenType()); break; } } } catch (Exception ex) { exception = ex; } } // If there aren't any active tokens, then there should be an error or exception. If no error or exception // as well, that means this token is not active. So show the generic error. if (!introResp.isActive()) { if (introResp.getError() != null) { LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, introResp.getError(), "validate-token", null); return introResp; } else if (exception != null) { LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, "System error occurred.", "validate-token", null); throw new IdentityOAuth2Exception("Error occurred while validating token.", exception); } else { return buildIntrospectionErrorResponse("Token validation failed"); } } else { LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.SUCCESS, "Token is successfully validated.", "validate-token", null); } if (introResp.getUsername() != null) { responseDTO.setAuthorizedUser(introResp.getUsername()); } if (tokenGenerator != null && validationRequest.getRequiredClaimURIs() != null) { // add user attributes to the introspection response. tokenGenerator.generateToken(messageContext); if (log.isDebugEnabled()) { log.debug(tokenGenerator.getClass().getName() + " generated token set to response"); } if (responseDTO.getAuthorizationContextToken() != null) { introResp.setUserContext(responseDTO.getAuthorizationContextToken().getTokenString()); } } introResp.getProperties().put(OAuth2Util.OAUTH2_VALIDATION_MESSAGE_CONTEXT, messageContext); return introResp; } private OAuth2IntrospectionResponseDTO validateRefreshToken(OAuth2TokenValidationMessageContext messageContext, OAuth2TokenValidationRequestDTO validationRequest, OAuth2TokenValidator tokenValidator) throws IdentityOAuth2Exception { OAuth2IntrospectionResponseDTO introResp = new OAuth2IntrospectionResponseDTO(); AccessTokenDO refreshTokenDataDO; try { refreshTokenDataDO = findRefreshToken(validationRequest.getAccessToken().getIdentifier()); } catch (IllegalArgumentException e) { // Refresh token not found in the system. LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, "Provided token is not a valid refresh token.", "validate-refresh-token", null); return buildIntrospectionErrorResponse(e.getMessage()); } if (refreshTokenDataDO == null || hasRefreshTokenExpired(refreshTokenDataDO)) { if (refreshTokenDataDO == null) { LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, "Provided token is not a valid refresh token.", "validate-refresh-token", null); } else if (hasRefreshTokenExpired(refreshTokenDataDO)) { LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, "Token is expired.", "validate-refresh-token", null); } // Token is not active. we do not need to worry about other details. introResp.setActive(false); return introResp; } // should be in seconds introResp.setExp((refreshTokenDataDO.getRefreshTokenValidityPeriodInMillis() + refreshTokenDataDO.getRefreshTokenIssuedTime().getTime()) / 1000); // should be in seconds introResp.setIat(refreshTokenDataDO.getRefreshTokenIssuedTime().getTime() / 1000); // Not before time will be the same as issued time. introResp.setNbf(refreshTokenDataDO.getRefreshTokenIssuedTime().getTime() / 1000); // Token scopes. introResp.setScope(OAuth2Util.buildScopeString((refreshTokenDataDO.getScope()))); // Set user-name. introResp.setUsername(getAuthzUser(refreshTokenDataDO)); // Add client id. introResp.setClientId(refreshTokenDataDO.getConsumerKey()); // Adding the AccessTokenDO as a context property for further use. messageContext.addProperty("RefreshTokenDO", refreshTokenDataDO); // Add authenticated user object since username attribute may not have the domain appended if the // subject identifier is built based in the SP config. introResp.setAuthorizedUser(refreshTokenDataDO.getAuthzUser()); // Validate access delegation. if (!tokenValidator.validateAccessDelegation(messageContext)) { // This is redundant. But for sake of readability. introResp.setActive(false); return buildIntrospectionErrorResponse("Invalid access delegation"); } // Validate scopes. if (!tokenValidator.validateScope(messageContext)) { // This is redundant. But for sake of readability. introResp.setActive(false); return buildIntrospectionErrorResponse("Scope validation failed"); } // All set. mark the token active. introResp.setActive(true); return introResp; } private OAuth2IntrospectionResponseDTO validateAccessToken(OAuth2TokenValidationMessageContext messageContext, OAuth2TokenValidationRequestDTO validationRequest, OAuth2TokenValidator tokenValidator) throws IdentityOAuth2Exception { OAuth2IntrospectionResponseDTO introResp = new OAuth2IntrospectionResponseDTO(); AccessTokenDO accessTokenDO = null; List<String> requestedAllowedScopes = new ArrayList<>(); if (messageContext.getProperty(OAuth2Util.REMOTE_ACCESS_TOKEN) != null && "true".equalsIgnoreCase((String) messageContext.getProperty(OAuth2Util.REMOTE_ACCESS_TOKEN))) { // this can be a self-issued JWT or any access token issued by a trusted OAuth authorization server. // should be in seconds if (messageContext.getProperty(OAuth2Util.EXP) != null) { introResp.setExp(Long.parseLong((String) messageContext.getProperty(OAuth2Util.EXP))); } // should be in seconds if (messageContext.getProperty(OAuth2Util.IAT) != null) { introResp.setIat(Long.parseLong((String) messageContext.getProperty(OAuth2Util.IAT))); } // token scopes - space delimited if (messageContext.getProperty(OAuth2Util.SCOPE) != null) { introResp.setScope((String) messageContext.getProperty(OAuth2Util.SCOPE)); } // set user-name if (messageContext.getProperty(OAuth2Util.USERNAME) != null) { introResp.setUsername((String) messageContext.getProperty(OAuth2Util.USERNAME)); } // set client-id if (messageContext.getProperty(OAuth2Util.CLIENT_ID) != null) { introResp.setClientId((String) messageContext.getProperty(OAuth2Util.CLIENT_ID)); } } else { try { accessTokenDO = OAuth2Util.findAccessToken(validationRequest.getAccessToken().getIdentifier(), false); List<String> allowedScopes = OAuthServerConfiguration.getInstance().getAllowedScopes(); String[] requestedScopes = accessTokenDO.getScope(); List<String> scopesToBeValidated = new ArrayList<>(); if (requestedScopes != null) { for (String scope : requestedScopes) { if (OAuth2Util.isAllowedScope(allowedScopes, scope)) { requestedAllowedScopes.add(scope); } else { scopesToBeValidated.add(scope); } } accessTokenDO.setScope(scopesToBeValidated.toArray(new String[0])); } } catch (IllegalArgumentException e) { // access token not found in the system. LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, "Provided token is not a valid access token.", "validate-access-token", null); return buildIntrospectionErrorResponse(e.getMessage()); } if (hasAccessTokenExpired(accessTokenDO)) { LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, "Access token is expired.", "validate-access-token", null); // token is not active. we do not need to worry about other details. introResp.setActive(false); return introResp; } // should be in seconds if (accessTokenDO.getValidityPeriodInMillis() < 0) { introResp.setExp(Long.MAX_VALUE); } else { if (accessTokenDO.getValidityPeriodInMillis() + accessTokenDO.getIssuedTime().getTime() < 0) { // When the access token have a long validity period (eg: 9223372036854775000), the calculated // expiry time will be a negative value. The reason is that, max value of long data type of Java is // "9223372036854775807". So, when the addition of the validity period and the issued time exceeds // this max value, it will result in a negative value. In those instances, we set the expiry time as // the max value of long data type. introResp.setExp(Long.MAX_VALUE); } else { introResp.setExp( (accessTokenDO.getValidityPeriodInMillis() + accessTokenDO.getIssuedTime().getTime()) / 1000); } } // should be in seconds introResp.setIat(accessTokenDO.getIssuedTime().getTime() / 1000); // Not before time will be the same as issued time. introResp.setNbf(accessTokenDO.getIssuedTime().getTime() / 1000); // token scopes introResp.setScope(OAuth2Util.buildScopeString((accessTokenDO.getScope()))); // set user-name introResp.setUsername(getAuthzUser(accessTokenDO)); // add client id introResp.setClientId(accessTokenDO.getConsumerKey()); // Set token binding info. if (accessTokenDO.getTokenBinding() != null) { introResp.setBindingType(accessTokenDO.getTokenBinding().getBindingType()); introResp.setBindingReference(accessTokenDO.getTokenBinding().getBindingReference()); } // add authorized user type if (accessTokenDO.getTokenType() != null) { introResp.setAut(accessTokenDO.getTokenType()); } // adding the AccessTokenDO as a context property for further use messageContext.addProperty("AccessTokenDO", accessTokenDO); // Add authenticated user object since username attribute may not have the domain appended if the // subject identifier is built based in the SP config. introResp.setAuthorizedUser(accessTokenDO.getAuthzUser()); } if (messageContext.getProperty(OAuth2Util.JWT_ACCESS_TOKEN) != null && "true".equalsIgnoreCase((String) messageContext.getProperty(OAuth2Util.JWT_ACCESS_TOKEN))) { // attributes only related JWT access tokens. if (messageContext.getProperty(OAuth2Util.SUB) != null) { introResp.setSub((String) messageContext.getProperty(OAuth2Util.SUB)); } if (messageContext.getProperty(OAuth2Util.ISS) != null) { introResp.setIss((String) messageContext.getProperty(OAuth2Util.ISS)); } if (messageContext.getProperty(OAuth2Util.AUD) != null) { introResp.setAud((String) messageContext.getProperty(OAuth2Util.AUD)); } if (messageContext.getProperty(OAuth2Util.JTI) != null) { introResp.setJti((String) messageContext.getProperty(OAuth2Util.JTI)); } // set the token not to be used before time in seconds if (messageContext.getProperty(OAuth2Util.NBF) != null) { introResp.setNbf(Long.parseLong((String) messageContext.getProperty(OAuth2Util.NBF))); } } // Validate access delegation. if (!tokenValidator.validateAccessDelegation(messageContext)) { // This is redundant. But sake of readability. LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, "Invalid access delegation.", "validate-access-token", null); introResp.setActive(false); return buildIntrospectionErrorResponse("Invalid access delegation"); } // Validate scopes at app level. if (!tokenValidator.validateScope(messageContext)) { // This is redundant. But sake of readability. LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, "Scope validation failed at application level.", "validate-access-token", null); introResp.setActive(false); if (log.isDebugEnabled()) { log.debug("Scope validation has failed at app level."); } return buildIntrospectionErrorResponse("Scope validation failed"); } // Add requested allowed scopes to the message context. addAllowedScopes(messageContext, requestedAllowedScopes.toArray(new String[0])); // Add requested allowed scopes to introResp and to the accessTokenDO. if (accessTokenDO != null) { addAllowedScopes(introResp, accessTokenDO, requestedAllowedScopes.toArray(new String[0])); } // All set. mark the token active. introResp.setActive(true); return introResp; } private String getAuthzUser(AccessTokenDO accessTokenDO) throws IdentityOAuth2Exception { AuthenticatedUser user = accessTokenDO.getAuthzUser(); if (user.isFederatedUser()) { return user.getAuthenticatedSubjectIdentifier(); } String consumerKey = accessTokenDO.getConsumerKey(); try { boolean buildSubjectIdentifierFromSPConfig = Boolean.parseBoolean(IdentityUtil.getProperty (BUILD_FQU_FROM_SP_CONFIG)); if (buildSubjectIdentifierFromSPConfig) { ServiceProvider serviceProvider = getServiceProvider(consumerKey); boolean useTenantDomainInLocalSubjectIdentifier = serviceProvider .getLocalAndOutBoundAuthenticationConfig().isUseTenantDomainInLocalSubjectIdentifier(); boolean useUserStoreDomainInLocalSubjectIdentifier = serviceProvider .getLocalAndOutBoundAuthenticationConfig().isUseUserstoreDomainInLocalSubjectIdentifier(); return user.getUsernameAsSubjectIdentifier(useUserStoreDomainInLocalSubjectIdentifier, useTenantDomainInLocalSubjectIdentifier); } else { return user.toFullQualifiedUsername(); } } catch (IdentityApplicationManagementException | InvalidOAuthClientException e) { throw new IdentityOAuth2Exception("Error occurred while retrieving OAuth2 application data for client id:" + consumerKey, e); } } private ServiceProvider getServiceProvider(String consumerKey) throws IdentityApplicationManagementException, IdentityOAuth2Exception, InvalidOAuthClientException { String spTenantDomain = OAuth2Util.getTenantDomainOfOauthApp(consumerKey); return OAuth2ServiceComponentHolder.getApplicationMgtService().getServiceProviderByClientId(consumerKey, OAuthConstants.Scope.OAUTH2, spTenantDomain); } /** * @param errorMessage * @return */ private OAuth2ClientApplicationDTO buildClientAppErrorResponse(String errorMessage) { OAuth2TokenValidationResponseDTO responseDTO = new OAuth2TokenValidationResponseDTO(); OAuth2ClientApplicationDTO clientApp = new OAuth2ClientApplicationDTO(); if (log.isDebugEnabled()) { log.debug(errorMessage); } responseDTO.setValid(false); responseDTO.setErrorMsg(errorMessage); clientApp.setAccessTokenValidationResponse(responseDTO); return clientApp; } /** * @param errorMessage * @return */ private OAuth2IntrospectionResponseDTO buildIntrospectionErrorResponse(String errorMessage) { OAuth2IntrospectionResponseDTO response = new OAuth2IntrospectionResponseDTO(); if (log.isDebugEnabled()) { log.debug(errorMessage); } response.setActive(false); response.setError(errorMessage); return response; } /** * @param accessToken * @return * @throws IdentityOAuth2Exception */ private OAuth2TokenValidator findAccessTokenValidator(OAuth2TokenValidationRequestDTO.OAuth2AccessToken accessToken) throws IdentityOAuth2Exception { // incomplete token validation request if (accessToken == null) { throw new IllegalArgumentException("Access token is not present in the validation request"); } String accessTokenIdentifier = accessToken.getIdentifier(); // incomplete token validation request if (accessTokenIdentifier == null) { throw new IllegalArgumentException("Access token identifier is not present in the validation request"); } OAuth2TokenValidator tokenValidator; if (isJWTTokenValidation(accessToken.getIdentifier())) { /* If the token is a self-contained JWT based access token and the config EnableJWTTokenValidationDuringIntrospection is set to true then the jwt token validator is selected. In the default pack TokenValidator type 'jwt' is 'org.wso2.carbon.identity.oauth2.validators.OAuth2JWTTokenValidator'. */ tokenValidator = tokenValidators.get(BEARER_TOKEN_TYPE_JWT); } else { tokenValidator = tokenValidators.get(accessToken.getTokenType()); } // There is no token validator for the provided token type. if (tokenValidator == null) { throw new IllegalArgumentException("Unsupported access token type: " + accessToken.getTokenType()); } return tokenValidator; } /** * @param accessTokenDO * @return */ private long getAccessTokenExpirationTime(AccessTokenDO accessTokenDO) { long expiryTime = OAuth2Util.getAccessTokenExpireMillis(accessTokenDO, false); if (OAuthConstants.UserType.APPLICATION_USER.equals(accessTokenDO.getTokenType()) && OAuthServerConfiguration.getInstance().getUserAccessTokenValidityPeriodInSeconds() < 0) { return Long.MAX_VALUE; } else if (OAuthConstants.UserType.APPLICATION.equals(accessTokenDO.getTokenType()) && OAuthServerConfiguration.getInstance().getApplicationAccessTokenValidityPeriodInSeconds() < 0) { return Long.MAX_VALUE; } else if (expiryTime < 0) { return Long.MAX_VALUE; } return expiryTime / 1000; } /** * @param accessTokenDO * @return * @throws IdentityOAuth2Exception */ private boolean hasAccessTokenExpired(AccessTokenDO accessTokenDO) { // check whether the grant is expired if (accessTokenDO.getValidityPeriod() < 0) { if (log.isDebugEnabled()) { log.debug("Access Token has infinite lifetime"); } } else { if (OAuth2Util.getAccessTokenExpireMillis(accessTokenDO, true) == 0) { if (log.isDebugEnabled()) { log.debug("Access Token has expired"); } return true; } } return false; } private boolean hasRefreshTokenExpired(AccessTokenDO accessTokenDO) { if (accessTokenDO.getRefreshTokenValidityPeriodInMillis() < 0) { if (log.isDebugEnabled()) { log.debug("Access Token has infinite lifetime"); } } else { if (OAuth2Util.getRefreshTokenExpireTimeMillis(accessTokenDO) == 0) { if (log.isDebugEnabled()) { log.debug("Access Token has expired"); } return true; } } return false; } private AccessTokenDO findRefreshToken(String refreshToken) throws IdentityOAuth2Exception { return OAuthTokenPersistenceFactory.getInstance().getTokenManagementDAO().getRefreshToken(refreshToken); } private boolean isJWTTokenValidation(String tokenIdentifier) { return Boolean.parseBoolean(IdentityUtil.getProperty(ENABLE_JWT_TOKEN_VALIDATION)) && isParsableJWT( tokenIdentifier); } private boolean isSkipValidatorForJWT(OAuth2TokenValidator tokenValidator, boolean isJWTTokenValidation) { return isJWTTokenValidation && BEARER_TOKEN_TYPE.equals(tokenValidator.getTokenType()); } private void addAllowedScopes(OAuth2TokenValidationMessageContext oAuth2TokenValidationMessageContext, String[] allowedScopes) { String[] scopes = oAuth2TokenValidationMessageContext.getResponseDTO().getScope(); String[] scopesToReturn = (String[]) ArrayUtils.addAll(scopes, allowedScopes); oAuth2TokenValidationMessageContext.getResponseDTO().setScope(scopesToReturn); } private void addAllowedScopes(OAuth2IntrospectionResponseDTO introResp, AccessTokenDO accessTokenDO, String[] allowedScopes) { String[] scopes = accessTokenDO.getScope(); String[] scopesToReturn = (String[]) ArrayUtils.addAll(scopes, allowedScopes); accessTokenDO.setScope(scopesToReturn); introResp.setScope(OAuth2Util.buildScopeString((scopesToReturn))); } }
components/org.wso2.carbon.identity.oauth/src/main/java/org/wso2/carbon/identity/oauth2/validators/TokenValidationHandler.java
/* * Copyright (c) 2019, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.oauth2.validators; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedUser; import org.wso2.carbon.identity.application.common.IdentityApplicationManagementException; import org.wso2.carbon.identity.application.common.model.ServiceProvider; import org.wso2.carbon.identity.central.log.mgt.utils.LoggerUtils; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.identity.oauth.common.OAuthConstants; import org.wso2.carbon.identity.oauth.common.exception.InvalidOAuthClientException; import org.wso2.carbon.identity.oauth.config.OAuthServerConfiguration; import org.wso2.carbon.identity.oauth2.IdentityOAuth2Exception; import org.wso2.carbon.identity.oauth2.authcontext.AuthorizationContextTokenGenerator; import org.wso2.carbon.identity.oauth2.dao.OAuthTokenPersistenceFactory; import org.wso2.carbon.identity.oauth2.dto.OAuth2ClientApplicationDTO; import org.wso2.carbon.identity.oauth2.dto.OAuth2IntrospectionResponseDTO; import org.wso2.carbon.identity.oauth2.dto.OAuth2TokenValidationRequestDTO; import org.wso2.carbon.identity.oauth2.dto.OAuth2TokenValidationResponseDTO; import org.wso2.carbon.identity.oauth2.internal.OAuth2ServiceComponentHolder; import org.wso2.carbon.identity.oauth2.model.AccessTokenDO; import org.wso2.carbon.identity.oauth2.util.OAuth2Util; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; import static org.wso2.carbon.identity.oauth2.util.OAuth2Util.isParsableJWT; /** * Handles the token validation by invoking the proper validation handler by looking at the token * type. */ public class TokenValidationHandler { private static TokenValidationHandler instance = null; AuthorizationContextTokenGenerator tokenGenerator = null; private static final Log log = LogFactory.getLog(TokenValidationHandler.class); private Map<String, OAuth2TokenValidator> tokenValidators = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); private static final String BEARER_TOKEN_TYPE = "Bearer"; private static final String BEARER_TOKEN_TYPE_JWT = "jwt"; private static final String BUILD_FQU_FROM_SP_CONFIG = "OAuth.BuildSubjectIdentifierFromSPConfig"; private static final String ENABLE_JWT_TOKEN_VALIDATION = "OAuth.EnableJWTTokenValidationDuringIntrospection"; private TokenValidationHandler() { tokenValidators.put(DefaultOAuth2TokenValidator.TOKEN_TYPE, new DefaultOAuth2TokenValidator()); tokenValidators.put(RefreshTokenValidator.TOKEN_TYPE, new RefreshTokenValidator()); for (Map.Entry<String, String> entry : OAuthServerConfiguration.getInstance().getTokenValidatorClassNames() .entrySet()) { String className = null; try { String type = entry.getKey(); className = entry.getValue(); Class clazz = Thread.currentThread().getContextClassLoader().loadClass(entry.getValue()); OAuth2TokenValidator tokenValidator = (OAuth2TokenValidator) clazz.newInstance(); tokenValidators.put(type, tokenValidator); } catch (ClassNotFoundException e) { log.error("Class not in build path " + className, e); } catch (InstantiationException e) { log.error("Class initialization error " + className, e); } catch (IllegalAccessException e) { log.error("Class access error " + className, e); } } // setting up the JWT if required if (OAuthServerConfiguration.getInstance().isAuthContextTokGenEnabled()) { try { Class clazz = this.getClass().getClassLoader().loadClass(OAuthServerConfiguration.getInstance() .getTokenGeneratorImplClass()); tokenGenerator = (AuthorizationContextTokenGenerator) clazz.newInstance(); tokenGenerator.init(); if (log.isDebugEnabled()) { log.debug("An instance of " + OAuthServerConfiguration.getInstance().getTokenGeneratorImplClass() + " is created for OAuthServerConfiguration."); } } catch (ClassNotFoundException e) { String errorMsg = "Class not found: " + OAuthServerConfiguration.getInstance().getTokenGeneratorImplClass(); log.error(errorMsg, e); } catch (InstantiationException e) { String errorMsg = "Error while instantiating: " + OAuthServerConfiguration.getInstance().getTokenGeneratorImplClass(); log.error(errorMsg, e); } catch (IllegalAccessException e) { String errorMsg = "Illegal access to: " + OAuthServerConfiguration.getInstance().getTokenGeneratorImplClass(); log.error(errorMsg, e); } catch (IdentityOAuth2Exception e) { String errorMsg = "Error while initializing: " + OAuthServerConfiguration.getInstance().getTokenGeneratorImplClass(); log.error(errorMsg, e); } } } public static TokenValidationHandler getInstance() { if (instance == null) { synchronized (TokenValidationHandler.class) { if (instance == null) { instance = new TokenValidationHandler(); } } } return instance; } public void addTokenValidator(String type, OAuth2TokenValidator handler) { tokenValidators.put(type, handler); } /** * @param requestDTO * @return * @throws IdentityOAuth2Exception */ public OAuth2TokenValidationResponseDTO validate(OAuth2TokenValidationRequestDTO requestDTO) throws IdentityOAuth2Exception { OAuth2ClientApplicationDTO appToken = findOAuthConsumerIfTokenIsValid(requestDTO); return appToken.getAccessTokenValidationResponse(); } /** * this is method is deprecated now. any new implementations use buildIntrospectionResponse. * * @param requestDTO * @return * @throws IdentityOAuth2Exception */ @Deprecated public OAuth2ClientApplicationDTO findOAuthConsumerIfTokenIsValid(OAuth2TokenValidationRequestDTO requestDTO) throws IdentityOAuth2Exception { OAuth2ClientApplicationDTO clientApp = new OAuth2ClientApplicationDTO(); OAuth2TokenValidationResponseDTO responseDTO = new OAuth2TokenValidationResponseDTO(); OAuth2TokenValidationMessageContext messageContext = new OAuth2TokenValidationMessageContext(requestDTO, responseDTO); OAuth2TokenValidationRequestDTO.OAuth2AccessToken accessToken = requestDTO.getAccessToken(); OAuth2TokenValidator tokenValidator = null; AccessTokenDO accessTokenDO = null; try { tokenValidator = findAccessTokenValidator(accessToken); } catch (IllegalArgumentException e) { // access token not provided. return buildClientAppErrorResponse(e.getMessage()); } try { accessTokenDO = OAuth2Util.findAccessToken(requestDTO.getAccessToken().getIdentifier(), false); } catch (IllegalArgumentException e) { // Access token not found in the system. return buildClientAppErrorResponse(e.getMessage()); } if (hasAccessTokenExpired(accessTokenDO)) { return buildClientAppErrorResponse("Access token expired"); } // Set the token expiration time responseDTO.setExpiryTime(getAccessTokenExpirationTime(accessTokenDO)); // Adding the AccessTokenDO as a context property for further use messageContext.addProperty(OAuthConstants.ACCESS_TOKEN_DO, accessTokenDO); if (!tokenValidator.validateAccessDelegation(messageContext)) { return buildClientAppErrorResponse("Invalid access delegation"); } if (!tokenValidator.validateScope(messageContext)) { return buildClientAppErrorResponse("Scope validation failed at app level"); } if (!tokenValidator.validateAccessToken(messageContext)) { return buildClientAppErrorResponse("OAuth2 access token validation failed"); } responseDTO.setAuthorizedUser(getAuthzUser(accessTokenDO)); responseDTO.setScope(accessTokenDO.getScope()); responseDTO.setValid(true); responseDTO.setTokenBinding(accessTokenDO.getTokenBinding()); if (tokenGenerator != null) { tokenGenerator.generateToken(messageContext); if (log.isDebugEnabled()) { log.debug(tokenGenerator.getClass().getName() + " generated token set to response"); } } clientApp.setAccessTokenValidationResponse(responseDTO); clientApp.setConsumerKey(accessTokenDO.getConsumerKey()); return clientApp; } /** * returns back the introspection response, which is compatible with RFC 7662. * * @param validationRequest * @return * @throws IdentityOAuth2Exception */ public OAuth2IntrospectionResponseDTO buildIntrospectionResponse(OAuth2TokenValidationRequestDTO validationRequest) throws IdentityOAuth2Exception { OAuth2TokenValidationResponseDTO responseDTO = new OAuth2TokenValidationResponseDTO(); OAuth2IntrospectionResponseDTO introResp = new OAuth2IntrospectionResponseDTO(); OAuth2TokenValidationMessageContext messageContext = new OAuth2TokenValidationMessageContext(validationRequest, responseDTO); OAuth2TokenValidationRequestDTO.OAuth2AccessToken oAuth2Token = validationRequest.getAccessToken(); // To hold the applicable validators list from all the available validators. This list will be prioritized if we // have a token_type_hint. List<OAuth2TokenValidator> applicableValidators = new ArrayList<>(); boolean isJWTTokenValidation = isJWTTokenValidation(oAuth2Token.getIdentifier()); // If we have a token type hint, we have to prioritize our list. if (oAuth2Token.getTokenType() != null) { if (tokenValidators.get(oAuth2Token.getTokenType()) != null) { // Ignore bearer token validators if the token is JWT. if (!isSkipValidatorForJWT(tokenValidators.get(oAuth2Token.getTokenType()), isJWTTokenValidation)) { applicableValidators.add(tokenValidators.get(oAuth2Token.getTokenType())); } } } // Add the rest of the validators. for (Map.Entry<String, OAuth2TokenValidator> oAuth2TokenValidator : tokenValidators.entrySet()) { // Ignore if we added this already. if (StringUtils.equals(oAuth2TokenValidator.getKey(), oAuth2Token.getTokenType())) { continue; } // Ignore bearer token validators if the token is JWT. if (isSkipValidatorForJWT(oAuth2TokenValidator.getValue(), isJWTTokenValidation)) { continue; } if (oAuth2TokenValidator.getValue() != null) { applicableValidators.add(oAuth2TokenValidator.getValue()); } } // Catch the latest exception and throw it if there aren't any active tokens. Exception exception = null; for (OAuth2TokenValidator tokenValidator : applicableValidators) { try { if (tokenValidator.validateAccessToken(messageContext)) { // We have to specially handle the access token and refresh token for further validations. if (tokenValidator instanceof DefaultOAuth2TokenValidator) { introResp = validateAccessToken(messageContext, validationRequest, tokenValidator); } else if (tokenValidator instanceof RefreshTokenValidator) { introResp = validateRefreshToken(messageContext, validationRequest, tokenValidator); } if (log.isDebugEnabled()) { log.debug("Introspecting token of the application:" + introResp.getClientId() + " using the" + " token validator " + tokenValidator.getClass().getName()); } // If there aren't any errors from the above special validations. if (introResp.isActive()) { if (log.isDebugEnabled()) { log.debug("Introspecting token is active for the application:" + introResp.getClientId()); } introResp.setTokenType(tokenValidator.getTokenType()); break; } } } catch (Exception ex) { exception = ex; } } // If there aren't any active tokens, then there should be an error or exception. If no error or exception // as well, that means this token is not active. So show the generic error. if (!introResp.isActive()) { if (introResp.getError() != null) { LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, introResp.getError(), "validate-token", null); return introResp; } else if (exception != null) { LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, "System error occurred.", "validate-token", null); throw new IdentityOAuth2Exception("Error occurred while validating token.", exception); } else { return buildIntrospectionErrorResponse("Token validation failed"); } } else { LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.SUCCESS, "Token is successfully validated.", "validate-token", null); } if (introResp.getUsername() != null) { responseDTO.setAuthorizedUser(introResp.getUsername()); } if (tokenGenerator != null && validationRequest.getRequiredClaimURIs() != null) { // add user attributes to the introspection response. tokenGenerator.generateToken(messageContext); if (log.isDebugEnabled()) { log.debug(tokenGenerator.getClass().getName() + " generated token set to response"); } if (responseDTO.getAuthorizationContextToken() != null) { introResp.setUserContext(responseDTO.getAuthorizationContextToken().getTokenString()); } } introResp.getProperties().put(OAuth2Util.OAUTH2_VALIDATION_MESSAGE_CONTEXT, messageContext); return introResp; } private OAuth2IntrospectionResponseDTO validateRefreshToken(OAuth2TokenValidationMessageContext messageContext, OAuth2TokenValidationRequestDTO validationRequest, OAuth2TokenValidator tokenValidator) throws IdentityOAuth2Exception { OAuth2IntrospectionResponseDTO introResp = new OAuth2IntrospectionResponseDTO(); AccessTokenDO refreshTokenDataDO; try { refreshTokenDataDO = findRefreshToken(validationRequest.getAccessToken().getIdentifier()); } catch (IllegalArgumentException e) { // Refresh token not found in the system. LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, "Provided token is not a valid refresh token.", "validate-refresh-token", null); return buildIntrospectionErrorResponse(e.getMessage()); } if (refreshTokenDataDO == null || hasRefreshTokenExpired(refreshTokenDataDO)) { if (refreshTokenDataDO == null) { LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, "Provided token is not a valid refresh token.", "validate-refresh-token", null); } else if (hasRefreshTokenExpired(refreshTokenDataDO)) { LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, "Token is expired.", "validate-refresh-token", null); } // Token is not active. we do not need to worry about other details. introResp.setActive(false); return introResp; } // should be in seconds introResp.setExp((refreshTokenDataDO.getRefreshTokenValidityPeriodInMillis() + refreshTokenDataDO.getRefreshTokenIssuedTime().getTime()) / 1000); // should be in seconds introResp.setIat(refreshTokenDataDO.getRefreshTokenIssuedTime().getTime() / 1000); // Not before time will be the same as issued time. introResp.setNbf(refreshTokenDataDO.getRefreshTokenIssuedTime().getTime() / 1000); // Token scopes. introResp.setScope(OAuth2Util.buildScopeString((refreshTokenDataDO.getScope()))); // Set user-name. introResp.setUsername(getAuthzUser(refreshTokenDataDO)); // Add client id. introResp.setClientId(refreshTokenDataDO.getConsumerKey()); // Adding the AccessTokenDO as a context property for further use. messageContext.addProperty("RefreshTokenDO", refreshTokenDataDO); // Add authenticated user object since username attribute may not have the domain appended if the // subject identifier is built based in the SP config. introResp.setAuthorizedUser(refreshTokenDataDO.getAuthzUser()); // Validate access delegation. if (!tokenValidator.validateAccessDelegation(messageContext)) { // This is redundant. But for sake of readability. introResp.setActive(false); return buildIntrospectionErrorResponse("Invalid access delegation"); } // Validate scopes. if (!tokenValidator.validateScope(messageContext)) { // This is redundant. But for sake of readability. introResp.setActive(false); return buildIntrospectionErrorResponse("Scope validation failed"); } // All set. mark the token active. introResp.setActive(true); return introResp; } private OAuth2IntrospectionResponseDTO validateAccessToken(OAuth2TokenValidationMessageContext messageContext, OAuth2TokenValidationRequestDTO validationRequest, OAuth2TokenValidator tokenValidator) throws IdentityOAuth2Exception { OAuth2IntrospectionResponseDTO introResp = new OAuth2IntrospectionResponseDTO(); AccessTokenDO accessTokenDO; List<String> requestedAllowedScopes = new ArrayList<>(); if (messageContext.getProperty(OAuth2Util.REMOTE_ACCESS_TOKEN) != null && "true".equalsIgnoreCase((String) messageContext.getProperty(OAuth2Util.REMOTE_ACCESS_TOKEN))) { // this can be a self-issued JWT or any access token issued by a trusted OAuth authorization server. // should be in seconds if (messageContext.getProperty(OAuth2Util.EXP) != null) { introResp.setExp(Long.parseLong((String) messageContext.getProperty(OAuth2Util.EXP))); } // should be in seconds if (messageContext.getProperty(OAuth2Util.IAT) != null) { introResp.setIat(Long.parseLong((String) messageContext.getProperty(OAuth2Util.IAT))); } // token scopes - space delimited if (messageContext.getProperty(OAuth2Util.SCOPE) != null) { introResp.setScope((String) messageContext.getProperty(OAuth2Util.SCOPE)); } // set user-name if (messageContext.getProperty(OAuth2Util.USERNAME) != null) { introResp.setUsername((String) messageContext.getProperty(OAuth2Util.USERNAME)); } // set client-id if (messageContext.getProperty(OAuth2Util.CLIENT_ID) != null) { introResp.setClientId((String) messageContext.getProperty(OAuth2Util.CLIENT_ID)); } } else { try { accessTokenDO = OAuth2Util.findAccessToken(validationRequest.getAccessToken().getIdentifier(), false); List<String> allowedScopes = OAuthServerConfiguration.getInstance().getAllowedScopes(); String[] requestedScopes = accessTokenDO.getScope(); List<String> scopesToBeValidated = new ArrayList<>(); if (requestedScopes != null) { for (String scope : requestedScopes) { if (OAuth2Util.isAllowedScope(allowedScopes, scope)) { requestedAllowedScopes.add(scope); } else { scopesToBeValidated.add(scope); } } accessTokenDO.setScope(scopesToBeValidated.toArray(new String[0])); } } catch (IllegalArgumentException e) { // access token not found in the system. LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, "Provided token is not a valid access token.", "validate-access-token", null); return buildIntrospectionErrorResponse(e.getMessage()); } if (hasAccessTokenExpired(accessTokenDO)) { LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, "Access token is expired.", "validate-access-token", null); // token is not active. we do not need to worry about other details. introResp.setActive(false); return introResp; } // should be in seconds if (accessTokenDO.getValidityPeriodInMillis() < 0) { introResp.setExp(Long.MAX_VALUE); } else { if (accessTokenDO.getValidityPeriodInMillis() + accessTokenDO.getIssuedTime().getTime() < 0) { // When the access token have a long validity period (eg: 9223372036854775000), the calculated // expiry time will be a negative value. The reason is that, max value of long data type of Java is // "9223372036854775807". So, when the addition of the validity period and the issued time exceeds // this max value, it will result in a negative value. In those instances, we set the expiry time as // the max value of long data type. introResp.setExp(Long.MAX_VALUE); } else { introResp.setExp( (accessTokenDO.getValidityPeriodInMillis() + accessTokenDO.getIssuedTime().getTime()) / 1000); } } // should be in seconds introResp.setIat(accessTokenDO.getIssuedTime().getTime() / 1000); // Not before time will be the same as issued time. introResp.setNbf(accessTokenDO.getIssuedTime().getTime() / 1000); // token scopes introResp.setScope(OAuth2Util.buildScopeString((accessTokenDO.getScope()))); // set user-name introResp.setUsername(getAuthzUser(accessTokenDO)); // add client id introResp.setClientId(accessTokenDO.getConsumerKey()); // Set token binding info. if (accessTokenDO.getTokenBinding() != null) { introResp.setBindingType(accessTokenDO.getTokenBinding().getBindingType()); introResp.setBindingReference(accessTokenDO.getTokenBinding().getBindingReference()); } // add authorized user type if (accessTokenDO.getTokenType() != null) { introResp.setAut(accessTokenDO.getTokenType()); } // adding the AccessTokenDO as a context property for further use messageContext.addProperty("AccessTokenDO", accessTokenDO); // Add authenticated user object since username attribute may not have the domain appended if the // subject identifier is built based in the SP config. introResp.setAuthorizedUser(accessTokenDO.getAuthzUser()); } if (messageContext.getProperty(OAuth2Util.JWT_ACCESS_TOKEN) != null && "true".equalsIgnoreCase((String) messageContext.getProperty(OAuth2Util.JWT_ACCESS_TOKEN))) { // attributes only related JWT access tokens. if (messageContext.getProperty(OAuth2Util.SUB) != null) { introResp.setSub((String) messageContext.getProperty(OAuth2Util.SUB)); } if (messageContext.getProperty(OAuth2Util.ISS) != null) { introResp.setIss((String) messageContext.getProperty(OAuth2Util.ISS)); } if (messageContext.getProperty(OAuth2Util.AUD) != null) { introResp.setAud((String) messageContext.getProperty(OAuth2Util.AUD)); } if (messageContext.getProperty(OAuth2Util.JTI) != null) { introResp.setJti((String) messageContext.getProperty(OAuth2Util.JTI)); } // set the token not to be used before time in seconds if (messageContext.getProperty(OAuth2Util.NBF) != null) { introResp.setNbf(Long.parseLong((String) messageContext.getProperty(OAuth2Util.NBF))); } } // Validate access delegation. if (!tokenValidator.validateAccessDelegation(messageContext)) { // This is redundant. But sake of readability. LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, "Invalid access delegation.", "validate-access-token", null); introResp.setActive(false); return buildIntrospectionErrorResponse("Invalid access delegation"); } // Validate scopes at app level. if (!tokenValidator.validateScope(messageContext)) { // This is redundant. But sake of readability. LoggerUtils.triggerDiagnosticLogEvent(OAuthConstants.LogConstants.OAUTH_INBOUND_SERVICE, null, OAuthConstants.LogConstants.FAILED, "Scope validation failed at application level.", "validate-access-token", null); introResp.setActive(false); if (log.isDebugEnabled()) { log.debug("Scope validation has failed at app level."); } return buildIntrospectionErrorResponse("Scope validation failed"); } addAllowedScopes(messageContext, requestedAllowedScopes.toArray(new String[0])); // All set. mark the token active. introResp.setActive(true); return introResp; } private String getAuthzUser(AccessTokenDO accessTokenDO) throws IdentityOAuth2Exception { AuthenticatedUser user = accessTokenDO.getAuthzUser(); if (user.isFederatedUser()) { return user.getAuthenticatedSubjectIdentifier(); } String consumerKey = accessTokenDO.getConsumerKey(); try { boolean buildSubjectIdentifierFromSPConfig = Boolean.parseBoolean(IdentityUtil.getProperty (BUILD_FQU_FROM_SP_CONFIG)); if (buildSubjectIdentifierFromSPConfig) { ServiceProvider serviceProvider = getServiceProvider(consumerKey); boolean useTenantDomainInLocalSubjectIdentifier = serviceProvider .getLocalAndOutBoundAuthenticationConfig().isUseTenantDomainInLocalSubjectIdentifier(); boolean useUserStoreDomainInLocalSubjectIdentifier = serviceProvider .getLocalAndOutBoundAuthenticationConfig().isUseUserstoreDomainInLocalSubjectIdentifier(); return user.getUsernameAsSubjectIdentifier(useUserStoreDomainInLocalSubjectIdentifier, useTenantDomainInLocalSubjectIdentifier); } else { return user.toFullQualifiedUsername(); } } catch (IdentityApplicationManagementException | InvalidOAuthClientException e) { throw new IdentityOAuth2Exception("Error occurred while retrieving OAuth2 application data for client id:" + consumerKey, e); } } private ServiceProvider getServiceProvider(String consumerKey) throws IdentityApplicationManagementException, IdentityOAuth2Exception, InvalidOAuthClientException { String spTenantDomain = OAuth2Util.getTenantDomainOfOauthApp(consumerKey); return OAuth2ServiceComponentHolder.getApplicationMgtService().getServiceProviderByClientId(consumerKey, OAuthConstants.Scope.OAUTH2, spTenantDomain); } /** * @param errorMessage * @return */ private OAuth2ClientApplicationDTO buildClientAppErrorResponse(String errorMessage) { OAuth2TokenValidationResponseDTO responseDTO = new OAuth2TokenValidationResponseDTO(); OAuth2ClientApplicationDTO clientApp = new OAuth2ClientApplicationDTO(); if (log.isDebugEnabled()) { log.debug(errorMessage); } responseDTO.setValid(false); responseDTO.setErrorMsg(errorMessage); clientApp.setAccessTokenValidationResponse(responseDTO); return clientApp; } /** * @param errorMessage * @return */ private OAuth2IntrospectionResponseDTO buildIntrospectionErrorResponse(String errorMessage) { OAuth2IntrospectionResponseDTO response = new OAuth2IntrospectionResponseDTO(); if (log.isDebugEnabled()) { log.debug(errorMessage); } response.setActive(false); response.setError(errorMessage); return response; } /** * @param accessToken * @return * @throws IdentityOAuth2Exception */ private OAuth2TokenValidator findAccessTokenValidator(OAuth2TokenValidationRequestDTO.OAuth2AccessToken accessToken) throws IdentityOAuth2Exception { // incomplete token validation request if (accessToken == null) { throw new IllegalArgumentException("Access token is not present in the validation request"); } String accessTokenIdentifier = accessToken.getIdentifier(); // incomplete token validation request if (accessTokenIdentifier == null) { throw new IllegalArgumentException("Access token identifier is not present in the validation request"); } OAuth2TokenValidator tokenValidator; if (isJWTTokenValidation(accessToken.getIdentifier())) { /* If the token is a self-contained JWT based access token and the config EnableJWTTokenValidationDuringIntrospection is set to true then the jwt token validator is selected. In the default pack TokenValidator type 'jwt' is 'org.wso2.carbon.identity.oauth2.validators.OAuth2JWTTokenValidator'. */ tokenValidator = tokenValidators.get(BEARER_TOKEN_TYPE_JWT); } else { tokenValidator = tokenValidators.get(accessToken.getTokenType()); } // There is no token validator for the provided token type. if (tokenValidator == null) { throw new IllegalArgumentException("Unsupported access token type: " + accessToken.getTokenType()); } return tokenValidator; } /** * @param accessTokenDO * @return */ private long getAccessTokenExpirationTime(AccessTokenDO accessTokenDO) { long expiryTime = OAuth2Util.getAccessTokenExpireMillis(accessTokenDO, false); if (OAuthConstants.UserType.APPLICATION_USER.equals(accessTokenDO.getTokenType()) && OAuthServerConfiguration.getInstance().getUserAccessTokenValidityPeriodInSeconds() < 0) { return Long.MAX_VALUE; } else if (OAuthConstants.UserType.APPLICATION.equals(accessTokenDO.getTokenType()) && OAuthServerConfiguration.getInstance().getApplicationAccessTokenValidityPeriodInSeconds() < 0) { return Long.MAX_VALUE; } else if (expiryTime < 0) { return Long.MAX_VALUE; } return expiryTime / 1000; } /** * @param accessTokenDO * @return * @throws IdentityOAuth2Exception */ private boolean hasAccessTokenExpired(AccessTokenDO accessTokenDO) { // check whether the grant is expired if (accessTokenDO.getValidityPeriod() < 0) { if (log.isDebugEnabled()) { log.debug("Access Token has infinite lifetime"); } } else { if (OAuth2Util.getAccessTokenExpireMillis(accessTokenDO, true) == 0) { if (log.isDebugEnabled()) { log.debug("Access Token has expired"); } return true; } } return false; } private boolean hasRefreshTokenExpired(AccessTokenDO accessTokenDO) { if (accessTokenDO.getRefreshTokenValidityPeriodInMillis() < 0) { if (log.isDebugEnabled()) { log.debug("Access Token has infinite lifetime"); } } else { if (OAuth2Util.getRefreshTokenExpireTimeMillis(accessTokenDO) == 0) { if (log.isDebugEnabled()) { log.debug("Access Token has expired"); } return true; } } return false; } private AccessTokenDO findRefreshToken(String refreshToken) throws IdentityOAuth2Exception { return OAuthTokenPersistenceFactory.getInstance().getTokenManagementDAO().getRefreshToken(refreshToken); } private boolean isJWTTokenValidation(String tokenIdentifier) { return Boolean.parseBoolean(IdentityUtil.getProperty(ENABLE_JWT_TOKEN_VALIDATION)) && isParsableJWT( tokenIdentifier); } private boolean isSkipValidatorForJWT(OAuth2TokenValidator tokenValidator, boolean isJWTTokenValidation) { return isJWTTokenValidation && BEARER_TOKEN_TYPE.equals(tokenValidator.getTokenType()); } private void addAllowedScopes(OAuth2TokenValidationMessageContext oAuth2TokenValidationMessageContext, String[] allowedScopes) { String[] scopes = oAuth2TokenValidationMessageContext.getResponseDTO().getScope(); String[] scopesToReturn = (String[]) ArrayUtils.addAll(scopes, allowedScopes); oAuth2TokenValidationMessageContext.getResponseDTO().setScope(scopesToReturn); } }
Add allowed scopes to introspect response after scope validation
components/org.wso2.carbon.identity.oauth/src/main/java/org/wso2/carbon/identity/oauth2/validators/TokenValidationHandler.java
Add allowed scopes to introspect response after scope validation
<ide><path>omponents/org.wso2.carbon.identity.oauth/src/main/java/org/wso2/carbon/identity/oauth2/validators/TokenValidationHandler.java <ide> throws IdentityOAuth2Exception { <ide> <ide> OAuth2IntrospectionResponseDTO introResp = new OAuth2IntrospectionResponseDTO(); <del> AccessTokenDO accessTokenDO; <add> AccessTokenDO accessTokenDO = null; <ide> List<String> requestedAllowedScopes = new ArrayList<>(); <ide> <ide> if (messageContext.getProperty(OAuth2Util.REMOTE_ACCESS_TOKEN) != null <ide> return buildIntrospectionErrorResponse("Scope validation failed"); <ide> } <ide> <add> // Add requested allowed scopes to the message context. <ide> addAllowedScopes(messageContext, requestedAllowedScopes.toArray(new String[0])); <add> <add> // Add requested allowed scopes to introResp and to the accessTokenDO. <add> if (accessTokenDO != null) { <add> addAllowedScopes(introResp, accessTokenDO, requestedAllowedScopes.toArray(new String[0])); <add> } <add> <ide> // All set. mark the token active. <ide> introResp.setActive(true); <ide> return introResp; <ide> String[] scopesToReturn = (String[]) ArrayUtils.addAll(scopes, allowedScopes); <ide> oAuth2TokenValidationMessageContext.getResponseDTO().setScope(scopesToReturn); <ide> } <add> <add> private void addAllowedScopes(OAuth2IntrospectionResponseDTO introResp, AccessTokenDO accessTokenDO, <add> String[] allowedScopes) { <add> <add> String[] scopes = accessTokenDO.getScope(); <add> String[] scopesToReturn = (String[]) ArrayUtils.addAll(scopes, allowedScopes); <add> accessTokenDO.setScope(scopesToReturn); <add> introResp.setScope(OAuth2Util.buildScopeString((scopesToReturn))); <add> } <ide> }
JavaScript
mit
204cc9ea4803b04ff026341282f1a398d966307c
0
tirejs/tire
var document = window.document , _tire = window.tire , _$ = window.$ , idExp = /^#([\w\-]*)$/ , classExp = /^\.([\w\-]*)$/ , tagNameExp = /^[\w\-]+$/ , tagExp = /^<([\w:]+)/ , slice = [].slice , noop = function () {} , table = document.createElement('table') , tableRow = document.createElement('tr') , containers = { 'thead': table, 'tbody': table, 'tfoot': table, 'tr': document.createElement('tbody'), 'td': tableRow, 'th': tableRow, '*': document.createElement('div') }; // Array Remove - By John Resig (MIT Licensed) Array.remove = function(array, from, to) { var rest = array.slice((to || from) + 1 || array.length); array.length = from < 0 ? array.length + from : from; return array.push.apply(array, rest); }; // If slice is not available we provide a backup try { slice.call(document.documentElement.childNodes, 0)[0].nodeType; } catch(e) { slice = function (i) { i = i || 0; var elem, results = []; for (; (elem = this[i]); i++) { results.push(elem); } return results; }; } var tire = function (selector, context) { return new tire.fn.find(selector, context); }; tire.fn = tire.prototype = { /** * Default length is zero */ length: 0, /** * Extend `tire.fn` * * @param {Object} o */ extend: function (o) { for (var k in o) { this[k] = o[k]; } }, /** * Find elements by selector * * @param {String|Object|Function|Array} selector * @param {Object} context * * @return {Object} */ find: function (selector, context) { var elms = [], attrs; if (!selector) { return this; } if (tire.isFunction(selector)) { return tire.ready(selector); } if (selector.nodeType) { this.selector = ''; this.context = selector; return this.set([selector]); } if (selector.length === 1 && selector[0].nodeType) { this.selector = this.context = selector[0]; return this.set(selector); } context = this.context ? this.context : (context || document); if (tire.isPlainObject(context)) { attrs = context; context = document; } if (tire.isString(selector)) { this.selector = selector; if (idExp.test(selector) && context.nodeType === context.DOCUMENT_NODE) { elms = (elms = context.getElementById(selector.substr(1))) ? [elms] : []; } else if (context.nodeType !== 1 && context.nodeType !== 9) { elms = []; } else if (tagExp.test(selector)) { var name = tagExp.exec(selector)[1], tmp; if (!containers.hasOwnProperty(name)) name = '*'; tmp = containers[name]; tmp.innerHTML = selector; this.each.call(slice.call(tmp.childNodes), function () { elms.push(this); }); } else { elms = slice.call( classExp.test(selector) && context.getElementsByClassName !== undefined ? context.getElementsByClassName(selector.substr(1)) : tagNameExp.test(selector) ? context.getElementsByTagName(selector) : context.querySelectorAll(selector) ); } } else if (selector.nodeName || selector === window) { elms = [selector]; } else if (tire.isArray(selector)) { elms = selector; } if (selector.selector !== undefined) { this.selector = selector.selector; this.context = selector.context; } else if (this.context === undefined) { if (elms[0] !== undefined && !tire.isString(elms[0])) { this.context = elms[0]; } else { this.context = document; } } return this.set(elms).each(function () { return attrs && tire(this).attr(attrs); }); }, /** * Fetch property from elements * * @param {String} prop * @return {Array} */ pluck: function (prop) { var result = []; this.each(function () { if (this[prop]) result.push(this[prop]); }); return result; }, /** * Run callback for each element in the collection * * @param {Function} callback * @return {Object} */ each: function(target, callback) { var i, key; if (typeof target === 'function') { callback = target; target = this; } if (target === this || target instanceof Array) { for (i = 0; i < target.length; ++i) { if (callback.call(target[i], i, target[i]) === false) break; } } else { if (target instanceof tire) { return tire.each(slice.call(target), callback); } else { for (key in target) { if (target.hasOwnProperty(key) && callback.call(target[key], key, target[key]) === false) break; } } } return target; }, /** * Set elements to tire object before returning `this` * * @param {Array} elements * @return {Object} */ set: function (elements) { // Introduce a fresh `tire` set to prevent context from being overridden var i = 0, newSet = tire(); newSet.selector = this.selector; newSet.context = this.context; for (; i < elements.length; i++) { newSet[i] = elements[i]; } newSet.length = i; return newSet; } }; /** * Extend `tire` with arguments, if the arguments length is one the extend target is `tire` */ tire.extend = function () { var target = arguments[0] || {}; if (typeof target !== 'object' && typeof target !== 'function') { target = {}; } if (arguments.length === 1) target = this; tire.fn.each(slice.call(arguments), function (index, value) { for (var key in value) { if (target[key] !== value[key]) target[key] = value[key]; } }); return target; }; tire.fn.find.prototype = tire.fn; tire.extend({ // The current version of tire being used version: '{{version}}', // We sould be able to use each outside each: tire.fn.each, /** * Trim string * * @param {String} str * @return {String} */ trim: function (str) { return str == null ? '' : str.trim ? str.trim() : ('' + str).replace(/^\s+|\s+$/g, ''); }, /** * Check to see if a DOM element is a descendant of another DOM element. * * @param {Object} parent * @param {Object} node * * @return {Boolean} */ contains: function (parent, node) { return parent.contains ? parent != node && parent.contains(node) : !!(parent.compareDocumentPosition(node) & 16); }, /** * Check if the element matches the selector * * @param {Object} element * @param {String} selector * @return {Boolean} */ matches: function (element, selector) { if (!element || element.nodeType !== 1) return false; // Trying to use matchesSelector if it is available var matchesSelector = element.webkitMatchesSelector || element.mozMatchesSelector || element.oMatchesSelector || element.matchesSelector; if (matchesSelector) { return matchesSelector.call(element, selector); } // querySelectorAll fallback if (document.querySelectorAll !== undefined) { var nodes = element.parentNode.querySelectorAll(selector); for (var i = 0; i < nodes.length; i++) { if (nodes[i] === element) return true; } } return false; }, /** * Check if the `obj` is a function * * @param {Object} obj * @return {Boolean} */ isFunction: function (obj) { return typeof obj === 'function'; }, /** * Check if the `obj` is a array * * @param {Object} obj * @return {Boolean} */ isArray: function (obj) { return obj instanceof Array; }, /** * Check if the `obj` is a string * * @param {Object} obj * @return {Boolean} */ isString: function (obj) { return typeof obj === 'string'; }, /** * Check if the `obj` is a number * * @param {Object} obj * @return {Boolean} */ isNumeric: function (obj) { return typeof obj === 'number'; }, /** * Check if the `obj` is a object * * @param {Object} obj * @return {Boolean} */ isObject: function (obj) { return obj instanceof Object && !this.isArray(obj) && !this.isFunction(obj) && !this.isWindow(obj); }, /** * Check if `obj` is a plain object * * @param {Object} obj * @return {Boolean} */ isPlainObject: function (obj) { if (!obj || !this.isObject(obj) || this.isWindow(obj) || obj.nodeType) { return false; } else if (obj.__proto__ === Object.prototype) { return true; } else { var key; for (key in obj) {} return key === undefined || {}.hasOwnProperty.call(obj, key); } }, /** * Check if `obj` is a `window` object */ isWindow: function (obj) { return obj !== null && obj !== undefined && (obj === obj.window || 'setInterval' in obj); }, /** * Parse JSON string to object. * * @param {String} str * @return {Object|null} */ parseJSON: function (str) { if (!this.isString(str) || !str) { return null; } str = this.trim(str); if (window.JSON && window.JSON.parse) { return window.JSON.parse(str); } // Solution to fix JSON parse support for older browser. Not so nice but it works. try { return (new Function('return ' + str))(); } catch (e) { return null; } }, /** * Calling .noConflict will restore the window.$` to its previous value. * * @param {Boolean} name Restore `tire` to it's previous value. * @return {Object} */ noConflict: function (name) { if (name) { window.tire = _tire; } window.$ = _$; return tire; } });
src/core.js
var document = window.document , _tire = window.tire , _$ = window.$ , idExp = /^#([\w\-]*)$/ , classExp = /^\.([\w\-]*)$/ , tagNameExp = /^[\w\-]+$/ , tagExp = /^<([\w:]+)/ , slice = [].slice , noop = function () {} , table = document.createElement('table') , tableRow = document.createElement('tr') , containers = { 'thead': table, 'tbody': table, 'tfoot': table, 'tr': document.createElement('tbody'), 'td': tableRow, 'th': tableRow, '*': document.createElement('div') }; // Array Remove - By John Resig (MIT Licensed) Array.remove = function(array, from, to) { var rest = array.slice((to || from) + 1 || array.length); array.length = from < 0 ? array.length + from : from; return array.push.apply(array, rest); }; // If slice is not available we provide a backup try { slice.call(document.documentElement.childNodes, 0)[0].nodeType; } catch(e) { slice = function (i) { i = i || 0; var elem, results = []; for (; (elem = this[i]); i++) { results.push(elem); } return results; }; } var tire = function (selector, context) { return new tire.fn.find(selector, context); }; tire.fn = tire.prototype = { /** * Default length is zero */ length: 0, /** * Extend `tire.fn` * * @param {Object} o */ extend: function (o) { for (var k in o) { this[k] = o[k]; } }, /** * Find elements by selector * * @param {String|Object|Function|Array} selector * @param {Object} context * * @return {Object} */ find: function (selector, context) { var elms = [], attrs; if (!selector) { return this; } if (tire.isFunction(selector)) { return tire.ready(selector); } if (selector.nodeType) { this.selector = ''; this.context = selector; return this.set([selector]); } if (selector.length === 1 && selector[0].nodeType) { this.selector = this.context = selector[0]; return this.set(selector); } context = this.context ? this.context : (context || document); if (tire.isPlainObject(context)) { attrs = context; context = document; } if (tire.isString(selector)) { this.selector = selector; if (idExp.test(selector) && context.nodeType === context.DOCUMENT_NODE) { elms = (elms = context.getElementById(selector.substr(1))) ? [elms] : []; } else if (context.nodeType !== 1 && context.nodeType !== 9) { elms = []; } else if (tagExp.test(selector)) { var name = tagExp.exec(selector)[1], tmp; if (!containers.hasOwnProperty(name)) name = '*'; tmp = containers[name]; tmp.innerHTML = selector; this.each.call(slice.call(tmp.childNodes), function () { elms.push(this); }); } else { elms = slice.call( classExp.test(selector) && context.getElementsByClassName !== undefined ? context.getElementsByClassName(selector.substr(1)) : tagNameExp.test(selector) ? context.getElementsByTagName(selector) : context.querySelectorAll(selector) ); } } else if (selector.nodeName || selector === window) { elms = [selector]; } else if (tire.isArray(selector)) { elms = selector; } if (selector.selector !== undefined) { this.selector = selector.selector; this.context = selector.context; } else if (this.context === undefined) { if (elms[0] !== undefined && !tire.isString(elms[0])) { this.context = elms[0]; } else { this.context = document; } } return this.set(elms).each(function () { return attrs && tire(this).attr(attrs); }); }, /** * Fetch property from elements * * @param {String} prop * @return {Array} */ pluck: function (prop) { var result = []; this.each(function () { if (this[prop]) result.push(this[prop]); }); return result; }, /** * Run callback for each element in the collection * * @param {Function} callback * @return {Object} */ each: function(target, callback) { var i, key; if (typeof target === 'function') { callback = target; target = this; } if (target === this || target instanceof Array) { for (i = 0; i < target.length; ++i) { if (callback.call(target[i], i, target[i]) === false) break; } } else { for (key in target) { if (target.hasOwnProperty(key) && callback.call(target[key], key, target[key]) === false) break; } } return target; }, /** * Set elements to tire object before returning `this` * * @param {Array} elements * @return {Object} */ set: function (elements) { // Introduce a fresh `tire` set to prevent context from being overridden var i = 0, newSet = tire(); newSet.selector = this.selector; newSet.context = this.context; for (; i < elements.length; i++) { newSet[i] = elements[i]; } newSet.length = i; return newSet; } }; /** * Extend `tire` with arguments, if the arguments length is one the extend target is `tire` */ tire.extend = function () { var target = arguments[0] || {}; if (typeof target !== 'object' && typeof target !== 'function') { target = {}; } if (arguments.length === 1) target = this; tire.fn.each(slice.call(arguments), function (index, value) { for (var key in value) { if (target[key] !== value[key]) target[key] = value[key]; } }); return target; }; tire.fn.find.prototype = tire.fn; tire.extend({ // The current version of tire being used version: '{{version}}', // We sould be able to use each outside each: tire.fn.each, /** * Trim string * * @param {String} str * @return {String} */ trim: function (str) { return str == null ? '' : str.trim ? str.trim() : ('' + str).replace(/^\s+|\s+$/g, ''); }, /** * Check to see if a DOM element is a descendant of another DOM element. * * @param {Object} parent * @param {Object} node * * @return {Boolean} */ contains: function (parent, node) { return parent.contains ? parent != node && parent.contains(node) : !!(parent.compareDocumentPosition(node) & 16); }, /** * Check if the element matches the selector * * @param {Object} element * @param {String} selector * @return {Boolean} */ matches: function (element, selector) { if (!element || element.nodeType !== 1) return false; // Trying to use matchesSelector if it is available var matchesSelector = element.webkitMatchesSelector || element.mozMatchesSelector || element.oMatchesSelector || element.matchesSelector; if (matchesSelector) { return matchesSelector.call(element, selector); } // querySelectorAll fallback if (document.querySelectorAll !== undefined) { var nodes = element.parentNode.querySelectorAll(selector); for (var i = 0; i < nodes.length; i++) { if (nodes[i] === element) return true; } } return false; }, /** * Check if the `obj` is a function * * @param {Object} obj * @return {Boolean} */ isFunction: function (obj) { return typeof obj === 'function'; }, /** * Check if the `obj` is a array * * @param {Object} obj * @return {Boolean} */ isArray: function (obj) { return obj instanceof Array; }, /** * Check if the `obj` is a string * * @param {Object} obj * @return {Boolean} */ isString: function (obj) { return typeof obj === 'string'; }, /** * Check if the `obj` is a number * * @param {Object} obj * @return {Boolean} */ isNumeric: function (obj) { return typeof obj === 'number'; }, /** * Check if the `obj` is a object * * @param {Object} obj * @return {Boolean} */ isObject: function (obj) { return obj instanceof Object && !this.isArray(obj) && !this.isFunction(obj) && !this.isWindow(obj); }, /** * Check if `obj` is a plain object * * @param {Object} obj * @return {Boolean} */ isPlainObject: function (obj) { if (!obj || !this.isObject(obj) || this.isWindow(obj) || obj.nodeType) { return false; } else if (obj.__proto__ === Object.prototype) { return true; } else { var key; for (key in obj) {} return key === undefined || {}.hasOwnProperty.call(obj, key); } }, /** * Check if `obj` is a `window` object */ isWindow: function (obj) { return obj !== null && obj !== undefined && (obj === obj.window || 'setInterval' in obj); }, /** * Parse JSON string to object. * * @param {String} str * @return {Object|null} */ parseJSON: function (str) { if (!this.isString(str) || !str) { return null; } str = this.trim(str); if (window.JSON && window.JSON.parse) { return window.JSON.parse(str); } // Solution to fix JSON parse support for older browser. Not so nice but it works. try { return (new Function('return ' + str))(); } catch (e) { return null; } }, /** * Calling .noConflict will restore the window.$` to its previous value. * * @param {Boolean} name Restore `tire` to it's previous value. * @return {Object} */ noConflict: function (name) { if (name) { window.tire = _tire; } window.$ = _$; return tire; } });
Fixed problem with $.each(tireObject) that did take more then just the elements.
src/core.js
Fixed problem with $.each(tireObject) that did take more then just the elements.
<ide><path>rc/core.js <ide> if (callback.call(target[i], i, target[i]) === false) break; <ide> } <ide> } else { <del> for (key in target) { <del> if (target.hasOwnProperty(key) && callback.call(target[key], key, target[key]) === false) break; <add> if (target instanceof tire) { <add> return tire.each(slice.call(target), callback); <add> } else { <add> for (key in target) { <add> if (target.hasOwnProperty(key) && callback.call(target[key], key, target[key]) === false) break; <add> } <ide> } <ide> } <ide>
Java
mit
70cd22b3670ea1b3378d65a0d32b4c2c5b28589a
0
mcflugen/wmt-rest,mcflugen/wmt-rest
/** * <License> */ package edu.colorado.csdms.wmt.client.ui; import com.google.gwt.core.shared.GWT; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.FlexTable; import com.google.gwt.user.client.ui.HTML; import com.google.gwt.user.client.ui.HasHorizontalAlignment; import edu.colorado.csdms.wmt.client.control.DataManager; import edu.colorado.csdms.wmt.client.data.ParameterJSO; import edu.colorado.csdms.wmt.client.ui.widgets.ViewInputFilesPanel; /** * Builds a table of parameters for a single WMT model component. The value of * the parameter is editable. * * @author Mark Piper ([email protected]) */ public class ParameterTable extends FlexTable { public DataManager data; private String componentId; // the id of the displayed component private ViewInputFilesPanel viewFilesPanel; /** * Initializes a table of parameters for a single WMT model component. The * table is empty until {@link #loadTable()} is called. * * @param data the DataManager instance for the WMT session */ public ParameterTable(DataManager data) { this.data = data; this.setWidth("100%"); } /** * A worker that displays an informational message in the ParameterTable. */ public void showInfoMessage() { HTML infoMessage = new HTML("Select a model component to view and edit its parameters"); infoMessage.setStyleName("wmt-ParameterTableMessage"); this.setWidget(0, 0, infoMessage); } /** * A worker that loads the ParameterTable with parameter values for the * selected model component. Displays a {@link ViewInputFilesPanel} at the * bottom of the table. * * @param the id of the component whose parameters are to be displayed */ public void loadTable(String componentId) { // The component whose parameters are to be displayed. this.setComponentId(componentId); // Return if the selected component doesn't have parameters. if (data.getModelComponent(componentId).getParameters() == null) { this.clearTable(); Window.alert("No parameters defined for this component."); return; } // Set the component name on the tab holding the ParameterTable. data.getPerspective().setParameterPanelTitle(componentId); // Build the parameter table. Integer nParameters = data.getModelComponent(componentId).getParameters().length(); Integer parameterIndex = 0; for (int i = 0; i < nParameters; i++) { ParameterJSO parameter = data.getModelComponent(componentId).getParameters().get(i); this.setWidget(parameterIndex, 0, new DescriptionCell(parameter)); if (parameter.getKey().matches("separator")) { this.getFlexCellFormatter().setColSpan(parameterIndex, 0, 2); this.getFlexCellFormatter().setStyleName(parameterIndex, 0, "wmt-ParameterSeparator"); } else { this.setWidget(parameterIndex, 1, new ValueCell(parameter)); this.getFlexCellFormatter().setStyleName(parameterIndex, 0, "wmt-ParameterDescription"); } parameterIndex++; } // Append links to view input files. viewFilesPanel = new ViewInputFilesPanel(data, componentId); this.setWidget(parameterIndex, 0, viewFilesPanel); this.getFlexCellFormatter().setColSpan(parameterIndex, 0, 2); this.getFlexCellFormatter().setHorizontalAlignment(parameterIndex, 0, HasHorizontalAlignment.ALIGN_CENTER); } /** * Stores the modified value of a parameter of a model component in the WMT * {@link DataManager}. * * @param parameter the ParameterJSO object for the parameter being modified * @param newValue the new parameter value, a String */ public void setValue(ParameterJSO parameter, String newValue) { String key = parameter.getKey(); String previousValue = data.getModelComponent(componentId).getParameter(key).getValue() .getDefault(); GWT.log(componentId + ": " + key + ": " + newValue); // Don't update state when tabbing between fields or moving within field. // XXX Would be better to handle this further upstream. if (!newValue.matches(previousValue)) { data.getModelComponent(componentId).getParameter(key).getValue() .setDefault(newValue); data.modelIsSaved(false); data.getPerspective().setModelPanelTitle(); } } /** * Deletes the contents of the ParameterTable and resets the tab title to * "Parameters". Unsets the selectedComponent in the DataManager. */ public void clearTable() { data.setSelectedComponent(null); // should also be in ControlCell#delete? this.setComponentId(null); data.getPerspective().setParameterPanelTitle(null); this.removeAllRows(); this.clear(true); } /** * Returns the id of the model component (a String) whose parameters are * displayed in the ParameterTable. */ public String getComponentId() { return componentId; } /** * Stores the id of the model component (a String) whose parameters are * displayed in the ParameterTable. * * @param componentId a component id, a String */ public void setComponentId(String componentId) { this.componentId = componentId; } }
gui/src/edu/colorado/csdms/wmt/client/ui/ParameterTable.java
/** * <License> */ package edu.colorado.csdms.wmt.client.ui; import com.google.gwt.core.shared.GWT; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.FlexTable; import com.google.gwt.user.client.ui.HTML; import com.google.gwt.user.client.ui.HasHorizontalAlignment; import edu.colorado.csdms.wmt.client.control.DataManager; import edu.colorado.csdms.wmt.client.data.ParameterJSO; import edu.colorado.csdms.wmt.client.ui.widgets.ViewInputFilesPanel; /** * Builds a table of parameters for a single WMT model component. The value of * the parameter is editable. * * @author Mark Piper ([email protected]) */ public class ParameterTable extends FlexTable { public DataManager data; private String componentId; // the id of the displayed component private ViewInputFilesPanel viewFilesPanel; /** * Initializes a table of parameters for a single WMT model component. The * table is empty until {@link #loadTable()} is called. * * @param data the DataManager instance for the WMT session */ public ParameterTable(DataManager data) { this.data = data; this.setWidth("100%"); } /** * A worker that displays an informational message in the ParameterTable. */ public void showInfoMessage() { HTML infoMessage = new HTML("Select a model component to view and edit its parameters"); infoMessage.setStyleName("wmt-ParameterTableMessage"); this.setWidget(0, 0, infoMessage); } @Deprecated public void loadTable() { // TODO Does nothing. To be deleted. } /** * A worker that loads the ParameterTable with parameter values for the * selected model component. Displays a {@link ViewInputFilesPanel} at the * bottom of the table. * * @param the id of the component whose parameters are to be displayed */ public void loadTable(String componentId) { // The component whose parameters are to be displayed. this.setComponentId(componentId); // Return if the selected component doesn't have parameters. if (data.getModelComponent(componentId).getParameters() == null) { this.clearTable(); Window.alert("No parameters defined for this component."); return; } // Set the component name on the tab holding the ParameterTable. data.getPerspective().setParameterPanelTitle(componentId); // Build the parameter table. Integer nParameters = data.getModelComponent(componentId).getParameters().length(); Integer parameterIndex = 0; for (int i = 0; i < nParameters; i++) { ParameterJSO parameter = data.getModelComponent(componentId).getParameters().get(i); this.setWidget(parameterIndex, 0, new DescriptionCell(parameter)); if (parameter.getKey().matches("separator")) { this.getFlexCellFormatter().setColSpan(parameterIndex, 0, 2); this.getFlexCellFormatter().setStyleName(parameterIndex, 0, "wmt-ParameterSeparator"); } else { this.setWidget(parameterIndex, 1, new ValueCell(parameter)); this.getFlexCellFormatter().setStyleName(parameterIndex, 0, "wmt-ParameterDescription"); } parameterIndex++; } // Append links to view input files. viewFilesPanel = new ViewInputFilesPanel(data, componentId); this.setWidget(parameterIndex, 0, viewFilesPanel); this.getFlexCellFormatter().setColSpan(parameterIndex, 0, 2); this.getFlexCellFormatter().setHorizontalAlignment(parameterIndex, 0, HasHorizontalAlignment.ALIGN_CENTER); } /** * Stores the modified value of a parameter of a model component in the WMT * {@link DataManager}. * * @param parameter the ParameterJSO object for the parameter being modified * @param newValue the new parameter value, a String */ public void setValue(ParameterJSO parameter, String newValue) { String key = parameter.getKey(); String previousValue = data.getModelComponent(componentId).getParameter(key).getValue() .getDefault(); GWT.log(componentId + ": " + key + ": " + newValue); // Don't update state when tabbing between fields or moving within field. // XXX Would be better to handle this further upstream. if (!newValue.matches(previousValue)) { data.getModelComponent(componentId).getParameter(key).getValue() .setDefault(newValue); data.modelIsSaved(false); data.getPerspective().setModelPanelTitle(); } } /** * Deletes the contents of the ParameterTable and resets the tab title to * "Parameters". Unsets the selectedComponent in the DataManager. */ public void clearTable() { data.setSelectedComponent(null); // should also be in ControlCell#delete? this.setComponentId(null); data.getPerspective().setParameterPanelTitle(null); this.removeAllRows(); this.clear(true); } /** * Returns the id of the model component (a String) whose parameters are * displayed in the ParameterTable. */ public String getComponentId() { return componentId; } /** * Stores the id of the model component (a String) whose parameters are * displayed in the ParameterTable. * * @param componentId a component id, a String */ public void setComponentId(String componentId) { this.componentId = componentId; } }
Remove deprecated ParameterTable#loadTable method
gui/src/edu/colorado/csdms/wmt/client/ui/ParameterTable.java
Remove deprecated ParameterTable#loadTable method
<ide><path>ui/src/edu/colorado/csdms/wmt/client/ui/ParameterTable.java <ide> new HTML("Select a model component to view and edit its parameters"); <ide> infoMessage.setStyleName("wmt-ParameterTableMessage"); <ide> this.setWidget(0, 0, infoMessage); <del> } <del> <del> @Deprecated <del> public void loadTable() { <del> // TODO Does nothing. To be deleted. <ide> } <ide> <ide> /**
Java
apache-2.0
b5e20e88929a546aabb2ee968be847f16bf3cb91
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.template.macro; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupElementBuilder; import com.intellij.codeInsight.template.*; import com.intellij.lang.Language; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.util.PsiUtilBase; import com.intellij.util.containers.ContainerUtil; import groovy.lang.Binding; import groovy.lang.GroovyShell; import groovy.lang.Script; import org.jetbrains.annotations.NotNull; import java.io.File; import java.util.Arrays; import java.util.Collection; /** * @author Maxim.Mossienko */ public class GroovyScriptMacro extends Macro { @Override public String getName() { return "groovyScript"; } @Override public String getPresentableName() { return CodeInsightBundle.message("macro.groovy.script"); } @Override public Result calculateResult(@NotNull Expression[] params, ExpressionContext context) { if (params.length == 0) return null; Object o = runIt(params, context); if (o instanceof Collection && !((Collection)o).isEmpty()) { return new TextResult(toNormalizedString(((Collection)o).iterator().next())); } if (o instanceof Object[] && ((Object[])o).length > 0) { return new TextResult(toNormalizedString(((Object[])o)[0])); } if (o != null) return new TextResult(toNormalizedString(o)); return null; } private static Object runIt(Expression[] params, ExpressionContext context) { Editor editor = context.getEditor(); Language language = editor == null ? null : PsiUtilBase.getLanguageInEditor(editor, context.getProject()); try { Result result = params[0].calculateResult(context); if (result == null) return null; String text = result.toString(); GroovyShell shell = new GroovyShell(language == null ? null : language.getClass().getClassLoader()); File possibleFile = new File(text); Script script = possibleFile.exists() ? shell.parse(possibleFile) : shell.parse(text); Binding binding = new Binding(); for(int i = 1; i < params.length; ++i) { Result paramResult = params[i].calculateResult(context); Object value = null; if (paramResult instanceof ListResult) { value = ContainerUtil.map2List(((ListResult)paramResult).getComponents(), result1 -> result1.toString()); } else if (paramResult != null) { value = paramResult.toString(); } binding.setVariable("_"+i, value); } binding.setVariable("_editor", editor); script.setBinding(binding); return script.run(); } catch (Exception | Error e) { return StringUtil.convertLineSeparators(e.getLocalizedMessage()); } } @Override public Result calculateQuickResult(@NotNull Expression[] params, ExpressionContext context) { return calculateResult(params, context); } @Override public LookupElement[] calculateLookupItems(@NotNull Expression[] params, ExpressionContext context) { Object o = runIt(params, context); Collection collection = o instanceof Collection ? (Collection)o : o instanceof Object[] ? Arrays.asList((Object[])o) : ContainerUtil.createMaybeSingletonList(o); return ContainerUtil.map2Array(collection, LookupElement.class, item -> LookupElementBuilder.create(toNormalizedString(item))); } private static String toNormalizedString(Object o) { return StringUtil.convertLineSeparators(o.toString()); } }
platform/lang-impl/src/com/intellij/codeInsight/template/macro/GroovyScriptMacro.java
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.template.macro; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupElementBuilder; import com.intellij.codeInsight.template.*; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.containers.ContainerUtil; import groovy.lang.Binding; import groovy.lang.GroovyShell; import groovy.lang.Script; import org.jetbrains.annotations.NotNull; import java.io.File; import java.util.Arrays; import java.util.Collection; /** * @author Maxim.Mossienko */ public class GroovyScriptMacro extends Macro { @Override public String getName() { return "groovyScript"; } @Override public String getPresentableName() { return CodeInsightBundle.message("macro.groovy.script"); } @Override public Result calculateResult(@NotNull Expression[] params, ExpressionContext context) { if (params.length == 0) return null; Object o = runIt(params, context); if (o instanceof Collection && !((Collection)o).isEmpty()) { return new TextResult(toNormalizedString(((Collection)o).iterator().next())); } if (o instanceof Object[] && ((Object[])o).length > 0) { return new TextResult(toNormalizedString(((Object[])o)[0])); } if (o != null) return new TextResult(toNormalizedString(o)); return null; } private static Object runIt(Expression[] params, ExpressionContext context) { try { Result result = params[0].calculateResult(context); if (result == null) return null; String text = result.toString(); GroovyShell shell = new GroovyShell(); File possibleFile = new File(text); Script script = possibleFile.exists() ? shell.parse(possibleFile) : shell.parse(text); Binding binding = new Binding(); for(int i = 1; i < params.length; ++i) { Result paramResult = params[i].calculateResult(context); Object value = null; if (paramResult instanceof ListResult) { value = ContainerUtil.map2List(((ListResult)paramResult).getComponents(), result1 -> result1.toString()); } else if (paramResult != null) { value = paramResult.toString(); } binding.setVariable("_"+i, value); } binding.setVariable("_editor", context.getEditor()); script.setBinding(binding); return script.run(); } catch (Exception | Error e) { return StringUtil.convertLineSeparators(e.getLocalizedMessage()); } } @Override public Result calculateQuickResult(@NotNull Expression[] params, ExpressionContext context) { return calculateResult(params, context); } @Override public LookupElement[] calculateLookupItems(@NotNull Expression[] params, ExpressionContext context) { Object o = runIt(params, context); Collection collection = o instanceof Collection ? (Collection)o : o instanceof Object[] ? Arrays.asList((Object[])o) : ContainerUtil.createMaybeSingletonList(o); return ContainerUtil.map2Array(collection, LookupElement.class, item -> LookupElementBuilder.create(toNormalizedString(item))); } private static String toNormalizedString(Object o) { return StringUtil.convertLineSeparators(o.toString()); } }
let groovyScript in live templates use the plugin of the language under caret (IDEA-218938) GitOrigin-RevId: c89810fc3bbe0b5cbeaa1241b5dfecee98579b66
platform/lang-impl/src/com/intellij/codeInsight/template/macro/GroovyScriptMacro.java
let groovyScript in live templates use the plugin of the language under caret (IDEA-218938)
<ide><path>latform/lang-impl/src/com/intellij/codeInsight/template/macro/GroovyScriptMacro.java <ide> import com.intellij.codeInsight.lookup.LookupElement; <ide> import com.intellij.codeInsight.lookup.LookupElementBuilder; <ide> import com.intellij.codeInsight.template.*; <add>import com.intellij.lang.Language; <add>import com.intellij.openapi.editor.Editor; <ide> import com.intellij.openapi.util.text.StringUtil; <add>import com.intellij.psi.util.PsiUtilBase; <ide> import com.intellij.util.containers.ContainerUtil; <ide> import groovy.lang.Binding; <ide> import groovy.lang.GroovyShell; <ide> } <ide> <ide> private static Object runIt(Expression[] params, ExpressionContext context) { <add> Editor editor = context.getEditor(); <add> Language language = editor == null ? null : PsiUtilBase.getLanguageInEditor(editor, context.getProject()); <add> <ide> try { <ide> Result result = params[0].calculateResult(context); <ide> if (result == null) return null; <ide> <ide> String text = result.toString(); <del> GroovyShell shell = new GroovyShell(); <add> GroovyShell shell = new GroovyShell(language == null ? null : language.getClass().getClassLoader()); <ide> File possibleFile = new File(text); <ide> Script script = possibleFile.exists() ? shell.parse(possibleFile) : shell.parse(text); <ide> Binding binding = new Binding(); <ide> binding.setVariable("_"+i, value); <ide> } <ide> <del> binding.setVariable("_editor", context.getEditor()); <add> binding.setVariable("_editor", editor); <ide> <ide> script.setBinding(binding); <ide>
Java
apache-2.0
2dda057d88aac1d461730ba364849e2155a32d9a
0
RedRoma/aroma-data-operations,AromaTech/banana-data-operations,RedRoma/aroma-data-operations,RedRoma/banana-data-operations
/* * Copyright 2016 RedRoma, Inc.. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tech.aroma.data; import java.util.Set; import org.apache.thrift.TException; import sir.wellington.alchemy.collections.sets.Sets; import tech.aroma.thrift.User; import tech.aroma.thrift.channels.MobileDevice; import tech.aroma.thrift.exceptions.InvalidArgumentException; import tech.sirwellington.alchemy.annotations.arguments.NonEmpty; import tech.sirwellington.alchemy.annotations.arguments.Required; import tech.sirwellington.alchemy.annotations.concurrency.ThreadSafe; import tech.sirwellington.alchemy.annotations.designs.patterns.StrategyPattern; import static tech.aroma.data.assertions.RequestAssertions.validMobileDevice; import static tech.aroma.data.assertions.RequestAssertions.validUserId; import static tech.sirwellington.alchemy.annotations.designs.patterns.StrategyPattern.Role.INTERFACE; import static tech.sirwellington.alchemy.arguments.Arguments.checkThat; /** * This Repository is responsible for storing information about Users, and their Preferences. * * <pre> * * Examples: * + Mobile Devices (used to send Push Notifications) * + Personal Reactions (applied to messages that hit their Inboxes) * + User Activity * </pre> * * @author SirWellington */ @StrategyPattern(role = INTERFACE) @ThreadSafe public interface UserPreferencesRepository { default boolean containsMobileDevice(String userId, MobileDevice mobileDevice) throws TException { checkThat(userId) .throwing(InvalidArgumentException.class) .is(validUserId()); checkThat(mobileDevice) .throwing(InvalidArgumentException.class) .is(validMobileDevice()); Set<MobileDevice> devices = getMobileDevices(userId); if (Sets.isEmpty(devices)) { return false; } return devices.contains(mobileDevice); } void saveMobileDevice(@NonEmpty String userId, @Required MobileDevice mobileDevice) throws TException; /** * This Operation overwrites all existing Mobile Devices registered for the specified User. * * @param userId The {@linkplain User#userId User ID} to save and associate the devices with. * @param mobileDevices The Set of Mobile Devices belonging to the user. * @throws TException */ void saveMobileDevices(@NonEmpty String userId, @Required Set<MobileDevice> mobileDevices) throws TException; /** * Get the Set of all Mobile Devices registered to a {@link User}. * * @param userId The {@linkplain User#userId User ID} of the User. * @return * @throws TException */ Set<MobileDevice> getMobileDevices(@NonEmpty String userId) throws TException; /** * Disassociates and removes a device associated with a particular user. * * @param userId The User ID the Device is associated with. * @param mobileDevice The Device to remove. * @throws TException */ void deleteMobileDevice(@NonEmpty String userId, @Required MobileDevice mobileDevice) throws TException; /** * Removes an Disassociates all devices registered to the specified {@Link User}. * * @param userId * @throws TException */ void deleteAllMobileDevices(@NonEmpty String userId) throws TException; }
src/main/java/tech/aroma/data/UserPreferencesRepository.java
/* * Copyright 2016 RedRoma, Inc.. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tech.aroma.data; import java.util.Set; import org.apache.thrift.TException; import sir.wellington.alchemy.collections.sets.Sets; import tech.aroma.thrift.User; import tech.aroma.thrift.channels.MobileDevice; import tech.aroma.thrift.exceptions.InvalidArgumentException; import tech.sirwellington.alchemy.annotations.arguments.NonEmpty; import tech.sirwellington.alchemy.annotations.arguments.Required; import tech.sirwellington.alchemy.annotations.concurrency.ThreadSafe; import tech.sirwellington.alchemy.annotations.designs.patterns.StrategyPattern; import static tech.aroma.data.assertions.RequestAssertions.validMobileDevice; import static tech.aroma.data.assertions.RequestAssertions.validUserId; import static tech.sirwellington.alchemy.annotations.designs.patterns.StrategyPattern.Role.INTERFACE; import static tech.sirwellington.alchemy.arguments.Arguments.checkThat; /** * This Repository is responsible for storing information about Users, and their Preferences. * * <pre> * * For Example: * + Mobile Device (used to send Push Notifications) * + Personal Reactions (applied to messages that hit their Inboxes) * + User Activity * </pre> * * @author SirWellington */ @StrategyPattern(role = INTERFACE) @ThreadSafe public interface UserPreferencesRepository { default boolean containsMobileDevice(String userId, MobileDevice mobileDevice) throws TException { checkThat(userId) .throwing(InvalidArgumentException.class) .is(validUserId()); checkThat(mobileDevice) .throwing(InvalidArgumentException.class) .is(validMobileDevice()); Set<MobileDevice> devices = getMobileDevices(userId); if (Sets.isEmpty(devices)) { return false; } return devices.contains(mobileDevice); } void saveMobileDevice(@NonEmpty String userId, @Required MobileDevice mobileDevice) throws TException; /** * This Operation overwrites all existing Mobile Devices registered for the specified User. * * @param userId The {@linkplain User#userId User ID} to save and associate the devices with. * @param mobileDevices The Set of Mobile Devices belonging to the user. * @throws TException */ void saveMobileDevices(@NonEmpty String userId, @Required Set<MobileDevice> mobileDevices) throws TException; /** * Get the Set of all Mobile Devices registered to a {@link User}. * * @param userId The {@linkplain User#userId User ID} of the User. * @return * @throws TException */ Set<MobileDevice> getMobileDevices(@NonEmpty String userId) throws TException; /** * Disassociates and removes a device associated with a particular user. * * @param userId The User ID the Device is associated with. * @param mobileDevice The Device to remove. * @throws TException */ void deleteMobileDevice(@NonEmpty String userId, @Required MobileDevice mobileDevice) throws TException; /** * Removes an Disassociates all devices registered to the specified {@Link User}. * * @param userId * @throws TException */ void deleteAllMobileDevices(@NonEmpty String userId) throws TException; }
UserPreferencesRepository: Addressing #8 comments
src/main/java/tech/aroma/data/UserPreferencesRepository.java
UserPreferencesRepository: Addressing #8 comments
<ide><path>rc/main/java/tech/aroma/data/UserPreferencesRepository.java <ide> * <ide> * <pre> <ide> * <del> * For Example: <del> * + Mobile Device (used to send Push Notifications) <add> * Examples: <add> * + Mobile Devices (used to send Push Notifications) <ide> * + Personal Reactions (applied to messages that hit their Inboxes) <ide> * + User Activity <ide> * </pre>
Java
apache-2.0
9e271001bc26b9b449c1c8e17e79f01d9bdc678f
0
GlenRSmith/elasticsearch,GlenRSmith/elasticsearch,GlenRSmith/elasticsearch,GlenRSmith/elasticsearch,GlenRSmith/elasticsearch
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.searchablesnapshots.cache.blob; import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.client.OriginSettingClient; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexingStats; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.snapshots.SearchableSnapshotsSettings; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.xpack.cluster.routing.allocation.DataTierAllocationDecider; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.searchablesnapshots.MountSearchableSnapshotRequest.Storage; import org.elasticsearch.xpack.core.searchablesnapshots.SearchableSnapshotShardStats; import org.elasticsearch.xpack.searchablesnapshots.BaseFrozenSearchableSnapshotsIntegTestCase; import org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots; import org.elasticsearch.xpack.searchablesnapshots.action.SearchableSnapshotsStatsAction; import org.elasticsearch.xpack.searchablesnapshots.action.SearchableSnapshotsStatsRequest; import org.elasticsearch.xpack.searchablesnapshots.cache.full.CacheService; import org.elasticsearch.xpack.searchablesnapshots.cache.shared.FrozenCacheService; import org.junit.AfterClass; import org.junit.BeforeClass; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Locale; import java.util.concurrent.TimeUnit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_BLOB_CACHE_INDEX; import static org.elasticsearch.xpack.searchablesnapshots.cache.shared.SharedBytes.pageAligned; import static org.elasticsearch.xpack.searchablesnapshots.store.SearchableSnapshotDirectory.unwrapDirectory; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; public class SearchableSnapshotsBlobStoreCacheIntegTests extends BaseFrozenSearchableSnapshotsIntegTestCase { private static Settings cacheSettings = null; private static ByteSizeValue blobCacheMaxLength = null; @BeforeClass public static void setUpCacheSettings() { blobCacheMaxLength = pageAligned(new ByteSizeValue(randomLongBetween(64L, 128L), ByteSizeUnit.KB)); final Settings.Builder builder = Settings.builder(); // Align ranges to match the blob cache max length builder.put(CacheService.SNAPSHOT_CACHE_RANGE_SIZE_SETTING.getKey(), blobCacheMaxLength); builder.put(CacheService.SNAPSHOT_CACHE_RECOVERY_RANGE_SIZE_SETTING.getKey(), blobCacheMaxLength); // Frozen (shared cache) cache should be large enough to not cause direct reads builder.put(FrozenCacheService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), ByteSizeValue.ofMb(128)); // Align ranges to match the blob cache max length builder.put(FrozenCacheService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.getKey(), blobCacheMaxLength); builder.put(FrozenCacheService.SHARED_CACHE_RANGE_SIZE_SETTING.getKey(), blobCacheMaxLength); builder.put(FrozenCacheService.FROZEN_CACHE_RECOVERY_RANGE_SIZE_SETTING.getKey(), blobCacheMaxLength); cacheSettings = builder.build(); } @AfterClass public static void tearDownCacheSettings() { blobCacheMaxLength = null; cacheSettings = null; } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { final List<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins()); plugins.add(WaitForSnapshotBlobCacheShardsActivePlugin.class); plugins.add(ReindexPlugin.class); return plugins; } @Override protected int numberOfReplicas() { return 0; } @Override protected int numberOfShards() { return 1; } @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { return Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)).put(cacheSettings).build(); } public void testBlobStoreCache() throws Exception { final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); createIndex(indexName); final NumShards numberOfShards = getNumShards(indexName); final int numberOfDocs = scaledRandomIntBetween(10, 20_000); logger.info("--> indexing [{}] documents in [{}]", numberOfDocs, indexName); final List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>(); for (int i = numberOfDocs; i > 0; i--) { XContentBuilder builder = XContentFactory.smileBuilder(); builder.startObject().field("text", randomRealisticUnicodeOfCodepointLengthBetween(5, 50)).field("num", i).endObject(); indexRequestBuilders.add(client().prepareIndex(indexName).setSource(builder)); } indexRandom(true, true, true, indexRequestBuilders); if (randomBoolean()) { logger.info("--> force-merging index before snapshotting"); final ForceMergeResponse forceMergeResponse = client().admin() .indices() .prepareForceMerge(indexName) .setMaxNumSegments(1) .get(); assertThat(forceMergeResponse.getSuccessfulShards(), equalTo(numberOfShards.totalNumShards)); assertThat(forceMergeResponse.getFailedShards(), equalTo(0)); } final String repositoryName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); final Path repositoryLocation = randomRepoPath(); createRepository(repositoryName, "fs", repositoryLocation); final SnapshotId snapshot = createSnapshot(repositoryName, "test-snapshot", List.of(indexName)).snapshotId(); assertAcked(client().admin().indices().prepareDelete(indexName)); expectThrows( IndexNotFoundException.class, ".snapshot-blob-cache system index should not be created yet", () -> systemClient().admin().indices().prepareGetIndex().addIndices(SNAPSHOT_BLOB_CACHE_INDEX).get() ); final Storage storage1 = randomFrom(Storage.values()); logger.info( "--> mount snapshot [{}] as an index for the first time [storage={}, max length={}]", snapshot, storage1, blobCacheMaxLength.getStringRep() ); final String restoredIndex = "restored-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); mountSnapshot( repositoryName, snapshot.getName(), indexName, restoredIndex, Settings.builder() .put(SearchableSnapshots.SNAPSHOT_CACHE_ENABLED_SETTING.getKey(), true) .put(SearchableSnapshots.SNAPSHOT_CACHE_PREWARM_ENABLED_SETTING.getKey(), false) .put(SearchableSnapshots.SNAPSHOT_BLOB_CACHE_METADATA_FILES_MAX_LENGTH, blobCacheMaxLength) .build(), storage1 ); ensureGreen(restoredIndex); assertRecoveryStats(restoredIndex, false); assertExecutorIsIdle(SearchableSnapshots.CACHE_FETCH_ASYNC_THREAD_POOL_NAME); waitForBlobCacheFillsToComplete(); for (final SearchableSnapshotShardStats shardStats : client().execute( SearchableSnapshotsStatsAction.INSTANCE, new SearchableSnapshotsStatsRequest() ).actionGet().getStats()) { for (final SearchableSnapshotShardStats.CacheIndexInputStats indexInputStats : shardStats.getStats()) { assertThat(Strings.toString(indexInputStats), indexInputStats.getBlobStoreBytesRequested().getCount(), greaterThan(0L)); } } logger.info("--> verifying cached documents in system index [{}]", SNAPSHOT_BLOB_CACHE_INDEX); ensureYellow(SNAPSHOT_BLOB_CACHE_INDEX); refreshSystemIndex(); logger.info("--> verifying system index [{}] data tiers preference", SNAPSHOT_BLOB_CACHE_INDEX); assertThat( systemClient().admin() .indices() .prepareGetSettings(SNAPSHOT_BLOB_CACHE_INDEX) .get() .getSetting(SNAPSHOT_BLOB_CACHE_INDEX, DataTierAllocationDecider.TIER_PREFERENCE), equalTo("data_content,data_hot") ); refreshSystemIndex(); final long numberOfCachedBlobs = systemClient().prepareSearch(SNAPSHOT_BLOB_CACHE_INDEX) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) .get() .getHits() .getTotalHits().value; IndexingStats indexingStats = systemClient().admin() .indices() .prepareStats(SNAPSHOT_BLOB_CACHE_INDEX) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) .clear() .setIndexing(true) .get() .getTotal() .getIndexing(); final long numberOfCacheWrites = indexingStats != null ? indexingStats.getTotal().getIndexCount() : 0L; logger.info("--> verifying number of documents in index [{}]", restoredIndex); assertHitCount(client().prepareSearch(restoredIndex).setSize(0).setTrackTotalHits(true).get(), numberOfDocs); for (IndicesService indicesService : internalCluster().getDataNodeInstances(IndicesService.class)) { for (IndexService indexService : indicesService) { if (indexService.index().getName().equals(restoredIndex)) { for (IndexShard indexShard : indexService) { try { unwrapDirectory(indexShard.store().directory()).clearStats(); } catch (AlreadyClosedException ignore) { // ok to ignore these } } } } } final Storage storage2 = randomFrom(Storage.values()); logger.info("--> mount snapshot [{}] as an index for the second time [storage={}]", snapshot, storage2); final String restoredAgainIndex = "restored-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); mountSnapshot( repositoryName, snapshot.getName(), indexName, restoredAgainIndex, Settings.builder() .put(SearchableSnapshots.SNAPSHOT_CACHE_ENABLED_SETTING.getKey(), true) .put(SearchableSnapshots.SNAPSHOT_CACHE_PREWARM_ENABLED_SETTING.getKey(), false) .put(SearchableSnapshots.SNAPSHOT_BLOB_CACHE_METADATA_FILES_MAX_LENGTH, blobCacheMaxLength) .build(), storage2 ); ensureGreen(restoredAgainIndex); assertRecoveryStats(restoredAgainIndex, false); assertExecutorIsIdle(SearchableSnapshots.CACHE_FETCH_ASYNC_THREAD_POOL_NAME); waitForBlobCacheFillsToComplete(); logger.info("--> verifying shards of [{}] were started without using the blob store more than necessary", restoredAgainIndex); checkNoBlobStoreAccess(); logger.info("--> verifying number of documents in index [{}]", restoredAgainIndex); assertHitCount(client().prepareSearch(restoredAgainIndex).setSize(0).setTrackTotalHits(true).get(), numberOfDocs); logger.info("--> verifying that no extra cached blobs were indexed [{}]", SNAPSHOT_BLOB_CACHE_INDEX); refreshSystemIndex(); assertHitCount( systemClient().prepareSearch(SNAPSHOT_BLOB_CACHE_INDEX).setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN).setSize(0).get(), numberOfCachedBlobs ); indexingStats = systemClient().admin() .indices() .prepareStats(SNAPSHOT_BLOB_CACHE_INDEX) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) .clear() .setIndexing(true) .get() .getTotal() .getIndexing(); assertThat(indexingStats != null ? indexingStats.getTotal().getIndexCount() : 0L, equalTo(numberOfCacheWrites)); logger.info("--> restarting cluster"); internalCluster().fullRestart(new InternalTestCluster.RestartCallback() { @Override public Settings onNodeStopped(String nodeName) throws Exception { return Settings.builder() .put(super.onNodeStopped(nodeName)) .put(WaitForSnapshotBlobCacheShardsActivePlugin.ENABLED.getKey(), true) .build(); } }); ensureGreen(restoredAgainIndex); assertRecoveryStats(restoredAgainIndex, false); assertExecutorIsIdle(SearchableSnapshots.CACHE_FETCH_ASYNC_THREAD_POOL_NAME); waitForBlobCacheFillsToComplete(); logger.info("--> shards of [{}] should start without downloading bytes from the blob store", restoredAgainIndex); checkNoBlobStoreAccess(); logger.info("--> verifying that no cached blobs were indexed in system index [{}] after restart", SNAPSHOT_BLOB_CACHE_INDEX); assertHitCount( systemClient().prepareSearch(SNAPSHOT_BLOB_CACHE_INDEX).setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN).setSize(0).get(), numberOfCachedBlobs ); indexingStats = systemClient().admin() .indices() .prepareStats(SNAPSHOT_BLOB_CACHE_INDEX) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) .clear() .setIndexing(true) .get() .getTotal() .getIndexing(); assertThat(indexingStats != null ? indexingStats.getTotal().getIndexCount() : 0L, equalTo(0L)); logger.info("--> verifying number of documents in index [{}]", restoredAgainIndex); assertHitCount(client().prepareSearch(restoredAgainIndex).setSize(0).setTrackTotalHits(true).get(), numberOfDocs); logger.info("--> deleting indices, maintenance service should clean up [{}] docs in system index", numberOfCachedBlobs); assertAcked(client().admin().indices().prepareDelete("restored-*")); assertBusy(() -> { refreshSystemIndex(); assertHitCount( systemClient().prepareSearch(SNAPSHOT_BLOB_CACHE_INDEX) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) .setSize(0) .get(), 0L ); }, 30L, TimeUnit.SECONDS); } private void checkNoBlobStoreAccess() { for (final SearchableSnapshotShardStats shardStats : client().execute( SearchableSnapshotsStatsAction.INSTANCE, new SearchableSnapshotsStatsRequest() ).actionGet().getStats()) { for (final SearchableSnapshotShardStats.CacheIndexInputStats indexInputStats : shardStats.getStats()) { assertThat(Strings.toString(indexInputStats), indexInputStats.getBlobStoreBytesRequested().getCount(), equalTo(0L)); } } } /** * @return a {@link Client} that can be used to query the blob store cache system index */ private Client systemClient() { return new OriginSettingClient(client(), ClientHelper.SEARCHABLE_SNAPSHOTS_ORIGIN); } private void refreshSystemIndex() { try { final RefreshResponse refreshResponse = systemClient().admin().indices().prepareRefresh(SNAPSHOT_BLOB_CACHE_INDEX).get(); assertThat(refreshResponse.getSuccessfulShards(), greaterThan(0)); assertThat(refreshResponse.getFailedShards(), equalTo(0)); } catch (IndexNotFoundException indexNotFoundException) { throw new AssertionError("unexpected", indexNotFoundException); } } /** * This plugin declares an {@link AllocationDecider} that forces searchable snapshot shards to be allocated after * the primary shards of the snapshot blob cache index are started. This way we can ensure that searchable snapshot * shards can use the snapshot blob cache index after the cluster restarted. */ public static class WaitForSnapshotBlobCacheShardsActivePlugin extends Plugin implements ClusterPlugin { public static Setting<Boolean> ENABLED = Setting.boolSetting( "wait_for_snapshot_blob_cache_shards_active.enabled", false, Setting.Property.NodeScope ); @Override public List<Setting<?>> getSettings() { return List.of(ENABLED); } @Override public Collection<AllocationDecider> createAllocationDeciders(Settings settings, ClusterSettings clusterSettings) { if (ENABLED.get(settings) == false) { return List.of(); } final String name = "wait_for_snapshot_blob_cache_shards_active"; return List.of(new AllocationDecider() { @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { return canAllocate(shardRouting, allocation); } @Override public Decision canAllocate(ShardRouting shardRouting, RoutingAllocation allocation) { final IndexMetadata indexMetadata = allocation.metadata().index(shardRouting.index()); if (SearchableSnapshotsSettings.isSearchableSnapshotStore(indexMetadata.getSettings()) == false) { return allocation.decision(Decision.YES, name, "index is not a searchable snapshot shard - can allocate"); } if (allocation.metadata().hasIndex(SNAPSHOT_BLOB_CACHE_INDEX) == false) { return allocation.decision(Decision.YES, name, SNAPSHOT_BLOB_CACHE_INDEX + " is not created yet"); } if (allocation.routingTable().hasIndex(SNAPSHOT_BLOB_CACHE_INDEX) == false) { return allocation.decision(Decision.THROTTLE, name, SNAPSHOT_BLOB_CACHE_INDEX + " is not active yet"); } final IndexRoutingTable indexRoutingTable = allocation.routingTable().index(SNAPSHOT_BLOB_CACHE_INDEX); if (indexRoutingTable.allPrimaryShardsActive() == false) { return allocation.decision(Decision.THROTTLE, name, SNAPSHOT_BLOB_CACHE_INDEX + " is not active yet"); } return allocation.decision(Decision.YES, name, "primary shard for this replica is already active"); } }); } } }
x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.searchablesnapshots.cache.blob; import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.client.OriginSettingClient; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexingStats; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.snapshots.SearchableSnapshotsSettings; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.xpack.cluster.routing.allocation.DataTierAllocationDecider; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.searchablesnapshots.MountSearchableSnapshotRequest.Storage; import org.elasticsearch.xpack.core.searchablesnapshots.SearchableSnapshotShardStats; import org.elasticsearch.xpack.searchablesnapshots.BaseFrozenSearchableSnapshotsIntegTestCase; import org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots; import org.elasticsearch.xpack.searchablesnapshots.action.SearchableSnapshotsStatsAction; import org.elasticsearch.xpack.searchablesnapshots.action.SearchableSnapshotsStatsRequest; import org.elasticsearch.xpack.searchablesnapshots.cache.full.CacheService; import org.elasticsearch.xpack.searchablesnapshots.cache.shared.FrozenCacheService; import org.junit.AfterClass; import org.junit.BeforeClass; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Locale; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_BLOB_CACHE_INDEX; import static org.elasticsearch.xpack.searchablesnapshots.cache.shared.SharedBytes.pageAligned; import static org.elasticsearch.xpack.searchablesnapshots.store.SearchableSnapshotDirectory.unwrapDirectory; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; public class SearchableSnapshotsBlobStoreCacheIntegTests extends BaseFrozenSearchableSnapshotsIntegTestCase { private static Settings cacheSettings = null; private static ByteSizeValue blobCacheMaxLength = null; @BeforeClass public static void setUpCacheSettings() { blobCacheMaxLength = pageAligned(new ByteSizeValue(randomLongBetween(64L, 128L), ByteSizeUnit.KB)); final Settings.Builder builder = Settings.builder(); // Align ranges to match the blob cache max length builder.put(CacheService.SNAPSHOT_CACHE_RANGE_SIZE_SETTING.getKey(), blobCacheMaxLength); builder.put(CacheService.SNAPSHOT_CACHE_RECOVERY_RANGE_SIZE_SETTING.getKey(), blobCacheMaxLength); // Frozen (shared cache) cache should be large enough to not cause direct reads builder.put(FrozenCacheService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), ByteSizeValue.ofMb(128)); // Align ranges to match the blob cache max length builder.put(FrozenCacheService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.getKey(), blobCacheMaxLength); builder.put(FrozenCacheService.SHARED_CACHE_RANGE_SIZE_SETTING.getKey(), blobCacheMaxLength); builder.put(FrozenCacheService.FROZEN_CACHE_RECOVERY_RANGE_SIZE_SETTING.getKey(), blobCacheMaxLength); cacheSettings = builder.build(); } @AfterClass public static void tearDownCacheSettings() { blobCacheMaxLength = null; cacheSettings = null; } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { final List<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins()); plugins.add(WaitForSnapshotBlobCacheShardsActivePlugin.class); plugins.add(ReindexPlugin.class); return plugins; } @Override protected int numberOfReplicas() { return 0; } @Override protected int numberOfShards() { return 1; } @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { return Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)).put(cacheSettings).build(); } public void testBlobStoreCache() throws Exception { final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); createIndex(indexName); final NumShards numberOfShards = getNumShards(indexName); final int numberOfDocs = scaledRandomIntBetween(10, 20_000); logger.info("--> indexing [{}] documents in [{}]", numberOfDocs, indexName); final List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>(); for (int i = numberOfDocs; i > 0; i--) { XContentBuilder builder = XContentFactory.smileBuilder(); builder.startObject().field("text", randomRealisticUnicodeOfCodepointLengthBetween(5, 50)).field("num", i).endObject(); indexRequestBuilders.add(client().prepareIndex(indexName).setSource(builder)); } indexRandom(true, true, true, indexRequestBuilders); if (randomBoolean()) { logger.info("--> force-merging index before snapshotting"); final ForceMergeResponse forceMergeResponse = client().admin() .indices() .prepareForceMerge(indexName) .setMaxNumSegments(1) .get(); assertThat(forceMergeResponse.getSuccessfulShards(), equalTo(numberOfShards.totalNumShards)); assertThat(forceMergeResponse.getFailedShards(), equalTo(0)); } final String repositoryName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); final Path repositoryLocation = randomRepoPath(); createRepository(repositoryName, "fs", repositoryLocation); final SnapshotId snapshot = createSnapshot(repositoryName, "test-snapshot", List.of(indexName)).snapshotId(); assertAcked(client().admin().indices().prepareDelete(indexName)); expectThrows( IndexNotFoundException.class, ".snapshot-blob-cache system index should not be created yet", () -> systemClient().admin().indices().prepareGetIndex().addIndices(SNAPSHOT_BLOB_CACHE_INDEX).get() ); final Storage storage1 = randomFrom(Storage.values()); logger.info( "--> mount snapshot [{}] as an index for the first time [storage={}, max length={}]", snapshot, storage1, blobCacheMaxLength.getStringRep() ); final String restoredIndex = "restored-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); mountSnapshot( repositoryName, snapshot.getName(), indexName, restoredIndex, Settings.builder() .put(SearchableSnapshots.SNAPSHOT_CACHE_ENABLED_SETTING.getKey(), true) .put(SearchableSnapshots.SNAPSHOT_CACHE_PREWARM_ENABLED_SETTING.getKey(), false) .put(SearchableSnapshots.SNAPSHOT_BLOB_CACHE_METADATA_FILES_MAX_LENGTH, blobCacheMaxLength) .build(), storage1 ); ensureGreen(restoredIndex); assertRecoveryStats(restoredIndex, false); assertExecutorIsIdle(SearchableSnapshots.CACHE_FETCH_ASYNC_THREAD_POOL_NAME); waitForBlobCacheFillsToComplete(); for (final SearchableSnapshotShardStats shardStats : client().execute( SearchableSnapshotsStatsAction.INSTANCE, new SearchableSnapshotsStatsRequest() ).actionGet().getStats()) { for (final SearchableSnapshotShardStats.CacheIndexInputStats indexInputStats : shardStats.getStats()) { assertThat(Strings.toString(indexInputStats), indexInputStats.getBlobStoreBytesRequested().getCount(), greaterThan(0L)); } } logger.info("--> verifying cached documents in system index [{}]", SNAPSHOT_BLOB_CACHE_INDEX); ensureYellow(SNAPSHOT_BLOB_CACHE_INDEX); refreshSystemIndex(); logger.info("--> verifying system index [{}] data tiers preference", SNAPSHOT_BLOB_CACHE_INDEX); assertThat( systemClient().admin() .indices() .prepareGetSettings(SNAPSHOT_BLOB_CACHE_INDEX) .get() .getSetting(SNAPSHOT_BLOB_CACHE_INDEX, DataTierAllocationDecider.TIER_PREFERENCE), equalTo("data_content,data_hot") ); refreshSystemIndex(); final long numberOfCachedBlobs = systemClient().prepareSearch(SNAPSHOT_BLOB_CACHE_INDEX) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) .get() .getHits() .getTotalHits().value; IndexingStats indexingStats = systemClient().admin() .indices() .prepareStats(SNAPSHOT_BLOB_CACHE_INDEX) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) .clear() .setIndexing(true) .get() .getTotal() .getIndexing(); final long numberOfCacheWrites = indexingStats != null ? indexingStats.getTotal().getIndexCount() : 0L; logger.info("--> verifying number of documents in index [{}]", restoredIndex); assertHitCount(client().prepareSearch(restoredIndex).setSize(0).setTrackTotalHits(true).get(), numberOfDocs); for (IndicesService indicesService : internalCluster().getDataNodeInstances(IndicesService.class)) { for (IndexService indexService : indicesService) { if (indexService.index().getName().equals(restoredIndex)) { for (IndexShard indexShard : indexService) { try { unwrapDirectory(indexShard.store().directory()).clearStats(); } catch (AlreadyClosedException ignore) { // ok to ignore these } } } } } final Storage storage2 = randomFrom(Storage.values()); logger.info("--> mount snapshot [{}] as an index for the second time [storage={}]", snapshot, storage2); final String restoredAgainIndex = "restored-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); mountSnapshot( repositoryName, snapshot.getName(), indexName, restoredAgainIndex, Settings.builder() .put(SearchableSnapshots.SNAPSHOT_CACHE_ENABLED_SETTING.getKey(), true) .put(SearchableSnapshots.SNAPSHOT_CACHE_PREWARM_ENABLED_SETTING.getKey(), false) .put(SearchableSnapshots.SNAPSHOT_BLOB_CACHE_METADATA_FILES_MAX_LENGTH, blobCacheMaxLength) .build(), storage2 ); ensureGreen(restoredAgainIndex); assertRecoveryStats(restoredAgainIndex, false); assertExecutorIsIdle(SearchableSnapshots.CACHE_FETCH_ASYNC_THREAD_POOL_NAME); waitForBlobCacheFillsToComplete(); logger.info("--> verifying shards of [{}] were started without using the blob store more than necessary", restoredAgainIndex); checkNoBlobStoreAccess(); logger.info("--> verifying number of documents in index [{}]", restoredAgainIndex); assertHitCount(client().prepareSearch(restoredAgainIndex).setSize(0).setTrackTotalHits(true).get(), numberOfDocs); logger.info("--> verifying that no extra cached blobs were indexed [{}]", SNAPSHOT_BLOB_CACHE_INDEX); refreshSystemIndex(); assertHitCount( systemClient().prepareSearch(SNAPSHOT_BLOB_CACHE_INDEX).setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN).setSize(0).get(), numberOfCachedBlobs ); indexingStats = systemClient().admin() .indices() .prepareStats(SNAPSHOT_BLOB_CACHE_INDEX) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) .clear() .setIndexing(true) .get() .getTotal() .getIndexing(); assertThat(indexingStats != null ? indexingStats.getTotal().getIndexCount() : 0L, equalTo(numberOfCacheWrites)); logger.info("--> restarting cluster"); internalCluster().fullRestart(new InternalTestCluster.RestartCallback() { @Override public Settings onNodeStopped(String nodeName) throws Exception { return Settings.builder() .put(super.onNodeStopped(nodeName)) .put(WaitForSnapshotBlobCacheShardsActivePlugin.ENABLED.getKey(), true) .build(); } }); ensureGreen(restoredAgainIndex); assertRecoveryStats(restoredAgainIndex, false); assertExecutorIsIdle(SearchableSnapshots.CACHE_FETCH_ASYNC_THREAD_POOL_NAME); waitForBlobCacheFillsToComplete(); logger.info("--> shards of [{}] should start without downloading bytes from the blob store", restoredAgainIndex); checkNoBlobStoreAccess(); logger.info("--> verifying that no cached blobs were indexed in system index [{}] after restart", SNAPSHOT_BLOB_CACHE_INDEX); assertHitCount( systemClient().prepareSearch(SNAPSHOT_BLOB_CACHE_INDEX).setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN).setSize(0).get(), numberOfCachedBlobs ); indexingStats = systemClient().admin() .indices() .prepareStats(SNAPSHOT_BLOB_CACHE_INDEX) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) .clear() .setIndexing(true) .get() .getTotal() .getIndexing(); assertThat(indexingStats != null ? indexingStats.getTotal().getIndexCount() : 0L, equalTo(0L)); logger.info("--> verifying number of documents in index [{}]", restoredAgainIndex); assertHitCount(client().prepareSearch(restoredAgainIndex).setSize(0).setTrackTotalHits(true).get(), numberOfDocs); logger.info("--> deleting indices, maintenance service should clean up snapshot blob cache index"); assertAcked(client().admin().indices().prepareDelete("restored-*")); assertBusy(() -> { refreshSystemIndex(); assertHitCount( systemClient().prepareSearch(SNAPSHOT_BLOB_CACHE_INDEX) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) .setSize(0) .get(), 0L ); }); } private void checkNoBlobStoreAccess() { for (final SearchableSnapshotShardStats shardStats : client().execute( SearchableSnapshotsStatsAction.INSTANCE, new SearchableSnapshotsStatsRequest() ).actionGet().getStats()) { for (final SearchableSnapshotShardStats.CacheIndexInputStats indexInputStats : shardStats.getStats()) { assertThat(Strings.toString(indexInputStats), indexInputStats.getBlobStoreBytesRequested().getCount(), equalTo(0L)); } } } /** * @return a {@link Client} that can be used to query the blob store cache system index */ private Client systemClient() { return new OriginSettingClient(client(), ClientHelper.SEARCHABLE_SNAPSHOTS_ORIGIN); } private void refreshSystemIndex() { try { final RefreshResponse refreshResponse = systemClient().admin().indices().prepareRefresh(SNAPSHOT_BLOB_CACHE_INDEX).get(); assertThat(refreshResponse.getSuccessfulShards(), greaterThan(0)); assertThat(refreshResponse.getFailedShards(), equalTo(0)); } catch (IndexNotFoundException indexNotFoundException) { throw new AssertionError("unexpected", indexNotFoundException); } } /** * This plugin declares an {@link AllocationDecider} that forces searchable snapshot shards to be allocated after * the primary shards of the snapshot blob cache index are started. This way we can ensure that searchable snapshot * shards can use the snapshot blob cache index after the cluster restarted. */ public static class WaitForSnapshotBlobCacheShardsActivePlugin extends Plugin implements ClusterPlugin { public static Setting<Boolean> ENABLED = Setting.boolSetting( "wait_for_snapshot_blob_cache_shards_active.enabled", false, Setting.Property.NodeScope ); @Override public List<Setting<?>> getSettings() { return List.of(ENABLED); } @Override public Collection<AllocationDecider> createAllocationDeciders(Settings settings, ClusterSettings clusterSettings) { if (ENABLED.get(settings) == false) { return List.of(); } final String name = "wait_for_snapshot_blob_cache_shards_active"; return List.of(new AllocationDecider() { @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { return canAllocate(shardRouting, allocation); } @Override public Decision canAllocate(ShardRouting shardRouting, RoutingAllocation allocation) { final IndexMetadata indexMetadata = allocation.metadata().index(shardRouting.index()); if (SearchableSnapshotsSettings.isSearchableSnapshotStore(indexMetadata.getSettings()) == false) { return allocation.decision(Decision.YES, name, "index is not a searchable snapshot shard - can allocate"); } if (allocation.metadata().hasIndex(SNAPSHOT_BLOB_CACHE_INDEX) == false) { return allocation.decision(Decision.YES, name, SNAPSHOT_BLOB_CACHE_INDEX + " is not created yet"); } if (allocation.routingTable().hasIndex(SNAPSHOT_BLOB_CACHE_INDEX) == false) { return allocation.decision(Decision.THROTTLE, name, SNAPSHOT_BLOB_CACHE_INDEX + " is not active yet"); } final IndexRoutingTable indexRoutingTable = allocation.routingTable().index(SNAPSHOT_BLOB_CACHE_INDEX); if (indexRoutingTable.allPrimaryShardsActive() == false) { return allocation.decision(Decision.THROTTLE, name, SNAPSHOT_BLOB_CACHE_INDEX + " is not active yet"); } return allocation.decision(Decision.YES, name, "primary shard for this replica is already active"); } }); } } }
Fix SearchableSnapshotsBlobStoreCacheIntegTests.testBlobStoreCache (#78616) This test often fails on CI because it does not wait for the data node that hosts the .snapshot-blob-cache primary shard to process the cluster state update and triggers the clean up. Closes #78512
x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java
Fix SearchableSnapshotsBlobStoreCacheIntegTests.testBlobStoreCache (#78616)
<ide><path>-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java <ide> import java.util.Collection; <ide> import java.util.List; <ide> import java.util.Locale; <add>import java.util.concurrent.TimeUnit; <ide> <ide> import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; <ide> import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; <ide> logger.info("--> verifying number of documents in index [{}]", restoredAgainIndex); <ide> assertHitCount(client().prepareSearch(restoredAgainIndex).setSize(0).setTrackTotalHits(true).get(), numberOfDocs); <ide> <del> logger.info("--> deleting indices, maintenance service should clean up snapshot blob cache index"); <add> logger.info("--> deleting indices, maintenance service should clean up [{}] docs in system index", numberOfCachedBlobs); <ide> assertAcked(client().admin().indices().prepareDelete("restored-*")); <add> <ide> assertBusy(() -> { <ide> refreshSystemIndex(); <ide> assertHitCount( <ide> .get(), <ide> 0L <ide> ); <del> }); <add> }, 30L, TimeUnit.SECONDS); <ide> } <ide> <ide> private void checkNoBlobStoreAccess() {
JavaScript
agpl-3.0
955d649821f74afba8489f2046915de144d94671
0
hawkrives/gobbldygook,hawkrives/gobbldygook,hawkrives/gobbldygook
var readJson = require('./readJson') var Promise = require('bluebird') var _ = require('lodash') var add = require('./add') var buildDeptNum = require('./deptNum').buildDeptNum function storeCourses(item) { return new Promise(function(resolve, reject) { console.log(item.meta.path, 'called storeCourses') _.map(item.data.courses, function(course) { course.sourcePath = item.meta.path course.deptnum = buildDeptNum(course.depts.join('/') + ' ' + course.num + course.sect) console.log(item.meta.path, 'is processing courses') }) console.log(item.meta.path, 'has processed courses') window.server.courses .add.apply(window.server, item.data.courses) .done(function() {console.log(item.meta.path, 'has stored courses'); resolve(item)}) }) } function storeArea(item) { return new Promise(function(resolve, reject) { console.log(item.meta.path, 'called storeArea') item.data.info.sourcePath = item.meta.path window.server.areas .add(item.data.info) .done(function() {resolve(item)}) }) } function storeItem(item) { return new Promise(function(resolve, reject) { if (item.type === 'courses') { return storeCourses(item) } else if (item.type === 'areas') { return storeArea(item) } }) } function deleteItems(type, path, key) { console.log('deleting ' + type + ' from ' + path) var itemsToDelete = window.server[type] .query('sourcePath') .only(path) .execute() .done() itemsToDelete.then(function(items) { var keysToDelete = _.pluck(items, key) var numberToDelete = _.size(keysToDelete) if (numberToDelete) { _.each(keysToDelete, function(key) { window.server[type] .remove(key) .done() }) } console.log(numberToDelete + ' ' + type + ' have been removed from ' + path) }) return new Promise(function(resolve, reject) { itemsToDelete.then(function() { resolve() }) }) } function cleanPriorData(item) { return new Promise(function(resolve, reject) { var path = item.meta.path var hash = item.meta.hash // get rid of old items var deleteItemsPromise; if (item.type === 'courses') { deleteItemsPromise = deleteItems('courses', path, 'clbid') } else if (item.type === 'areas') { deleteItemsPromise = deleteItems('areas', path, 'sourcePath') } else { deleteItemsPromise = Promise.reject(new Error('Unknown item type ' + item.type)) } deleteItemsPromise.then(function() { console.log('deleteItemsPromise is done') localStorage.removeItem(path) resolve(item) }) }) } function cacheItemHash(item) { console.log(item.meta.path + ' called cacheItemHash') localStorage.setItem(item.meta.path, item.meta.hash) return Promise.resolve(item) } function updateDatabase(itemType, infoFromServer) { infoFromServer.path = '/data/' + itemType + '/' + infoFromServer.path var oldHash = localStorage.getItem(infoFromServer.path) var newHash = infoFromServer.hash var itemPath = infoFromServer.path return new Promise(function(resolve, reject) { if (newHash !== oldHash) { console.log('need to add ' + itemPath) readJson(itemPath) .then(function(data) { return { data: data, meta: infoFromServer, type: itemType } }) .then(cleanPriorData) .then(storeItem) .then(cacheItemHash) .catch(function(err) { reject(err.stack) }) .done(function() { console.log('added ' + itemPath + ' (' + _.size(item) + ' ' + itemType + ')') resolve(true) }) } else { console.log('skipped ' + itemPath) resolve(false) } }) } function loadDataFiles(infoFile) { console.log('load data files', infoFile) var progress = 0; var notification = new Cortex({ message: 'Updating ' + infoFile.type, hasProgress: true, progressValue: progress, maxProgressValue: _.chain(infoFile.info).mapValues(_.size).reduce(add).value(), ident: infoFile.type }) window.notifications.push(notification.val()) return Promise.all(_.map(infoFile.info, function(files) { var allFilesLoadedPromise = Promise.all(_.map(files, function(file) { var dbUpdatedPromise = updateDatabase(infoFile.type, file) dbUpdatedPromise.then(function() { notification.progressValue.set(progress += 1) }) return dbUpdatedPromise })) allFilesLoadedPromise.then(function() { setTimeout(notification.remove, 200) }) return allFilesLoadedPromise })) } function loadInfoFile(url) { console.log('loading', url) return readJson(url) .then(loadDataFiles) } function loadData() { var infoFiles = [ '/data/areas/info.json', '/data/courses/info.json', ] return new Promise(function(resolve, reject) { Promise.all(_.map(infoFiles, loadInfoFile)).then(resolve) }) } module.exports = loadData
client/helpers/loadData.js
var readJson = require('./readJson') var Promise = require('bluebird') var _ = require('lodash') var buildDeptNum = require('./deptNum') // get info.json // for each list in file['info'], // check ['hash'] against the db's hash // if it matches, return. // else, request the file from ['path'], // delete the pervious data. // and store the new data in the database function storeItem(item) { if (item.type === 'courses') { _.map(item.data.courses, function(course) { course.sourcePath = item.meta.path course.deptnum = buildDeptNum(course.depts.join('/') + ' ' + course.num + course.sect) }) window.server.courses .add.apply(window.server, item.data.courses) .done(function() { console.log(_.size(item.data.courses) + ' courses have been added') }) } else if (item.type === 'areas') { item.data.info.sourcePath = item.meta.path window.server.areas .add(item.data.info) .done(function(results) { console.log('an area has been added') }) } } function deleteItems(type, path, key) { console.log('deleting ' + type + ' from ' + path) var itemsToDelete = window.server[type] .query('sourcePath') .only(path) .execute() .done() itemsToDelete.then(function(items) { var keysToDelete = _.pluck(items, key) var numberToDelete = _.size(keysToDelete) if (numberToDelete) { _.each(keysToDelete, function(key) { window.server[type] .remove(key) .done() }) } console.log(numberToDelete + ' ' + type + ' have been removed') }) return new Promise(function(resolve, reject) { itemsToDelete.then(resolve) }) } function cleanPriorData(item) { var path = item.meta.path var hash = item.meta.hash // get rid of old items var deleteItemsPromise; if (item.type === 'areas') { deleteItemsPromise = deleteItems('areas', path, 'sourcePath') } else if (item.type === 'courses') { deleteItemsPromise = deleteItems('courses', path, 'clbid') } else { deleteItemsPromise = Promise.reject(new Error('Unknown item type' + item.type)) } deleteItemsPromise.then(function() { localStorage.removeItem(path) }) return deleteItemsPromise } function updateDatabase(itemType, infoFromServer) { infoFromServer.path = '/data/' + itemType + '/' + infoFromServer.path var oldHash = localStorage.getItem(infoFromServer.path) var newHash = infoFromServer.hash var itemPath = infoFromServer.path return new Promise(function(resolve, reject) { if (newHash !== oldHash) { console.log('adding ' + itemPath) readJson(itemPath) .then(function(data) { console.log('read ' + itemPath) var item = { data: data, meta: infoFromServer, type: itemType } cleanPriorData(item).then(function() { storeItem(item) }).then(function() { localStorage.setItem(item.meta.path, item.meta.hash) }) }) .catch(function(err) { reject(err.stack) }) .done(function() { console.log('added ' + itemPath) resolve() }) } else { console.log('skipped ' + itemPath) resolve() } }) } function loadDataFiles(infoFile) { console.log('load data files', infoFile) return Promise.all(_.map(infoFile.info, function(files) { return Promise.all(_.map(files, function(file) { return updateDatabase(infoFile.type, file) })) })) } function loadInfoFile(url) { console.log('loading', url) return readJson(url) .then(loadDataFiles) } function loadData() { var infoFiles = [ '/data/areas/info.json', '/data/courses/info.json', ] return new Promise(function(resolve, reject) { Promise.all(_.map(infoFiles, loadInfoFile)).then(resolve) }) } module.exports = loadData
break loadData badly
client/helpers/loadData.js
break loadData badly
<ide><path>lient/helpers/loadData.js <ide> var readJson = require('./readJson') <ide> var Promise = require('bluebird') <ide> var _ = require('lodash') <add>var add = require('./add') <ide> <del>var buildDeptNum = require('./deptNum') <add>var buildDeptNum = require('./deptNum').buildDeptNum <ide> <del>// get info.json <del>// for each list in file['info'], <del> // check ['hash'] against the db's hash <del> // if it matches, return. <del> // else, request the file from ['path'], <del> // delete the pervious data. <del> // and store the new data in the database <del> <del>function storeItem(item) { <del> if (item.type === 'courses') { <add>function storeCourses(item) { <add> return new Promise(function(resolve, reject) { <add> console.log(item.meta.path, 'called storeCourses') <ide> _.map(item.data.courses, function(course) { <ide> course.sourcePath = item.meta.path <ide> course.deptnum = buildDeptNum(course.depts.join('/') + ' ' + course.num + course.sect) <add> console.log(item.meta.path, 'is processing courses') <ide> }) <add> console.log(item.meta.path, 'has processed courses') <ide> window.server.courses <ide> .add.apply(window.server, item.data.courses) <del> .done(function() { <del> console.log(_.size(item.data.courses) + ' courses have been added') <del> }) <del> } <add> .done(function() {console.log(item.meta.path, 'has stored courses'); resolve(item)}) <add> }) <add>} <ide> <del> else if (item.type === 'areas') { <add>function storeArea(item) { <add> return new Promise(function(resolve, reject) { <add> console.log(item.meta.path, 'called storeArea') <ide> item.data.info.sourcePath = item.meta.path <ide> window.server.areas <ide> .add(item.data.info) <del> .done(function(results) { <del> console.log('an area has been added') <del> }) <del> } <add> .done(function() {resolve(item)}) <add> }) <add>} <add> <add>function storeItem(item) { <add> return new Promise(function(resolve, reject) { <add> if (item.type === 'courses') { <add> return storeCourses(item) <add> } else if (item.type === 'areas') { <add> return storeArea(item) <add> } <add> }) <ide> } <ide> <ide> function deleteItems(type, path, key) { <ide> .done() <ide> }) <ide> } <del> console.log(numberToDelete + ' ' + type + ' have been removed') <add> console.log(numberToDelete + ' ' + type + ' have been removed from ' + path) <ide> }) <ide> <ide> return new Promise(function(resolve, reject) { <del> itemsToDelete.then(resolve) <add> itemsToDelete.then(function() { <add> resolve() <add> }) <ide> }) <ide> } <ide> <ide> function cleanPriorData(item) { <del> var path = item.meta.path <del> var hash = item.meta.hash <add> return new Promise(function(resolve, reject) { <add> var path = item.meta.path <add> var hash = item.meta.hash <ide> <del> // get rid of old items <del> var deleteItemsPromise; <del> if (item.type === 'areas') { <del> deleteItemsPromise = deleteItems('areas', path, 'sourcePath') <del> } else if (item.type === 'courses') { <del> deleteItemsPromise = deleteItems('courses', path, 'clbid') <del> } else { <del> deleteItemsPromise = Promise.reject(new Error('Unknown item type' + item.type)) <del> } <add> // get rid of old items <add> var deleteItemsPromise; <add> if (item.type === 'courses') { <add> deleteItemsPromise = deleteItems('courses', path, 'clbid') <add> } else if (item.type === 'areas') { <add> deleteItemsPromise = deleteItems('areas', path, 'sourcePath') <add> } else { <add> deleteItemsPromise = Promise.reject(new Error('Unknown item type ' + item.type)) <add> } <ide> <del> deleteItemsPromise.then(function() { <del> localStorage.removeItem(path) <add> deleteItemsPromise.then(function() { <add> console.log('deleteItemsPromise is done') <add> localStorage.removeItem(path) <add> resolve(item) <add> }) <ide> }) <add>} <ide> <del> return deleteItemsPromise <add>function cacheItemHash(item) { <add> console.log(item.meta.path + ' called cacheItemHash') <add> localStorage.setItem(item.meta.path, item.meta.hash) <add> return Promise.resolve(item) <ide> } <ide> <ide> function updateDatabase(itemType, infoFromServer) { <ide> <ide> return new Promise(function(resolve, reject) { <ide> if (newHash !== oldHash) { <del> console.log('adding ' + itemPath) <add> console.log('need to add ' + itemPath) <ide> readJson(itemPath) <ide> .then(function(data) { <del> console.log('read ' + itemPath) <del> var item = { <add> return { <ide> data: data, <ide> meta: infoFromServer, <ide> type: itemType <ide> } <del> cleanPriorData(item).then(function() { <del> storeItem(item) <del> }).then(function() { <del> localStorage.setItem(item.meta.path, item.meta.hash) <del> }) <ide> }) <add> .then(cleanPriorData) <add> .then(storeItem) <add> .then(cacheItemHash) <ide> .catch(function(err) { <ide> reject(err.stack) <ide> }) <ide> .done(function() { <del> console.log('added ' + itemPath) <del> resolve() <add> console.log('added ' + itemPath + ' (' + _.size(item) + ' ' + itemType + ')') <add> resolve(true) <ide> }) <ide> } else { <ide> console.log('skipped ' + itemPath) <del> resolve() <add> resolve(false) <ide> } <ide> }) <ide> } <ide> <ide> function loadDataFiles(infoFile) { <ide> console.log('load data files', infoFile) <add> <add> var progress = 0; <add> var notification = new Cortex({ <add> message: 'Updating ' + infoFile.type, <add> hasProgress: true, <add> progressValue: progress, <add> maxProgressValue: _.chain(infoFile.info).mapValues(_.size).reduce(add).value(), <add> ident: infoFile.type <add> }) <add> window.notifications.push(notification.val()) <add> <ide> return Promise.all(_.map(infoFile.info, function(files) { <del> return Promise.all(_.map(files, function(file) { <del> return updateDatabase(infoFile.type, file) <add> var allFilesLoadedPromise = Promise.all(_.map(files, function(file) { <add> var dbUpdatedPromise = updateDatabase(infoFile.type, file) <add> dbUpdatedPromise.then(function() { <add> notification.progressValue.set(progress += 1) <add> }) <add> return dbUpdatedPromise <ide> })) <add> <add> allFilesLoadedPromise.then(function() { <add> setTimeout(notification.remove, 200) <add> }) <add> <add> return allFilesLoadedPromise <ide> })) <ide> } <ide>
JavaScript
agpl-3.0
13a23bd9273af3ccffa8e92998d22e3f61ad03c1
0
unicef/rhizome,SeedScientific/polio,SeedScientific/polio,SeedScientific/polio,unicef/rhizome,unicef/polio,SeedScientific/polio,unicef/rhizome,unicef/polio,unicef/polio,unicef/rhizome,unicef/polio,SeedScientific/polio
'use strict'; var _ = require('lodash'); var d3 = require('d3'); var moment = require('moment'); var api = require('data/api'); module.exports = { replace: true, template: require('./choropleth.html'), paramAttributes: [ 'data-indicator', 'data-range' ], mixins: [ require('./mixin/resize') ], partials: { 'loading-overlay': require('./partial/loading-overlay.html') }, data: function () { return { error : false, loading : false, region : null, indicator: null, campaign : null, geo : null, border : null, range : [0, 1] }; }, ready: function () { if (typeof this.range === 'string') { this.range = JSON.parse(this.range); } }, computed: { mappedRegions: function () { if (!this.region) { return null; } return [this.region.id].concat(_(this.geo.features) .pluck('properties') .pluck('region_id') .value()); }, features: function () { var features = []; if (this.geo && !_.isEmpty(this.geo.features)) { features = this.geo.features; } else if (this.border) { features = [this.border]; } return features; }, boundingBox: function () { function lat(d) { return d[1]; } function lng(d) { return d[0]; } if (this.features.length < 1) { return [[0,0], [0,0]]; } var coordinates = _(this.features).map(function (f) { return _.flatten(f.geometry.coordinates); }) .flatten() .value(); var left = d3.min(coordinates, lng); var right = d3.max(coordinates, lng); var bottom = d3.min(coordinates, lat); var top = d3.max(coordinates, lat); return [[left, top], [right, bottom]]; }, center: function () { var w = this.width || 0; var h = this.height || 0; return [w/2, h/2]; }, centroid: function () { var box = this.boundingBox; var lat = box[1][1] + ((box[0][1] - box[1][1]) / 2); var lng = box[0][0] + ((box[1][0] - box[0][0]) / 2); return [lng, lat]; } }, methods: { draw: function () { var self = this; var svg = d3.select(this.$el).select('.geography'); var bounds = this.boundingBox; var width = this.width || 0; var height = this.height || 0; var projection = d3.geo.conicEqualArea() .parallels([bounds[1][1], bounds[0][1]]) .rotate([-this.centroid[0], 0]) // Rotate the globe so that the country is centered horizontally .center([0, this.centroid[1]]) // Set the center of the projection so that the polygon is moved vertically into the center of the viewport .translate([width / 2, height / 2]) // Translate to the center of the viewport .scale(1); var geopath = d3.geo.path().projection(projection); var features = this.features; // Calculate the scale required to fit the map within the SVG view box. var b = [projection(bounds[0]), projection(bounds[1])]; var s = 1 / Math.max((b[1][0] - b[0][0]) / width, (b[1][1] - b[0][1]) / height); projection.scale(s); var path = svg.selectAll('.region') .data(features, function (d) { return d.properties.region_id; }); path.enter().append('path') .on('click', function (d) { self.$dispatch('region-selected', d.properties.name); }) .on('mousemove', function (d) { var evt = d3.event; // Do not show a tooltip if we have no name if (!d.properties.name) { return; } self.$dispatch('tooltip-show', { el : this, position: { x: evt.pageX, y: evt.pageY, }, data: { text: d.properties.name } }); }) .on('mouseout', function () { self.$dispatch('tooltip-hide', { el: this }); }); var indicator = this.indicator; var quantize = d3.scale.quantize() .domain(this.range) .range([1, 2, 3, 4, 5, 6]); path.attr({ 'd': geopath, 'class': function (d) { if (!(d.properties.hasOwnProperty('indicators') && d.properties.indicators[indicator])) { return 'region'; } return 'region clickable q-' + quantize(d.properties.indicators[indicator]); } }); path.exit().remove(); }, loadData: function () { this.loading = true; if (!this.campaign || !this.geo || !this.indicator) { return; } var self = this; api.datapoints({ campaign_end : moment(this.campaign.end_date).format('YYYY-MM-DD'), campaign_start: moment(this.campaign.start_date).format('YYYY-MM-DD'), indicator__in : [this.indicator], region__in : this.mappedRegions }).then(function (data) { var index = _.indexBy(data.objects, 'region'); var features = self.features; for (var i = features.length - 1; i >= 0; i--) { var f = features[i].properties; var d = index[f.region_id]; var indicators = (d && d.indicators) || []; f.indicators = {}; for (var j = indicators.length - 1; j >= 0; j--) { var indicator = indicators[j]; f.indicators[indicator.indicator] = indicator.value; } } self.draw(); self.loading = false; }, this.onError); }, loadFeatures: function () { if (!this.region) { return; } var self = this; this.loading = true; Promise.all([api.geo({ parent_region__in: [this.region.id] }), api.geo({ region__in: [this.region.id] })]).then(function (data) { self.geo = data[0].objects; self.border = data[1].objects.features[0]; // FIXME: Can't filter regions by ID, so we have to fetch all of them // and just pick out the ones we want. api.regions().then(function (data) { var regions = _.indexBy(data.objects, 'id'); self.geo.features.forEach(function (feature) { feature.properties.name = regions[feature.properties.region_id].name; }); }); self.draw(); }, this.onError); }, onError: function (err) { console.err('choropleth', err); this.loading = false; this.error = true; } }, watch: { 'region' : 'loadFeatures', 'campaign' : 'loadData', 'geo' : 'loadData', 'indicator' : 'loadData', 'invalidate-display': 'draw', 'width' : 'draw', 'height' : 'draw' } };
ui/js/component/chart/choropleth.js
'use strict'; var _ = require('lodash'); var d3 = require('d3'); var moment = require('moment'); var api = require('data/api'); module.exports = { replace: true, template: require('./choropleth.html'), paramAttributes: [ 'data-indicator', 'data-range' ], mixins: [ require('./mixin/resize') ], partials: { 'loading-overlay': require('./partial/loading-overlay.html') }, data: function () { return { error : false, loading : false, region : null, indicator: null, campaign : null, geo : null, border : null, range : [0, 1] }; }, ready: function () { if (typeof this.range === 'string') { this.range = JSON.parse(this.range); } }, computed: { mappedRegions: function () { if (!this.region) { return null; } return _(this.geo.features) .pluck('properties') .pluck('region_id') .value(); }, features: function () { var features = []; if (this.geo && !_.isEmpty(this.geo.features)) { features = this.geo.features; } else if (this.border) { features = [this.border]; } return features; }, boundingBox: function () { function lat(d) { return d[1]; } function lng(d) { return d[0]; } if (this.features.length < 1) { return [[0,0], [0,0]]; } var coordinates = _(this.features).map(function (f) { return _.flatten(f.geometry.coordinates); }) .flatten() .value(); var left = d3.min(coordinates, lng); var right = d3.max(coordinates, lng); var bottom = d3.min(coordinates, lat); var top = d3.max(coordinates, lat); return [[left, top], [right, bottom]]; }, center: function () { var w = this.width || 0; var h = this.height || 0; return [w/2, h/2]; }, centroid: function () { var box = this.boundingBox; var lat = box[1][1] + ((box[0][1] - box[1][1]) / 2); var lng = box[0][0] + ((box[1][0] - box[0][0]) / 2); return [lng, lat]; } }, methods: { draw: function () { var self = this; var svg = d3.select(this.$el).select('.geography'); var bounds = this.boundingBox; var width = this.width || 0; var height = this.height || 0; var projection = d3.geo.conicEqualArea() .parallels([bounds[1][1], bounds[0][1]]) .rotate([-this.centroid[0], 0]) // Rotate the globe so that the country is centered horizontally .center([0, this.centroid[1]]) // Set the center of the projection so that the polygon is moved vertically into the center of the viewport .translate([width / 2, height / 2]) // Translate to the center of the viewport .scale(1); var geopath = d3.geo.path().projection(projection); var features = this.features; // Calculate the scale required to fit the map within the SVG view box. var b = [projection(bounds[0]), projection(bounds[1])]; var s = 1 / Math.max((b[1][0] - b[0][0]) / width, (b[1][1] - b[0][1]) / height); projection.scale(s); var path = svg.selectAll('.region') .data(features, function (d) { return d.properties.region_id; }); path.enter().append('path') .on('click', function (d) { self.$dispatch('region-selected', d.properties.name); }) .on('mousemove', function (d) { var evt = d3.event; // Do not show a tooltip if we have no name if (!d.properties.name) { return; } self.$dispatch('tooltip-show', { el : this, position: { x: evt.pageX, y: evt.pageY, }, data: { text: d.properties.name } }); }) .on('mouseout', function () { self.$dispatch('tooltip-hide', { el: this }); }); var indicator = this.indicator; var quantize = d3.scale.quantize() .domain(this.range) .range([1, 2, 3, 4, 5, 6]); path.attr({ 'd': geopath, 'class': function (d) { if (!(d.properties.hasOwnProperty('indicators') && d.properties.indicators[indicator])) { return 'region'; } return 'region clickable q-' + quantize(d.properties.indicators[indicator]); } }); path.exit().remove(); }, loadData: function () { this.loading = true; if (!this.campaign || !this.geo || !this.indicator) { return; } var self = this; api.datapoints({ campaign_end : moment(this.campaign.end_date).format('YYYY-MM-DD'), campaign_start: moment(this.campaign.start_date).format('YYYY-MM-DD'), indicator__in : [this.indicator], region__in : this.mappedRegions }).then(function (data) { var index = _.indexBy(data.objects, 'region'); var features = self.features; for (var i = features.length - 1; i >= 0; i--) { var f = features[i].properties; var d = index[f.region_id]; var indicators = (d && d.indicators) || []; f.indicators = {}; for (var j = indicators.length - 1; j >= 0; j--) { var indicator = indicators[j]; f.indicators[indicator.indicator] = indicator.value; } } self.draw(); self.loading = false; }, this.onError); }, loadFeatures: function () { if (!this.region) { return; } var self = this; this.loading = true; Promise.all([api.geo({ parent_region__in: [this.region.id] }), api.geo({ region__in: [this.region.id] })]).then(function (data) { self.geo = data[0].objects; self.border = data[1].objects.features[0]; // FIXME: Can't filter regions by ID, so we have to fetch all of them // and just pick out the ones we want. api.regions().then(function (data) { var regions = _.indexBy(data.objects, 'id'); self.geo.features.forEach(function (feature) { feature.properties.name = regions[feature.properties.region_id].name; }); }); self.draw(); }, this.onError); }, onError: function (err) { console.err('choropleth', err); this.loading = false; this.error = true; } }, watch: { 'region' : 'loadFeatures', 'campaign' : 'loadData', 'geo' : 'loadData', 'indicator' : 'loadData', 'invalidate-display': 'draw', 'width' : 'draw', 'height' : 'draw' } };
Include the parent region in mappedRegions Including the parent region ID along with the child regions ensures that region will be colored even if there are no child regions to display
ui/js/component/chart/choropleth.js
Include the parent region in mappedRegions
<ide><path>i/js/component/chart/choropleth.js <ide> return null; <ide> } <ide> <del> return _(this.geo.features) <add> return [this.region.id].concat(_(this.geo.features) <ide> .pluck('properties') <ide> .pluck('region_id') <del> .value(); <add> .value()); <ide> }, <ide> <ide> features: function () {