lang
stringclasses 2
values | license
stringclasses 13
values | stderr
stringlengths 0
343
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 6
87.7k
| new_contents
stringlengths 0
6.23M
| new_file
stringlengths 3
311
| old_contents
stringlengths 0
6.23M
| message
stringlengths 6
9.1k
| old_file
stringlengths 3
311
| subject
stringlengths 0
4k
| git_diff
stringlengths 0
6.31M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
JavaScript | mit | 067de245d5a27cd78438b7485b3f964680ea0e9e | 0 | NRGI/rp-org-frontend,NRGI/rp-org-frontend | ////////////////
//LINK SCHEMA///
////////////////
'use strict';
var mongoose = require('mongoose');
require('mongoose-html-2').loadType(mongoose);
var linkSchema, Link,
deepPopulate = require('mongoose-deep-populate')(mongoose),
Schema = mongoose.Schema,
ObjectId = Schema.Types.ObjectId,
//MixedSchema = Schema.Types.Mixed,
entity_enu = {
values: 'commodity company company_group concession contract project production transfer'.split(' '),
//values: ' project '.split(' '),
message: 'Validator failed for `{PATH}` with value `{VALUE}`. Please select company, concession, contract, country, project, production or company group.'
};
linkSchema = new Schema({
commodity: {type: ObjectId, ref: 'Commodity'},
company: {type: ObjectId, ref: 'Company'},
company_group: {type: ObjectId, ref: 'CompanyGroup'},
concession: {type: ObjectId, ref: 'Concession'},
contract: {type: ObjectId, ref: 'Contract'},
project: {type: ObjectId, ref: 'Project'},
transfer: {type: ObjectId, ref: 'Transfer'},
production: {type: ObjectId, ref: 'Production'},
source: {type: ObjectId, ref: 'Source'},
entities: [{ //linked entity
type: String,
required:'{PATH} is required!',
enum: entity_enu}]
////company group specific
//company_group_start_date: Date,
//company_group_end_date: Date,
////licensee specific
//ownership_stake: Number
});
//linkSchema.plugin(mongooseHistory, hst_options);
linkSchema.plugin(deepPopulate);
//linkSchema.index({ commodity: 1, company: 1, company_group: 1, contract: 1, concession: 1, project: 1, transfer: 1, production: 1}, { unique: true });
Link = mongoose.model('Link', linkSchema);
function createDefaultLinks() {
Link.find({}).exec(function(err, links) {
if(links.length === 0) {
Link.create({_id:'56a8e342b9a34fbb07013c5f',company_group:'56a14d8ee47b92f110ce9a57',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee5',entities:['company','company_group']});
Link.create({_id:'56b923ea7a800a0d19c5f872',company_group:'56a14d8ee47b92f110ce9a58',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee5',entities:['company','company_group']});
Link.create({_id:'56b92074a836fc16182ed7de',project:'56a930f41b5482a31231ef42',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee6',entities:['company','project']});
Link.create({_id:'56b92074a836fc16182ed7e0',project:'56a930f41b5482a31231ef43',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee6',entities:['company','project']});
Link.create({_id:'56b92074a836fc16182ed7df',project:'56a930f41b5482a31231ef44',company:'56a13a758f224f670e6a376a',source:'56747e060e8cc07115200ee6',entities:['company','project']});
Link.create({_id:'56a8def185d9580a07c58280',commodity:'56a13e9942c8bef50ec2e9e8',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee4',entities:['company','commodity']});
Link.create({_id:'56a8def185d9580a07c58281',commodity:'56a13e9942c8bef50ec2e9eb',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee4',entities:['company','commodity']});
Link.create({_id:'56a8def666d9580a07c58281',commodity:'56a13e9942c8bef50ec2e9eb',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee4',entities:['company','commodity']});
Link.create({_id:'56a8dfbfee9e493007085bce',commodity:'56a13e9942c8bef50ec2e9e8',company:'56a13a758f224f670e6a376a',source:'56747e060e8cc07115200ee4',entities:['company','commodity']});
Link.create({_id:'56a8e070121b00500792c2eb',commodity:'56a13e9942c8bef50ec2e9e8',company:'56a13a758f224f670e6a376c',source:'56747e060e8cc07115200ee4',entities:['company','commodity']});
Link.create({_id:'56a8e66f405f534508e8586f',contract:'56a2eb4345d114c30439ec20',company:'56a13a758f224f670e6a376a',source:'56747e060e8cc07115200ee4',entities:['company','contract']});
Link.create({_id:'56a8e000000f534508e8586f',contract:'56a2eb4345d114c30439ec20',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee6',entities:['company','contract']});
Link.create({_id:'56a8e4acf77930f50708881e',concession:'56a2b8236e585b7316655794',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee6',entities:['company','concession']});
Link.create({_id:'56a8e5320fa7dd0d0817beff',concession:'56a2b8236e585b7316655794',company:'56a13a758f224f670e6a376a',source:'56747e060e8cc07115200ee5',entities:['company','concession']});
Link.create({_id:'56a8e777777730f50708881e',concession:'56a2b8236e585b7316655794',project:'56a930f41b5482a31231ef43',source:'56747e060e8cc07115200ee5',entities:['project','concession']});
Link.create({_id:'56a8e5344445dd0d0817beff',concession:'56a2b8236e585b731665579d',project:'56a930f41b5482a31231ef44',source:'56747e060e8cc07115200ee5',entities:['project','concession']});
Link.create({_id:'56a8e5355555dd0d0817beff',concession:'56a2b8236e585b7316655794',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee5',entities:['project','concession']});
Link.create({_id:'56a8e6658jfh534508e8586f',contract:'56a2eb4345d114c30439ec20',concession:'56a2b8236e585b7316655794',source:'56747e060e8cc07115200ee6',entities:['concession','contract']});
Link.create({_id:'56a8e6574555534508e8586f',contract:'56a2eb4345d114c30439ec22',concession:'56a2b8236e585b7316655794',source:'56747e060e8cc07115200ee6',entities:['concession','contract']});
Link.create({_id:'56a8e049487f534508e8586f',contract:'56a2eb4345d114c30439ec20',concession:'56a2b8236e585b731665579d',source:'56747e060e8cc07115200ee6',entities:['concession','contract']});
Link.create({_id:'56a8e044009f534508e8586f',contract:'56a2eb4345d114c30439ec22',concession:'56a2b8236e585b731665579d',source:'56747e060e8cc07115200ee6',entities:['concession','contract']});
Link.create({_id:'56b9207ffffffc16182ed7de',project:'56a930f41b5482a31231ef42',contract:'56a2eb4345d114c30439ec20',source:'56747e060e8cc07115200ee6',entities:['contract','project']});
Link.create({_id:'56b920dddddddc16182ed7e0',project:'56a930f41b5482a31231ef43',contract:'56a2eb4345d114c30439ec20',source:'56747e060e8cc07115200ee6',entities:['contract','project']});
Link.create({_id:'56b92hhhhhhhhc16182ed7df',project:'56a930f41b5482a31231ef44',contract:'56a2eb4345d114c30439ec21',source:'56747e060e8cc07115200ee6',entities:['contract','project']});
Link.create({_id:'56be54f9d7bff9921c93c985',project:'56a930f41b5482a31231ef42',production:'56be54f9d7bff9921c93c985',entities:['project','production']});
Link.create({_id:'56be54f9d7bff9921c93c986',project:'56a930f41b5482a31231ef43',production:'56be54f9d7bff9921c93c986',entities:['project','production']});
Link.create({_id:'56be54f9d7bff9921c93c987',project:'56a930f41b5482a31231ef44',production:'56be54f9d7bff9921c93c987',entities:['project','production']});
Link.create({_id:'56be54f9d7bff9921c93c988',project:'56a930f41b5482a31231ef44',production:'56be54f9d7bff9921c93c988',entities:['project','production']});
//transfer links
//country
//companies
//project
Link.create({_id:'56b92074a4jhd786182ed7de',project:'56a930f41b5482a31231ef42',concession:'56a2b8236e585b7316655794',source:'56747e060e8cc07115200ee6',entities:['concession','project']});
Link.create({_id:'56b92074a4449996182ed7e0',project:'56a930f41b5482a31231ef43',concession:'56a2b8236e585b7316655794',source:'56747e060e8cc07115200ee6',entities:['concession','project']});
Link.create({_id:'56b92074a855kkk6182ed7df',project:'56a930f41b5482a31231ef44',concession:'56a2b8236e585b7316655794',source:'56747e060e8cc07115200ee6',entities:['concession','project']});
Link.create({_id:'56a8de666666580a07c58280',commodity:'56a13e9942c8bef50ec2e9e8',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee6',entities:['project','commodity']});
Link.create({_id:'56a8def16666580a07c58281',commodity:'56a13e9942c8bef50ec2e9eb',project:'56a930f41b5482a31231ef43',source:'56747e060e8cc07115200ee6',entities:['project','commodity']});
//transfer links
Link.create({transfer:'56be54f9d7bff9921c93c985',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee3',entities:['company','transfer']});
Link.create({transfer:'56be54f9d7bff9921c93c98a',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee3',entities:['company','transfer']});
Link.create({transfer:'56be54f9d7bff9921c93c988',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee3',entities:['company','transfer']});
Link.create({transfer:'56be54f9d7bff9921c93c986',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee3',entities:['company','transfer']});
Link.create({transfer:'56be54f9d7bff9dd1c93c985',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','transfer']});
Link.create({transfer:'56be54f9d7bff9921c93c98a',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','transfer']});
Link.create({transfer:'56be54f9d7bff9921c93c988',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','transfer']});
Link.create({transfer:'56be54f9d7bff9921c93c986',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','transfer']});
//production links
Link.create({production:'56be54f9d7bff9921c93c985',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','production']});
Link.create({production:'56be54f9d7bff9921c93c986',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','production']});
Link.create({production:'56be54f9d7bff9921c93c987',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','production']});
Link.create({production:'56be54f9d7bff9921c93c988',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','production']});
Link.create({production:'56be54f9d7bff9921c93c989',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','production']});
Link.create({production:'56be54f9d7bff9921c93c990',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','production']});
//country
//companies
//project
//Link.create({_id:'56a8e778c052957008a847a7',concession:'56a2b8236e585b7316655794',commodity:'56a13e9942c8bef50ec2e9f1',source:'56747e060e8cc07115200ee6',entities:['concession','commodity']});
//Link.create({_id:'56a8e778c052957008a847a8',concession:'56a2b8236e585b7316655794',commodity:'56a13e9942c8bef50ec2e9eb',source:'56747e060e8cc07115200ee6',entities:['concession','commodity']});
//Link.create({_id:'56a8e834bd760b92085829de',concession:'56a2b8236e585b731665579d',commodity:'56a13e9942c8bef50ec2e9f1',source:'56747e060e8cc07115200ee5',entities:['commodity','concession']});
//Link.create({_id:'56a8e834bd760b92085829df',concession:'56a2b8236e585b731665579d',commodity:'56a13e9942c8bef50ec2e9eb',source:'56747e060e8cc07115200ee5',entities:['commodity','concession']});
//Link.create({_id:'56a8e91f514d14b5080599e0',concession:'56a2b8236e585b7316655794',contract:'56a2eb4345d114c30439ec20',source:'56747e060e8cc07115200ee6',entities:['concession','contract']});
//Link.create({_id:'56a8e9408c2925be086967b6',concession:'56a2b8236e585b731665579d',contract:'56a2eb4345d114c30439ec22',source:'56747e060e8cc07115200ee5',entities:['concession','contract']});
//
//Link.create({company_group:'',company:'',source:'',entities:['company','company_group']});
//Link.create({company_group:'',company:'',source:'',entities:['company','company_group']});
//Link.create({company_group:'',company:'',source:'',entities:['company','company_group']});
//Link.create({company_group:'',company:'',source:'',entities:['company','company_group']});
//
console.log('Links created...');
}
});
};
exports.createDefaultLinks = createDefaultLinks; | server/models/Links.js | ////////////////
//LINK SCHEMA///
////////////////
'use strict';
var mongoose = require('mongoose');
require('mongoose-html-2').loadType(mongoose);
var linkSchema, Link,
deepPopulate = require('mongoose-deep-populate')(mongoose),
Schema = mongoose.Schema,
ObjectId = Schema.Types.ObjectId,
//MixedSchema = Schema.Types.Mixed,
entity_enu = {
values: 'commodity company company_group concession contract project production transfer'.split(' '),
//values: ' project '.split(' '),
message: 'Validator failed for `{PATH}` with value `{VALUE}`. Please select company, concession, contract, country, project, production or company group.'
};
linkSchema = new Schema({
commodity: {type: ObjectId, ref: 'Commodity'},
company: {type: ObjectId, ref: 'Company'},
company_group: {type: ObjectId, ref: 'CompanyGroup'},
concession: {type: ObjectId, ref: 'Concession'},
contract: {type: ObjectId, ref: 'Contract'},
project: {type: ObjectId, ref: 'Project'},
transfer: {type: ObjectId, ref: 'Transfer'},
production: {type: ObjectId, ref: 'Production'},
source: {type: ObjectId, ref: 'Source'},
production: {type: ObjectId, ref: 'Production'},
entities: [{ //linked entity
type: String,
required:'{PATH} is required!',
enum: entity_enu}]
////company group specific
//company_group_start_date: Date,
//company_group_end_date: Date,
////licensee specific
//ownership_stake: Number
});
//linkSchema.plugin(mongooseHistory, hst_options);
linkSchema.plugin(deepPopulate);
//linkSchema.index({ commodity: 1, company: 1, company_group: 1, contract: 1, concession: 1, project: 1, transfer: 1, production: 1}, { unique: true });
Link = mongoose.model('Link', linkSchema);
function createDefaultLinks() {
Link.find({}).exec(function(err, links) {
if(links.length === 0) {
Link.create({_id:'56a8e342b9a34fbb07013c5f',company_group:'56a14d8ee47b92f110ce9a57',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee5',entities:['company','company_group']});
Link.create({_id:'56b923ea7a800a0d19c5f872',company_group:'56a14d8ee47b92f110ce9a58',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee5',entities:['company','company_group']});
Link.create({_id:'56b92074a836fc16182ed7de',project:'56a930f41b5482a31231ef42',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee6',entities:['company','project']});
Link.create({_id:'56b92074a836fc16182ed7e0',project:'56a930f41b5482a31231ef43',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee6',entities:['company','project']});
Link.create({_id:'56b92074a836fc16182ed7df',project:'56a930f41b5482a31231ef44',company:'56a13a758f224f670e6a376a',source:'56747e060e8cc07115200ee6',entities:['company','project']});
Link.create({_id:'56a8def185d9580a07c58280',commodity:'56a13e9942c8bef50ec2e9e8',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee4',entities:['company','commodity']});
Link.create({_id:'56a8def185d9580a07c58281',commodity:'56a13e9942c8bef50ec2e9eb',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee4',entities:['company','commodity']});
Link.create({_id:'56a8def666d9580a07c58281',commodity:'56a13e9942c8bef50ec2e9eb',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee4',entities:['company','commodity']});
Link.create({_id:'56a8dfbfee9e493007085bce',commodity:'56a13e9942c8bef50ec2e9e8',company:'56a13a758f224f670e6a376a',source:'56747e060e8cc07115200ee4',entities:['company','commodity']});
Link.create({_id:'56a8e070121b00500792c2eb',commodity:'56a13e9942c8bef50ec2e9e8',company:'56a13a758f224f670e6a376c',source:'56747e060e8cc07115200ee4',entities:['company','commodity']});
Link.create({_id:'56a8e66f405f534508e8586f',contract:'56a2eb4345d114c30439ec20',company:'56a13a758f224f670e6a376a',source:'56747e060e8cc07115200ee4',entities:['company','contract']});
Link.create({_id:'56a8e000000f534508e8586f',contract:'56a2eb4345d114c30439ec20',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee6',entities:['company','contract']});
Link.create({_id:'56a8e4acf77930f50708881e',concession:'56a2b8236e585b7316655794',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee6',entities:['company','concession']});
Link.create({_id:'56a8e5320fa7dd0d0817beff',concession:'56a2b8236e585b7316655794',company:'56a13a758f224f670e6a376a',source:'56747e060e8cc07115200ee5',entities:['company','concession']});
Link.create({_id:'56a8e777777730f50708881e',concession:'56a2b8236e585b7316655794',project:'56a930f41b5482a31231ef43',source:'56747e060e8cc07115200ee5',entities:['project','concession']});
Link.create({_id:'56a8e5344445dd0d0817beff',concession:'56a2b8236e585b731665579d',project:'56a930f41b5482a31231ef44',source:'56747e060e8cc07115200ee5',entities:['project','concession']});
Link.create({_id:'56a8e5355555dd0d0817beff',concession:'56a2b8236e585b7316655794',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee5',entities:['project','concession']});
Link.create({_id:'56a8e6658jfh534508e8586f',contract:'56a2eb4345d114c30439ec20',concession:'56a2b8236e585b7316655794',source:'56747e060e8cc07115200ee6',entities:['concession','contract']});
Link.create({_id:'56a8e6574555534508e8586f',contract:'56a2eb4345d114c30439ec22',concession:'56a2b8236e585b7316655794',source:'56747e060e8cc07115200ee6',entities:['concession','contract']});
Link.create({_id:'56a8e049487f534508e8586f',contract:'56a2eb4345d114c30439ec20',concession:'56a2b8236e585b731665579d',source:'56747e060e8cc07115200ee6',entities:['concession','contract']});
Link.create({_id:'56a8e044009f534508e8586f',contract:'56a2eb4345d114c30439ec22',concession:'56a2b8236e585b731665579d',source:'56747e060e8cc07115200ee6',entities:['concession','contract']});
Link.create({_id:'56b9207ffffffc16182ed7de',project:'56a930f41b5482a31231ef42',contract:'56a2eb4345d114c30439ec20',source:'56747e060e8cc07115200ee6',entities:['contract','project']});
Link.create({_id:'56b920dddddddc16182ed7e0',project:'56a930f41b5482a31231ef43',contract:'56a2eb4345d114c30439ec20',source:'56747e060e8cc07115200ee6',entities:['contract','project']});
Link.create({_id:'56b92hhhhhhhhc16182ed7df',project:'56a930f41b5482a31231ef44',contract:'56a2eb4345d114c30439ec21',source:'56747e060e8cc07115200ee6',entities:['contract','project']});
Link.create({_id:'56be54f9d7bff9921c93c985',project:'56a930f41b5482a31231ef42',production:'56be54f9d7bff9921c93c985',entities:['project','production']});
Link.create({_id:'56be54f9d7bff9921c93c986',project:'56a930f41b5482a31231ef43',production:'56be54f9d7bff9921c93c986',entities:['project','production']});
Link.create({_id:'56be54f9d7bff9921c93c987',project:'56a930f41b5482a31231ef44',production:'56be54f9d7bff9921c93c987',entities:['project','production']});
Link.create({_id:'56be54f9d7bff9921c93c988',project:'56a930f41b5482a31231ef44',production:'56be54f9d7bff9921c93c988',entities:['project','production']});
//transfer links
//country
//companies
//project
Link.create({_id:'56b92074a4jhd786182ed7de',project:'56a930f41b5482a31231ef42',concession:'56a2b8236e585b7316655794',source:'56747e060e8cc07115200ee6',entities:['concession','project']});
Link.create({_id:'56b92074a4449996182ed7e0',project:'56a930f41b5482a31231ef43',concession:'56a2b8236e585b7316655794',source:'56747e060e8cc07115200ee6',entities:['concession','project']});
Link.create({_id:'56b92074a855kkk6182ed7df',project:'56a930f41b5482a31231ef44',concession:'56a2b8236e585b7316655794',source:'56747e060e8cc07115200ee6',entities:['concession','project']});
Link.create({_id:'56a8de666666580a07c58280',commodity:'56a13e9942c8bef50ec2e9e8',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee6',entities:['project','commodity']});
Link.create({_id:'56a8def16666580a07c58281',commodity:'56a13e9942c8bef50ec2e9eb',project:'56a930f41b5482a31231ef43',source:'56747e060e8cc07115200ee6',entities:['project','commodity']});
//transfer links
Link.create({transfer:'56be54f9d7bff9921c93c985',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee3',entities:['company','transfer']});
Link.create({transfer:'56be54f9d7bff9921c93c98a',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee3',entities:['company','transfer']});
Link.create({transfer:'56be54f9d7bff9921c93c988',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee3',entities:['company','transfer']});
Link.create({transfer:'56be54f9d7bff9921c93c986',company:'56a13a758f224f670e6a376e',source:'56747e060e8cc07115200ee3',entities:['company','transfer']});
Link.create({transfer:'56be54f9d7bff9dd1c93c985',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','transfer']});
Link.create({transfer:'56be54f9d7bff9921c93c98a',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','transfer']});
Link.create({transfer:'56be54f9d7bff9921c93c988',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','transfer']});
Link.create({transfer:'56be54f9d7bff9921c93c986',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','transfer']});
//production links
Link.create({production:'56be54f9d7bff9921c93c985',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','production']});
Link.create({production:'56be54f9d7bff9921c93c986',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','production']});
Link.create({production:'56be54f9d7bff9921c93c987',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','production']});
Link.create({production:'56be54f9d7bff9921c93c988',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','production']});
Link.create({production:'56be54f9d7bff9921c93c989',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','production']});
Link.create({production:'56be54f9d7bff9921c93c990',project:'56a930f41b5482a31231ef42',source:'56747e060e8cc07115200ee3',entities:['project','production']});
//country
//companies
//project
//Link.create({_id:'56a8e778c052957008a847a7',concession:'56a2b8236e585b7316655794',commodity:'56a13e9942c8bef50ec2e9f1',source:'56747e060e8cc07115200ee6',entities:['concession','commodity']});
//Link.create({_id:'56a8e778c052957008a847a8',concession:'56a2b8236e585b7316655794',commodity:'56a13e9942c8bef50ec2e9eb',source:'56747e060e8cc07115200ee6',entities:['concession','commodity']});
//Link.create({_id:'56a8e834bd760b92085829de',concession:'56a2b8236e585b731665579d',commodity:'56a13e9942c8bef50ec2e9f1',source:'56747e060e8cc07115200ee5',entities:['commodity','concession']});
//Link.create({_id:'56a8e834bd760b92085829df',concession:'56a2b8236e585b731665579d',commodity:'56a13e9942c8bef50ec2e9eb',source:'56747e060e8cc07115200ee5',entities:['commodity','concession']});
//Link.create({_id:'56a8e91f514d14b5080599e0',concession:'56a2b8236e585b7316655794',contract:'56a2eb4345d114c30439ec20',source:'56747e060e8cc07115200ee6',entities:['concession','contract']});
//Link.create({_id:'56a8e9408c2925be086967b6',concession:'56a2b8236e585b731665579d',contract:'56a2eb4345d114c30439ec22',source:'56747e060e8cc07115200ee5',entities:['concession','contract']});
//
//Link.create({company_group:'',company:'',source:'',entities:['company','company_group']});
//Link.create({company_group:'',company:'',source:'',entities:['company','company_group']});
//Link.create({company_group:'',company:'',source:'',entities:['company','company_group']});
//Link.create({company_group:'',company:'',source:'',entities:['company','company_group']});
//
console.log('Links created...');
}
});
};
exports.createDefaultLinks = createDefaultLinks; | SAVEPOINT
| server/models/Links.js | SAVEPOINT | <ide><path>erver/models/Links.js
<ide> transfer: {type: ObjectId, ref: 'Transfer'},
<ide> production: {type: ObjectId, ref: 'Production'},
<ide> source: {type: ObjectId, ref: 'Source'},
<del> production: {type: ObjectId, ref: 'Production'},
<ide> entities: [{ //linked entity
<ide> type: String,
<ide> required:'{PATH} is required!', |
|
Java | apache-2.0 | 2815e1689f1fbe4d813eef731f1d115777a1ad9a | 0 | MaxRau/CoffeeMud,oriontribunal/CoffeeMud,Tycheo/coffeemud,oriontribunal/CoffeeMud,sfunk1x/CoffeeMud,MaxRau/CoffeeMud,oriontribunal/CoffeeMud,Tycheo/coffeemud,sfunk1x/CoffeeMud,sfunk1x/CoffeeMud,MaxRau/CoffeeMud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,MaxRau/CoffeeMud,bozimmerman/CoffeeMud,Tycheo/coffeemud,oriontribunal/CoffeeMud,bozimmerman/CoffeeMud,Tycheo/coffeemud,sfunk1x/CoffeeMud | package com.planet_ink.coffee_mud.Abilities.Spells;
import com.planet_ink.coffee_mud.interfaces.*;
import com.planet_ink.coffee_mud.common.*;
import com.planet_ink.coffee_mud.utils.*;
import java.util.*;
public class Spell_Frailty extends Spell
{
public String ID() { return "Spell_Frailty"; }
public String name(){return "Frailty";}
public String displayText(){return "(Frailty)";}
public int quality(){return MALICIOUS;};
protected int canAffectCode(){return CAN_MOBS;}
public Environmental newInstance(){ return new Spell_Frailty();}
public int classificationCode(){ return Ability.SPELL|Ability.DOMAIN_ENCHANTMENT;}
public boolean okAffect(Environmental myHost, Affect affect)
{
if(!super.okAffect(myHost,affect))
return false;
if((affected==null)||(!(affected instanceof MOB)))
return true;
MOB mob=(MOB)affected;
if((affect.amITarget(mob))&&(Util.bset(affect.targetCode(),Affect.MASK_HURT)))
{
int recovery=(int)Math.round(Util.div((affect.targetCode()-Affect.MASK_HURT),2.0));
SaucerSupport.adjustDamageMessage(affect,recovery);
}
return true;
}
public void unInvoke()
{
// undo the affects of this spell
if((affected==null)||(!(affected instanceof MOB)))
return;
MOB mob=(MOB)affected;
super.unInvoke();
if(canBeUninvoked())
{
if(mob.location()!=null)
mob.location().show(mob,null,Affect.MSG_OK_VISUAL,"<S-NAME> seem(s) less frail.");
}
}
public boolean invoke(MOB mob, Vector commands, Environmental givenTarget, boolean auto)
{
MOB target=this.getTarget(mob,commands,givenTarget);
if(target==null) return false;
// the invoke method for spells receives as
// parameters the invoker, and the REMAINING
// command line parameters, divided into words,
// and added as String objects to a vector.
if(!super.invoke(mob,commands,givenTarget,auto))
return false;
boolean success=profficiencyCheck(mob.envStats().level()-target.envStats().level(),auto);
if(success)
{
// it worked, so build a copy of this ability,
// and add it to the affects list of the
// affected MOB. Then tell everyone else
// what happened.
invoker=mob;
FullMsg msg=new FullMsg(mob,target,this,affectType(auto),auto?"":"^S<S-NAME> incant(s) to <T-NAMESELF>.^?");
if(mob.location().okAffect(mob,msg))
{
mob.location().send(mob,msg);
if(!msg.wasModified())
{
mob.location().show(target,null,Affect.MSG_OK_VISUAL,"<S-NAME> seem(s) frail!");
maliciousAffect(mob,target,10,-1);
}
}
}
else
return maliciousFizzle(mob,target,"<S-NAME> incant(s) to <T-NAMESELF>, but the spell fades.");
// return whether it worked
return success;
}
}
| com/planet_ink/coffee_mud/Abilities/Spells/Spell_Frailty.java | package com.planet_ink.coffee_mud.Abilities.Spells;
import com.planet_ink.coffee_mud.interfaces.*;
import com.planet_ink.coffee_mud.common.*;
import com.planet_ink.coffee_mud.utils.*;
import java.util.*;
public class Spell_Frailty extends Spell
{
public String ID() { return "Spell_Frailty"; }
public String name(){return "Frailty";}
public String displayText(){return "(Frailty)";}
public int quality(){return MALICIOUS;};
protected int canAffectCode(){return CAN_MOBS;}
public Environmental newInstance(){ return new Spell_Frailty();}
public int classificationCode(){ return Ability.SPELL|Ability.DOMAIN_ENCHANTMENT;}
public boolean okAffect(Environmental myHost, Affect affect)
{
if(!super.okAffect(myHost,affect))
return false;
if((affected==null)||(!(affected instanceof MOB)))
return true;
MOB mob=(MOB)affected;
if((affect.amITarget(mob))&&(Util.bset(affect.targetCode(),Affect.MASK_HURT)))
{
int recovery=(int)Math.round(Util.mul((affect.targetCode()-Affect.MASK_HURT),2.0));
SaucerSupport.adjustDamageMessage(affect,recovery);
}
return true;
}
public void unInvoke()
{
// undo the affects of this spell
if((affected==null)||(!(affected instanceof MOB)))
return;
MOB mob=(MOB)affected;
super.unInvoke();
if(canBeUninvoked())
{
if(mob.location()!=null)
mob.location().show(mob,null,Affect.MSG_OK_VISUAL,"<S-NAME> seem(s) less frail.");
}
}
public boolean invoke(MOB mob, Vector commands, Environmental givenTarget, boolean auto)
{
MOB target=this.getTarget(mob,commands,givenTarget);
if(target==null) return false;
// the invoke method for spells receives as
// parameters the invoker, and the REMAINING
// command line parameters, divided into words,
// and added as String objects to a vector.
if(!super.invoke(mob,commands,givenTarget,auto))
return false;
boolean success=profficiencyCheck(mob.envStats().level()-target.envStats().level(),auto);
if(success)
{
// it worked, so build a copy of this ability,
// and add it to the affects list of the
// affected MOB. Then tell everyone else
// what happened.
invoker=mob;
FullMsg msg=new FullMsg(mob,target,this,affectType(auto),auto?"":"^S<S-NAME> incant(s) to <T-NAMESELF>.^?");
if(mob.location().okAffect(mob,msg))
{
mob.location().send(mob,msg);
if(!msg.wasModified())
{
mob.location().show(target,null,Affect.MSG_OK_VISUAL,"<S-NAME> seem(s) frail!");
maliciousAffect(mob,target,10,-1);
}
}
}
else
return maliciousFizzle(mob,target,"<S-NAME> incant(s) to <T-NAMESELF>, but the spell fades.");
// return whether it worked
return success;
}
}
|
git-svn-id: svn://192.168.1.10/public/CoffeeMud@3299 0d6f1817-ed0e-0410-87c9-987e46238f29
| com/planet_ink/coffee_mud/Abilities/Spells/Spell_Frailty.java | <ide><path>om/planet_ink/coffee_mud/Abilities/Spells/Spell_Frailty.java
<ide> MOB mob=(MOB)affected;
<ide> if((affect.amITarget(mob))&&(Util.bset(affect.targetCode(),Affect.MASK_HURT)))
<ide> {
<del> int recovery=(int)Math.round(Util.mul((affect.targetCode()-Affect.MASK_HURT),2.0));
<add> int recovery=(int)Math.round(Util.div((affect.targetCode()-Affect.MASK_HURT),2.0));
<ide> SaucerSupport.adjustDamageMessage(affect,recovery);
<ide> }
<ide> return true; |
||
Java | apache-2.0 | e59323550b0f57d18b2d9292bf62ab6032555aaa | 0 | vaadin/teleport | package com.drone;
import static com.drone.ReaderUtils.getUInt32;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;
import java.util.StringTokenizer;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.slf4j.Logger;
import org.springframework.context.Lifecycle;
import org.springframework.core.task.TaskExecutor;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
import com.drone.command.ConfigCommand;
import com.drone.command.ControlCommand;
import com.drone.command.DroneCommand;
import com.drone.command.ResetWatchdogCommand;
public class DroneTemplate2 implements Lifecycle {
public static final String DEFAULT_IP = "192.168.1.1";
public static final int COMMAND_PORT = 5556;
public static final int NAV_PORT = 5554;
private volatile boolean running = false;
private final PriorityBlockingQueue<DroneCommand> commands = new PriorityBlockingQueue<>(
100, (l, r) -> l.getOrder() - r.getOrder());
private InetAddress address = fromIP(DEFAULT_IP);
private Logger logger = org.slf4j.LoggerFactory.getLogger(getClass());
private final TaskExecutor taskExecutor;
private final Object controlAckLock = new Object();
private final AtomicInteger sequenceNumber = new AtomicInteger(1);
private final AtomicBoolean controlAck = new AtomicBoolean();
public static interface ExceptionFriendlyRunnable {
void run() throws Exception;
}
public DroneTemplate2(TaskExecutor taskExecutor) {
this.taskExecutor = taskExecutor;
}
@Override
public void start() {
this.running = true;
try {
init(this.address, COMMAND_PORT, NAV_PORT);
} catch (Throwable throwable) {
logger.warn("Ooops!", throwable);
}
}
@Override
public void stop() {
this.running = false;
}
@Override
public boolean isRunning() {
return this.running;
}
private DroneState readDroneState(DatagramSocket datagramSocket,
int maxPacketSize) throws IOException {
DatagramPacket packet = new DatagramPacket(new byte[maxPacketSize],
maxPacketSize);
datagramSocket.receive(packet);
ByteBuffer buffer = ByteBuffer.wrap(packet.getData(), 0,
packet.getLength());
buffer.order(ByteOrder.LITTLE_ENDIAN);
int header = buffer.getInt();
int state = buffer.getInt();
long sequence = getUInt32(buffer);
int vision = buffer.getInt();
DroneState droneState = new DroneState(sequence, state, vision);
logger.info("droneState: " + droneState);
if (droneState.isNavDataDemoOnly()) {
while (buffer.position() < buffer.limit()) {
int tag = buffer.getShort() & 0xFFFF;
int payloadSize = (buffer.getShort() & 0xFFFF) - 4;
ByteBuffer optionData = buffer.slice().order(
ByteOrder.LITTLE_ENDIAN);
payloadSize = Math.min(payloadSize, optionData.remaining());
optionData.limit(payloadSize);
parseOption(tag, optionData);
buffer.position(buffer.position() + payloadSize);
}
}
return droneState;
}
private void parseOption(int tag, ByteBuffer optionData) {
if (tag == 0) {
int parsedTag = optionData.getShort();
int parsedSize = optionData.getShort();
int battery = optionData.getInt();
float theta = optionData.getFloat();
float phi = optionData.getFloat();
float psi = optionData.getFloat();
int altitude = optionData.getInt();
System.out.println("tag: " + parsedTag + " size: " + parsedSize
+ " battery: " + battery + " theta: " + theta + " phi: "
+ phi + " psi: " + psi + " altitude: " + altitude);
}
}
private void enqueueCommand(DroneCommand command) {
commands.add(command);
}
private void bootstrapNavData(DroneState droneState) throws IOException {
if (droneState.isNavDataBootstrap()) {
controlAck.set(droneState.isControlReceived()); // todo what's this
// buy us? Why?
enqueueCommand(new ConfigCommand("general:navdata_demo", true));
enqueueCommand(new ControlCommand(ControlCommand.ControlType.ACK, 0));
}
}
private void navDataLoop(InetAddress address, DatagramSocket navDataSocket,
int navPort, int maxPacketSize) throws Exception {
navDataSocket.setSoTimeout(3000);
// tickle!
navDataSocket.send(new DatagramPacket(new byte[] { 0x01, 0x00, 0x00,
0x00 }, 4, address, navPort));
// get the deon
DroneState droneState = readDroneState(navDataSocket, maxPacketSize);
bootstrapNavData(droneState);
while (isRunning()) {
droneState = readDroneState(navDataSocket, maxPacketSize);
}
}
private void commandLoop(InetAddress address, DatagramSocket socket,
int commandPort) throws Exception {
DroneCommand ack = new ControlCommand(ControlCommand.ControlType.ACK, 0);
DroneCommand keepAlive = new ResetWatchdogCommand();
DroneCommand command = null, stickyCommand = null;
long lastRecordedTime = 0;
while (isRunning()) {
long deltaTime = 0;
deltaTime = stickyCommand == null ? 40 : System.currentTimeMillis()
- lastRecordedTime;
command = commands.poll(deltaTime, TimeUnit.MILLISECONDS);
if (command == null) {
if (stickyCommand == null) {
command = keepAlive;
} else {
command = stickyCommand;
lastRecordedTime = System.currentTimeMillis();
}
} else {
if (command.isRepeated()) {
// sticky commands replace previous sticky
stickyCommand = command;
lastRecordedTime = System.currentTimeMillis();
} else /* if (command.clearSticky()) */{
// only some commands can clear sticky commands
stickyCommand = null;
}
}
DroneCommand gross = command;
if (gross.needControlAck()) {
waitForControlAckToBe(false,
() -> sendCommand(address, socket, gross, commandPort));
waitForControlAckToBe(true,
() -> sendCommand(address, socket, gross, commandPort));
} else {
sendCommand(address, socket, gross, commandPort);
}
}
}
private void sendCommand(InetAddress address,
DatagramSocket datagramSocket, DroneCommand droneCommand,
int commandPort) throws IOException {
Assert.notNull(droneCommand, "the droneCommand must not be null");
byte[] commandBytes = droneCommand.buildCommand(
this.sequenceNumber.getAndIncrement()).getBytes();
datagramSocket.send(new DatagramPacket(commandBytes,
commandBytes.length, address, commandPort));
}
private void waitForControlAckToBe(boolean b,
ExceptionFriendlyRunnable andThen) throws InterruptedException {
if (controlAck.get() != b) {
boolean tried = false;
synchronized (controlAckLock) {
while (!tried && controlAck.get() != b) {
controlAckLock.wait(50);
tried = true;
}
}
if (tried && controlAck.get() != b) {
throw new RuntimeException("control ack timeout "
+ String.valueOf(b));
}
try {
andThen.run();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
private void init(InetAddress address, int commandPort, int navPort)
throws Throwable {
int timeout = 3000;
DatagramSocket commandSocket = new DatagramSocket(commandPort);
DatagramSocket navSocket = new DatagramSocket(navPort);
Arrays.asList(commandSocket, navSocket).forEach(s -> {
try {
s.setSoTimeout(timeout);
} catch (SocketException e) {
e.printStackTrace();
}
});
this.taskExecutor.execute(() -> {
try {
commandLoop(address, commandSocket, commandPort);
} catch (Exception e) {
throw new RuntimeException(e);
}
});
this.taskExecutor.execute(() -> {
try {
navDataLoop(address, navSocket, navPort, 2048);
} catch (Exception e) {
throw new RuntimeException(e);
}
});
}
private static InetAddress fromIP(String ip) {
ip = StringUtils.hasText(ip) ? ip : DEFAULT_IP;
StringTokenizer st = new StringTokenizer(ip, ".");
byte[] ipBytes = new byte[4];
for (int i = 0; i < 4; i++) {
ipBytes[i] = (byte) Integer.parseInt(st.nextToken());
}
try {
return InetAddress.getByAddress(ipBytes);
} catch (UnknownHostException e) {
throw new RuntimeException(e);
}
}
}
| backend/src/main/java/com/drone/DroneTemplate2.java | package com.drone;
import com.drone.command.ConfigCommand;
import com.drone.command.ControlCommand;
import com.drone.command.DroneCommand;
import com.drone.command.ResetWatchdogCommand;
import org.slf4j.Logger;
import org.springframework.context.Lifecycle;
import org.springframework.core.task.TaskExecutor;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
import java.io.IOException;
import java.net.*;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;
import java.util.StringTokenizer;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import static com.drone.ReaderUtils.getUInt32;
public class DroneTemplate2 implements Lifecycle {
public static final String DEFAULT_IP = "192.168.1.1";
public static final int COMMAND_PORT = 5556;
public static final int NAV_PORT = 5554;
private volatile boolean running = false;
private final PriorityBlockingQueue<DroneCommand> commands = new PriorityBlockingQueue<>(100, (l, r) -> l.getOrder() - r.getOrder());
private InetAddress address = fromIP(DEFAULT_IP);
private Logger logger = org.slf4j.LoggerFactory.getLogger(getClass());
private final TaskExecutor taskExecutor;
private final Object controlAckLock = new Object();
private final AtomicInteger sequenceNumber = new AtomicInteger(1);
private final AtomicBoolean controlAck = new AtomicBoolean();
public static interface ExceptionFriendlyRunnable {
void run() throws Exception;
}
public DroneTemplate2(TaskExecutor taskExecutor) {
this.taskExecutor = taskExecutor;
}
@Override
public void start() {
this.running = true;
try {
init(this.address, COMMAND_PORT, NAV_PORT);
} catch (Throwable throwable) {
logger.warn("Ooops!", throwable);
}
}
@Override
public void stop() {
this.running = false;
}
@Override
public boolean isRunning() {
return this.running;
}
private DroneState readDroneState(DatagramSocket datagramSocket,
int maxPacketSize) throws IOException {
DatagramPacket packet = new DatagramPacket(new byte[maxPacketSize], maxPacketSize);
datagramSocket.receive(packet);
ByteBuffer buffer = ByteBuffer.wrap(packet.getData(), 0, packet.getLength());
buffer.order(ByteOrder.LITTLE_ENDIAN);
int header = buffer.getInt();
int state = buffer.getInt();
long sequence = getUInt32(buffer);
int vision = buffer.getInt();
DroneState droneState = new DroneState(sequence, state, vision);
logger.info("droneState: " + droneState);
return droneState;
}
private void enqueueCommand(DroneCommand command) {
commands.add(command);
}
private void bootstrapNavData(DroneState droneState) throws IOException {
if (droneState.isNavDataBootstrap()) {
controlAck.set(droneState.isControlReceived()); // todo what's this buy us? Why?
enqueueCommand(new ConfigCommand("general:navdata_demo", true));
enqueueCommand(new ControlCommand(ControlCommand.ControlType.ACK, 0));
}
}
private void navDataLoop(InetAddress address, DatagramSocket navDataSocket, int navPort, int maxPacketSize) throws Exception {
navDataSocket.setSoTimeout(3000);
// tickle!
navDataSocket.send(new DatagramPacket(new byte[]{0x01, 0x00, 0x00, 0x00}, 4, address, navPort));
// get the deon
DroneState droneState = readDroneState(navDataSocket, maxPacketSize);
bootstrapNavData(droneState);
while (isRunning()) {
droneState = readDroneState(navDataSocket, maxPacketSize);
}
}
private int buildOptionMask (boolean reset, int ... tags) {
int mask = 0 ;
int newmask = 0;
for (int n = 0; n < tags.length; n++) {
newmask |= 1 << tags[n];
}
if (reset) {
mask &= ~newmask;
} else {
mask |= newmask;
}
// maskChanged = true;
return mask ;
}
private void commandLoop(InetAddress address, DatagramSocket socket, int commandPort) throws Exception {
DroneCommand ack = new ControlCommand(ControlCommand.ControlType.ACK, 0);
DroneCommand keepAlive = new ResetWatchdogCommand();
DroneCommand command = null, stickyCommand = null;
long lastRecordedTime = 0;
while (isRunning()) {
long deltaTime = 0;
deltaTime = stickyCommand == null ? 40 : System.currentTimeMillis() - lastRecordedTime;
command = commands.poll(deltaTime, TimeUnit.MILLISECONDS);
if (command == null) {
if (stickyCommand == null) {
command = keepAlive;
} else {
command = stickyCommand;
lastRecordedTime = System.currentTimeMillis();
}
} else {
if (command.isRepeated()) {
// sticky commands replace previous sticky
stickyCommand = command;
lastRecordedTime = System.currentTimeMillis();
} else /*if (command.clearSticky()) */ {
// only some commands can clear sticky commands
stickyCommand = null;
}
}
DroneCommand gross = command;
if (gross.needControlAck()) {
waitForControlAckToBe(false, () -> sendCommand(address, socket, gross, commandPort));
waitForControlAckToBe(true, () -> sendCommand(address, socket, gross, commandPort));
} else {
sendCommand(address, socket, gross, commandPort);
}
}
}
private void sendCommand(InetAddress address, DatagramSocket datagramSocket, DroneCommand droneCommand, int commandPort) throws IOException {
Assert.notNull(droneCommand, "the droneCommand must not be null");
byte[] commandBytes = droneCommand.buildCommand(this.sequenceNumber.getAndIncrement()).getBytes();
datagramSocket.send(new DatagramPacket(commandBytes, commandBytes.length, address, commandPort));
}
private void waitForControlAckToBe(boolean b, ExceptionFriendlyRunnable andThen) throws InterruptedException {
if (controlAck.get() != b) {
boolean tried = false;
synchronized (controlAckLock) {
while (!tried && controlAck.get() != b) {
controlAckLock.wait(50);
tried = true;
}
}
if (tried && controlAck.get() != b) {
throw new RuntimeException("control ack timeout " + String.valueOf(b));
}
try {
andThen.run();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
private void init(InetAddress address, int commandPort, int navPort) throws Throwable {
int timeout = 3000;
DatagramSocket commandSocket = new DatagramSocket(commandPort);
DatagramSocket navSocket = new DatagramSocket(navPort);
Arrays.asList(commandSocket, navSocket).forEach(s -> {
try {
s.setSoTimeout(timeout);
} catch (SocketException e) {
e.printStackTrace();
}
});
this.taskExecutor.execute(() -> {
try {
commandLoop(address, commandSocket, commandPort);
} catch (Exception e) {
throw new RuntimeException(e);
}
});
this.taskExecutor.execute(() -> {
try {
navDataLoop(address, navSocket, navPort, 2048);
} catch (Exception e) {
throw new RuntimeException(e);
}
});
}
private static InetAddress fromIP(String ip) {
ip = StringUtils.hasText(ip) ? ip : DEFAULT_IP;
StringTokenizer st = new StringTokenizer(ip, ".");
byte[] ipBytes = new byte[4];
for (int i = 0; i < 4; i++) {
ipBytes[i] = (byte) Integer.parseInt(st.nextToken());
}
try {
return InetAddress.getByAddress(ipBytes);
} catch (UnknownHostException e) {
throw new RuntimeException(e);
}
}
} | reformat
| backend/src/main/java/com/drone/DroneTemplate2.java | reformat | <ide><path>ackend/src/main/java/com/drone/DroneTemplate2.java
<ide> package com.drone;
<ide>
<del>import com.drone.command.ConfigCommand;
<del>import com.drone.command.ControlCommand;
<del>import com.drone.command.DroneCommand;
<del>import com.drone.command.ResetWatchdogCommand;
<del>import org.slf4j.Logger;
<del>import org.springframework.context.Lifecycle;
<del>import org.springframework.core.task.TaskExecutor;
<del>import org.springframework.util.Assert;
<del>import org.springframework.util.StringUtils;
<add>import static com.drone.ReaderUtils.getUInt32;
<ide>
<ide> import java.io.IOException;
<del>import java.net.*;
<add>import java.net.DatagramPacket;
<add>import java.net.DatagramSocket;
<add>import java.net.InetAddress;
<add>import java.net.SocketException;
<add>import java.net.UnknownHostException;
<ide> import java.nio.ByteBuffer;
<ide> import java.nio.ByteOrder;
<ide> import java.util.Arrays;
<ide> import java.util.concurrent.atomic.AtomicBoolean;
<ide> import java.util.concurrent.atomic.AtomicInteger;
<ide>
<del>import static com.drone.ReaderUtils.getUInt32;
<add>import org.slf4j.Logger;
<add>import org.springframework.context.Lifecycle;
<add>import org.springframework.core.task.TaskExecutor;
<add>import org.springframework.util.Assert;
<add>import org.springframework.util.StringUtils;
<add>
<add>import com.drone.command.ConfigCommand;
<add>import com.drone.command.ControlCommand;
<add>import com.drone.command.DroneCommand;
<add>import com.drone.command.ResetWatchdogCommand;
<ide>
<ide> public class DroneTemplate2 implements Lifecycle {
<del>
<ide>
<ide> public static final String DEFAULT_IP = "192.168.1.1";
<ide>
<ide> public static final int NAV_PORT = 5554;
<ide>
<ide> private volatile boolean running = false;
<del> private final PriorityBlockingQueue<DroneCommand> commands = new PriorityBlockingQueue<>(100, (l, r) -> l.getOrder() - r.getOrder());
<add> private final PriorityBlockingQueue<DroneCommand> commands = new PriorityBlockingQueue<>(
<add> 100, (l, r) -> l.getOrder() - r.getOrder());
<ide> private InetAddress address = fromIP(DEFAULT_IP);
<ide> private Logger logger = org.slf4j.LoggerFactory.getLogger(getClass());
<ide> private final TaskExecutor taskExecutor;
<ide> return this.running;
<ide> }
<ide>
<del>
<ide> private DroneState readDroneState(DatagramSocket datagramSocket,
<ide> int maxPacketSize) throws IOException {
<ide>
<del> DatagramPacket packet = new DatagramPacket(new byte[maxPacketSize], maxPacketSize);
<add> DatagramPacket packet = new DatagramPacket(new byte[maxPacketSize],
<add> maxPacketSize);
<ide> datagramSocket.receive(packet);
<ide>
<del> ByteBuffer buffer = ByteBuffer.wrap(packet.getData(), 0, packet.getLength());
<add> ByteBuffer buffer = ByteBuffer.wrap(packet.getData(), 0,
<add> packet.getLength());
<ide> buffer.order(ByteOrder.LITTLE_ENDIAN);
<ide>
<ide> int header = buffer.getInt();
<ide> int vision = buffer.getInt();
<ide> DroneState droneState = new DroneState(sequence, state, vision);
<ide> logger.info("droneState: " + droneState);
<add>
<add> if (droneState.isNavDataDemoOnly()) {
<add> while (buffer.position() < buffer.limit()) {
<add> int tag = buffer.getShort() & 0xFFFF;
<add> int payloadSize = (buffer.getShort() & 0xFFFF) - 4;
<add> ByteBuffer optionData = buffer.slice().order(
<add> ByteOrder.LITTLE_ENDIAN);
<add> payloadSize = Math.min(payloadSize, optionData.remaining());
<add> optionData.limit(payloadSize);
<add> parseOption(tag, optionData);
<add> buffer.position(buffer.position() + payloadSize);
<add> }
<add> }
<add>
<ide> return droneState;
<add> }
<add>
<add> private void parseOption(int tag, ByteBuffer optionData) {
<add> if (tag == 0) {
<add> int parsedTag = optionData.getShort();
<add> int parsedSize = optionData.getShort();
<add> int battery = optionData.getInt();
<add> float theta = optionData.getFloat();
<add> float phi = optionData.getFloat();
<add> float psi = optionData.getFloat();
<add> int altitude = optionData.getInt();
<add>
<add> System.out.println("tag: " + parsedTag + " size: " + parsedSize
<add> + " battery: " + battery + " theta: " + theta + " phi: "
<add> + phi + " psi: " + psi + " altitude: " + altitude);
<add>
<add> }
<ide> }
<ide>
<ide> private void enqueueCommand(DroneCommand command) {
<ide>
<ide> private void bootstrapNavData(DroneState droneState) throws IOException {
<ide> if (droneState.isNavDataBootstrap()) {
<del> controlAck.set(droneState.isControlReceived()); // todo what's this buy us? Why?
<add> controlAck.set(droneState.isControlReceived()); // todo what's this
<add> // buy us? Why?
<ide> enqueueCommand(new ConfigCommand("general:navdata_demo", true));
<ide> enqueueCommand(new ControlCommand(ControlCommand.ControlType.ACK, 0));
<ide> }
<ide> }
<ide>
<del> private void navDataLoop(InetAddress address, DatagramSocket navDataSocket, int navPort, int maxPacketSize) throws Exception {
<add> private void navDataLoop(InetAddress address, DatagramSocket navDataSocket,
<add> int navPort, int maxPacketSize) throws Exception {
<ide> navDataSocket.setSoTimeout(3000);
<ide>
<ide> // tickle!
<del> navDataSocket.send(new DatagramPacket(new byte[]{0x01, 0x00, 0x00, 0x00}, 4, address, navPort));
<add> navDataSocket.send(new DatagramPacket(new byte[] { 0x01, 0x00, 0x00,
<add> 0x00 }, 4, address, navPort));
<ide>
<ide> // get the deon
<ide> DroneState droneState = readDroneState(navDataSocket, maxPacketSize);
<ide> }
<ide> }
<ide>
<del>
<del> private int buildOptionMask (boolean reset, int ... tags) {
<del> int mask = 0 ;
<del> int newmask = 0;
<del> for (int n = 0; n < tags.length; n++) {
<del> newmask |= 1 << tags[n];
<del> }
<del> if (reset) {
<del> mask &= ~newmask;
<del> } else {
<del> mask |= newmask;
<del> }
<del> // maskChanged = true;
<del> return mask ;
<del> }
<del>
<del> private void commandLoop(InetAddress address, DatagramSocket socket, int commandPort) throws Exception {
<add> private void commandLoop(InetAddress address, DatagramSocket socket,
<add> int commandPort) throws Exception {
<ide> DroneCommand ack = new ControlCommand(ControlCommand.ControlType.ACK, 0);
<ide> DroneCommand keepAlive = new ResetWatchdogCommand();
<ide> DroneCommand command = null, stickyCommand = null;
<ide> long lastRecordedTime = 0;
<ide> while (isRunning()) {
<ide> long deltaTime = 0;
<del> deltaTime = stickyCommand == null ? 40 : System.currentTimeMillis() - lastRecordedTime;
<add> deltaTime = stickyCommand == null ? 40 : System.currentTimeMillis()
<add> - lastRecordedTime;
<ide> command = commands.poll(deltaTime, TimeUnit.MILLISECONDS);
<ide> if (command == null) {
<ide> if (stickyCommand == null) {
<ide> // sticky commands replace previous sticky
<ide> stickyCommand = command;
<ide> lastRecordedTime = System.currentTimeMillis();
<del> } else /*if (command.clearSticky()) */ {
<add> } else /* if (command.clearSticky()) */{
<ide> // only some commands can clear sticky commands
<ide> stickyCommand = null;
<ide> }
<ide>
<ide> DroneCommand gross = command;
<ide> if (gross.needControlAck()) {
<del> waitForControlAckToBe(false, () -> sendCommand(address, socket, gross, commandPort));
<del> waitForControlAckToBe(true, () -> sendCommand(address, socket, gross, commandPort));
<add> waitForControlAckToBe(false,
<add> () -> sendCommand(address, socket, gross, commandPort));
<add> waitForControlAckToBe(true,
<add> () -> sendCommand(address, socket, gross, commandPort));
<ide> } else {
<ide> sendCommand(address, socket, gross, commandPort);
<ide> }
<ide> }
<ide> }
<ide>
<del> private void sendCommand(InetAddress address, DatagramSocket datagramSocket, DroneCommand droneCommand, int commandPort) throws IOException {
<add> private void sendCommand(InetAddress address,
<add> DatagramSocket datagramSocket, DroneCommand droneCommand,
<add> int commandPort) throws IOException {
<ide> Assert.notNull(droneCommand, "the droneCommand must not be null");
<del> byte[] commandBytes = droneCommand.buildCommand(this.sequenceNumber.getAndIncrement()).getBytes();
<del> datagramSocket.send(new DatagramPacket(commandBytes, commandBytes.length, address, commandPort));
<del> }
<del>
<del> private void waitForControlAckToBe(boolean b, ExceptionFriendlyRunnable andThen) throws InterruptedException {
<add> byte[] commandBytes = droneCommand.buildCommand(
<add> this.sequenceNumber.getAndIncrement()).getBytes();
<add> datagramSocket.send(new DatagramPacket(commandBytes,
<add> commandBytes.length, address, commandPort));
<add> }
<add>
<add> private void waitForControlAckToBe(boolean b,
<add> ExceptionFriendlyRunnable andThen) throws InterruptedException {
<ide> if (controlAck.get() != b) {
<ide> boolean tried = false;
<ide> synchronized (controlAckLock) {
<ide> }
<ide> }
<ide> if (tried && controlAck.get() != b) {
<del> throw new RuntimeException("control ack timeout " + String.valueOf(b));
<add> throw new RuntimeException("control ack timeout "
<add> + String.valueOf(b));
<ide> }
<ide>
<ide> try {
<ide> }
<ide> }
<ide>
<del> private void init(InetAddress address, int commandPort, int navPort) throws Throwable {
<add> private void init(InetAddress address, int commandPort, int navPort)
<add> throws Throwable {
<ide>
<ide> int timeout = 3000;
<ide>
<ide> throw new RuntimeException(e);
<ide> }
<ide> });
<del> }
<del>
<add>
<add> }
<ide>
<ide> private static InetAddress fromIP(String ip) {
<ide> ip = StringUtils.hasText(ip) ? ip : DEFAULT_IP; |
|
JavaScript | apache-2.0 | e9937cc5892d89ec74c9a6e5bd106f39de1fe1ad | 0 | emepyc/tnt,tntvis/tnt | var epeek = function() {
"use strict";
// Default species and genome location
// TODO: Encapsulate this information in an object
var gene; // undefined
var ensGene; // undefined
var species = "human";
var chr = 7;
var fromPos = 139424940;
var toPos = 141784100;
var chr_length; // undefined
// Prefixes to use the REST API.
// These are modified in the localREST setter
// TODO: Encapsulate this information in an object
var prefix = "http://beta.rest.ensembl.org";
var prefix_region = prefix + "/feature/region/";
var prefix_ensgene = prefix + "/lookup/";
var prefix_gene = prefix + "/xrefs/symbol/";
var prefix_homologues = prefix + "/homology/id/";
var prefix_chr_info = prefix + "/assembly/info/";
// The REST response in general view
var genes = [];
// Display elements options that can be overridden by setters
// (so they are exposed in the API)
// TODO: Encapsulate this information in an object
var min_width = 300;
var width = 600;
var height = 150;
var bgColor = d3.rgb('#DDDDDD'); //#F8FBEF
var fgColor = d3.rgb('#000000');
var highlightColor = d3.rgb('#000000');
var drag_allowed = true;
var curr_ease = d3.ease("cubic-in-out");
var fivePrimeEdge;
var threePrimeEdge;
// Display elements (not directly exposed in the API)
// TODO: Encapsulate this information in an object
var svg_g;
var pane;
var xScale;
var zoomEventHandler
var xAxis;
var refresh;
var dur = 500;
// Closure to layout the genes in the view
// var genes_layout = epeek_genes().height(height);
// The id of the div element the plug-in connects to
// undefined by default
var div_id;
/** The returned closure
@alias ePeek
@namespace
@example
// Typically, the plug-in is used as follows:
var gB = epeek().width(920); // other methods can be included here
var gBTheme = epeek_theme(); // other methods can be included here
gBTheme(gB, document.getElementById('DOM_element_id');
*/
var gBrowser = function(div) {
div_id = d3.select(div).attr("id");
gBrowser.genes_layout.height(height); //genes_layout;
// The original div is classed with the ePeek class
d3.select(div)
.classed("ePeek", true);
// The Browser div
var browserDiv = d3.select(div);
var locRow = browserDiv
.append("div")
.attr("class", "ePeek_locRow");
var groupDiv = browserDiv
.append("div")
.attr("class", "ePeek_groupDiv");
// The SVG
svg_g = groupDiv
.append("svg")
.attr("class", "ePeek_svg")
.attr("width", width)
.attr("height", height)
.style("background-color", bgColor)
.append("g")
.attr("transform", "translate(0,20)")
.append("g")
.attr("class", "ePeek_g");
// The Zooming/Panning Pane
pane = svg_g
.append("rect")
.attr("class", "ePeek_pane")
.attr("id", "ePeek_" + div_id + "_pane")
.attr("width", width)
.attr("height", height)
.style("fill", fgColor);
var tooWide_text = svg_g
.append("text")
.attr("class", "ePeek_wideOK_text")
.attr("id", "ePeek_" + div_id + "_tooWide")
.attr("fill", bgColor)
.text("Region too wide");
// TODO: I don't know if this is the best way (and portable) way
// of centering the text in the text
var bb = tooWide_text[0][0].getBBox();
tooWide_text
.attr("x", ~~(width/2 - bb.width/2))
.attr("y", ~~(height/2 - bb.height/2));
// The locRow
locRow
.append("span")
.text("Current location: ");
locRow
.append("span")
.attr("id", "ePeek_" + div_id + "_species")
.text(species);
locRow
.append("span")
.text(" (");
locRow
.append("span")
.attr("id", "ePeek_" + div_id + "_chr")
.text(chr);
locRow
.append("span")
.text(":");
locRow
.append("span")
.attr("id", "ePeek_" + div_id + "_from")
.text(fromPos);
locRow
.append("span")
.text("-");
locRow
.append("span")
.attr("id", "ePeek_" + div_id + "_to")
.text(toPos);
locRow
.append("span")
.text(")");
};
/** <strong>startOnOrigin</strong> decides if the plug-in needs to be started based on a
gene or genome location. If centered in a gene, it tries to fetch the genomic
coordinates associated with the given gene.
*/
gBrowser.startOnOrigin = function () {
if (gBrowser.gene() !== undefined) {
gBrowser.get_gene(gBrowser.gene());
} else {
gBrowser.start();
}
return;
};
/** <strong>start</strong> plots the genome browser and starts listening for mouse events on it.
It always uses genomic coordinates, so if the coordinates associated with a given gene
needs to be taken into account, {@link ePeek.startOnOrigin} has to be used instead.
*/
gBrowser.start = function () {
var url = get_url();
console.log("URL: ");
console.log(url);
// We get the chromosome length, this is done async, but I assume that
// the response will be available on time in the interactive process
getChrLength();
d3.json(url, function (error, resp) {
console.log("RESP: ");
console.log(resp);
genes = resp;
plot();
update_layout();
});
};
var plot = function () {
xScale = d3.scale.linear()
.domain([fromPos, toPos])
.range([0, width]);
gBrowser.genes_layout(genes, xScale);
xAxis = d3.svg.axis()
.scale(xScale)
.orient("top");
// zoom
if (drag_allowed) {
zoomEventHandler = d3.behavior.zoom();
pane.call( zoomEventHandler.x(xScale).on("zoom", zoom) );
}
};
var update_layout = function () {
var newdata = gBrowser.genes_layout.genes();
var g_genes = svg_g.selectAll(".ePeek_gs")
.data(newdata, function (d) {
return d.ID
});
g_genes.selectAll(".ePeek_gene")
// The data needs to be re-joint for all the sub-elements?
.data(newdata, function (d) {return d.ID})
.transition()
.duration(500)
.attr("y", function (d) {
return gBrowser.genes_layout.gene_slot().slot_height * d.slot;
})
.attr("height", gBrowser.genes_layout.gene_slot().gene_height)
g_genes.selectAll(".ePeek_name")
// The data needs to be re-joint for all the sub-elements?
.data(newdata, function (d) {return d.ID})
.transition()
.duration(500)
// .each(function() {console.log(d3.select(this).style('font-size'))})
.attr("y", function (d) {
return (gBrowser.genes_layout.gene_slot().slot_height * d.slot) + 25
})
.text(function (d) {
if (gBrowser.genes_layout.gene_slot().show_label) {
return d.external_name;
} else {
return "";
}
});
g_genes
.enter()
.append("g")
.attr("class", "ePeek_gs")
.call(plot_gene)
g_genes.exit().remove();
g_genes.on("click", function (d) {gBrowser.gene_info_callback(d)});
update();
};
var plot_gene = function (new_gene) {
new_gene
.append("rect")
.attr("class", "ePeek_gene")
.attr("x", function (d) {
return (xScale(d.start));
})
.attr("y", function (d) {
console.log("D.SLOT:" + d.slot);
console.log("SLOT_HEIGHT:" + gBrowser.genes_layout.gene_slot().slot_height);
return gBrowser.genes_layout.gene_slot().slot_height * d.slot;
})
.attr("width", function (d) {
return (xScale(d.end) - xScale(d.start));
})
.attr("height", gBrowser.genes_layout.gene_slot().gene_height) // This has to be dynamic now
.attr("fill", bgColor)
.transition().duration(dur).attr("fill", function (d) {
return fgColor;
});
new_gene
.append("text")
.attr("class", "ePeek_name")
.attr("x", function (d) {
return (xScale(d.start));
})
.attr("y", function (d) {
return (gBrowser.genes_layout.gene_slot().slot_height * d.slot) + 25 // TODO: This 25 is artificial. It is supposed to give enough room for the label
// i.e. the font vertical size is less than 25.
// Maybe it would be better to have a fixed font-size at least?
})
.attr("fill", bgColor)
.text(function (d) {
if (gBrowser.genes_layout.gene_slot().show_label) {
return d.external_name
} else {
return ""
}
})
.style ("font-weight", function (d) {
return "normal";
})
.transition().duration(dur).attr("fill", function (d) {
return fgColor;
});
};
var update = function () {
svg_g.call(xAxis);
var g_genes = svg_g.selectAll(".ePeek_gs");
g_genes.select(".ePeek_gene")
.attr("x", function (d) {
return (xScale(d.start))
})
.attr("width", function (d) {
return (xScale(d.end) - xScale(d.start))
});
g_genes.select(".ePeek_name")
.attr("x", function (d) {
return (xScale(d.start));
});
// loc_row
var xScale_domain = xScale.domain();
d3.select("#ePeek_" + div_id + "_species")
.text(species); // Only if cross-species is allowed! This can only change if Jumped from searchBox or ortholog selection
d3.select("#ePeek_" + div_id + "_chr")
.text(chr);
d3.select("#ePeek_" + div_id + "_from")
.text(~~xScale_domain[0]);
d3.select("#ePeek_" + div_id + "_to")
.text(~~xScale_domain[1]);
};
/** <strong>zoom</strong> zooms in or out
*/
gBrowser.zoom = function (factor) {
move(factor, 0);
};
var move = function (factor, direction) {
var oldDomain = xScale.domain();
var span = oldDomain[1] - oldDomain[0];
var offset = (span * factor) - span;
var newDomain;
switch (direction) {
case -1 :
newDomain = [(~~oldDomain[0] - offset), ~~(oldDomain[1] - offset)];
break;
case 1 :
newDomain = [(~~oldDomain[0] + offset), ~~(oldDomain[1] - offset)];
break;
case 0 :
newDomain = [oldDomain[0] - ~~(offset/2), oldDomain[1] + (~~offset/2)];
}
var interpolator = d3.interpolateNumber(oldDomain[0], newDomain[0]);
var ease = gBrowser.ease();
var x = 0;
d3.timer(function(d) {
var curr_start = interpolator(ease(x));
var curr_end;
switch (direction) {
case -1 :
curr_end = curr_start + span;
break;
case 1 :
curr_end = curr_start + span;
break;
case 0 :
curr_end = oldDomain[1] + oldDomain[0] - curr_start;
break;
}
var currDomain = [curr_start, curr_end];
xScale.domain(currDomain);
zoom(xScale);
x+=0.02;
return x>1;
});
};
gBrowser.right = function (factor) {
// It doesn't make sense factors < 1 for left/right moves
if (factor > 0) {
move(factor, 1);
}
};
gBrowser.left = function (factor) {
// It doesn't make sense factors < 1 for left/right moves
if (factor > 0) {
move(factor, -1);
}
};
var zoom = function (new_xScale) {
if (new_xScale !== undefined) {
zoomEventHandler.x(new_xScale);
}
// This fixes before-the-beginning panning
var currDomain = xScale.domain();
if (currDomain[0] < 0) {
if (fivePrimeEdge === undefined) {
fivePrimeEdge = currDomain[1]
}
currDomain = ([0, fivePrimeEdge]);
xScale.domain(currDomain);
zoomEventHandler.x(xScale);
// var bestEnd = currDomain[1] < 1000 ? 1000 : ~~currDomain[1];
// xScale.domain([0,bestEnd]);
} else {
fivePrimeEdge = undefined;
}
// This fixes pass-the-ending panning
if (currDomain[1] > chr_length) {
if (threePrimeEdge === undefined) {
threePrimeEdge = currDomain[0];
}
// var bestStart = currDomain[0] > (chr_length - 1000) ? (chr_length - 1000) : ~~currDomain[0] < 0 ? 0 : ~~currDomain[0];
// xScale.domain([bestStart,chr_length]);
xScale.domain([threePrimeEdge, chr_length]);
zoomEventHandler.x(xScale);
} else {
threePrimeEdge = undefined;
}
window.clearTimeout(refresh);
refresh = window.setTimeout(function(){
var currDomain = xScale.domain();
gBrowser.from(~~currDomain[0]);
gBrowser.to(~~currDomain[1]);
console.log("New Pos:" + fromPos + "-" + toPos);
var url = get_url();
console.log(url);
d3.json(url, function (error, resp) {
if (error !== null) {
d3.select("#ePeek_" + div_id + "_pane")
.classed("ePeek_dark_pane", true);
d3.select("#ePeek_" + div_id + "_tooWide")
.classed("ePeek_tooWide_text", true)
.moveToFront();
} else {
d3.select("#ePeek_" + div_id + "_pane")
.classed("ePeek_dark_pane", false);
d3.select("#ePeek_" + div_id + "_tooWide")
.classed("ePeek_tooWide_text", false)
gBrowser.genes_layout(resp, xScale);
update_layout();
}
});
}, 300); //
update();
};
var change_gene_color = function (genes, color) {
genes.select(".ePeek_gene")
.transition()
.duration(500)
.attr("fill", color);
genes.select(".ePeek_name")
.transition()
.duration(500)
.style("fill", color);
};
/** <strong>unhighlight_gene</strong> removes highlight from genes previously highlighted with {@link gBrowser.highlight_gene}.
Its argument is a callback that is called on every gene. The callback receives the information for that gene as its argument. On each gene, the callback is expected to return true or false. The gene or set of genes evaluated to true will be unhighlighted.
@param {Callback} callback A closure that evaluates all the genes.
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.unhighlight_gene = function (cbak) {
// TODO: This will select all genes *accross gBs*. Should we include the div_id in the class to avoid that?
var genes_to_unhighlight = d3.selectAll(".ePeek_gs")
.filter(function(d){return cbak(d)});
change_gene_color(genes_to_unhighlight, fgColor)
return gBrowser;
}
/** <strong>highlight_gene</strong> highlights a gene based on the {@link gBrowser.highlight_color} specified.
Its argument is a callback that is called on every gene. The callback receives the information for that gene as its argument. On each gene, the callback is expected to return true or false. The gene or set of genes evaluated to true will be highlighted.
Note that this method doesn't "un-highlight" any gene previously highlighted, so make sure you call {@link gBrowser.unhighlight_gene} if needed.
@param {Callback} callback A closure that evaluates all the genes.
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.highlight_gene = function (cbak) {
// TODO: This will select all genes *accross gBs*. Should we include the div_id in the class to avoid that?
var genes_to_highlight = d3.selectAll(".ePeek_gs")
.filter(function(d){return cbak(d)});
change_gene_color(genes_to_highlight, highlightColor);
return gBrowser;
};
// public methods (API)
/** <strong>resize</strong> takes a new width (in pixels) for the genome view and resizes it accordingly. It can be used to resize the view lively. For example it is used by the mobile theme to respond to orientation changes in the device
@param {Number} width New width (in pixels)
*/
gBrowser.resize = function (w) {
// Resize the svg
d3.select(".ePeek_svg").attr("width", w);
// Resize the zooming/panning pane
d3.select("#ePeek_" + div_id + "_pane").attr("width", w);
// Set the new width
gBrowser.width(w);
// Replot
plot();
update();
};
/** <strong>get_gene</strong> retrieves the gene from the remote (REST) server
returning the Ensembl genes associated with the given external name
If the response of the REST server is defined, the {@link ePeek.ensGenes_callback} callback is invoked passing the found Ensembl genes as its argument.
If the response of the REST server is undefined, the plug-in is started with the current
genomic coordinates
@param {string} gene_name The gene external name to look up
*/
gBrowser.get_gene = function (gene_name) {
var url = prefix_gene + species + "/" + gene_name + ".json?object=gene";
console.log("URL: " + url);
d3.json(url, function(error, resp) {
resp = resp.filter(function(d) {
return !d.id.indexOf("ENS"); // To avoid LRG genes (maybe the REST service doesn't return those now?
});
console.log("RESP:");
console.log(resp);
if (resp[0] !== undefined) {
set_ensGene(resp[0].id);
gBrowser.ensGenes_callback(resp);
gBrowser.ensGene_lookup(resp[0].id);
} else {
gBrowser.start();
}
});
};
///*********************////
/// DATA RETRIEVERS ////
///*********************////
/** <strong>homologues</strong> looks for homologues of the given gene.
Once the homologues are retrieved, the {@link ePeek.homologues_callback} callback is invoked passing the array of homologues as its argument.
@param {string} ensGene The id of the gene to look for homologues
*/
gBrowser.homologues = function(ensGene) {
var url = prefix_homologues + ensGene + ".json?format=condensed;sequence=none;type=all";
console.log(url);
d3.json(url, function (error, resp) {
console.log("HOMOLOGUES RESP: ");
console.log(resp);
if (resp !== undefined) { // / Needed because if there are not homologues we get an error -- We are trying to change this in the REST API
gBrowser.homologues_callback(resp.data[0].homologies);
}
});
}
/** <strong>ensGene_lookup</strong> retrieves the coordinates of a Ensembl gene from
the remote server. The plug-in is then initialized via the {@link ePeek.start} method.
@param {string} gene_name The id of the Ensembl gene to look for
*/
gBrowser.ensGene_lookup = function (gene_name) {
gBrowser.homologues(gene_name);
var url = prefix_ensgene + gene_name + ".json?format=full";
console.log("lookup url:");
console.log(url);
d3.json(url, function(error, resp) {
console.log("RESP:");
console.log(resp);
gBrowser
.species(resp.species)
.chr(resp.seq_region_name)
.from(resp.start)
.to(resp.end);
gBrowser.start();
});
};
var getChrLength = function () {
var url = prefix_chr_info + species + "/" + chr;
d3.json(url, function(error, resp) {
chr_length = resp.length;
});
};
///***********************////
/// Setters & Getters ////
///***********************////
var set_ensGene = function (ens) {
if (!arguments.length) {
return ensGene;
}
ensGene = ens;
return gBrowser;
};
/** <strong>species</strong> gets/sets the species used in the REST queries.
See for example {@link ePeek.get_gene}.
If no argument is provided, returns the current species.
Common names are allowed (human, chimp, gorilla, mouse, etc...)
Binary scientific names are also allowed with and without underscores (for example "mus_musculus" or "mus musculus")
Case is ignored.
@param {String} [species] The new species
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.species = function (sp) {
if (!arguments.length) {
return species;
}
species = sp;
return gBrowser;
};
/** <strong>chr</strong> gets/sets the chr used in the next genome coordinates-based query.
If no argument is provided, returns the current chr or the default one if no one has been set before.
Strictly speaking, the arguments expects a seq_region_name, i.e. "scaffolds", etc are also considered chromosomes.
This value is used by {@link ePeek.start} to set the genomic coordinates in the plug-in view
@param {String} [chr] The new chr
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.chr = function (c) {
if (!arguments.length) {
return chr;
}
chr = c;
return gBrowser;
};
/** <strong>from</strong> gets/sets the start coordinate for the next genome coordinates-based query.
If no argument is provided, returns the current start coordinate or the default one if none has been set before.
This value is used by {@link ePeek.start} to set the genomic coordinates in the plug-in view
@param {Number} [coordinte] The new start coordinate. Commas or dots are not allowed (32,341,674 or 32.341.674)
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.from = function (n) {
// TODO: Allow commas and dots in numbers? eg: 32,341,674 or 32.341.674
if (!arguments.length) {
return fromPos;
}
fromPos = n;
return gBrowser;
};
/** <strong>to</strong> gets/sets the end coordinate for the next genome coordinates-based query.
If no argument is provided, returns the current end coordinate or the default one if none has been set before.
This value is used by {@link ePeek.start} to set the genomic coordinates in the plug-in view
@param {Number} [coordinate] The new end coordinate. Commas or dots are not allowed (32,341,674 or 32.341.674)
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.to = function (n) {
// TODO: Allow commas and dots in numbers? eg: 32,341,674 or 32.341.674
if (!arguments.length) {
return toPos;
}
toPos = n;
return gBrowser;
};
/** <strong>gene</strong> sets the gene name for the next gene-based location.
External gene names (BRCA2) and ensembl gene identifiers (ENSG00000139618) are both allowed.
Gene-based locations have higher preference over coordinates-based locations.
@example
// Will show the correct location even if the gene name is spelled wrong
// or is not recognized by Ensembl
gB.species("human").chr(13).from(35009587).to(35214822).gene("LINC00457");
@param {String} [name] The name of the gene
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.gene = function(g) {
if (!arguments.length) {
return gene;
}
gene = g;
return gBrowser;
};
/** <strong>height</strong> gets/sets the height of the plug-in.
If no argument is provided, returns the current height.
The argument should be only the number of pixels (without any suffix like "px")
@param {Number} [height] The new height (in pixels)
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.height = function (h) {
// TODO: Allow suffixes like "1000px"?
// TODO: Test wrong formats
if (!arguments.length) {
return height;
}
height = h;
return gBrowser;
};
/** <strong>width</strong> gets/sets the width (in pixels) of the plug-in.
If no argument is provided, returns the current height.
The argument should be only the number of pixels (without any suffix like "px")
To re-set the width lively use the {@link ePeek.resize} method.
@param {Number} [width] The new width (in pixels)
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.width = function (w) {
// TODO: Allow suffixes like "1000px"?
// TODO: Test wrong formats
if (!arguments.length) {
return width;
}
// At least min-width
if (w < min_width) {
w = min_width
}
width = w;
return gBrowser;
};
/** <strong>background_color</strong> gets/sets the background color for the view.
If no argument is provided, returns the current background color.
The argument should be a valid hexadecimal number (including the "#" prefix)
The color is internally converted to a {@link https://github.com/mbostock/d3/wiki/Colors#wiki-d3_rgb|d3.rgb} format
@param {String} [color] The new color in hexadecimal format (including the leading "#")
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.background_color = function (hex) {
if (!arguments.length) {
return bgColor;
}
bgColor = d3.rgb(hex);
return gBrowser;
};
/** <strong>foreground_color</strong> gets/sets the foreground color for the view.
If no argument is provided, returns the current foreground color.
The argument should be a valid hexadecimal number (including the "#" prefix)
The color is internally converted to a {@link https://github.com/mbostock/d3/wiki/Colors#wiki-d3_rgb|d3.rgb} format
@param {String} [color] The new color in hexadecimal format (including the leading "#")
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.foreground_color = function (hex) {
if (!arguments.length) {
return fgColor;
}
fgColor = d3.rgb(hex);
return gBrowser;
};
/** <strong>highlight_color</strong> gets/sets the color to be used for a highlighted gene in the view.
If no argument is provided, returns the current color for highlighting.
The argument should be a valid hexadecimal number (including the "#" prefix)
The color is internally converted to a {@link https://github.com/mbostock/d3/wiki/Colors#wiki-d3_rgb|d3.rgb} format
@param {String} [color] The new color in hexadecimal format (including the leading "#")
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.highlight_color = function (hex) {
// TODO: Make highlight_color optional, defaulting to either the foreground color or a .darker() version of it
if (!arguments.length) {
return highlightColor;
}
highlightColor = d3.rgb(hex);
return gBrowser;
}
/** <strong>localREST</strong> points the queries to a local REST service to debug.
This method should be removed in "production"
*/
gBrowser.localREST = function() {
prefix = "http://127.0.0.1:3000";
prefix_region = prefix + "/feature/region/";
prefix_ensgene = prefix + "/lookup/id/";
prefix_gene = prefix + "/xrefs/symbol/";
prefix_homologues = prefix + "/homology/id/";
return gBrowser;
};
gBrowser.ease = function(e) {
if (!arguments.length) {
return curr_ease;
}
curr_ease = d3.ease(e);
return gBrowser;
};
gBrowser.allow_drag = function(b) {
if (!arguments.length) {
return drag_allowed;
}
drag_allowed = b;
return gBrowser;
};
///*********************////
/// UTILITY METHODS ////
///*********************////
// Private methods
var get_url = function () {
var url = prefix_region + species + "/" + chr + ":" + fromPos + "-" + toPos + ".json?feature=gene";
return url;
};
/** <strong>split_homologues</strong> split an array of homologues into an object containing an array of orthologues (under the 'orthologues' field)
and an array of paralogues (under the 'paralogues' field)
@param {Array} [homologues] The array containing homologues objects
@returns {Object} An object containing an array of orthologues and an array of paralogues
*/
gBrowser.split_homologues = function (homologues) {
var orthoPatt = /ortholog/;
var paraPatt = /paralog/;
var orthologues = homologues.filter(function(d){return d.type.match(orthoPatt)});
var paralogues = homologues.filter(function(d){return d.type.match(paraPatt)});
return {'orthologues' : orthologues,
'paralogues' : paralogues};
};
// Default callbacks
/** <strong>genes_layout</strong> specifies how to layout the genes.
The default layout avoid clashing between genes, hiding the labels when there are too many elements to display, etc...
The layout closure has to have several methods in its API.
@param {Callback} [callback] The callback to call to get/set the gene layout
*/
gBrowser.genes_layout = epeek_genes(); //genes_layout;
/** <strong>gene_info_callback</strong> is a callback called when a gene is selected.
It should be used to respond to mouse clicks on the genes or their names (labels).
Its default behaviour is to do nothing.
This function can be overwritten by a theme to display the gene information
in, for example, a custom way and/or place.
@param {Object} object A literal object containing the following fields:
<ul>
<li>external_name => External name of the gene</li>
<li>ID => Ensembl ID of the gene</li>
<li>description => A short description of the gene</li>
<li>logic_name => The source of the gene</li>
<li>feature_type => This is always set to gene</li>
<li>seq_region_name => The chromosome or region name the gene is located</li>
<li>start => The start coordinate in the seq_region_name</li>
<li>end => The end coordinate in the seq_region_name</li>
<li>strand => The strand in the seq_region_name</li>
</ul>
*/
gBrowser.gene_info_callback = function() {};
/** <strong>ensGenes_callback</strong> is a callback called every time a gene is searched in the
REST server in the {@link ePeek.get_gene} method.
Its default behaviour is to do nothing.
This method can be used by a theme to run some arbitrary code when a gene is found in the REST
server.
@param {Array} genes An array of genes found in the last gene-based search. Each gene is an object having the following fields:
<ul>
<li>id => The Ensembl gene id associated with the gene</li>
<li>type => This should be "gene"
</ul>
*/
gBrowser.ensGenes_callback = function() {};
/** <strong>homologues_callback</strong> is a callback called every time the homologues (orthologues + paralogues) of a gene
are found in the REST server in the {@link ePeek.homologues} method.
Its default behaviour is to do nothing.
This method can be used by a theme to do run some arbitrary code when homologues are found for this gene.
@param {Array} homologies An array of object literals representing homologies having the following fields:
<ul>
<li>id => The Ensembl Gene ID of the homolog</li>
<li>protein_id => The Ensembl Protein ID of the homolog</li>
<li>species => The species name of the homolog</li>
<li>subtype => The subtype of the homology relantionship</li>
<li>type => The type of homology</li>
</ul>
*/
gBrowser.homologues_callback = function() {};
return gBrowser;
};
// The collision detector
var epeek_genes = function() {
"use strict";
var height = 150; // Default value
var genes = [];
var xScale;
var max_slots;
var slot_types = {
'expanded' : {
slot_height : 30,
gene_height : 10,
show_label : true
},
'collapsed' : {
slot_height : 5,
gene_height : 3,
show_label : false
}
};
var current_slot_type = 'expanded';
var genes_layout = function (new_genes, scale) {
// We make sure that the genes have name
for (var i = 0; i < new_genes.length; i++) {
if (new_genes[i].external_name === null) {
new_genes[i].external_name = "";
}
}
max_slots = ~~(height / slot_types.expanded.slot_height) - 1;
if (scale !== undefined) {
genes_layout.scale(scale);
}
slot_keeper(new_genes, genes);
var needed_slots = collition_detector(new_genes);
if (needed_slots > max_slots) {
current_slot_type = 'collapsed';
shrink_slots(height, needed_slots);
} else {
current_slot_type = 'expanded';
}
genes = new_genes;
};
genes_layout.genes = function () {
return genes;
}
genes_layout.gene_slot = function () {
return slot_types[current_slot_type];
};
genes_layout.height = function (h) {
if (!arguments.length) {
return height;
}
height = h;
return genes_layout;
};
genes_layout.scale = function (x) {
if (!arguments.length) {
return xScale;
}
xScale = x;
return genes_layout;
};
var collition_detector = function (genes) {
var genes_placed = [];
var genes_to_place = genes; // was []
var needed_slots = 0;
for (var i = 0; i < genes.length; i++) {
if (genes[i].slot > needed_slots && genes[i].slot < max_slots) {
needed_slots = genes[i].slot
}
}
for (var i = 0; i < genes_to_place.length; i++) {
var genes_by_slot = sort_genes_by_slot(genes_placed);
var this_gene = genes_to_place[i];
if (this_gene.slot !== undefined && this_gene.slot < max_slots) {
if (slot_has_space(this_gene, genes_by_slot[this_gene.slot])) {
genes_placed.push(this_gene);
continue;
}
}
var slot = 0;
OUTER: while (true) { //
if (slot_has_space(this_gene, genes_by_slot[slot])) {
this_gene.slot = slot;
genes_placed.push(this_gene);
if (slot > needed_slots) {
needed_slots = slot;
}
break;
}
slot++;
}
}
return needed_slots + 1;
};
var slot_has_space = function (query_gene, genes_in_this_slot) {
if (genes_in_this_slot === undefined) {
return true;
}
for (var j = 0; j < genes_in_this_slot.length; j++) {
var subj_gene = genes_in_this_slot[j];
if (query_gene.ID === subj_gene.ID) {
continue;
}
var y_label_end = subj_gene.external_name.length * 8 + xScale(subj_gene.start); // TODO: It may be better to have a fixed font size (instead of the hardcoded 16)?
var y1 = xScale(subj_gene.start);
var y2 = xScale(subj_gene.end) > y_label_end ? xScale(subj_gene.end) : y_label_end;
var x_label_end = query_gene.external_name.length * 8 + xScale(query_gene.start);
var x1 = xScale(query_gene.start);
var x2 = xScale(query_gene.end) > x_label_end ? xScale(query_gene.end) : x_label_end;
if ( ((x1 < y1) && (x2 > y1)) ||
((x1 > y1) && (x1 < y2)) ) {
return false;
}
}
return true;
};
var slot_keeper = function (genes, prev_genes) {
var prev_genes_slots = genes2slots(prev_genes);
for (var i = 0; i < genes.length; i++) {
if (prev_genes_slots[genes[i].ID] !== undefined) {
genes[i].slot = prev_genes_slots[genes[i].ID];
}
}
};
var genes2slots = function (genes_array) {
var hash = {};
for (var i = 0; i < genes_array.length; i++) {
var gene = genes_array[i];
hash[gene.ID] = gene.slot;
}
return hash;
}
var shrink_slots = function (height, needed_slots) {
// slot_types.collapsed.slot_height = ~~(height/needed_slots);
return;
};
var sort_genes_by_slot = function (genes) {
var slots = [];
for (var i = 0; i < genes.length; i++) {
if (slots[genes[i].slot] === undefined) {
slots[genes[i].slot] = [];
}
slots[genes[i].slot].push(genes[i]);
}
return slots;
};
return genes_layout;
}
d3.selection.prototype.moveToFront = function() {
return this.each(function() {
this.parentNode.appendChild(this);
});
};
| lib/ePeek.js | var epeek = function() {
"use strict";
// Default species and genome location
// TODO: Encapsulate this information in an object
var gene; // undefined
var ensGene; // undefined
var species = "human";
var chr = 7;
var fromPos = 139424940;
var toPos = 141784100;
var chr_length; // undefined
// Prefixes to use the REST API.
// These are modified in the localREST setter
// TODO: Encapsulate this information in an object
var prefix = "http://beta.rest.ensembl.org";
var prefix_region = prefix + "/feature/region/";
var prefix_ensgene = prefix + "/lookup/";
var prefix_gene = prefix + "/xrefs/symbol/";
var prefix_homologues = prefix + "/homology/id/";
var prefix_chr_info = prefix + "/assembly/info/";
// The REST response in general view
var genes = [];
// Display elements options that can be overridden by setters
// (so they are exposed in the API)
// TODO: Encapsulate this information in an object
var min_width = 300;
var width = 600;
var height = 150;
var bgColor = d3.rgb('#DDDDDD'); //#F8FBEF
var fgColor = d3.rgb('#000000');
var highlightColor = d3.rgb('#000000');
var drag_allowed = true;
var curr_ease = d3.ease("cubic-in-out");
var fivePrimeEdge;
var threePrimeEdge;
// Display elements (not directly exposed in the API)
// TODO: Encapsulate this information in an object
var svg_g;
var pane;
var xScale;
var zoomEventHandler
var xAxis;
var refresh;
var dur = 500;
// Closure to layout the genes in the view
// var genes_layout = epeek_genes().height(height);
// The id of the div element the plug-in connects to
// undefined by default
var div_id;
/** The returned closure
@alias ePeek
@namespace
@example
// Typically, the plug-in is used as follows:
var gB = epeek().width(920); // other methods can be included here
var gBTheme = epeek_theme(); // other methods can be included here
gBTheme(gB, document.getElementById('DOM_element_id');
*/
var gBrowser = function(div) {
div_id = d3.select(div).attr("id");
gBrowser.genes_layout.height(height); //genes_layout;
// The original div is classed with the ePeek class
d3.select(div)
.classed("ePeek", true);
// The Browser div
var browserDiv = d3.select(div);
var locRow = browserDiv
.append("div")
.attr("class", "ePeek_locRow");
var groupDiv = browserDiv
.append("div")
.attr("class", "ePeek_groupDiv");
// The SVG
svg_g = groupDiv
.append("svg")
.attr("class", "ePeek_svg")
.attr("width", width)
.attr("height", height)
.style("background-color", bgColor)
.append("g")
.attr("transform", "translate(0,20)")
.append("g")
.attr("class", "ePeek_g");
// The Zooming/Panning Pane
pane = svg_g
.append("rect")
.attr("class", "ePeek_pane")
.attr("id", "ePeek_" + div_id + "_pane")
.attr("width", width)
.attr("height", height)
.style("fill", fgColor);
var tooWide_text = svg_g
.append("text")
.attr("class", "ePeek_wideOK_text")
.attr("id", "ePeek_" + div_id + "_tooWide")
.attr("fill", bgColor)
.text("Region too wide");
// TODO: I don't know if this is the best way (and portable) way
// of centering the text in the text
var bb = tooWide_text[0][0].getBBox();
tooWide_text
.attr("x", ~~(width/2 - bb.width/2))
.attr("y", ~~(height/2 - bb.height/2));
// The locRow
locRow
.append("span")
.text("Current location: ");
locRow
.append("span")
.attr("id", "ePeek_" + div_id + "_species")
.text(species);
locRow
.append("span")
.text(" (");
locRow
.append("span")
.attr("id", "ePeek_" + div_id + "_chr")
.text(chr);
locRow
.append("span")
.text(":");
locRow
.append("span")
.attr("id", "ePeek_" + div_id + "_from")
.text(fromPos);
locRow
.append("span")
.text("-");
locRow
.append("span")
.attr("id", "ePeek_" + div_id + "_to")
.text(toPos);
locRow
.append("span")
.text(")");
};
/** <strong>startOnOrigin</strong> decides if the plug-in needs to be started based on a
gene or genome location. If centered in a gene, it tries to fetch the genomic
coordinates associated with the given gene.
*/
gBrowser.startOnOrigin = function () {
if (gBrowser.gene() !== undefined) {
gBrowser.get_gene(gBrowser.gene());
} else {
gBrowser.start();
}
return;
};
/** <strong>start</strong> plots the genome browser and starts listening for mouse events on it.
It always uses genomic coordinates, so if the coordinates associated with a given gene
needs to be taken into account, {@link ePeek.startOnOrigin} has to be used instead.
*/
gBrowser.start = function () {
var url = get_url();
console.log("URL: ");
console.log(url);
// We get the chromosome length, this is done async, but I assume that
// the response will be available on time in the interactive process
getChrLength();
d3.json(url, function (error, resp) {
console.log("RESP: ");
console.log(resp);
genes = resp;
plot();
update_layout();
});
};
var plot = function () {
xScale = d3.scale.linear()
.domain([fromPos, toPos])
.range([0, width]);
gBrowser.genes_layout(genes, xScale);
xAxis = d3.svg.axis()
.scale(xScale)
.orient("top");
// zoom
if (drag_allowed) {
zoomEventHandler = d3.behavior.zoom();
pane.call( zoomEventHandler.x(xScale).on("zoom", zoom) );
}
};
var update_layout = function () {
var newdata = gBrowser.genes_layout.genes();
var g_genes = svg_g.selectAll(".ePeek_gs")
.data(newdata, function (d) {
return d.ID
});
g_genes.selectAll(".ePeek_gene")
// The data needs to be re-joint for all the sub-elements?
.data(newdata, function (d) {return d.ID})
.transition()
.duration(500)
.attr("y", function (d) {
return gBrowser.genes_layout.gene_slot().slot_height * d.slot;
})
.attr("height", gBrowser.genes_layout.gene_slot().gene_height)
g_genes.selectAll(".ePeek_name")
// The data needs to be re-joint for all the sub-elements?
.data(newdata, function (d) {return d.ID})
.transition()
.duration(500)
// .each(function() {console.log(d3.select(this).style('font-size'))})
.attr("y", function (d) {
return (gBrowser.genes_layout.gene_slot().slot_height * d.slot) + 25
})
.text(function (d) {
if (gBrowser.genes_layout.gene_slot().show_label) {
return d.external_name;
} else {
return "";
}
});
g_genes
.enter()
.append("g")
.attr("class", "ePeek_gs")
.call(plot_gene)
g_genes.exit().remove();
g_genes.on("click", function (d) {gBrowser.gene_info_callback(d)});
update();
};
var plot_gene = function (new_gene) {
new_gene
.append("rect")
.attr("class", "ePeek_gene")
.attr("x", function (d) {
return (xScale(d.start));
})
.attr("y", function (d) {
console.log("D.SLOT:" + d.slot);
console.log("SLOT_HEIGHT:" + gBrowser.genes_layout.gene_slot().slot_height);
return gBrowser.genes_layout.gene_slot().slot_height * d.slot;
})
.attr("width", function (d) {
return (xScale(d.end) - xScale(d.start));
})
.attr("height", gBrowser.genes_layout.gene_slot().gene_height) // This has to be dynamic now
.attr("fill", bgColor)
.transition().duration(dur).attr("fill", function (d) {
return fgColor;
});
new_gene
.append("text")
.attr("class", "ePeek_name")
.attr("x", function (d) {
return (xScale(d.start));
})
.attr("y", function (d) {
return (gBrowser.genes_layout.gene_slot().slot_height * d.slot) + 25 // TODO: This 25 is artificial. It is supposed to give enough room for the label
// i.e. the font vertical size is less than 25.
// Maybe it would be better to have a fixed font-size at least?
})
.attr("fill", bgColor)
.text(function (d) {
if (gBrowser.genes_layout.gene_slot().show_label) {
return d.external_name
} else {
return ""
}
})
.style ("font-weight", function (d) {
return "normal";
})
.transition().duration(dur).attr("fill", function (d) {
return fgColor;
});
};
var update = function () {
svg_g.call(xAxis);
var g_genes = svg_g.selectAll(".ePeek_gs");
g_genes.select(".ePeek_gene")
.attr("x", function (d) {
return (xScale(d.start))
})
.attr("width", function (d) {
return (xScale(d.end) - xScale(d.start))
});
g_genes.select(".ePeek_name")
.attr("x", function (d) {
return (xScale(d.start));
});
// loc_row
var xScale_domain = xScale.domain();
d3.select("#ePeek_" + div_id + "_species")
.text(species); // Only if cross-species is allowed! This can only change if Jumped from searchBox or ortholog selection
d3.select("#ePeek_" + div_id + "_chr")
.text(chr);
d3.select("#ePeek_" + div_id + "_from")
.text(~~xScale_domain[0]);
d3.select("#ePeek_" + div_id + "_to")
.text(~~xScale_domain[1]);
};
/** <strong>zoom</strong> zooms in or out
*/
gBrowser.zoom = function (factor) {
move(factor, 0);
};
var move = function (factor, direction) {
var oldDomain = xScale.domain();
var span = oldDomain[1] - oldDomain[0];
var offset = (span * factor) - span;
var newDomain;
switch (direction) {
case -1 :
newDomain = [(~~oldDomain[0] - offset), ~~(oldDomain[1] - offset)];
break;
case 1 :
newDomain = [(~~oldDomain[0] + offset), ~~(oldDomain[1] - offset)];
break;
case 0 :
newDomain = [oldDomain[0] - ~~(offset/2), oldDomain[1] + (~~offset/2)];
}
var interpolator = d3.interpolateNumber(oldDomain[0], newDomain[0]);
var ease = gBrowser.ease();
var x = 0;
d3.timer(function(d) {
var curr_start = interpolator(ease(x));
var curr_end;
switch (direction) {
case -1 :
curr_end = curr_start + span;
break;
case 1 :
curr_end = curr_start + span;
break;
case 0 :
curr_end = oldDomain[1] + oldDomain[0] - curr_start;
break;
}
var currDomain = [curr_start, curr_end];
xScale.domain(currDomain);
zoom();
x+=0.02;
return x>1;
});
};
gBrowser.right = function (factor) {
// It doesn't make sense factors < 1 for left/right moves
if (factor > 0) {
move(factor, 1);
}
};
gBrowser.left = function (factor) {
// It doesn't make sense factors < 1 for left/right moves
if (factor > 0) {
move(factor, -1);
}
};
var zoom = function () {
// This fixes before-the-beginning panning
var currDomain = xScale.domain();
if (currDomain[0] < 0) {
if (fivePrimeEdge === undefined) {
fivePrimeEdge = currDomain[1]
}
currDomain = ([0, fivePrimeEdge]);
xScale.domain(currDomain);
zoomEventHandler.x(xScale);
// var bestEnd = currDomain[1] < 1000 ? 1000 : ~~currDomain[1];
// xScale.domain([0,bestEnd]);
} else {
fivePrimeEdge = undefined;
}
// This fixes pass-the-ending panning
if (currDomain[1] > chr_length) {
if (threePrimeEdge === undefined) {
threePrimeEdge = currDomain[0];
}
// var bestStart = currDomain[0] > (chr_length - 1000) ? (chr_length - 1000) : ~~currDomain[0] < 0 ? 0 : ~~currDomain[0];
// xScale.domain([bestStart,chr_length]);
xScale.domain([threePrimeEdge, chr_length]);
zoomEventHandler.x(xScale);
} else {
threePrimeEdge = undefined;
}
window.clearTimeout(refresh);
refresh = window.setTimeout(function(){
var currDomain = xScale.domain();
gBrowser.from(~~currDomain[0]);
gBrowser.to(~~currDomain[1]);
console.log("New Pos:" + fromPos + "-" + toPos);
var url = get_url();
console.log(url);
d3.json(url, function (error, resp) {
if (error !== null) {
d3.select("#ePeek_" + div_id + "_pane")
.classed("ePeek_dark_pane", true);
d3.select("#ePeek_" + div_id + "_tooWide")
.classed("ePeek_tooWide_text", true)
.moveToFront();
} else {
d3.select("#ePeek_" + div_id + "_pane")
.classed("ePeek_dark_pane", false);
d3.select("#ePeek_" + div_id + "_tooWide")
.classed("ePeek_tooWide_text", false)
gBrowser.genes_layout(resp, xScale);
update_layout();
}
});
}, 300); //
update();
};
var change_gene_color = function (genes, color) {
genes.select(".ePeek_gene")
.transition()
.duration(500)
.attr("fill", color);
genes.select(".ePeek_name")
.transition()
.duration(500)
.style("fill", color);
};
/** <strong>unhighlight_gene</strong> removes highlight from genes previously highlighted with {@link gBrowser.highlight_gene}.
Its argument is a callback that is called on every gene. The callback receives the information for that gene as its argument. On each gene, the callback is expected to return true or false. The gene or set of genes evaluated to true will be unhighlighted.
@param {Callback} callback A closure that evaluates all the genes.
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.unhighlight_gene = function (cbak) {
// TODO: This will select all genes *accross gBs*. Should we include the div_id in the class to avoid that?
var genes_to_unhighlight = d3.selectAll(".ePeek_gs")
.filter(function(d){return cbak(d)});
change_gene_color(genes_to_unhighlight, fgColor)
return gBrowser;
}
/** <strong>highlight_gene</strong> highlights a gene based on the {@link gBrowser.highlight_color} specified.
Its argument is a callback that is called on every gene. The callback receives the information for that gene as its argument. On each gene, the callback is expected to return true or false. The gene or set of genes evaluated to true will be highlighted.
Note that this method doesn't "un-highlight" any gene previously highlighted, so make sure you call {@link gBrowser.unhighlight_gene} if needed.
@param {Callback} callback A closure that evaluates all the genes.
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.highlight_gene = function (cbak) {
// TODO: This will select all genes *accross gBs*. Should we include the div_id in the class to avoid that?
var genes_to_highlight = d3.selectAll(".ePeek_gs")
.filter(function(d){return cbak(d)});
change_gene_color(genes_to_highlight, highlightColor);
return gBrowser;
};
// public methods (API)
/** <strong>resize</strong> takes a new width (in pixels) for the genome view and resizes it accordingly. It can be used to resize the view lively. For example it is used by the mobile theme to respond to orientation changes in the device
@param {Number} width New width (in pixels)
*/
gBrowser.resize = function (w) {
// Resize the svg
d3.select(".ePeek_svg").attr("width", w);
// Resize the zooming/panning pane
d3.select("#ePeek_" + div_id + "_pane").attr("width", w);
// Set the new width
gBrowser.width(w);
// Replot
plot();
update();
};
/** <strong>get_gene</strong> retrieves the gene from the remote (REST) server
returning the Ensembl genes associated with the given external name
If the response of the REST server is defined, the {@link ePeek.ensGenes_callback} callback is invoked passing the found Ensembl genes as its argument.
If the response of the REST server is undefined, the plug-in is started with the current
genomic coordinates
@param {string} gene_name The gene external name to look up
*/
gBrowser.get_gene = function (gene_name) {
var url = prefix_gene + species + "/" + gene_name + ".json?object=gene";
console.log("URL: " + url);
d3.json(url, function(error, resp) {
resp = resp.filter(function(d) {
return !d.id.indexOf("ENS"); // To avoid LRG genes (maybe the REST service doesn't return those now?
});
console.log("RESP:");
console.log(resp);
if (resp[0] !== undefined) {
set_ensGene(resp[0].id);
gBrowser.ensGenes_callback(resp);
gBrowser.ensGene_lookup(resp[0].id);
} else {
gBrowser.start();
}
});
};
///*********************////
/// DATA RETRIEVERS ////
///*********************////
/** <strong>homologues</strong> looks for homologues of the given gene.
Once the homologues are retrieved, the {@link ePeek.homologues_callback} callback is invoked passing the array of homologues as its argument.
@param {string} ensGene The id of the gene to look for homologues
*/
gBrowser.homologues = function(ensGene) {
var url = prefix_homologues + ensGene + ".json?format=condensed;sequence=none;type=all";
console.log(url);
d3.json(url, function (error, resp) {
console.log("HOMOLOGUES RESP: ");
console.log(resp);
if (resp !== undefined) { // / Needed because if there are not homologues we get an error -- We are trying to change this in the REST API
gBrowser.homologues_callback(resp.data[0].homologies);
}
});
}
/** <strong>ensGene_lookup</strong> retrieves the coordinates of a Ensembl gene from
the remote server. The plug-in is then initialized via the {@link ePeek.start} method.
@param {string} gene_name The id of the Ensembl gene to look for
*/
gBrowser.ensGene_lookup = function (gene_name) {
gBrowser.homologues(gene_name);
var url = prefix_ensgene + gene_name + ".json?format=full";
console.log("lookup url:");
console.log(url);
d3.json(url, function(error, resp) {
console.log("RESP:");
console.log(resp);
gBrowser
.species(resp.species)
.chr(resp.seq_region_name)
.from(resp.start)
.to(resp.end);
gBrowser.start();
});
};
var getChrLength = function () {
var url = prefix_chr_info + species + "/" + chr;
d3.json(url, function(error, resp) {
chr_length = resp.length;
});
};
///***********************////
/// Setters & Getters ////
///***********************////
var set_ensGene = function (ens) {
if (!arguments.length) {
return ensGene;
}
ensGene = ens;
return gBrowser;
};
/** <strong>species</strong> gets/sets the species used in the REST queries.
See for example {@link ePeek.get_gene}.
If no argument is provided, returns the current species.
Common names are allowed (human, chimp, gorilla, mouse, etc...)
Binary scientific names are also allowed with and without underscores (for example "mus_musculus" or "mus musculus")
Case is ignored.
@param {String} [species] The new species
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.species = function (sp) {
if (!arguments.length) {
return species;
}
species = sp;
return gBrowser;
};
/** <strong>chr</strong> gets/sets the chr used in the next genome coordinates-based query.
If no argument is provided, returns the current chr or the default one if no one has been set before.
Strictly speaking, the arguments expects a seq_region_name, i.e. "scaffolds", etc are also considered chromosomes.
This value is used by {@link ePeek.start} to set the genomic coordinates in the plug-in view
@param {String} [chr] The new chr
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.chr = function (c) {
if (!arguments.length) {
return chr;
}
chr = c;
return gBrowser;
};
/** <strong>from</strong> gets/sets the start coordinate for the next genome coordinates-based query.
If no argument is provided, returns the current start coordinate or the default one if none has been set before.
This value is used by {@link ePeek.start} to set the genomic coordinates in the plug-in view
@param {Number} [coordinte] The new start coordinate. Commas or dots are not allowed (32,341,674 or 32.341.674)
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.from = function (n) {
// TODO: Allow commas and dots in numbers? eg: 32,341,674 or 32.341.674
if (!arguments.length) {
return fromPos;
}
fromPos = n;
return gBrowser;
};
/** <strong>to</strong> gets/sets the end coordinate for the next genome coordinates-based query.
If no argument is provided, returns the current end coordinate or the default one if none has been set before.
This value is used by {@link ePeek.start} to set the genomic coordinates in the plug-in view
@param {Number} [coordinate] The new end coordinate. Commas or dots are not allowed (32,341,674 or 32.341.674)
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.to = function (n) {
// TODO: Allow commas and dots in numbers? eg: 32,341,674 or 32.341.674
if (!arguments.length) {
return toPos;
}
toPos = n;
return gBrowser;
};
/** <strong>gene</strong> sets the gene name for the next gene-based location.
External gene names (BRCA2) and ensembl gene identifiers (ENSG00000139618) are both allowed.
Gene-based locations have higher preference over coordinates-based locations.
@example
// Will show the correct location even if the gene name is spelled wrong
// or is not recognized by Ensembl
gB.species("human").chr(13).from(35009587).to(35214822).gene("LINC00457");
@param {String} [name] The name of the gene
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.gene = function(g) {
if (!arguments.length) {
return gene;
}
gene = g;
return gBrowser;
};
/** <strong>height</strong> gets/sets the height of the plug-in.
If no argument is provided, returns the current height.
The argument should be only the number of pixels (without any suffix like "px")
@param {Number} [height] The new height (in pixels)
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.height = function (h) {
// TODO: Allow suffixes like "1000px"?
// TODO: Test wrong formats
if (!arguments.length) {
return height;
}
height = h;
return gBrowser;
};
/** <strong>width</strong> gets/sets the width (in pixels) of the plug-in.
If no argument is provided, returns the current height.
The argument should be only the number of pixels (without any suffix like "px")
To re-set the width lively use the {@link ePeek.resize} method.
@param {Number} [width] The new width (in pixels)
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.width = function (w) {
// TODO: Allow suffixes like "1000px"?
// TODO: Test wrong formats
if (!arguments.length) {
return width;
}
// At least min-width
if (w < min_width) {
w = min_width
}
width = w;
return gBrowser;
};
/** <strong>background_color</strong> gets/sets the background color for the view.
If no argument is provided, returns the current background color.
The argument should be a valid hexadecimal number (including the "#" prefix)
The color is internally converted to a {@link https://github.com/mbostock/d3/wiki/Colors#wiki-d3_rgb|d3.rgb} format
@param {String} [color] The new color in hexadecimal format (including the leading "#")
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.background_color = function (hex) {
if (!arguments.length) {
return bgColor;
}
bgColor = d3.rgb(hex);
return gBrowser;
};
/** <strong>foreground_color</strong> gets/sets the foreground color for the view.
If no argument is provided, returns the current foreground color.
The argument should be a valid hexadecimal number (including the "#" prefix)
The color is internally converted to a {@link https://github.com/mbostock/d3/wiki/Colors#wiki-d3_rgb|d3.rgb} format
@param {String} [color] The new color in hexadecimal format (including the leading "#")
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.foreground_color = function (hex) {
if (!arguments.length) {
return fgColor;
}
fgColor = d3.rgb(hex);
return gBrowser;
};
/** <strong>highlight_color</strong> gets/sets the color to be used for a highlighted gene in the view.
If no argument is provided, returns the current color for highlighting.
The argument should be a valid hexadecimal number (including the "#" prefix)
The color is internally converted to a {@link https://github.com/mbostock/d3/wiki/Colors#wiki-d3_rgb|d3.rgb} format
@param {String} [color] The new color in hexadecimal format (including the leading "#")
@returns {ePeek} The original object allowing method chaining
*/
gBrowser.highlight_color = function (hex) {
// TODO: Make highlight_color optional, defaulting to either the foreground color or a .darker() version of it
if (!arguments.length) {
return highlightColor;
}
highlightColor = d3.rgb(hex);
return gBrowser;
}
/** <strong>localREST</strong> points the queries to a local REST service to debug.
This method should be removed in "production"
*/
gBrowser.localREST = function() {
prefix = "http://127.0.0.1:3000";
prefix_region = prefix + "/feature/region/";
prefix_ensgene = prefix + "/lookup/id/";
prefix_gene = prefix + "/xrefs/symbol/";
prefix_homologues = prefix + "/homology/id/";
return gBrowser;
};
gBrowser.ease = function(e) {
if (!arguments.length) {
return curr_ease;
}
curr_ease = d3.ease(e);
return gBrowser;
};
gBrowser.allow_drag = function(b) {
if (!arguments.length) {
return drag_allowed;
}
drag_allowed = b;
return gBrowser;
};
///*********************////
/// UTILITY METHODS ////
///*********************////
// Private methods
var get_url = function () {
var url = prefix_region + species + "/" + chr + ":" + fromPos + "-" + toPos + ".json?feature=gene";
return url;
};
/** <strong>split_homologues</strong> split an array of homologues into an object containing an array of orthologues (under the 'orthologues' field)
and an array of paralogues (under the 'paralogues' field)
@param {Array} [homologues] The array containing homologues objects
@returns {Object} An object containing an array of orthologues and an array of paralogues
*/
gBrowser.split_homologues = function (homologues) {
var orthoPatt = /ortholog/;
var paraPatt = /paralog/;
var orthologues = homologues.filter(function(d){return d.type.match(orthoPatt)});
var paralogues = homologues.filter(function(d){return d.type.match(paraPatt)});
return {'orthologues' : orthologues,
'paralogues' : paralogues};
};
// Default callbacks
/** <strong>genes_layout</strong> specifies how to layout the genes.
The default layout avoid clashing between genes, hiding the labels when there are too many elements to display, etc...
The layout closure has to have several methods in its API.
@param {Callback} [callback] The callback to call to get/set the gene layout
*/
gBrowser.genes_layout = epeek_genes(); //genes_layout;
/** <strong>gene_info_callback</strong> is a callback called when a gene is selected.
It should be used to respond to mouse clicks on the genes or their names (labels).
Its default behaviour is to do nothing.
This function can be overwritten by a theme to display the gene information
in, for example, a custom way and/or place.
@param {Object} object A literal object containing the following fields:
<ul>
<li>external_name => External name of the gene</li>
<li>ID => Ensembl ID of the gene</li>
<li>description => A short description of the gene</li>
<li>logic_name => The source of the gene</li>
<li>feature_type => This is always set to gene</li>
<li>seq_region_name => The chromosome or region name the gene is located</li>
<li>start => The start coordinate in the seq_region_name</li>
<li>end => The end coordinate in the seq_region_name</li>
<li>strand => The strand in the seq_region_name</li>
</ul>
*/
gBrowser.gene_info_callback = function() {};
/** <strong>ensGenes_callback</strong> is a callback called every time a gene is searched in the
REST server in the {@link ePeek.get_gene} method.
Its default behaviour is to do nothing.
This method can be used by a theme to run some arbitrary code when a gene is found in the REST
server.
@param {Array} genes An array of genes found in the last gene-based search. Each gene is an object having the following fields:
<ul>
<li>id => The Ensembl gene id associated with the gene</li>
<li>type => This should be "gene"
</ul>
*/
gBrowser.ensGenes_callback = function() {};
/** <strong>homologues_callback</strong> is a callback called every time the homologues (orthologues + paralogues) of a gene
are found in the REST server in the {@link ePeek.homologues} method.
Its default behaviour is to do nothing.
This method can be used by a theme to do run some arbitrary code when homologues are found for this gene.
@param {Array} homologies An array of object literals representing homologies having the following fields:
<ul>
<li>id => The Ensembl Gene ID of the homolog</li>
<li>protein_id => The Ensembl Protein ID of the homolog</li>
<li>species => The species name of the homolog</li>
<li>subtype => The subtype of the homology relantionship</li>
<li>type => The type of homology</li>
</ul>
*/
gBrowser.homologues_callback = function() {};
return gBrowser;
};
// The collision detector
var epeek_genes = function() {
"use strict";
var height = 150; // Default value
var genes = [];
var xScale;
var max_slots;
var slot_types = {
'expanded' : {
slot_height : 30,
gene_height : 10,
show_label : true
},
'collapsed' : {
slot_height : 5,
gene_height : 3,
show_label : false
}
};
var current_slot_type = 'expanded';
var genes_layout = function (new_genes, scale) {
// We make sure that the genes have name
for (var i = 0; i < new_genes.length; i++) {
if (new_genes[i].external_name === null) {
new_genes[i].external_name = "";
}
}
max_slots = ~~(height / slot_types.expanded.slot_height) - 1;
if (scale !== undefined) {
genes_layout.scale(scale);
}
slot_keeper(new_genes, genes);
var needed_slots = collition_detector(new_genes);
if (needed_slots > max_slots) {
current_slot_type = 'collapsed';
shrink_slots(height, needed_slots);
} else {
current_slot_type = 'expanded';
}
genes = new_genes;
};
genes_layout.genes = function () {
return genes;
}
genes_layout.gene_slot = function () {
return slot_types[current_slot_type];
};
genes_layout.height = function (h) {
if (!arguments.length) {
return height;
}
height = h;
return genes_layout;
};
genes_layout.scale = function (x) {
if (!arguments.length) {
return xScale;
}
xScale = x;
return genes_layout;
};
var collition_detector = function (genes) {
var genes_placed = [];
var genes_to_place = genes; // was []
var needed_slots = 0;
for (var i = 0; i < genes.length; i++) {
if (genes[i].slot > needed_slots && genes[i].slot < max_slots) {
needed_slots = genes[i].slot
}
}
for (var i = 0; i < genes_to_place.length; i++) {
var genes_by_slot = sort_genes_by_slot(genes_placed);
var this_gene = genes_to_place[i];
if (this_gene.slot !== undefined && this_gene.slot < max_slots) {
if (slot_has_space(this_gene, genes_by_slot[this_gene.slot])) {
genes_placed.push(this_gene);
continue;
}
}
var slot = 0;
OUTER: while (true) { //
if (slot_has_space(this_gene, genes_by_slot[slot])) {
this_gene.slot = slot;
genes_placed.push(this_gene);
if (slot > needed_slots) {
needed_slots = slot;
}
break;
}
slot++;
}
}
return needed_slots + 1;
};
var slot_has_space = function (query_gene, genes_in_this_slot) {
if (genes_in_this_slot === undefined) {
return true;
}
for (var j = 0; j < genes_in_this_slot.length; j++) {
var subj_gene = genes_in_this_slot[j];
if (query_gene.ID === subj_gene.ID) {
continue;
}
var y_label_end = subj_gene.external_name.length * 8 + xScale(subj_gene.start); // TODO: It may be better to have a fixed font size (instead of the hardcoded 16)?
var y1 = xScale(subj_gene.start);
var y2 = xScale(subj_gene.end) > y_label_end ? xScale(subj_gene.end) : y_label_end;
var x_label_end = query_gene.external_name.length * 8 + xScale(query_gene.start);
var x1 = xScale(query_gene.start);
var x2 = xScale(query_gene.end) > x_label_end ? xScale(query_gene.end) : x_label_end;
if ( ((x1 < y1) && (x2 > y1)) ||
((x1 > y1) && (x1 < y2)) ) {
return false;
}
}
return true;
};
var slot_keeper = function (genes, prev_genes) {
var prev_genes_slots = genes2slots(prev_genes);
for (var i = 0; i < genes.length; i++) {
if (prev_genes_slots[genes[i].ID] !== undefined) {
genes[i].slot = prev_genes_slots[genes[i].ID];
}
}
};
var genes2slots = function (genes_array) {
var hash = {};
for (var i = 0; i < genes_array.length; i++) {
var gene = genes_array[i];
hash[gene.ID] = gene.slot;
}
return hash;
}
var shrink_slots = function (height, needed_slots) {
// slot_types.collapsed.slot_height = ~~(height/needed_slots);
return;
};
var sort_genes_by_slot = function (genes) {
var slots = [];
for (var i = 0; i < genes.length; i++) {
if (slots[genes[i].slot] === undefined) {
slots[genes[i].slot] = [];
}
slots[genes[i].slot].push(genes[i]);
}
return slots;
};
return genes_layout;
}
d3.selection.prototype.moveToFront = function() {
return this.each(function() {
this.parentNode.appendChild(this);
});
};
| bugfix: working with mixed button-based and mouse-based panning/zooming
| lib/ePeek.js | bugfix: working with mixed button-based and mouse-based panning/zooming | <ide><path>ib/ePeek.js
<ide>
<ide> var currDomain = [curr_start, curr_end];
<ide> xScale.domain(currDomain);
<del> zoom();
<add> zoom(xScale);
<ide> x+=0.02;
<ide> return x>1;
<ide> });
<ide> }
<ide> };
<ide>
<del> var zoom = function () {
<add> var zoom = function (new_xScale) {
<add> if (new_xScale !== undefined) {
<add> zoomEventHandler.x(new_xScale);
<add> }
<ide> // This fixes before-the-beginning panning
<ide> var currDomain = xScale.domain();
<ide> if (currDomain[0] < 0) { |
|
Java | apache-2.0 | b1accb4e42e528696dbc6be47e7f3d38df5636bc | 0 | ewestfal/rice,rojlarge/rice-kc,shahess/rice,rojlarge/rice-kc,shahess/rice,jwillia/kc-rice1,gathreya/rice-kc,kuali/kc-rice,bsmith83/rice-1,bhutchinson/rice,bsmith83/rice-1,bhutchinson/rice,cniesen/rice,cniesen/rice,jwillia/kc-rice1,bhutchinson/rice,geothomasp/kualico-rice-kc,UniversityOfHawaiiORS/rice,smith750/rice,bhutchinson/rice,UniversityOfHawaiiORS/rice,ewestfal/rice-svn2git-test,gathreya/rice-kc,cniesen/rice,ewestfal/rice,sonamuthu/rice-1,gathreya/rice-kc,geothomasp/kualico-rice-kc,cniesen/rice,geothomasp/kualico-rice-kc,kuali/kc-rice,UniversityOfHawaiiORS/rice,ewestfal/rice,ewestfal/rice-svn2git-test,rojlarge/rice-kc,bsmith83/rice-1,shahess/rice,gathreya/rice-kc,smith750/rice,gathreya/rice-kc,jwillia/kc-rice1,shahess/rice,cniesen/rice,sonamuthu/rice-1,bhutchinson/rice,smith750/rice,geothomasp/kualico-rice-kc,ewestfal/rice-svn2git-test,rojlarge/rice-kc,UniversityOfHawaiiORS/rice,kuali/kc-rice,sonamuthu/rice-1,ewestfal/rice,jwillia/kc-rice1,geothomasp/kualico-rice-kc,shahess/rice,rojlarge/rice-kc,ewestfal/rice-svn2git-test,kuali/kc-rice,UniversityOfHawaiiORS/rice,ewestfal/rice,sonamuthu/rice-1,smith750/rice,smith750/rice,jwillia/kc-rice1,bsmith83/rice-1,kuali/kc-rice | /*
* Copyright 2007 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kew.engine.node;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.MDC;
import org.kuali.rice.kew.actionitem.ActionItem;
import org.kuali.rice.kew.actionrequest.ActionRequestValue;
import org.kuali.rice.kew.doctype.bo.DocumentType;
import org.kuali.rice.kew.engine.RouteContext;
import org.kuali.rice.kew.engine.RouteHelper;
import org.kuali.rice.kew.exception.ResourceUnavailableException;
import org.kuali.rice.kew.exception.RouteManagerException;
import org.kuali.rice.kew.exception.WorkflowException;
import org.kuali.rice.kew.role.RoleRouteModule;
import org.kuali.rice.kew.routeheader.DocumentRouteHeaderValue;
import org.kuali.rice.kew.routemodule.RouteModule;
import org.kuali.rice.kew.service.KEWServiceLocator;
import org.kuali.rice.kew.util.ClassDumper;
import org.kuali.rice.kew.util.KEWConstants;
import org.kuali.rice.kew.util.PerformanceLogger;
import org.kuali.rice.kew.util.Utilities;
import org.kuali.rice.kim.bo.impl.KimAttributes;
import org.kuali.rice.kim.bo.role.dto.KimResponsibilityInfo;
import org.kuali.rice.kim.service.KIMServiceLocator;
import org.kuali.rice.kns.util.KNSConstants;
/**
* A node implementation which provides integration with KIM Roles for routing.
* Essentially extends RequestsNode and provides a custom RouteModule
* implementation.
*
* @author Kuali Rice Team ([email protected])
*
*/
public class RoleNode extends RequestsNode {
private static final org.apache.log4j.Logger LOG = org.apache.log4j.Logger
.getLogger( RoleNode.class );
@Override
protected RouteModule getRouteModule(RouteContext context) throws Exception {
return new RoleRouteModule();
}
/**
* @see org.kuali.rice.kew.engine.node.RequestsNode#processCustom(org.kuali.rice.kew.engine.RouteContext, org.kuali.rice.kew.engine.RouteHelper)
*/
@Override
protected boolean processCustom(RouteContext routeContext, RouteHelper routeHelper) throws Exception {
DocumentRouteHeaderValue document = routeContext.getDocument();
RouteNodeInstance nodeInstance = routeContext.getNodeInstance();
RouteNode node = nodeInstance.getRouteNode();
// while no routable actions are activated and there are more
// routeLevels to process
if ( nodeInstance.isInitial() ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "RouteHeader info inside routing loop\n"
+ ClassDumper.dumpFields( routeContext.getDocument() ) );
LOG.debug( "Looking for new actionRequests - routeLevel: "
+ node.getRouteNodeName() );
}
boolean suppressPolicyErrors = isSupressingPolicyErrors( routeContext );
List<ActionRequestValue> requests = getNewActionRequests( routeContext );
// Debugging code to force an empty action request
// if ( document.getDocumentType().getName().equals( "SACC" ) ) {
// LOG.fatal( "DEBUGGING CODE IN PLACE - SACC DOCUMENT ACTION REQUESTS CLEARED" );
// requests.clear();
// }
// for mandatory routes, requests must be generated
if ( requests.isEmpty() && !suppressPolicyErrors) {
KimResponsibilityInfo resp = getFirstResponsibilityWithMandatoryRouteFlag( document, node );
if ( resp != null ) {
throw new RouteManagerException( "No requests generated for KIM Responsibility-based mandatory route.\n" +
"Document Id: " + document.getRouteHeaderId() + "\n" +
"DocumentType: " + document.getDocumentType().getName() + "\n" +
"Route Node: " + node.getRouteNodeName() + "\n" +
"Responsibility: " + resp,
routeContext );
}
}
// determine if we have any approve requests for FinalApprover
// checks
if ( !suppressPolicyErrors ) {
verifyFinalApprovalRequest( document, requests, nodeInstance, routeContext );
}
}
return true; // to indicate custom processing performed
}
/**
* Checks for any mandatory route responsibilities for the given document type and node.
*
* Stops once it finds a responsibility for the document and node.
*/
protected KimResponsibilityInfo getFirstResponsibilityWithMandatoryRouteFlag( DocumentRouteHeaderValue document, RouteNode node ) {
// iterate over the document hierarchy
// gather responsibilities - merge based on route level
//Map<String,Boolean>
Map<String,String> searchCriteria = new HashMap<String,String>();
searchCriteria.put("template.namespaceCode", KNSConstants.KUALI_RICE_WORKFLOW_NAMESPACE);
searchCriteria.put("template.name", KEWConstants.DEFAULT_RESPONSIBILITY_TEMPLATE_NAME);
searchCriteria.put("active", "Y");
DocumentType docType = document.getDocumentType();
while ( docType != null ) {
searchCriteria.put("detailCriteria", getDetailCriteriaString( document.getDocumentType().getName(), node.getRouteNodeName() ) );
try {
List<? extends KimResponsibilityInfo> responsibilities = KIMServiceLocator.getResponsibilityService().lookupResponsibilityInfo( searchCriteria, false );
// once we find a responsibility, stop, since this overrides any parent
// responsibilities for this node
if ( !responsibilities.isEmpty() ) {
// if any has required=true - return true
for ( KimResponsibilityInfo resp : responsibilities ) {
if ( Boolean.parseBoolean( resp.getDetails().get( KimAttributes.REQUIRED ) ) ) {
return resp;
}
}
return null;
}
} catch ( Exception ex ) {
LOG.error( "Problem looking up responsibilities to check mandatory route. Criteria: " +searchCriteria, ex );
return null;
}
docType = docType.getParentDocType();
}
return null;
}
protected String getDetailCriteriaString( String documentTypeName, String routeNodeName ) {
return KimAttributes.DOCUMENT_TYPE_NAME+"="+documentTypeName
+ ","
+ KimAttributes.ROUTE_NODE_NAME+"="+routeNodeName
// + ","
// + KimAttributes.REQUIRED+"=true"
;
}
/**
* Activates the action requests that are pending at this routelevel of the
* document. The requests are processed by priority and then request ID. It
* is implicit in the access that the requests are activated according to
* the route level above all.
* <p>
* FYI and acknowledgment requests do not cause the processing to stop. Only
* action requests for approval or completion cause the processing to stop
* and then only for route level with a serialized activation policy. Only
* requests at the current document's current route level are activated.
* Inactive requests at a lower level cause a routing exception.
* <p>
* Exception routing and adhoc routing are processed slightly differently.
*
* @return True if the any approval actions were activated.
* @throws ResourceUnavailableException
* @throws WorkflowException
*/
public boolean activateRequests(RouteContext context, DocumentRouteHeaderValue document,
RouteNodeInstance nodeInstance) throws WorkflowException {
MDC.put( "docID", document.getRouteHeaderId() );
PerformanceLogger performanceLogger = new PerformanceLogger( document.getRouteHeaderId() );
List<ActionItem> generatedActionItems = new ArrayList<ActionItem>();
List<ActionRequestValue> requests = new ArrayList<ActionRequestValue>();
if ( context.isSimulation() ) {
for ( ActionRequestValue ar : context.getDocument().getActionRequests() ) {
// logic check below duplicates behavior of the
// ActionRequestService.findPendingRootRequestsByDocIdAtRouteNode(routeHeaderId,
// routeNodeInstanceId) method
if ( ar.getCurrentIndicator()
&& (KEWConstants.ACTION_REQUEST_INITIALIZED.equals( ar.getStatus() ) || KEWConstants.ACTION_REQUEST_ACTIVATED
.equals( ar.getStatus() ))
&& ar.getNodeInstance().getRouteNodeInstanceId().equals(
nodeInstance.getRouteNodeInstanceId() )
&& ar.getParentActionRequest() == null ) {
requests.add( ar );
}
}
requests.addAll( context.getEngineState().getGeneratedRequests() );
} else {
requests = KEWServiceLocator.getActionRequestService()
.findPendingRootRequestsByDocIdAtRouteNode( document.getRouteHeaderId(),
nodeInstance.getRouteNodeInstanceId() );
}
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Pending Root Requests " + requests.size() );
}
boolean requestActivated = activateRequestsCustom( context, requests, generatedActionItems,
document, nodeInstance );
// now let's send notifications, since this code needs to be able to
// activate each request individually, we need
// to collection all action items and then notify after all have been
// generated
if ( !context.isSimulation() ) {
KEWServiceLocator.getNotificationService().notify( generatedActionItems );
}
performanceLogger.log( "Time to activate requests." );
return requestActivated;
}
protected boolean activateRequestsCustom(RouteContext context,
List<ActionRequestValue> requests, List<ActionItem> generatedActionItems,
DocumentRouteHeaderValue document, RouteNodeInstance nodeInstance)
throws WorkflowException {
// FIXME: won't this undo any ordering from the role type service?
Collections.sort( requests, new Utilities.PrioritySorter() );
String activationType = nodeInstance.getRouteNode().getActivationType();
boolean isParallel = KEWConstants.ROUTE_LEVEL_PARALLEL.equals( activationType );
boolean requestActivated = false;
String groupToActivate = null;
Integer priorityToActivate = null;
for ( ActionRequestValue request : requests ) {
if ( requestActivated
&& !isParallel
&& (!context.isSimulation() || !context.getActivationContext()
.isActivateRequests()) ) {
break;
}
if ( request.getParentActionRequest() != null || request.getNodeInstance() == null ) {
// 1. disregard request if it's not a top-level request
// 2. disregard request if it's a "future" request and hasn't
// been attached to a node instance yet
continue;
}
if ( request.isApproveOrCompleteRequest() ) {
if ( priorityToActivate == null ) {
priorityToActivate = request.getPriority();
}
if ( groupToActivate == null ) {
groupToActivate = request.getResponsibilityDesc();
}
// check that the given request is found in the current group to activate
// check priority and grouping from the request (stored in the responsibility description)
if ( StringUtils.equals( groupToActivate, request.getResponsibilityDesc() )
&& (
(priorityToActivate != null && request.getPriority() != null && priorityToActivate.equals(request.getPriority()))
|| (priorityToActivate == null && request.getPriority() == null)
)
) {
if ( request.isActive() ) {
requestActivated = requestActivated || request.isApproveOrCompleteRequest();
continue;
}
logProcessingMessage( request );
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Activating request: " + request );
}
requestActivated = activateRequest( context, request, nodeInstance,
generatedActionItems )
|| requestActivated;
}
} else {
logProcessingMessage( request );
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Activating request: " + request );
}
requestActivated = activateRequest( context, request, nodeInstance,
generatedActionItems )
|| requestActivated;
}
}
return requestActivated;
}
}
| impl/src/main/java/org/kuali/rice/kew/engine/node/RoleNode.java | /*
* Copyright 2007 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kew.engine.node;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.MDC;
import org.kuali.rice.kew.actionitem.ActionItem;
import org.kuali.rice.kew.actionrequest.ActionRequestValue;
import org.kuali.rice.kew.doctype.bo.DocumentType;
import org.kuali.rice.kew.engine.RouteContext;
import org.kuali.rice.kew.engine.RouteHelper;
import org.kuali.rice.kew.exception.ResourceUnavailableException;
import org.kuali.rice.kew.exception.RouteManagerException;
import org.kuali.rice.kew.exception.WorkflowException;
import org.kuali.rice.kew.role.RoleRouteModule;
import org.kuali.rice.kew.routeheader.DocumentRouteHeaderValue;
import org.kuali.rice.kew.routemodule.RouteModule;
import org.kuali.rice.kew.service.KEWServiceLocator;
import org.kuali.rice.kew.util.ClassDumper;
import org.kuali.rice.kew.util.KEWConstants;
import org.kuali.rice.kew.util.PerformanceLogger;
import org.kuali.rice.kew.util.Utilities;
import org.kuali.rice.kim.bo.impl.KimAttributes;
import org.kuali.rice.kim.bo.role.dto.KimResponsibilityInfo;
import org.kuali.rice.kim.service.KIMServiceLocator;
import org.kuali.rice.kns.util.KNSConstants;
/**
* A node implementation which provides integration with KIM Roles for routing.
* Essentially extends RequestsNode and provides a custom RouteModule
* implementation.
*
* @author Kuali Rice Team ([email protected])
*
*/
public class RoleNode extends RequestsNode {
private static final org.apache.log4j.Logger LOG = org.apache.log4j.Logger
.getLogger( RoleNode.class );
@Override
protected RouteModule getRouteModule(RouteContext context) throws Exception {
return new RoleRouteModule();
}
/**
* @see org.kuali.rice.kew.engine.node.RequestsNode#processCustom(org.kuali.rice.kew.engine.RouteContext, org.kuali.rice.kew.engine.RouteHelper)
*/
@Override
protected boolean processCustom(RouteContext routeContext, RouteHelper routeHelper) throws Exception {
DocumentRouteHeaderValue document = routeContext.getDocument();
RouteNodeInstance nodeInstance = routeContext.getNodeInstance();
RouteNode node = nodeInstance.getRouteNode();
// while no routable actions are activated and there are more
// routeLevels to process
if ( nodeInstance.isInitial() ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "RouteHeader info inside routing loop\n"
+ ClassDumper.dumpFields( routeContext.getDocument() ) );
LOG.debug( "Looking for new actionRequests - routeLevel: "
+ node.getRouteNodeName() );
}
boolean suppressPolicyErrors = isSupressingPolicyErrors( routeContext );
List<ActionRequestValue> requests = getNewActionRequests( routeContext );
// Debugging code to force an empty action request
// if ( document.getDocumentType().getName().equals( "SACC" ) ) {
// LOG.fatal( "DEBUGGING CODE IN PLACE - SACC DOCUMENT ACTION REQUESTS CLEARED" );
// requests.clear();
// }
// for mandatory routes, requests must be generated
if ( requests.isEmpty() && !suppressPolicyErrors) {
KimResponsibilityInfo resp = getFirstResponsibilityWithMandatoryRouteFlag( document, node );
if ( resp != null ) {
throw new RouteManagerException( "No requests generated for KIM Responsibility-based mandatory route.\n" +
"Document Id: " + document.getRouteHeaderId() + "\n" +
"DocumentType: " + document.getDocumentType().getName() + "\n" +
"Route Node: " + node.getRouteNodeName() + "\n" +
"Responsibility: " + resp,
routeContext );
}
}
// determine if we have any approve requests for FinalApprover
// checks
if ( !suppressPolicyErrors ) {
verifyFinalApprovalRequest( document, requests, nodeInstance, routeContext );
}
}
return true; // to indicate custom processing performed
}
/**
* Checks for any mandatory route responsibilities for the given document type and node.
*
* Stops once it finds a responsibility for the document and node.
*/
protected KimResponsibilityInfo getFirstResponsibilityWithMandatoryRouteFlag( DocumentRouteHeaderValue document, RouteNode node ) {
// iterate over the document hierarchy
// gather responsibilities - merge based on route level
//Map<String,Boolean>
Map<String,String> searchCriteria = new HashMap<String,String>();
searchCriteria.put("template.namespaceCode", KNSConstants.KUALI_RICE_WORKFLOW_NAMESPACE);
searchCriteria.put("template.name", KEWConstants.DEFAULT_RESPONSIBILITY_TEMPLATE_NAME);
searchCriteria.put("active", "Y");
DocumentType docType = document.getDocumentType();
while ( docType != null ) {
searchCriteria.put("detailCriteria", getDetailCriteriaString( document.getDocumentType().getName(), node.getRouteNodeName() ) );
try {
List<? extends KimResponsibilityInfo> responsibilities = KIMServiceLocator.getResponsibilityService().lookupResponsibilityInfo( searchCriteria, false );
// once we find a responsibility, stop, since this overrides any parent
// responsibilities for this node
if ( !responsibilities.isEmpty() ) {
// if any has required=true - return true
for ( KimResponsibilityInfo resp : responsibilities ) {
if ( Boolean.parseBoolean( resp.getDetails().get( KimAttributes.REQUIRED ) ) ) {
return resp;
}
}
return null;
}
} catch ( Exception ex ) {
LOG.error( "Problem looking up responsibilities to check mandatory route. Criteria: " +searchCriteria, ex );
return null;
}
docType = docType.getParentDocType();
}
return null;
}
protected String getDetailCriteriaString( String documentTypeName, String routeNodeName ) {
return KimAttributes.DOCUMENT_TYPE_NAME+"="+documentTypeName
+ ","
+ KimAttributes.ROUTE_NODE_NAME+"="+routeNodeName
// + ","
// + KimAttributes.REQUIRED+"=true"
;
}
/**
* Activates the action requests that are pending at this routelevel of the
* document. The requests are processed by priority and then request ID. It
* is implicit in the access that the requests are activated according to
* the route level above all.
* <p>
* FYI and acknowledgment requests do not cause the processing to stop. Only
* action requests for approval or completion cause the processing to stop
* and then only for route level with a serialized activation policy. Only
* requests at the current document's current route level are activated.
* Inactive requests at a lower level cause a routing exception.
* <p>
* Exception routing and adhoc routing are processed slightly differently.
*
* @return True if the any approval actions were activated.
* @throws ResourceUnavailableException
* @throws WorkflowException
*/
public boolean activateRequests(RouteContext context, DocumentRouteHeaderValue document,
RouteNodeInstance nodeInstance) throws WorkflowException {
MDC.put( "docID", document.getRouteHeaderId() );
PerformanceLogger performanceLogger = new PerformanceLogger( document.getRouteHeaderId() );
List<ActionItem> generatedActionItems = new ArrayList<ActionItem>();
List<ActionRequestValue> requests = new ArrayList<ActionRequestValue>();
if ( context.isSimulation() ) {
for ( ActionRequestValue ar : context.getDocument().getActionRequests() ) {
// logic check below duplicates behavior of the
// ActionRequestService.findPendingRootRequestsByDocIdAtRouteNode(routeHeaderId,
// routeNodeInstanceId) method
if ( ar.getCurrentIndicator()
&& (KEWConstants.ACTION_REQUEST_INITIALIZED.equals( ar.getStatus() ) || KEWConstants.ACTION_REQUEST_ACTIVATED
.equals( ar.getStatus() ))
&& ar.getNodeInstance().getRouteNodeInstanceId().equals(
nodeInstance.getRouteNodeInstanceId() )
&& ar.getParentActionRequest() == null ) {
requests.add( ar );
}
}
requests.addAll( context.getEngineState().getGeneratedRequests() );
} else {
requests = KEWServiceLocator.getActionRequestService()
.findPendingRootRequestsByDocIdAtRouteNode( document.getRouteHeaderId(),
nodeInstance.getRouteNodeInstanceId() );
}
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Pending Root Requests " + requests.size() );
}
boolean requestActivated = activateRequestsCustom( context, requests, generatedActionItems,
document, nodeInstance );
// now let's send notifications, since this code needs to be able to
// activate each request individually, we need
// to collection all action items and then notify after all have been
// generated
if ( !context.isSimulation() ) {
KEWServiceLocator.getNotificationService().notify( generatedActionItems );
}
performanceLogger.log( "Time to activate requests." );
return requestActivated;
}
protected boolean activateRequestsCustom(RouteContext context,
List<ActionRequestValue> requests, List<ActionItem> generatedActionItems,
DocumentRouteHeaderValue document, RouteNodeInstance nodeInstance)
throws WorkflowException {
// FIXME: won't this undo any ordering from the role type service?
Collections.sort( requests, new Utilities.PrioritySorter() );
String activationType = nodeInstance.getRouteNode().getActivationType();
boolean isParallel = KEWConstants.ROUTE_LEVEL_PARALLEL.equals( activationType );
boolean requestActivated = false;
String groupToActivate = null;
for ( ActionRequestValue request : requests ) {
if ( requestActivated
&& !isParallel
&& (!context.isSimulation() || !context.getActivationContext()
.isActivateRequests()) ) {
break;
}
if ( request.getParentActionRequest() != null || request.getNodeInstance() == null ) {
// 1. disregard request if it's not a top-level request
// 2. disregard request if it's a "future" request and hasn't
// been attached to a node instance yet
continue;
}
if ( request.isApproveOrCompleteRequest() ) {
if ( groupToActivate == null ) {
groupToActivate = request.getResponsibilityDesc();
}
if ( StringUtils.equals( groupToActivate, request.getResponsibilityDesc() ) ) {
if ( request.isActive() ) {
requestActivated = requestActivated || request.isApproveOrCompleteRequest();
continue;
}
logProcessingMessage( request );
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Activating request: " + request );
}
requestActivated = activateRequest( context, request, nodeInstance,
generatedActionItems )
|| requestActivated;
}
} else {
logProcessingMessage( request );
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Activating request: " + request );
}
requestActivated = activateRequest( context, request, nodeInstance,
generatedActionItems )
|| requestActivated;
}
}
return requestActivated;
}
}
| KFSMI-3445 : added priority to parallel routing grouping so that only one level would be routed to at the same time
| impl/src/main/java/org/kuali/rice/kew/engine/node/RoleNode.java | KFSMI-3445 : added priority to parallel routing grouping so that only one level would be routed to at the same time | <ide><path>mpl/src/main/java/org/kuali/rice/kew/engine/node/RoleNode.java
<ide> boolean isParallel = KEWConstants.ROUTE_LEVEL_PARALLEL.equals( activationType );
<ide> boolean requestActivated = false;
<ide> String groupToActivate = null;
<add> Integer priorityToActivate = null;
<ide> for ( ActionRequestValue request : requests ) {
<ide> if ( requestActivated
<ide> && !isParallel
<ide> continue;
<ide> }
<ide> if ( request.isApproveOrCompleteRequest() ) {
<add> if ( priorityToActivate == null ) {
<add> priorityToActivate = request.getPriority();
<add> }
<ide> if ( groupToActivate == null ) {
<ide> groupToActivate = request.getResponsibilityDesc();
<ide> }
<del> if ( StringUtils.equals( groupToActivate, request.getResponsibilityDesc() ) ) {
<add> // check that the given request is found in the current group to activate
<add> // check priority and grouping from the request (stored in the responsibility description)
<add> if ( StringUtils.equals( groupToActivate, request.getResponsibilityDesc() )
<add> && (
<add> (priorityToActivate != null && request.getPriority() != null && priorityToActivate.equals(request.getPriority()))
<add> || (priorityToActivate == null && request.getPriority() == null)
<add> )
<add> ) {
<ide> if ( request.isActive() ) {
<ide> requestActivated = requestActivated || request.isApproveOrCompleteRequest();
<ide> continue; |
|
Java | apache-2.0 | 6f293f15845248d6d14436d5e957d71c23cd3cd3 | 0 | ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma | /*
* The Gemma project
*
* Copyright (c) 2008 University of British Columbia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ubic.gemma.web.controller.expression.experiment;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Queue;
import java.util.Random;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.StopWatch;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.servlet.ModelAndView;
import ubic.basecode.math.DescriptiveWithMissing;
import ubic.gemma.analysis.expression.diff.DiffExpressionSelectedFactorCommand;
import ubic.gemma.analysis.expression.diff.DifferentialExpressionValueObject;
import ubic.gemma.analysis.expression.diff.GeneDifferentialExpressionService;
import ubic.gemma.analysis.preprocess.svd.SVDService;
import ubic.gemma.analysis.service.ExpressionDataFileService;
import ubic.gemma.expression.experiment.service.ExpressionExperimentService;
import ubic.gemma.genome.gene.service.GeneService;
import ubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysisResult;
import ubic.gemma.model.analysis.expression.diff.DifferentialExpressionResultService;
import ubic.gemma.model.analysis.expression.diff.ExpressionAnalysisResultSet;
import ubic.gemma.model.analysis.expression.pca.ProbeLoading;
import ubic.gemma.model.association.coexpression.Probe2ProbeCoexpressionService;
import ubic.gemma.model.common.description.Characteristic;
import ubic.gemma.model.expression.bioAssay.BioAssay;
import ubic.gemma.model.expression.bioAssay.BioAssayValueObject;
import ubic.gemma.model.expression.bioAssayData.DesignElementDataVector;
import ubic.gemma.model.expression.bioAssayData.DesignElementDataVectorService;
import ubic.gemma.model.expression.bioAssayData.DoubleVectorValueObject;
import ubic.gemma.model.expression.bioAssayData.ProcessedExpressionDataVectorService;
import ubic.gemma.model.expression.designElement.CompositeSequence;
import ubic.gemma.model.expression.designElement.CompositeSequenceService;
import ubic.gemma.model.expression.experiment.BioAssaySet;
import ubic.gemma.model.expression.experiment.ExperimentalFactor;
import ubic.gemma.model.expression.experiment.ExperimentalFactorValueObject;
import ubic.gemma.model.expression.experiment.ExpressionExperiment;
import ubic.gemma.model.expression.experiment.ExpressionExperimentSubSetService;
import ubic.gemma.model.expression.experiment.ExpressionExperimentValueObject;
import ubic.gemma.model.expression.experiment.FactorType;
import ubic.gemma.model.expression.experiment.FactorValue;
import ubic.gemma.model.expression.experiment.FactorValueService;
import ubic.gemma.model.genome.Gene;
import ubic.gemma.model.genome.gene.GeneValueObject;
import ubic.gemma.util.EntityUtils;
import ubic.gemma.visualization.ExperimentalDesignVisualizationService;
import ubic.gemma.web.controller.visualization.ExpressionProfileDataObject;
import ubic.gemma.web.controller.visualization.VisualizationValueObject;
import ubic.gemma.web.view.TextView;
import cern.colt.list.DoubleArrayList;
/**
* Exposes methods for accessing underlying Design Element Data Vectors. eg: ajax methods for visualization
*
* @author kelsey
* @version $Id$
*/
@Controller
@RequestMapping("/dedv")
public class DEDVController {
protected static Log log = LogFactory.getLog( DEDVController.class.getName() );
private static final double DEFAULT_THRESHOLD = 0.05;
private static final int MAX_RESULTS_TO_RETURN = 150;
private static final int SAMPLE_SIZE = 20; // Number of dedvs to return if no genes given
/**
* Assign colour lists (queues actually) to factors. The idea is that every factor value will get a colour assigned
* from its factor's list.
*
* @param factorNames using names here because multiple experimentalFactors can have the same name (want them
* collapsed for legend)
* @return
*/
private static Map<String, Queue<String>> createFactorNameToColoursMap( Collection<String> factorNames ) {
// colours for conditions/factor values bar chart
Map<String, Queue<String>> factorColoursMap = new HashMap<String, Queue<String>>();
String[] blues = { "#85c6ff", "#6b90ff", "#105bfe", "#005589", "#0090e9", "#0400fe", "#008998", "#3e3c90",
"#020090", "#105bfe" }; // 10
String[] purples = { "#d19bff", "#a30064", "#7d00ea", "#893984", "#f05eb8", "#9c00d0", "#b66ccf", "#e7008f",
"#670089", "#bf00b2", "#890080", "#8865a6", "#3f0076" }; // 13
String[] redYellows = { "#ffd78d", "#d85d00", "#b40101", "#944343", "#ff6d48", "#d36b62", "#ff8001", "#c74f34",
"#d89561", "#f8bc2e" }; // 10
String[] greens = { "#98da95", "#82b998", "#257e21", "#36b52f", "#38b990", "#a9da5f", "#4cfe42", "#73c000",
"#0fa345", "#99fe01", "#508500" }; // 10
String[][] colourArrs = { blues, greens, purples, redYellows };
int j = 0;
for ( String factorName : factorNames ) {
if ( !factorColoursMap.containsKey( factorName ) ) {
factorColoursMap.put( factorName, new LinkedList<String>() );
}
if ( j < colourArrs.length ) {
for ( int i = 0; i < colourArrs[j].length; i++ ) {
factorColoursMap.get( factorName ).add( colourArrs[j][i] ); // array to queue
}
}
j++;
}
return factorColoursMap;
}
@Autowired
private CompositeSequenceService compositeSequenceService;
@Autowired
private DesignElementDataVectorService designElementDataVectorService;
@Autowired
private DifferentialExpressionResultService differentialExpressionResultService;
@Autowired
private ExperimentalDesignVisualizationService experimentalDesignVisualizationService;
@Autowired
private ExpressionExperimentService expressionExperimentService;
@Autowired
private SVDService svdService;
@Autowired
private GeneDifferentialExpressionService geneDifferentialExpressionService;
@Autowired
private GeneService geneService;
@Autowired
private FactorValueService factorValueService;
@Autowired
private Probe2ProbeCoexpressionService probe2ProbeCoexpressionService;
@Autowired
private ProcessedExpressionDataVectorService processedExpressionDataVectorService;
@Autowired
private ExpressionExperimentSubSetService expressionExperimentSubSetService;
/**
* Given a collection of expression experiment Ids and a geneId returns a map of DEDV value objects to a collection
* of genes. The EE info is in the value object. FIXME handle subsets.
*/
public Map<ExpressionExperimentValueObject, Map<Long, Collection<DoubleVectorValueObject>>> getDEDV(
Collection<Long> eeIds, Collection<Long> geneIds ) {
StopWatch watch = new StopWatch();
watch.start();
Collection<ExpressionExperiment> ees = expressionExperimentService.loadMultiple( eeIds );
if ( ees == null || ees.isEmpty() ) return null;
Collection<DoubleVectorValueObject> dedvMap;
if ( geneIds == null || geneIds.isEmpty() ) {
dedvMap = processedExpressionDataVectorService.getProcessedDataArrays( ees.iterator().next(), 50 );
} else {
dedvMap = processedExpressionDataVectorService.getProcessedDataArrays( ees, geneIds );
}
/*
* Don't reorganize them -- the headings will be wrong.
*/
Map<Long, LinkedHashMap<BioAssay, LinkedHashMap<ExperimentalFactor, Double>>> layouts = null;
//
// layouts= experimentalDesignVisualizationService .sortVectorDataByDesign( dedvMap );
watch.stop();
Long time = watch.getTime();
if ( time > 1000 ) {
log.info( "Retrieved " + dedvMap.size() + " DEDVs from " + eeIds.size() + " EEs in " + time + " ms." );
}
return makeVectorMap( dedvMap, layouts );
}
/**
* AJAX exposed method
*
* @param eeIds
* @return
*/
public VisualizationValueObject[] getDEDVForCoexpressionVisualization( Collection<Long> eeIds, Long queryGeneId,
Long coexpressedGeneId ) {
StopWatch watch = new StopWatch();
watch.start();
Collection<ExpressionExperiment> ees = expressionExperimentService.loadMultiple( eeIds );
if ( ees == null || ees.isEmpty() ) return new VisualizationValueObject[0];
Gene queryGene = geneService.load( queryGeneId );
Gene coexpressedGene = geneService.load( coexpressedGeneId );
List<Long> genes = new ArrayList<Long>();
genes.add( queryGeneId );
genes.add( coexpressedGeneId );
if ( genes.isEmpty() ) return new VisualizationValueObject[0];
Collection<DoubleVectorValueObject> dedvs = processedExpressionDataVectorService.getProcessedDataArrays( ees,
genes );
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts = null;
layouts = experimentalDesignVisualizationService.sortVectorDataByDesign( dedvs );
// layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts );
watch.stop();
Long time = watch.getTime();
if ( dedvs.size() == 0 ) {
log.warn( "No expression profiles (DEDVs) were available for the experiments: " + eeIds + " and genes(s) "
+ queryGene.getOfficialSymbol() + ", " + coexpressedGene.getOfficialSymbol() );
return new VisualizationValueObject[0];
}
if ( time > 1000 ) {
log.info( "Retrieved " + dedvs.size() + " DEDVs for " + eeIds.size() + " EEs and " + genes.size()
+ " genes in " + time + " ms." );
}
Map<Long, Collection<Long>> validatedProbes = getProbeLinkValidation( ees, queryGene, coexpressedGene, dedvs );
return makeVisCollection( dedvs, genes, validatedProbes, layouts );
}
/**
* AJAX exposed method
*
* @param eeIds FIXME accommodate ExpressionExperimentSubSets. Currently we pass in the "source experiment" so we
* don't get the slice.
* @param geneIds (could be just one)
* @param threshold for 'significance'
* @param factorMap Collection of DiffExpressionSelectedFactorCommand showing which factors to use.
* @return
*/
public VisualizationValueObject[] getDEDVForDiffExVisualization( Collection<Long> eeIds, Collection<Long> geneIds,
Double threshold, Collection<DiffExpressionSelectedFactorCommand> factorMap ) {
if ( eeIds.isEmpty() || geneIds.isEmpty() ) return null;
StopWatch watch = new StopWatch();
watch.start();
Collection<? extends BioAssaySet> ees = expressionExperimentService.loadMultiple( eeIds );
if ( ees == null || ees.isEmpty() ) return null;
Collection<Gene> genes = geneService.loadMultiple( geneIds );
if ( genes == null || genes.isEmpty() ) return null;
Collection<DoubleVectorValueObject> dedvs = processedExpressionDataVectorService.getProcessedDataArrays( ees,
geneIds );
watch.stop();
Long time = watch.getTime();
log.info( "Retrieved " + dedvs.size() + " DEDVs for " + eeIds.size() + " EEs and " + geneIds.size()
+ " genes in " + time + " ms." );
watch = new StopWatch();
watch.start();
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts = null;
layouts = experimentalDesignVisualizationService.sortVectorDataByDesign( dedvs );
time = watch.getTime();
if ( time > 100 ) {
log.info( "Ran sortVectorDataByDesign on " + dedvs.size() + " DEDVs for 1 EE" + " in " + time
+ " ms (times <100ms not reported)." );
}
// layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts ); // required? yes, see
// GSE11859
time = watch.getTime();
if ( time > 100 ) {
log.info( "Ran sortLayoutSamplesByFactor on " + layouts.size() + " layouts" + " in " + time
+ " ms (times <100ms not reported)." );
}
watch = new StopWatch();
watch.start();
Map<Long, Collection<DifferentialExpressionValueObject>> validatedProbes = getProbeDiffExValidation( genes,
threshold, factorMap );
watch.stop();
time = watch.getTime();
log.info( "Retrieved " + validatedProbes.size() + " valid probes in " + time + " ms." );
return makeDiffVisCollection( dedvs, new ArrayList<Long>( geneIds ), validatedProbes, layouts );
}
/**
* AJAX exposed method Batch factor value analyses are filtered out
*
* @param eeId
* @param geneId
* @param threshold (diff expression threshold)
* @param isSubset Set to true if the ID is for an EE subset.
* @return
*/
public VisualizationValueObject[] getDEDVForDiffExVisualizationByExperiment( Long eeId, Long geneId,
Double threshold, Boolean isSubset ) {
StopWatch watch = new StopWatch();
watch.start();
BioAssaySet ee = null;
if ( isSubset ) {
ee = expressionExperimentSubSetService.load( eeId );
} else {
ee = expressionExperimentService.load( eeId );
}
if ( ee == null ) return new VisualizationValueObject[] {}; // access denied, etc.
if ( threshold == null ) {
log.warn( "Threshold was null, using default" );
threshold = DEFAULT_THRESHOLD;
}
Collection<DoubleVectorValueObject> dedvs;
Gene g = geneService.load( geneId );
if ( g == null ) return null;
Collection<Long> genes = new ArrayList<Long>();
genes.add( geneId );
Collection<BioAssaySet> ees = new ArrayList<BioAssaySet>();
ees.add( ee );
dedvs = processedExpressionDataVectorService.getProcessedDataArrays( ees, genes );
Long time = watch.getTime();
watch.reset();
watch.start();
if ( time > 100 ) {
log.info( "Retrieved " + dedvs.size() + " DEDVs for " + ee.getId() + " and " + "one gene in " + time
+ " ms (times <100ms not reported)." );
}
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts = null;
layouts = experimentalDesignVisualizationService.sortVectorDataByDesign( dedvs );
time = watch.getTime();
watch.reset();
watch.start();
if ( time > 100 ) {
log.info( "Ran sortVectorDataByDesign on " + dedvs.size() + " DEDVs for 1 EE" + " in " + time
+ " ms (times <100ms not reported)." );
}
// layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts ); // required? yes, see
// GSE11859
time = watch.getTime();
watch.reset();
watch.start();
if ( time > 100 ) {
log.info( "Ran sortLayoutSamplesByFactor on " + layouts.size() + " layouts" + " in " + time
+ " ms (times <100ms not reported)." );
}
Map<Long, Collection<DifferentialExpressionValueObject>> validatedProbes = new HashMap<Long, Collection<DifferentialExpressionValueObject>>();
validatedProbes.put( ee.getId(),
geneDifferentialExpressionService.getDifferentialExpression( g, ees, threshold, null ) );
watch.stop();
time = watch.getTime();
if ( time > 100 ) {
log.info( "Retrieved " + validatedProbes.size() + " valid probes in " + time + " ms." );
}
return makeDiffVisCollection( dedvs, new ArrayList<Long>( genes ), validatedProbes, layouts );
}
/**
* AJAX exposed method
*
* @param resultSetId The resultset we're specifically interested. Note that this is what is used to choose the
* vectors, since it could be a subset of an experiment.
* @param threshold for 'significance'
* @return collection of visualization value objects
*/
public VisualizationValueObject[] getDEDVForDiffExVisualizationByThreshold( Long resultSetId, Double givenThreshold ) {
if ( resultSetId == null ) {
throw new IllegalArgumentException( "ResultsetId cannot be null" );
}
double threshold = DEFAULT_THRESHOLD;
if ( givenThreshold != null ) {
threshold = givenThreshold;
log.debug( "Threshold specified not using default value: " + givenThreshold );
}
List<DoubleVectorValueObject> dedvs = getDiffExVectors( resultSetId, threshold, 50 );
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts = experimentalDesignVisualizationService
.sortVectorDataByDesign( dedvs );
// layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts );
return makeVisCollection( dedvs, null, null, layouts );
}
/**
* AJAX
*
* @param eeId
* @param component
* @param count
* @return
*/
public VisualizationValueObject[] getDEDVForPcaVisualization( Long eeId, int component, int count ) {
StopWatch watch = new StopWatch();
watch.start();
Map<ProbeLoading, DoubleVectorValueObject> topLoadedVectors = this.svdService.getTopLoadedVectors( eeId,
component, count );
if ( topLoadedVectors == null ) return null;
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts = null;
Collection<DoubleVectorValueObject> values = topLoadedVectors.values();
layouts = experimentalDesignVisualizationService.sortVectorDataByDesign( values );
return makeVisCollection( values, null, null, layouts );
}
/**
* AJAX exposed method
*
* @param eeIds
* @param geneIds
* @return
*/
public VisualizationValueObject[] getDEDVForVisualization( Collection<Long> eeIds, Collection<Long> geneIds ) {
StopWatch watch = new StopWatch();
watch.start();
Collection<ExpressionExperiment> ees = expressionExperimentService.loadMultiple( eeIds );
if ( ees == null || ees.isEmpty() ) return null;
Collection<DoubleVectorValueObject> dedvs;
if ( geneIds == null || geneIds.isEmpty() ) {
dedvs = processedExpressionDataVectorService.getProcessedDataArrays( ees.iterator().next(), SAMPLE_SIZE );
} else {
if ( geneIds.size() > MAX_RESULTS_TO_RETURN ) {
log.warn( geneIds.size() + " genes for visualization. Too many. Only using first "
+ MAX_RESULTS_TO_RETURN + " genes. " );
List<Long> reducedGeneIds = new ArrayList<Long>( geneIds );
geneIds = reducedGeneIds.subList( 0, MAX_RESULTS_TO_RETURN );
}
dedvs = processedExpressionDataVectorService.getProcessedDataArrays( ees, geneIds );
}
// watch.stop();
Long time = watch.getTime();
watch.reset();
watch.start();
if ( time > 100 ) {
log.info( "Retrieved " + dedvs.size() + " DEDVs for " + eeIds.size() + " EEs"
+ ( geneIds == null ? " sample" : " for " + geneIds.size() + " genes " ) + " in " + time
+ " ms (times <100ms not reported)." );
}
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts = null;
layouts = experimentalDesignVisualizationService.sortVectorDataByDesign( dedvs );
time = watch.getTime();
watch.reset();
watch.start();
if ( time > 100 ) {
log.info( "Ran sortVectorDataByDesign on " + dedvs.size() + " DEDVs for " + eeIds.size() + " EEs" + " in "
+ time + " ms (times <100ms not reported)." );
}
// layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts ); // required?
watch.stop();
time = watch.getTime();
if ( time > 100 ) {
log.info( "Ran sortLayoutSamplesByFactor on " + layouts.size() + " layouts" + " in " + time
+ " ms (times <100ms not reported)." );
}
return makeVisCollection( dedvs, geneIds, null, layouts );
}
/**
* AJAX exposed method
*
* @param eeIds
* @param geneIds (could be just one)
* @param threshold for 'significance'
* @param factorMap Collection of DiffExpressionSelectedFactorCommand showing which factors to use.
* @return
*/
public VisualizationValueObject[] getDEDVForVisualizationByProbe( Collection<Long> eeIds, Collection<Long> probeIds ) {
if ( eeIds.isEmpty() || probeIds.isEmpty() ) return null;
StopWatch watch = new StopWatch();
watch.start();
Collection<ExpressionExperiment> ees = expressionExperimentService.loadMultiple( eeIds );
if ( ees == null || ees.isEmpty() ) return null;
Collection<CompositeSequence> probes = this.compositeSequenceService.loadMultiple( probeIds );
if ( probes == null || probes.isEmpty() ) return null;
Collection<DoubleVectorValueObject> dedvs = processedExpressionDataVectorService.getProcessedDataArraysByProbe(
ees, probes );
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts = experimentalDesignVisualizationService
.sortVectorDataByDesign( dedvs );
watch.stop();
Long time = watch.getTime();
log.info( "Retrieved " + dedvs.size() + " DEDVs for " + eeIds.size() + " EEs and " + probeIds.size()
+ " genes in " + time + " ms." );
return makeVisCollection( dedvs, null, null, layouts );
}
/**
* @param dedvIds
* @return
*/
public Collection<ExpressionProfileDataObject> getVectorData( Collection<Long> dedvIds ) {
List<ExpressionProfileDataObject> result = new ArrayList<ExpressionProfileDataObject>();
for ( Long id : dedvIds ) {
DesignElementDataVector vector = this.designElementDataVectorService.load( id );
try {
DoubleVectorValueObject dvvo = new DoubleVectorValueObject( vector );
ExpressionProfileDataObject epdo = new ExpressionProfileDataObject( dvvo );
DoubleArrayList doubleArrayList = new cern.colt.list.DoubleArrayList( epdo.getData() );
DescriptiveWithMissing.standardize( doubleArrayList );
epdo.setData( doubleArrayList.elements() );
result.add( epdo );
} catch ( IllegalArgumentException iae ) {
log.warn( iae );
}
}
// TODO fill in gene; normalize and clip if desired.; watch for invalid ids.
return result;
}
/**
* Returns a collection of {@link Long} ids from strings.
*
* @param idString
* @return
*/
protected Collection<Long> extractIds( String idString ) {
Collection<Long> ids = new ArrayList<Long>();
if ( idString != null ) {
for ( String s : idString.split( "," ) ) {
try {
ids.add( Long.parseLong( s.trim() ) );
} catch ( NumberFormatException e ) {
log.warn( "invalid id " + s );
}
}
}
return ids;
}
/*
* Handle case of text export of the results.
*
* @see org.springframework.web.servlet.mvc.AbstractFormController#handleRequestInternal(javax.servlet.http.
* HttpServletRequest, javax.servlet.http.HttpServletResponse) Called by /Gemma/dedv/downloadDEDV.html
*/
@RequestMapping("/downloadDEDV.html")
protected ModelAndView handleRequestInternal( HttpServletRequest request ) throws Exception {
StopWatch watch = new StopWatch();
watch.start();
Collection<Long> geneIds = extractIds( request.getParameter( "g" ) ); // might not be any
Collection<Long> eeIds = extractIds( request.getParameter( "ee" ) ); // might not be there
ModelAndView mav = new ModelAndView( new TextView() );
if ( eeIds == null || eeIds.isEmpty() ) {
mav.addObject( "text", "Input empty for finding DEDVs: " + geneIds + " and " + eeIds );
return mav;
}
String threshSt = request.getParameter( "thresh" );
String resultSetIdSt = request.getParameter( "rs" );
Double thresh = 100.0;
if ( StringUtils.isNotBlank( threshSt ) ) {
try {
thresh = Double.parseDouble( threshSt );
} catch ( NumberFormatException e ) {
throw new RuntimeException( "Threshold was not a valid value: " + threshSt );
}
}
Map<ExpressionExperimentValueObject, Map<Long, Collection<DoubleVectorValueObject>>> result = null;
if ( request.getParameter( "pca" ) != null ) {
int component = Integer.parseInt( request.getParameter( "component" ) );
Long eeId = eeIds.iterator().next();
Map<ProbeLoading, DoubleVectorValueObject> topLoadedVectors = this.svdService.getTopLoadedVectors( eeId,
component, thresh.intValue() );
if ( topLoadedVectors == null ) return null;
mav.addObject( "text", format4File( topLoadedVectors.values() ) );
return mav;
}
/*
* The following should be set if we're viewing diff. ex results.
*/
Long resultSetId = null;
if ( StringUtils.isNumeric( resultSetIdSt ) ) {
resultSetId = Long.parseLong( resultSetIdSt );
}
if ( thresh != null && resultSetId != null ) {
/*
* Diff ex case.
*/
Long eeId = eeIds.iterator().next();
Collection<DoubleVectorValueObject> diffExVectors = getDiffExVectors( resultSetId, thresh, 50 );
if ( diffExVectors == null || diffExVectors.isEmpty() ) {
mav.addObject( "text", "No results" );
return mav;
}
/*
* Organize the vectors in the same way expected by the ee+gene type of request.
*/
ExpressionExperimentValueObject ee = expressionExperimentService.loadValueObject( eeId );
result = new HashMap<ExpressionExperimentValueObject, Map<Long, Collection<DoubleVectorValueObject>>>();
Map<Long, Collection<DoubleVectorValueObject>> gmap = new HashMap<Long, Collection<DoubleVectorValueObject>>();
for ( DoubleVectorValueObject dv : diffExVectors ) {
for ( Long g : dv.getGenes() ) {
if ( !gmap.containsKey( g ) ) {
gmap.put( g, new HashSet<DoubleVectorValueObject>() );
}
gmap.get( g ).add( dv );
}
}
result.put( ee, gmap );
} else {
// Generic listing.
result = getDEDV( eeIds, geneIds );
}
if ( result == null || result.isEmpty() ) {
mav.addObject( "text", "No results" );
return mav;
}
mav.addObject( "text", format4File( result ) );
watch.stop();
Long time = watch.getTime();
if ( time > 100 ) {
log.info( "Retrieved and Formated" + result.keySet().size() + " DEDVs for eeIDs: " + eeIds
+ " and GeneIds: "
+ geneIds + " in : " + time + " ms." );
}
return mav;
}
/**
* @param vectors
* @return
*/
private String format4File( Collection<DoubleVectorValueObject> vectors ) {
StringBuffer converted = new StringBuffer();
converted.append( "# Generated by Gemma\n# " + new Date() + "\n" );
converted.append( ExpressionDataFileService.DISCLAIMER + "#\n" );
boolean didHeader = false;
Map<Long, GeneValueObject> gmap = getGeneValueObjectsUsed( vectors );
for ( DoubleVectorValueObject vec : vectors ) {
if ( !didHeader ) {
converted.append( makeHeader( vec ) );
didHeader = true;
}
List<String> geneSymbols = new ArrayList<String>();
List<String> geneNames = new ArrayList<String>();
for ( Long g : vec.getGenes() ) {
GeneValueObject gene = gmap.get( g );
assert gene != null;
geneSymbols.add( gene.getOfficialSymbol() );
geneNames.add( gene.getOfficialName() );
}
converted.append( StringUtils.join( geneSymbols, "|" ) + "\t" + StringUtils.join( geneNames, "|" ) + "\t" );
converted.append( vec.getDesignElement().getName() + "\t" );
if ( vec.getData() != null || vec.getData().length != 0 ) {
for ( double data : vec.getData() ) {
converted.append( String.format( "%.3f", data ) + "\t" );
}
converted.deleteCharAt( converted.length() - 1 ); // remove the trailing tab // FIXME just joind
}
converted.append( "\n" );
}
return converted.toString();
}
/**
* Converts the given map into a tab delimited String
*
* @param result
* @return
*/
private String format4File(
Map<ExpressionExperimentValueObject, Map<Long, Collection<DoubleVectorValueObject>>> result ) {
StringBuffer converted = new StringBuffer();
Map<Long, GeneValueObject> genes = new HashMap<Long, GeneValueObject>(); // Saves us from loading genes
// unnecessarily
converted.append( "# Generated by Gemma\n# " + new Date() + "\n" );
converted.append( ExpressionDataFileService.DISCLAIMER + "#\n" );
for ( ExpressionExperimentValueObject ee : result.keySet() ) {
boolean didHeaderForEe = false;
Collection<Long> geneIds = result.get( ee ).keySet();
for ( Long geneId : geneIds ) {
GeneValueObject gene;
if ( genes.containsKey( geneId ) ) {
gene = genes.get( geneId );
} else {
gene = geneService.loadValueObject( geneId );
genes.put( geneId, gene );
}
String geneName = gene.getOfficialSymbol();
Collection<DoubleVectorValueObject> vecs = result.get( ee ).get( geneId );
for ( DoubleVectorValueObject dedv : vecs ) {
if ( !didHeaderForEe ) {
converted.append( makeHeader( dedv ) );
didHeaderForEe = true;
}
converted.append( geneName + "\t" + gene.getOfficialName() + "\t" );
converted.append( dedv.getDesignElement().getName() + "\t" );
if ( dedv.getData() != null || dedv.getData().length != 0 ) {
for ( double data : dedv.getData() ) {
converted.append( String.format( "%.3f", data ) + "\t" );
}
converted.deleteCharAt( converted.length() - 1 ); // remove the trailing tab
}
converted.append( "\n" );
}
}
converted.append( "\n" );
}
converted.append( "\r\n" );
return converted.toString();
}
/**
* @param resultSetId
* @param threshold
* @return
*/
private List<DoubleVectorValueObject> getDiffExVectors( Long resultSetId, Double threshold, int minNumberOfResults ) {
StopWatch watch = new StopWatch();
watch.start();
ExpressionAnalysisResultSet ar = differentialExpressionResultService.loadAnalysisResultSet( resultSetId );
if ( ar == null ) {
log.warn( "No diff ex result set with ID=" + resultSetId );
return null;
}
differentialExpressionResultService.thawLite( ar );
if ( watch.getTime() > 200 ) {
log.info( "Thaw result set: " + watch.getTime() );
}
BioAssaySet analyzedSet = ar.getAnalysis().getExperimentAnalyzed();
Collection<BioAssaySet> ees = new ArrayList<BioAssaySet>();
ees.add( analyzedSet );
List<DifferentialExpressionAnalysisResult> ee2probeResults = differentialExpressionResultService
.findInResultSet( ar, threshold, MAX_RESULTS_TO_RETURN, minNumberOfResults );
Collection<CompositeSequence> probes = new HashSet<CompositeSequence>();
// Map<CompositeSequenceId, pValue>
// using id instead of entity for map key because want to use a value object for retrieval later
Map<Long, Double> pvalues = new HashMap<Long, Double>();
for ( DifferentialExpressionAnalysisResult par : ee2probeResults ) {
probes.add( par.getProbe() );
pvalues.put( par.getProbe().getId(), par.getPvalue() );
}
watch.reset();
watch.start();
List<DoubleVectorValueObject> dedvs = new ArrayList<DoubleVectorValueObject>(
processedExpressionDataVectorService.getProcessedDataArraysByProbe( ees, probes ) );
watch.stop();
if ( watch.getTime() > 1000 ) {
log.info( "Fetch " + dedvs.size() + " DEDVs for " + probes.size() + " genes in " + watch.getTime()
+ " ms. (result set=" + ar.getId() + ")" );
}
/*
* Resort
*/
for ( DoubleVectorValueObject v : dedvs ) {
v.setPvalue( pvalues.get( v.getDesignElement().getId() ) );
}
Collections.sort( dedvs, new Comparator<DoubleVectorValueObject>() {
@Override
public int compare( DoubleVectorValueObject o1, DoubleVectorValueObject o2 ) {
if ( o1.getPvalue() == null ) return -1;
if ( o2.getPvalue() == null ) return 1;
return o1.getPvalue().compareTo( o2.getPvalue() );
}
} );
return dedvs;
}
private LinkedHashSet<String> getFactorNames(
LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> eeLayouts ) {
LinkedHashSet<String> factorNames = new LinkedHashSet<String>(); // need uniqueness & order
for ( BioAssayValueObject ba : eeLayouts.keySet() ) {
LinkedHashMap<ExperimentalFactor, Double> factorMap = eeLayouts.get( ba );
// for each experimental factor, store the name and value
for ( Entry<ExperimentalFactor, Double> pair : factorMap.entrySet() ) {
if ( pair.getKey() != null ) {
factorNames.add( pair.getKey().getName() );
}
}
}
return factorNames;
}
/**
* @param facVal
* @return
*/
private String getFactorValueDisplayString( FactorValue facVal ) {
StringBuffer facValsStrBuff = new StringBuffer();
if ( facVal.getCharacteristics() == null || facVal.getCharacteristics().isEmpty() ) {
facValsStrBuff.append( facVal.getValue() + ", " );
}
for ( Characteristic characteristic : facVal.getCharacteristics() ) {
facValsStrBuff.append( characteristic.getValue() + ", " );
}
if ( facValsStrBuff.length() > 0 ) {
facValsStrBuff.delete( facValsStrBuff.length() - 2, facValsStrBuff.length() );
}
if ( facValsStrBuff.length() == 0 ) {
facValsStrBuff.append( "FactorValue id:" + Math.round( facVal.getId() )
+ " was not null but no value was found." );
}
return facValsStrBuff.toString();
}
/**
* @param genes
* @return
*/
private List<GeneValueObject> getGeneValueObjectList( List<Long> genes ) {
Collection<GeneValueObject> geneValueObjects = geneService.loadValueObjects( genes );
Map<Long, GeneValueObject> m = EntityUtils.getIdMap( geneValueObjects );
List<GeneValueObject> geneValueObjectList = new ArrayList<GeneValueObject>();
for ( Long id : genes ) {
if ( !m.containsKey( id ) ) {
continue;
}
geneValueObjectList.add( m.get( id ) );
}
return geneValueObjectList;
}
/**
* @param vectors
* @return
*/
private Map<Long, GeneValueObject> getGeneValueObjectsUsed( Collection<DoubleVectorValueObject> vectors ) {
Set<Long> usedGeneIds = new HashSet<Long>();
for ( DoubleVectorValueObject vec : vectors ) {
if ( vec == null || vec.getGenes() == null ) continue;
usedGeneIds.addAll( vec.getGenes() );
}
Map<Long, GeneValueObject> gmap = EntityUtils.getIdMap( geneService.loadValueObjects( usedGeneIds ) );
return gmap;
}
/**
* This is probably no longer being really used?
*
* @param genes
* @param threshold
* @param factorMap
* @return
*/
private Map<Long, Collection<DifferentialExpressionValueObject>> getProbeDiffExValidation( Collection<Gene> genes,
Double threshold, Collection<DiffExpressionSelectedFactorCommand> factorMap ) {
if ( factorMap == null )
throw new IllegalArgumentException( "Factor information is missing, please make sure factors are selected." );
Map<Long, Collection<DifferentialExpressionValueObject>> validatedProbes = new HashMap<Long, Collection<DifferentialExpressionValueObject>>();
Collection<Long> wantedFactors = new HashSet<Long>();
for ( DiffExpressionSelectedFactorCommand factor : factorMap ) {
wantedFactors.add( factor.getEfId() );
}
for ( Gene gene : genes ) {
Collection<DifferentialExpressionValueObject> differentialExpression = geneDifferentialExpressionService
.getDifferentialExpression( gene, threshold, factorMap );
for ( DifferentialExpressionValueObject diffVo : differentialExpression ) {
assert diffVo.getCorrP() <= threshold;
Long eeId = diffVo.getExpressionExperiment().getId();
if ( !validatedProbes.containsKey( eeId ) ) {
validatedProbes.put( eeId, new HashSet<DifferentialExpressionValueObject>() );
}
Collection<ExperimentalFactorValueObject> factors = diffVo.getExperimentalFactors();
for ( ExperimentalFactorValueObject fac : factors ) {
if ( wantedFactors.contains( fac.getId() ) ) {
validatedProbes.get( eeId ).add( diffVo );
}
}
}
}
return validatedProbes;
}
/**
* Identify which probes were 'responsible' for the coexpression links.
*
* @param ees
* @param queryGene
* @param coexpressedGene
* @param dedvs
* @return map of EEID -> collection ProbeIDs which underlie the stored coexpression links.
*/
private Map<Long, Collection<Long>> getProbeLinkValidation( Collection<ExpressionExperiment> ees, Gene queryGene,
Gene coexpressedGene, Collection<DoubleVectorValueObject> dedvs ) {
StopWatch watch = new StopWatch();
watch.start();
Map<Long, Collection<Long>> coexpressedEE2ProbeIds = new HashMap<Long, Collection<Long>>();
Map<Long, Collection<Long>> queryEE2ProbeIds = new HashMap<Long, Collection<Long>>();
/*
* Get the probes for the vectors, organize by ee.
*/
for ( DoubleVectorValueObject dedv : dedvs ) {
ExpressionExperimentValueObject ee = dedv.getExpressionExperiment();
if ( dedv.getGenes().contains( queryGene.getId() ) ) {
if ( !queryEE2ProbeIds.containsKey( ee.getId() ) ) {
queryEE2ProbeIds.put( ee.getId(), new HashSet<Long>() );
}
queryEE2ProbeIds.get( ee.getId() ).add( dedv.getDesignElement().getId() );
} else if ( dedv.getGenes().contains( coexpressedGene.getId() ) ) {
if ( !coexpressedEE2ProbeIds.containsKey( ee.getId() ) ) {
coexpressedEE2ProbeIds.put( ee.getId(), new HashSet<Long>() );
}
coexpressedEE2ProbeIds.get( ee.getId() ).add( dedv.getDesignElement().getId() );
} else {
log.error( "Dedv doesn't belong to coexpressed or query gene. QueryGene= " + queryGene
+ "CoexpressedGene= " + coexpressedGene + "DEDV " + dedv.getId() + " has genes: "
+ dedv.getGenes() );
}
}
Map<Long, Collection<Long>> validatedProbes = new HashMap<Long, Collection<Long>>();
for ( ExpressionExperiment ee : ees ) {
Collection<Long> queryProbeIds = queryEE2ProbeIds.get( ee.getId() );
Collection<Long> coexpressedProbeIds = coexpressedEE2ProbeIds.get( ee.getId() );
if ( queryProbeIds == null || queryProbeIds.isEmpty() ) {
log.warn( "Unexpectedly no probes for " + queryGene + " in " + ee );
continue;
}
if ( coexpressedProbeIds == null || coexpressedProbeIds.isEmpty() ) {
log.warn( "Unexpectedly no probes for " + coexpressedGene + " in " + ee );
continue;
}
/*
* Note: this does a probe-level query
*/
Collection<Long> probesInLinks = this.probe2ProbeCoexpressionService.getCoexpressedProbes( queryProbeIds,
coexpressedProbeIds, ee, queryGene.getTaxon().getCommonName() );
if ( probesInLinks.isEmpty() ) {
log.warn( "Unexpectedly no probes for link between " + queryGene + " -and- " + coexpressedGene + " in "
+ ee );
}
validatedProbes.put( ee.getId(), probesInLinks );
}
watch.stop();
Long time = watch.getTime();
if ( time > 1000 ) {
log.info( "Validation of probes for " + ees.size() + " experiments in " + time + "ms." );
}
return validatedProbes;
}
/**
* @param random
* @return
*/
private String getRandomColour( Random random ) {
String colourString;
colourString = "#" + Integer.toHexString( random.nextInt( 16 ) ) + "0"
+ Integer.toHexString( random.nextInt( 16 ) ) + "0" + Integer.toHexString( random.nextInt( 16 ) ) + "0";
return colourString;
}
/**
* Get the names we'll use for the columns of the vectors.
*
* @param vectors
* @param vvo
* @param layouts
*/
private void getSampleNames( Collection<DoubleVectorValueObject> vectors, VisualizationValueObject vvo,
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts ) {
for ( DoubleVectorValueObject vec : vectors ) {
List<String> sampleNames = new ArrayList<String>();
if ( layouts != null && layouts.get( vec.getExpressionExperiment().getId() ) != null ) {
for ( BioAssayValueObject ba : layouts.get( vec.getExpressionExperiment().getId() ).keySet() ) {
sampleNames.add( ba.getName() ); // fIXME
}
if ( sampleNames.size() > 0 ) {
log.debug( sampleNames.size() + " sample names!" );
vvo.setSampleNames( sampleNames );
}
} else {
sampleNames = getSampleNames( vec );
if ( sampleNames.size() > 0 ) {
log.debug( sampleNames.size() + " sample names!" );
vvo.setSampleNames( sampleNames );
}
}
}
}
/**
* @param dedv
* @return
*/
private List<String> getSampleNames( DoubleVectorValueObject dedv ) {
List<String> result = new ArrayList<String>();
for ( BioAssayValueObject ba : dedv.getBioAssays() ) {
result.add( ba.getName() );
}
return result;
}
/**
* Takes the DEDVs and put them in point objects and normalize the values. returns a map of eeid to visValueObject.
* Currently removes multiple hits for same gene. Tries to pick best DEDV. Organizes the experiments from lowest to
* higest p-value
*
* @param dedvs
* @param genes
* @param validatedProbes (bad name)
* @param layouts
* @return
*/
private VisualizationValueObject[] makeDiffVisCollection( Collection<DoubleVectorValueObject> dedvs,
List<Long> genes, Map<Long, Collection<DifferentialExpressionValueObject>> validatedProbes,
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts ) {
StopWatch watch = new StopWatch();
watch.start();
Map<Long, Collection<DoubleVectorValueObject>> vvoMap = new HashMap<Long, Collection<DoubleVectorValueObject>>();
Map<Long, ExpressionExperimentValueObject> eeMap = new HashMap<Long, ExpressionExperimentValueObject>();
// Organize by expression experiment
for ( DoubleVectorValueObject dvvo : dedvs ) {
ExpressionExperimentValueObject ee = dvvo.getExpressionExperiment();
eeMap.put( ee.getId(), ee );
if ( !vvoMap.containsKey( ee.getId() ) ) {
vvoMap.put( ee.getId(), new HashSet<DoubleVectorValueObject>() );
}
vvoMap.get( ee.getId() ).add( dvvo );
}
class EE2PValue implements Comparable<EE2PValue> {
Long EEId;
double pValue;
public EE2PValue() {
super();
}
public EE2PValue( Long eeid, double pValue ) {
this();
this.EEId = eeid;
this.pValue = pValue;
}
@Override
public int compareTo( EE2PValue o ) {
if ( this.pValue > o.getPValue() )
return 1;
else if ( this.pValue > o.getPValue() )
return -1;
else
return 0;
}
public Long getEEId() {
return EEId;
}
public double getPValue() {
return pValue;
}
}
List<EE2PValue> sortedEE = new ArrayList<EE2PValue>();
// Need to sort the expression experiments by lowest p-value
for ( Long eeId : vvoMap.keySet() ) {
Collection<DifferentialExpressionValueObject> devos = validatedProbes.get( eeId );
double minP = 1;
if ( devos != null && !devos.isEmpty() ) {
for ( DifferentialExpressionValueObject devo : devos ) {
if ( minP > devo.getP() ) {
minP = devo.getP();
}
}
}
sortedEE.add( new EE2PValue( eeId, minP ) );
}
Collections.sort( sortedEE );
VisualizationValueObject[] result = new VisualizationValueObject[vvoMap.keySet().size()];
List<GeneValueObject> geneValueObjects = getGeneValueObjectList( genes );
// Create collection of visualizationValueObject for flotr on js side
int i = 0;
for ( EE2PValue ee2P : sortedEE ) {
VisualizationValueObject vvo = new VisualizationValueObject( vvoMap.get( ee2P.getEEId() ),
geneValueObjects, ee2P.getPValue(), validatedProbes.get( ee2P.getEEId() ) );
getSampleNames( vvoMap.get( ee2P.getEEId() ), vvo, layouts );
if ( layouts != null && !layouts.isEmpty() && layouts.containsKey( ee2P.getEEId() ) ) {
LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> layout = layouts
.get( ee2P.getEEId() );
this.prepareFactorsForFrontEndDisplay( vvo, layout );
}
/*
* Set up the experimental designinfo so we can show it above the graph.
*/
if ( layouts != null ) {
ExpressionExperimentValueObject ee = eeMap.get( ee2P.getEEId() );
log.debug( "setup experimental design layout profiles for " + ee );
vvo.setUpFactorProfiles( layouts.get( ee.getId() ) );
}
result[i] = vvo;
i++;
}
Long time = watch.getTime();
if ( time > 1000 ) {
log.info( "Created vis value objects in: " + time );
}
return result;
}
/**
* @param dedv
* @return
*/
private String makeHeader( DoubleVectorValueObject dedv ) {
if ( dedv.isReorganized() ) {
/*
* FIXME we should output the names in the 'right' order.
*/
return "Gene Symbol\tGene Name\tProbe\n";
}
StringBuilder buf = new StringBuilder();
ExpressionExperimentValueObject ee = dedv.getExpressionExperiment();
buf.append( "# " + ee.getShortName() + " : " + ee.getName() + "\n" );
for ( BioAssayValueObject ba : dedv.getBioAssays() ) {
buf.append( ba.getName() + "\t" );
}
buf.deleteCharAt( buf.length() - 1 );
buf.append( "\n" );
return buf.toString();
}
/**
* @param newResults
* @param layouts
* @return
*/
private Map<ExpressionExperimentValueObject, Map<Long, Collection<DoubleVectorValueObject>>> makeVectorMap(
Collection<DoubleVectorValueObject> newResults,
Map<Long, LinkedHashMap<BioAssay, LinkedHashMap<ExperimentalFactor, Double>>> layouts ) {
// FIXME use the layouts.
Map<ExpressionExperimentValueObject, Map<Long, Collection<DoubleVectorValueObject>>> result = new HashMap<ExpressionExperimentValueObject, Map<Long, Collection<DoubleVectorValueObject>>>();
for ( DoubleVectorValueObject v : newResults ) {
ExpressionExperimentValueObject e = v.getExpressionExperiment();
if ( !result.containsKey( e ) ) {
result.put( e, new HashMap<Long, Collection<DoubleVectorValueObject>>() );
}
Map<Long, Collection<DoubleVectorValueObject>> innerMap = result.get( e );
if ( v.getGenes() == null || v.getGenes().isEmpty() ) {
continue;
}
for ( Long g : v.getGenes() ) {
if ( !innerMap.containsKey( g ) ) {
innerMap.put( g, new HashSet<DoubleVectorValueObject>() );
}
innerMap.get( g ).add( v );
}
}
return result;
}
/**
* Takes the DEDVs and put them in point objects and normalize the values. returns a map of eeid to visValueObject.
* Currently removes multiple hits for same gene. Tries to pick best DEDV.
*
* @param dedvs
* @param genes
* @param validatedProbes
* @param layouts
* @return
*/
private VisualizationValueObject[] makeVisCollection( Collection<DoubleVectorValueObject> dedvs,
Collection<Long> genes, Map<Long, Collection<Long>> validatedProbes,
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts ) {
Map<Long, List<DoubleVectorValueObject>> vvoMap = new HashMap<Long, List<DoubleVectorValueObject>>();
// Organize by expression experiment
if ( dedvs == null || dedvs.isEmpty() ) return new VisualizationValueObject[1];
for ( DoubleVectorValueObject dvvo : dedvs ) {
ExpressionExperimentValueObject ee = dvvo.getExpressionExperiment();
if ( !vvoMap.containsKey( ee.getId() ) ) {
vvoMap.put( ee.getId(), new ArrayList<DoubleVectorValueObject>() );
}
vvoMap.get( ee.getId() ).add( dvvo );
}
List<GeneValueObject> geneValueObjects = new ArrayList<GeneValueObject>();
if ( genes == null || genes.isEmpty() ) {
geneValueObjects = new ArrayList<GeneValueObject>( getGeneValueObjectsUsed( dedvs ).values() );
} else {
geneValueObjects = getGeneValueObjectList( new ArrayList<Long>( genes ) );
}
StopWatch timer = new StopWatch();
timer.start();
VisualizationValueObject[] result = new VisualizationValueObject[vvoMap.keySet().size()];
// Create collection of visualizationValueObject for flotr on js side
int i = 0;
for ( Long ee : vvoMap.keySet() ) {
Collection<Long> validatedProbeList = null;
if ( validatedProbes != null ) {
validatedProbeList = validatedProbes.get( ee );
}
Collection<DoubleVectorValueObject> vectors = vvoMap.get( ee );
VisualizationValueObject vvo = new VisualizationValueObject( vectors, geneValueObjects, validatedProbeList );
if ( vectors.size() > 0 ) {
getSampleNames( vectors, vvo, layouts );
if ( vectors.size() > 0 && layouts != null && !layouts.isEmpty() && layouts.containsKey( ee ) ) {
// Set up the experimental designinfo so we can show it above the graph.
LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> layout = layouts
.get( ee );
this.prepareFactorsForFrontEndDisplay( vvo, layout );
}
}
/*
* Set up the experimental design info so we can show it above the graph.
*/
if ( layouts != null && layouts.get( ee ) != null ) {
vvo.setUpFactorProfiles( layouts.get( ee ) );
}
result[i] = vvo;
i++;
}
long time = timer.getTime();
if ( time > 1000 ) {
log.info( "Created vis value objects in: " + time );
}
return result;
}
/**
* Prepare vvo for display on front end. Uses factors and factor values from layouts
*
* @param vvo Note: This will be modified! It will be updated with the factorNames and factorValuesToNames
* @param eeLayouts
*/
private void prepareFactorsForFrontEndDisplay( VisualizationValueObject vvo,
LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> eeLayouts ) {
if ( eeLayouts == null || eeLayouts.isEmpty() ) {
log.warn( "No layouts, bail" );
vvo.setFactorNames( null );
vvo.setFactorValuesToNames( null );
return;
}
LinkedHashSet<String> factorNames = getFactorNames( eeLayouts );
// colours for conditions/factor values bar chart
Map<String, Queue<String>> factorColoursMap = createFactorNameToColoursMap( factorNames );
String missingValueColour = "#DCDCDC";
Random random = new Random();
LinkedHashMap<String, LinkedHashMap<String, String>> factorToValueNames = new LinkedHashMap<String, LinkedHashMap<String, String>>();
// list of maps with entries: key = factorName, value=array of factor values
// 1 entry per sample
ArrayList<LinkedHashMap<String, String[]>> factorValueMaps = new ArrayList<LinkedHashMap<String, String[]>>();
Collection<String> factorsMissingValues = new HashSet<String>();
Map<Long, FactorValue> fvs = new HashMap<Long, FactorValue>(); // avoid loading repeatedly.
Collection<ExperimentalFactor> seenFactors = new HashSet<ExperimentalFactor>();
for ( BioAssayValueObject ba : eeLayouts.keySet() ) {
// double should be the factorValue id, defined in
// ubic.gemma.visualization.ExperimentalDesignVisualizationService.getExperimentalDesignLayout(ExpressionExperiment,
// BioAssayDimension)
LinkedHashMap<ExperimentalFactor, Double> factorMap = eeLayouts.get( ba );
LinkedHashMap<String, String[]> factorNamesToValueColourPairs = new LinkedHashMap<String, String[]>(
factorNames.size() );
// this is defensive, should only come into play when there's something messed up with the data.
// for every factor, add a missing-value entry (guards against missing data messing up the layout)
for ( String facName : factorNames ) {
String[] facValAndColour = new String[] { "No value", missingValueColour };
factorNamesToValueColourPairs.put( facName, facValAndColour );
}
ExperimentalFactor factor;
Double valueOrId;
// for each experimental factor, store the name and value
for ( Entry<ExperimentalFactor, Double> pair : factorMap.entrySet() ) {
factor = pair.getKey();
valueOrId = pair.getValue();
if ( factor == null ) continue;
/*
* the double is only a double because it is meant to hold measurements when the factor is continuous if
* the factor is categorical, the double value is set to the value's id see
* ubic.gemma.visualization.ExperimentalDesignVisualizationService.getExperimentalDesignLayout(
* ExpressionExperiment, BioAssayDimension)
*/
if ( valueOrId == null || factor.getType() == null
|| ( factor.getType().equals( FactorType.CATEGORICAL ) && factor.getFactorValues().isEmpty() ) ) {
factorsMissingValues.add( factor.getName() );
continue;
}
if ( !seenFactors.contains( factor ) && factor.getType().equals( FactorType.CATEGORICAL ) ) {
for ( FactorValue fv : factor.getFactorValues() ) {
fvs.put( fv.getId(), fv );
}
}
String facValsStr;
if ( factor.getType() == FactorType.CONTINUOUS ) {
log.debug( "Experiment has continuous factor." );
facValsStr = valueOrId.toString();
} else {
Long id = new Long( Math.round( valueOrId ) );
FactorValue facVal = fvs.get( id );
if ( facVal == null ) {
// probably already have it already ... so this is just in case.
facVal = factorValueService.load( id );
log.warn( "Surprisingly have a factorvalue not already accounted for by premapping: " + facVal );
}
if ( facVal == null ) {
log.warn( "Failed to load factorValue with id = " + valueOrId
+ ". Load returned null. Experiment is: " + vvo.getEevo().toString() );
factorsMissingValues.add( factor.getName() );
continue;
}
fvs.put( facVal.getId(), facVal );
facValsStr = getFactorValueDisplayString( facVal );
}
if ( !factorToValueNames.containsKey( factor.getName() ) ) {
factorToValueNames.put( factor.getName(), new LinkedHashMap<String, String>() );
}
// assign colour if unassigned or fetch it if already assigned
String colourString = "";
if ( !factorToValueNames.get( factor.getName() ).containsKey( facValsStr ) ) {
if ( factorColoursMap.containsKey( factor.getName() ) ) {
colourString = factorColoursMap.get( factor.getName() ).poll();
}
if ( colourString == null || colourString == "" ) { // ran out of predefined colours
colourString = getRandomColour( random );
}
factorToValueNames.get( factor.getName() ).put( facValsStr, colourString );
} else {
colourString = factorToValueNames.get( factor.getName() ).get( facValsStr );
}
String[] facValAndColour = new String[] { facValsStr, colourString };
factorNamesToValueColourPairs.put( factor.getName(), facValAndColour );
}
factorValueMaps.add( factorNamesToValueColourPairs );
}
// add missing value entries here so they show up at the end of the legend's value lists
if ( !factorsMissingValues.isEmpty() ) {
for ( String factorName : factorsMissingValues ) {
if ( !factorToValueNames.containsKey( factorName ) ) {
factorToValueNames.put( factorName, new LinkedHashMap<String, String>() );
}
factorToValueNames.get( factorName ).put( "No value", missingValueColour );
}
}
vvo.setFactorNames( factorToValueNames ); // this is summary of values & colours by factor, used for legend
vvo.setFactorValuesToNames( factorValueMaps ); // this is list of maps for each sample
}
} | gemma-web/src/main/java/ubic/gemma/web/controller/expression/experiment/DEDVController.java | /*
* The Gemma project
*
* Copyright (c) 2008 University of British Columbia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ubic.gemma.web.controller.expression.experiment;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Queue;
import java.util.Random;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.StopWatch;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.servlet.ModelAndView;
import ubic.basecode.math.DescriptiveWithMissing;
import ubic.gemma.analysis.expression.diff.DiffExpressionSelectedFactorCommand;
import ubic.gemma.analysis.expression.diff.DifferentialExpressionValueObject;
import ubic.gemma.analysis.expression.diff.GeneDifferentialExpressionService;
import ubic.gemma.analysis.preprocess.svd.SVDService;
import ubic.gemma.analysis.service.ExpressionDataFileService;
import ubic.gemma.expression.experiment.service.ExpressionExperimentService;
import ubic.gemma.genome.gene.service.GeneService;
import ubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysisResult;
import ubic.gemma.model.analysis.expression.diff.DifferentialExpressionResultService;
import ubic.gemma.model.analysis.expression.diff.ExpressionAnalysisResultSet;
import ubic.gemma.model.analysis.expression.pca.ProbeLoading;
import ubic.gemma.model.association.coexpression.Probe2ProbeCoexpressionService;
import ubic.gemma.model.common.description.Characteristic;
import ubic.gemma.model.expression.bioAssay.BioAssay;
import ubic.gemma.model.expression.bioAssay.BioAssayValueObject;
import ubic.gemma.model.expression.bioAssayData.DesignElementDataVector;
import ubic.gemma.model.expression.bioAssayData.DesignElementDataVectorService;
import ubic.gemma.model.expression.bioAssayData.DoubleVectorValueObject;
import ubic.gemma.model.expression.bioAssayData.ProcessedExpressionDataVectorService;
import ubic.gemma.model.expression.designElement.CompositeSequence;
import ubic.gemma.model.expression.designElement.CompositeSequenceService;
import ubic.gemma.model.expression.experiment.BioAssaySet;
import ubic.gemma.model.expression.experiment.ExperimentalFactor;
import ubic.gemma.model.expression.experiment.ExperimentalFactorValueObject;
import ubic.gemma.model.expression.experiment.ExpressionExperiment;
import ubic.gemma.model.expression.experiment.ExpressionExperimentSubSetService;
import ubic.gemma.model.expression.experiment.ExpressionExperimentValueObject;
import ubic.gemma.model.expression.experiment.FactorType;
import ubic.gemma.model.expression.experiment.FactorValue;
import ubic.gemma.model.expression.experiment.FactorValueService;
import ubic.gemma.model.genome.Gene;
import ubic.gemma.model.genome.gene.GeneValueObject;
import ubic.gemma.util.EntityUtils;
import ubic.gemma.visualization.ExperimentalDesignVisualizationService;
import ubic.gemma.web.controller.visualization.ExpressionProfileDataObject;
import ubic.gemma.web.controller.visualization.VisualizationValueObject;
import ubic.gemma.web.view.TextView;
import cern.colt.list.DoubleArrayList;
/**
* Exposes methods for accessing underlying Design Element Data Vectors. eg: ajax methods for visualization
*
* @author kelsey
* @version $Id$
*/
@Controller
@RequestMapping("/dedv")
public class DEDVController {
protected static Log log = LogFactory.getLog( DEDVController.class.getName() );
private static final double DEFAULT_THRESHOLD = 0.05;
private static final int MAX_RESULTS_TO_RETURN = 150;
private static final int SAMPLE_SIZE = 20; // Number of dedvs to return if no genes given
/**
* Assign colour lists (queues actually) to factors. The idea is that every factor value will get a colour assigned
* from its factor's list.
*
* @param factorNames using names here because multiple experimentalFactors can have the same name (want them
* collapsed for legend)
* @return
*/
private static Map<String, Queue<String>> createFactorNameToColoursMap( Collection<String> factorNames ) {
// colours for conditions/factor values bar chart
Map<String, Queue<String>> factorColoursMap = new HashMap<String, Queue<String>>();
String[] blues = { "#85c6ff", "#6b90ff", "#105bfe", "#005589", "#0090e9", "#0400fe", "#008998", "#3e3c90",
"#020090", "#105bfe" }; // 10
String[] purples = { "#d19bff", "#a30064", "#7d00ea", "#893984", "#f05eb8", "#9c00d0", "#b66ccf", "#e7008f",
"#670089", "#bf00b2", "#890080", "#8865a6", "#3f0076" }; // 13
String[] redYellows = { "#ffd78d", "#d85d00", "#b40101", "#944343", "#ff6d48", "#d36b62", "#ff8001", "#c74f34",
"#d89561", "#f8bc2e" }; // 10
String[] greens = { "#98da95", "#82b998", "#257e21", "#36b52f", "#38b990", "#a9da5f", "#4cfe42", "#73c000",
"#0fa345", "#99fe01", "#508500" }; // 10
String[][] colourArrs = { blues, greens, purples, redYellows };
int j = 0;
for ( String factorName : factorNames ) {
if ( !factorColoursMap.containsKey( factorName ) ) {
factorColoursMap.put( factorName, new LinkedList<String>() );
}
if ( j < colourArrs.length ) {
for ( int i = 0; i < colourArrs[j].length; i++ ) {
factorColoursMap.get( factorName ).add( colourArrs[j][i] ); // array to queue
}
}
j++;
}
return factorColoursMap;
}
@Autowired
private CompositeSequenceService compositeSequenceService;
@Autowired
private DesignElementDataVectorService designElementDataVectorService;
@Autowired
private DifferentialExpressionResultService differentialExpressionResultService;
@Autowired
private ExperimentalDesignVisualizationService experimentalDesignVisualizationService;
@Autowired
private ExpressionExperimentService expressionExperimentService;
@Autowired
private SVDService svdService;
@Autowired
private GeneDifferentialExpressionService geneDifferentialExpressionService;
@Autowired
private GeneService geneService;
@Autowired
private FactorValueService factorValueService;
@Autowired
private Probe2ProbeCoexpressionService probe2ProbeCoexpressionService;
@Autowired
private ProcessedExpressionDataVectorService processedExpressionDataVectorService;
@Autowired
private ExpressionExperimentSubSetService expressionExperimentSubSetService;
/**
* Given a collection of expression experiment Ids and a geneId returns a map of DEDV value objects to a collection
* of genes. The EE info is in the value object. FIXME handle subsets.
*/
public Map<ExpressionExperimentValueObject, Map<Long, Collection<DoubleVectorValueObject>>> getDEDV(
Collection<Long> eeIds, Collection<Long> geneIds ) {
StopWatch watch = new StopWatch();
watch.start();
Collection<ExpressionExperiment> ees = expressionExperimentService.loadMultiple( eeIds );
if ( ees == null || ees.isEmpty() ) return null;
Collection<DoubleVectorValueObject> dedvMap;
if ( geneIds == null || geneIds.isEmpty() ) {
dedvMap = processedExpressionDataVectorService.getProcessedDataArrays( ees.iterator().next(), 50 );
} else {
dedvMap = processedExpressionDataVectorService.getProcessedDataArrays( ees, geneIds );
}
/*
* Don't reorganize them -- the headings will be wrong.
*/
Map<Long, LinkedHashMap<BioAssay, LinkedHashMap<ExperimentalFactor, Double>>> layouts = null;
//
// layouts= experimentalDesignVisualizationService .sortVectorDataByDesign( dedvMap );
watch.stop();
Long time = watch.getTime();
if ( time > 1000 ) {
log.info( "Retrieved " + dedvMap.size() + " DEDVs from " + eeIds.size() + " EEs in " + time + " ms." );
}
return makeVectorMap( dedvMap, layouts );
}
/**
* AJAX exposed method
*
* @param eeIds
* @return
*/
public VisualizationValueObject[] getDEDVForCoexpressionVisualization( Collection<Long> eeIds, Long queryGeneId,
Long coexpressedGeneId ) {
StopWatch watch = new StopWatch();
watch.start();
Collection<ExpressionExperiment> ees = expressionExperimentService.loadMultiple( eeIds );
if ( ees == null || ees.isEmpty() ) return new VisualizationValueObject[0];
Gene queryGene = geneService.load( queryGeneId );
Gene coexpressedGene = geneService.load( coexpressedGeneId );
List<Long> genes = new ArrayList<Long>();
genes.add( queryGeneId );
genes.add( coexpressedGeneId );
if ( genes.isEmpty() ) return new VisualizationValueObject[0];
Collection<DoubleVectorValueObject> dedvs = processedExpressionDataVectorService.getProcessedDataArrays( ees,
genes );
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts = null;
layouts = experimentalDesignVisualizationService.sortVectorDataByDesign( dedvs );
layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts );
watch.stop();
Long time = watch.getTime();
if ( dedvs.size() == 0 ) {
log.warn( "No expression profiles (DEDVs) were available for the experiments: " + eeIds + " and genes(s) "
+ queryGene.getOfficialSymbol() + ", " + coexpressedGene.getOfficialSymbol() );
return new VisualizationValueObject[0];
}
if ( time > 1000 ) {
log.info( "Retrieved " + dedvs.size() + " DEDVs for " + eeIds.size() + " EEs and " + genes.size()
+ " genes in " + time + " ms." );
}
Map<Long, Collection<Long>> validatedProbes = getProbeLinkValidation( ees, queryGene, coexpressedGene, dedvs );
return makeVisCollection( dedvs, genes, validatedProbes, layouts );
}
/**
* AJAX exposed method
*
* @param eeIds FIXME accommodate ExpressionExperimentSubSets. Currently we pass in the "source experiment" so we
* don't get the slice.
* @param geneIds (could be just one)
* @param threshold for 'significance'
* @param factorMap Collection of DiffExpressionSelectedFactorCommand showing which factors to use.
* @return
*/
public VisualizationValueObject[] getDEDVForDiffExVisualization( Collection<Long> eeIds, Collection<Long> geneIds,
Double threshold, Collection<DiffExpressionSelectedFactorCommand> factorMap ) {
if ( eeIds.isEmpty() || geneIds.isEmpty() ) return null;
StopWatch watch = new StopWatch();
watch.start();
Collection<? extends BioAssaySet> ees = expressionExperimentService.loadMultiple( eeIds );
if ( ees == null || ees.isEmpty() ) return null;
Collection<Gene> genes = geneService.loadMultiple( geneIds );
if ( genes == null || genes.isEmpty() ) return null;
Collection<DoubleVectorValueObject> dedvs = processedExpressionDataVectorService.getProcessedDataArrays( ees,
geneIds );
watch.stop();
Long time = watch.getTime();
log.info( "Retrieved " + dedvs.size() + " DEDVs for " + eeIds.size() + " EEs and " + geneIds.size()
+ " genes in " + time + " ms." );
watch = new StopWatch();
watch.start();
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts = null;
layouts = experimentalDesignVisualizationService.sortVectorDataByDesign( dedvs );
time = watch.getTime();
if ( time > 100 ) {
log.info( "Ran sortVectorDataByDesign on " + dedvs.size() + " DEDVs for 1 EE" + " in " + time
+ " ms (times <100ms not reported)." );
}
layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts ); // required? yes, see
// GSE11859
time = watch.getTime();
if ( time > 100 ) {
log.info( "Ran sortLayoutSamplesByFactor on " + layouts.size() + " layouts" + " in " + time
+ " ms (times <100ms not reported)." );
}
watch = new StopWatch();
watch.start();
Map<Long, Collection<DifferentialExpressionValueObject>> validatedProbes = getProbeDiffExValidation( genes,
threshold, factorMap );
watch.stop();
time = watch.getTime();
log.info( "Retrieved " + validatedProbes.size() + " valid probes in " + time + " ms." );
return makeDiffVisCollection( dedvs, new ArrayList<Long>( geneIds ), validatedProbes, layouts );
}
/**
* AJAX exposed method Batch factor value analyses are filtered out
*
* @param eeId
* @param geneId
* @param threshold (diff expression threshold)
* @param isSubset Set to true if the ID is for an EE subset.
* @return
*/
public VisualizationValueObject[] getDEDVForDiffExVisualizationByExperiment( Long eeId, Long geneId,
Double threshold, Boolean isSubset ) {
StopWatch watch = new StopWatch();
watch.start();
BioAssaySet ee = null;
if ( isSubset ) {
ee = expressionExperimentSubSetService.load( eeId );
} else {
ee = expressionExperimentService.load( eeId );
}
if ( ee == null ) return new VisualizationValueObject[] {}; // access denied, etc.
if ( threshold == null ) {
log.warn( "Threshold was null, using default" );
threshold = DEFAULT_THRESHOLD;
}
Collection<DoubleVectorValueObject> dedvs;
Gene g = geneService.load( geneId );
if ( g == null ) return null;
Collection<Long> genes = new ArrayList<Long>();
genes.add( geneId );
Collection<BioAssaySet> ees = new ArrayList<BioAssaySet>();
ees.add( ee );
dedvs = processedExpressionDataVectorService.getProcessedDataArrays( ees, genes );
Long time = watch.getTime();
watch.reset();
watch.start();
if ( time > 100 ) {
log.info( "Retrieved " + dedvs.size() + " DEDVs for " + ee.getId() + " and " + "one gene in " + time
+ " ms (times <100ms not reported)." );
}
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts = null;
layouts = experimentalDesignVisualizationService.sortVectorDataByDesign( dedvs );
time = watch.getTime();
watch.reset();
watch.start();
if ( time > 100 ) {
log.info( "Ran sortVectorDataByDesign on " + dedvs.size() + " DEDVs for 1 EE" + " in " + time
+ " ms (times <100ms not reported)." );
}
layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts ); // required? yes, see
// GSE11859
time = watch.getTime();
watch.reset();
watch.start();
if ( time > 100 ) {
log.info( "Ran sortLayoutSamplesByFactor on " + layouts.size() + " layouts" + " in " + time
+ " ms (times <100ms not reported)." );
}
Map<Long, Collection<DifferentialExpressionValueObject>> validatedProbes = new HashMap<Long, Collection<DifferentialExpressionValueObject>>();
validatedProbes.put( ee.getId(),
geneDifferentialExpressionService.getDifferentialExpression( g, ees, threshold, null ) );
watch.stop();
time = watch.getTime();
if ( time > 100 ) {
log.info( "Retrieved " + validatedProbes.size() + " valid probes in " + time + " ms." );
}
return makeDiffVisCollection( dedvs, new ArrayList<Long>( genes ), validatedProbes, layouts );
}
/**
* AJAX exposed method
*
* @param resultSetId The resultset we're specifically interested. Note that this is what is used to choose the
* vectors, since it could be a subset of an experiment.
* @param threshold for 'significance'
* @return collection of visualization value objects
*/
public VisualizationValueObject[] getDEDVForDiffExVisualizationByThreshold( Long resultSetId, Double givenThreshold ) {
if ( resultSetId == null ) {
throw new IllegalArgumentException( "ResultsetId cannot be null" );
}
double threshold = DEFAULT_THRESHOLD;
if ( givenThreshold != null ) {
threshold = givenThreshold;
log.debug( "Threshold specified not using default value: " + givenThreshold );
}
List<DoubleVectorValueObject> dedvs = getDiffExVectors( resultSetId, threshold, 50 );
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts = experimentalDesignVisualizationService
.sortVectorDataByDesign( dedvs );
layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts );
return makeVisCollection( dedvs, null, null, layouts );
}
/**
* AJAX
*
* @param eeId
* @param component
* @param count
* @return
*/
public VisualizationValueObject[] getDEDVForPcaVisualization( Long eeId, int component, int count ) {
StopWatch watch = new StopWatch();
watch.start();
Map<ProbeLoading, DoubleVectorValueObject> topLoadedVectors = this.svdService.getTopLoadedVectors( eeId,
component, count );
if ( topLoadedVectors == null ) return null;
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts = null;
Collection<DoubleVectorValueObject> values = topLoadedVectors.values();
layouts = experimentalDesignVisualizationService.sortVectorDataByDesign( values );
return makeVisCollection( values, null, null, layouts );
}
/**
* AJAX exposed method
*
* @param eeIds
* @param geneIds
* @return
*/
public VisualizationValueObject[] getDEDVForVisualization( Collection<Long> eeIds, Collection<Long> geneIds ) {
StopWatch watch = new StopWatch();
watch.start();
Collection<ExpressionExperiment> ees = expressionExperimentService.loadMultiple( eeIds );
if ( ees == null || ees.isEmpty() ) return null;
Collection<DoubleVectorValueObject> dedvs;
if ( geneIds == null || geneIds.isEmpty() ) {
dedvs = processedExpressionDataVectorService.getProcessedDataArrays( ees.iterator().next(), SAMPLE_SIZE );
} else {
if ( geneIds.size() > MAX_RESULTS_TO_RETURN ) {
log.warn( geneIds.size() + " genes for visualization. Too many. Only using first "
+ MAX_RESULTS_TO_RETURN + " genes. " );
List<Long> reducedGeneIds = new ArrayList<Long>( geneIds );
geneIds = reducedGeneIds.subList( 0, MAX_RESULTS_TO_RETURN );
}
dedvs = processedExpressionDataVectorService.getProcessedDataArrays( ees, geneIds );
}
// watch.stop();
Long time = watch.getTime();
watch.reset();
watch.start();
if ( time > 100 ) {
log.info( "Retrieved " + dedvs.size() + " DEDVs for " + eeIds.size() + " EEs"
+ ( geneIds == null ? " sample" : " for " + geneIds.size() + " genes " ) + " in " + time
+ " ms (times <100ms not reported)." );
}
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts = null;
layouts = experimentalDesignVisualizationService.sortVectorDataByDesign( dedvs );
time = watch.getTime();
watch.reset();
watch.start();
if ( time > 100 ) {
log.info( "Ran sortVectorDataByDesign on " + dedvs.size() + " DEDVs for " + eeIds.size() + " EEs" + " in "
+ time + " ms (times <100ms not reported)." );
}
layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts ); // required?
watch.stop();
time = watch.getTime();
if ( time > 100 ) {
log.info( "Ran sortLayoutSamplesByFactor on " + layouts.size() + " layouts" + " in " + time
+ " ms (times <100ms not reported)." );
}
return makeVisCollection( dedvs, geneIds, null, layouts );
}
/**
* AJAX exposed method
*
* @param eeIds
* @param geneIds (could be just one)
* @param threshold for 'significance'
* @param factorMap Collection of DiffExpressionSelectedFactorCommand showing which factors to use.
* @return
*/
public VisualizationValueObject[] getDEDVForVisualizationByProbe( Collection<Long> eeIds, Collection<Long> probeIds ) {
if ( eeIds.isEmpty() || probeIds.isEmpty() ) return null;
StopWatch watch = new StopWatch();
watch.start();
Collection<ExpressionExperiment> ees = expressionExperimentService.loadMultiple( eeIds );
if ( ees == null || ees.isEmpty() ) return null;
Collection<CompositeSequence> probes = this.compositeSequenceService.loadMultiple( probeIds );
if ( probes == null || probes.isEmpty() ) return null;
Collection<DoubleVectorValueObject> dedvs = processedExpressionDataVectorService.getProcessedDataArraysByProbe(
ees, probes );
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts = experimentalDesignVisualizationService
.sortVectorDataByDesign( dedvs );
watch.stop();
Long time = watch.getTime();
log.info( "Retrieved " + dedvs.size() + " DEDVs for " + eeIds.size() + " EEs and " + probeIds.size()
+ " genes in " + time + " ms." );
return makeVisCollection( dedvs, null, null, layouts );
}
/**
* @param dedvIds
* @return
*/
public Collection<ExpressionProfileDataObject> getVectorData( Collection<Long> dedvIds ) {
List<ExpressionProfileDataObject> result = new ArrayList<ExpressionProfileDataObject>();
for ( Long id : dedvIds ) {
DesignElementDataVector vector = this.designElementDataVectorService.load( id );
try {
DoubleVectorValueObject dvvo = new DoubleVectorValueObject( vector );
ExpressionProfileDataObject epdo = new ExpressionProfileDataObject( dvvo );
DoubleArrayList doubleArrayList = new cern.colt.list.DoubleArrayList( epdo.getData() );
DescriptiveWithMissing.standardize( doubleArrayList );
epdo.setData( doubleArrayList.elements() );
result.add( epdo );
} catch ( IllegalArgumentException iae ) {
log.warn( iae );
}
}
// TODO fill in gene; normalize and clip if desired.; watch for invalid ids.
return result;
}
/**
* Returns a collection of {@link Long} ids from strings.
*
* @param idString
* @return
*/
protected Collection<Long> extractIds( String idString ) {
Collection<Long> ids = new ArrayList<Long>();
if ( idString != null ) {
for ( String s : idString.split( "," ) ) {
try {
ids.add( Long.parseLong( s.trim() ) );
} catch ( NumberFormatException e ) {
log.warn( "invalid id " + s );
}
}
}
return ids;
}
/*
* Handle case of text export of the results.
*
* @see org.springframework.web.servlet.mvc.AbstractFormController#handleRequestInternal(javax.servlet.http.
* HttpServletRequest, javax.servlet.http.HttpServletResponse) Called by /Gemma/dedv/downloadDEDV.html
*/
@RequestMapping("/downloadDEDV.html")
protected ModelAndView handleRequestInternal( HttpServletRequest request ) throws Exception {
StopWatch watch = new StopWatch();
watch.start();
Collection<Long> geneIds = extractIds( request.getParameter( "g" ) ); // might not be any
Collection<Long> eeIds = extractIds( request.getParameter( "ee" ) ); // might not be there
ModelAndView mav = new ModelAndView( new TextView() );
if ( eeIds == null || eeIds.isEmpty() ) {
mav.addObject( "text", "Input empty for finding DEDVs: " + geneIds + " and " + eeIds );
return mav;
}
String threshSt = request.getParameter( "thresh" );
String resultSetIdSt = request.getParameter( "rs" );
Double thresh = 100.0;
if ( StringUtils.isNotBlank( threshSt ) ) {
try {
thresh = Double.parseDouble( threshSt );
} catch ( NumberFormatException e ) {
throw new RuntimeException( "Threshold was not a valid value: " + threshSt );
}
}
Map<ExpressionExperimentValueObject, Map<Long, Collection<DoubleVectorValueObject>>> result = null;
if ( request.getParameter( "pca" ) != null ) {
int component = Integer.parseInt( request.getParameter( "component" ) );
Long eeId = eeIds.iterator().next();
Map<ProbeLoading, DoubleVectorValueObject> topLoadedVectors = this.svdService.getTopLoadedVectors( eeId,
component, thresh.intValue() );
if ( topLoadedVectors == null ) return null;
mav.addObject( "text", format4File( topLoadedVectors.values() ) );
return mav;
}
/*
* The following should be set if we're viewing diff. ex results.
*/
Long resultSetId = null;
if ( StringUtils.isNumeric( resultSetIdSt ) ) {
resultSetId = Long.parseLong( resultSetIdSt );
}
if ( thresh != null && resultSetId != null ) {
/*
* Diff ex case.
*/
Long eeId = eeIds.iterator().next();
Collection<DoubleVectorValueObject> diffExVectors = getDiffExVectors( resultSetId, thresh, 50 );
if ( diffExVectors == null || diffExVectors.isEmpty() ) {
mav.addObject( "text", "No results" );
return mav;
}
/*
* Organize the vectors in the same way expected by the ee+gene type of request.
*/
ExpressionExperimentValueObject ee = expressionExperimentService.loadValueObject( eeId );
result = new HashMap<ExpressionExperimentValueObject, Map<Long, Collection<DoubleVectorValueObject>>>();
Map<Long, Collection<DoubleVectorValueObject>> gmap = new HashMap<Long, Collection<DoubleVectorValueObject>>();
for ( DoubleVectorValueObject dv : diffExVectors ) {
for ( Long g : dv.getGenes() ) {
if ( !gmap.containsKey( g ) ) {
gmap.put( g, new HashSet<DoubleVectorValueObject>() );
}
gmap.get( g ).add( dv );
}
}
result.put( ee, gmap );
} else {
// Generic listing.
result = getDEDV( eeIds, geneIds );
}
if ( result == null || result.isEmpty() ) {
mav.addObject( "text", "No results" );
return mav;
}
mav.addObject( "text", format4File( result ) );
watch.stop();
Long time = watch.getTime();
if ( time > 100 ) {
log.info( "Retrieved and Formated" + result.keySet().size() + " DEDVs for eeIDs: " + eeIds
+ " and GeneIds: "
+ geneIds + " in : " + time + " ms." );
}
return mav;
}
/**
* @param vectors
* @return
*/
private String format4File( Collection<DoubleVectorValueObject> vectors ) {
StringBuffer converted = new StringBuffer();
converted.append( "# Generated by Gemma\n# " + new Date() + "\n" );
converted.append( ExpressionDataFileService.DISCLAIMER + "#\n" );
boolean didHeader = false;
Map<Long, GeneValueObject> gmap = getGeneValueObjectsUsed( vectors );
for ( DoubleVectorValueObject vec : vectors ) {
if ( !didHeader ) {
converted.append( makeHeader( vec ) );
didHeader = true;
}
List<String> geneSymbols = new ArrayList<String>();
List<String> geneNames = new ArrayList<String>();
for ( Long g : vec.getGenes() ) {
GeneValueObject gene = gmap.get( g );
assert gene != null;
geneSymbols.add( gene.getOfficialSymbol() );
geneNames.add( gene.getOfficialName() );
}
converted.append( StringUtils.join( geneSymbols, "|" ) + "\t" + StringUtils.join( geneNames, "|" ) + "\t" );
converted.append( vec.getDesignElement().getName() + "\t" );
if ( vec.getData() != null || vec.getData().length != 0 ) {
for ( double data : vec.getData() ) {
converted.append( String.format( "%.3f", data ) + "\t" );
}
converted.deleteCharAt( converted.length() - 1 ); // remove the trailing tab // FIXME just joind
}
converted.append( "\n" );
}
return converted.toString();
}
/**
* Converts the given map into a tab delimited String
*
* @param result
* @return
*/
private String format4File(
Map<ExpressionExperimentValueObject, Map<Long, Collection<DoubleVectorValueObject>>> result ) {
StringBuffer converted = new StringBuffer();
Map<Long, GeneValueObject> genes = new HashMap<Long, GeneValueObject>(); // Saves us from loading genes
// unnecessarily
converted.append( "# Generated by Gemma\n# " + new Date() + "\n" );
converted.append( ExpressionDataFileService.DISCLAIMER + "#\n" );
for ( ExpressionExperimentValueObject ee : result.keySet() ) {
boolean didHeaderForEe = false;
Collection<Long> geneIds = result.get( ee ).keySet();
for ( Long geneId : geneIds ) {
GeneValueObject gene;
if ( genes.containsKey( geneId ) ) {
gene = genes.get( geneId );
} else {
gene = geneService.loadValueObject( geneId );
genes.put( geneId, gene );
}
String geneName = gene.getOfficialSymbol();
Collection<DoubleVectorValueObject> vecs = result.get( ee ).get( geneId );
for ( DoubleVectorValueObject dedv : vecs ) {
if ( !didHeaderForEe ) {
converted.append( makeHeader( dedv ) );
didHeaderForEe = true;
}
converted.append( geneName + "\t" + gene.getOfficialName() + "\t" );
converted.append( dedv.getDesignElement().getName() + "\t" );
if ( dedv.getData() != null || dedv.getData().length != 0 ) {
for ( double data : dedv.getData() ) {
converted.append( String.format( "%.3f", data ) + "\t" );
}
converted.deleteCharAt( converted.length() - 1 ); // remove the trailing tab
}
converted.append( "\n" );
}
}
converted.append( "\n" );
}
converted.append( "\r\n" );
return converted.toString();
}
/**
* @param resultSetId
* @param threshold
* @return
*/
private List<DoubleVectorValueObject> getDiffExVectors( Long resultSetId, Double threshold, int minNumberOfResults ) {
StopWatch watch = new StopWatch();
watch.start();
ExpressionAnalysisResultSet ar = differentialExpressionResultService.loadAnalysisResultSet( resultSetId );
if ( ar == null ) {
log.warn( "No diff ex result set with ID=" + resultSetId );
return null;
}
differentialExpressionResultService.thawLite( ar );
if ( watch.getTime() > 200 ) {
log.info( "Thaw result set: " + watch.getTime() );
}
BioAssaySet analyzedSet = ar.getAnalysis().getExperimentAnalyzed();
Collection<BioAssaySet> ees = new ArrayList<BioAssaySet>();
ees.add( analyzedSet );
List<DifferentialExpressionAnalysisResult> ee2probeResults = differentialExpressionResultService
.findInResultSet( ar, threshold, MAX_RESULTS_TO_RETURN, minNumberOfResults );
Collection<CompositeSequence> probes = new HashSet<CompositeSequence>();
// Map<CompositeSequenceId, pValue>
// using id instead of entity for map key because want to use a value object for retrieval later
Map<Long, Double> pvalues = new HashMap<Long, Double>();
for ( DifferentialExpressionAnalysisResult par : ee2probeResults ) {
probes.add( par.getProbe() );
pvalues.put( par.getProbe().getId(), par.getPvalue() );
}
watch.reset();
watch.start();
List<DoubleVectorValueObject> dedvs = new ArrayList<DoubleVectorValueObject>(
processedExpressionDataVectorService.getProcessedDataArraysByProbe( ees, probes ) );
watch.stop();
if ( watch.getTime() > 1000 ) {
log.info( "Fetch " + dedvs.size() + " DEDVs for " + probes.size() + " genes in " + watch.getTime()
+ " ms. (result set=" + ar.getId() + ")" );
}
/*
* Resort
*/
for ( DoubleVectorValueObject v : dedvs ) {
v.setPvalue( pvalues.get( v.getDesignElement().getId() ) );
}
Collections.sort( dedvs, new Comparator<DoubleVectorValueObject>() {
@Override
public int compare( DoubleVectorValueObject o1, DoubleVectorValueObject o2 ) {
if ( o1.getPvalue() == null ) return -1;
if ( o2.getPvalue() == null ) return 1;
return o1.getPvalue().compareTo( o2.getPvalue() );
}
} );
return dedvs;
}
private LinkedHashSet<String> getFactorNames(
LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> eeLayouts ) {
LinkedHashSet<String> factorNames = new LinkedHashSet<String>(); // need uniqueness & order
for ( BioAssayValueObject ba : eeLayouts.keySet() ) {
LinkedHashMap<ExperimentalFactor, Double> factorMap = eeLayouts.get( ba );
// for each experimental factor, store the name and value
for ( Entry<ExperimentalFactor, Double> pair : factorMap.entrySet() ) {
if ( pair.getKey() != null ) {
factorNames.add( pair.getKey().getName() );
}
}
}
return factorNames;
}
/**
* @param facVal
* @return
*/
private String getFactorValueDisplayString( FactorValue facVal ) {
StringBuffer facValsStrBuff = new StringBuffer();
if ( facVal.getCharacteristics() == null || facVal.getCharacteristics().isEmpty() ) {
facValsStrBuff.append( facVal.getValue() + ", " );
}
for ( Characteristic characteristic : facVal.getCharacteristics() ) {
facValsStrBuff.append( characteristic.getValue() + ", " );
}
if ( facValsStrBuff.length() > 0 ) {
facValsStrBuff.delete( facValsStrBuff.length() - 2, facValsStrBuff.length() );
}
if ( facValsStrBuff.length() == 0 ) {
facValsStrBuff.append( "FactorValue id:" + Math.round( facVal.getId() )
+ " was not null but no value was found." );
}
return facValsStrBuff.toString();
}
/**
* @param genes
* @return
*/
private List<GeneValueObject> getGeneValueObjectList( List<Long> genes ) {
Collection<GeneValueObject> geneValueObjects = geneService.loadValueObjects( genes );
Map<Long, GeneValueObject> m = EntityUtils.getIdMap( geneValueObjects );
List<GeneValueObject> geneValueObjectList = new ArrayList<GeneValueObject>();
for ( Long id : genes ) {
if ( !m.containsKey( id ) ) {
continue;
}
geneValueObjectList.add( m.get( id ) );
}
return geneValueObjectList;
}
/**
* @param vectors
* @return
*/
private Map<Long, GeneValueObject> getGeneValueObjectsUsed( Collection<DoubleVectorValueObject> vectors ) {
Set<Long> usedGeneIds = new HashSet<Long>();
for ( DoubleVectorValueObject vec : vectors ) {
if ( vec == null || vec.getGenes() == null ) continue;
usedGeneIds.addAll( vec.getGenes() );
}
Map<Long, GeneValueObject> gmap = EntityUtils.getIdMap( geneService.loadValueObjects( usedGeneIds ) );
return gmap;
}
/**
* This is probably no longer being really used?
*
* @param genes
* @param threshold
* @param factorMap
* @return
*/
private Map<Long, Collection<DifferentialExpressionValueObject>> getProbeDiffExValidation( Collection<Gene> genes,
Double threshold, Collection<DiffExpressionSelectedFactorCommand> factorMap ) {
if ( factorMap == null )
throw new IllegalArgumentException( "Factor information is missing, please make sure factors are selected." );
Map<Long, Collection<DifferentialExpressionValueObject>> validatedProbes = new HashMap<Long, Collection<DifferentialExpressionValueObject>>();
Collection<Long> wantedFactors = new HashSet<Long>();
for ( DiffExpressionSelectedFactorCommand factor : factorMap ) {
wantedFactors.add( factor.getEfId() );
}
for ( Gene gene : genes ) {
Collection<DifferentialExpressionValueObject> differentialExpression = geneDifferentialExpressionService
.getDifferentialExpression( gene, threshold, factorMap );
for ( DifferentialExpressionValueObject diffVo : differentialExpression ) {
assert diffVo.getCorrP() <= threshold;
Long eeId = diffVo.getExpressionExperiment().getId();
if ( !validatedProbes.containsKey( eeId ) ) {
validatedProbes.put( eeId, new HashSet<DifferentialExpressionValueObject>() );
}
Collection<ExperimentalFactorValueObject> factors = diffVo.getExperimentalFactors();
for ( ExperimentalFactorValueObject fac : factors ) {
if ( wantedFactors.contains( fac.getId() ) ) {
validatedProbes.get( eeId ).add( diffVo );
}
}
}
}
return validatedProbes;
}
/**
* Identify which probes were 'responsible' for the coexpression links.
*
* @param ees
* @param queryGene
* @param coexpressedGene
* @param dedvs
* @return map of EEID -> collection ProbeIDs which underlie the stored coexpression links.
*/
private Map<Long, Collection<Long>> getProbeLinkValidation( Collection<ExpressionExperiment> ees, Gene queryGene,
Gene coexpressedGene, Collection<DoubleVectorValueObject> dedvs ) {
StopWatch watch = new StopWatch();
watch.start();
Map<Long, Collection<Long>> coexpressedEE2ProbeIds = new HashMap<Long, Collection<Long>>();
Map<Long, Collection<Long>> queryEE2ProbeIds = new HashMap<Long, Collection<Long>>();
/*
* Get the probes for the vectors, organize by ee.
*/
for ( DoubleVectorValueObject dedv : dedvs ) {
ExpressionExperimentValueObject ee = dedv.getExpressionExperiment();
if ( dedv.getGenes().contains( queryGene.getId() ) ) {
if ( !queryEE2ProbeIds.containsKey( ee.getId() ) ) {
queryEE2ProbeIds.put( ee.getId(), new HashSet<Long>() );
}
queryEE2ProbeIds.get( ee.getId() ).add( dedv.getDesignElement().getId() );
} else if ( dedv.getGenes().contains( coexpressedGene.getId() ) ) {
if ( !coexpressedEE2ProbeIds.containsKey( ee.getId() ) ) {
coexpressedEE2ProbeIds.put( ee.getId(), new HashSet<Long>() );
}
coexpressedEE2ProbeIds.get( ee.getId() ).add( dedv.getDesignElement().getId() );
} else {
log.error( "Dedv doesn't belong to coexpressed or query gene. QueryGene= " + queryGene
+ "CoexpressedGene= " + coexpressedGene + "DEDV " + dedv.getId() + " has genes: "
+ dedv.getGenes() );
}
}
Map<Long, Collection<Long>> validatedProbes = new HashMap<Long, Collection<Long>>();
for ( ExpressionExperiment ee : ees ) {
Collection<Long> queryProbeIds = queryEE2ProbeIds.get( ee.getId() );
Collection<Long> coexpressedProbeIds = coexpressedEE2ProbeIds.get( ee.getId() );
if ( queryProbeIds == null || queryProbeIds.isEmpty() ) {
log.warn( "Unexpectedly no probes for " + queryGene + " in " + ee );
continue;
}
if ( coexpressedProbeIds == null || coexpressedProbeIds.isEmpty() ) {
log.warn( "Unexpectedly no probes for " + coexpressedGene + " in " + ee );
continue;
}
/*
* Note: this does a probe-level query
*/
Collection<Long> probesInLinks = this.probe2ProbeCoexpressionService.getCoexpressedProbes( queryProbeIds,
coexpressedProbeIds, ee, queryGene.getTaxon().getCommonName() );
if ( probesInLinks.isEmpty() ) {
log.warn( "Unexpectedly no probes for link between " + queryGene + " -and- " + coexpressedGene + " in "
+ ee );
}
validatedProbes.put( ee.getId(), probesInLinks );
}
watch.stop();
Long time = watch.getTime();
if ( time > 1000 ) {
log.info( "Validation of probes for " + ees.size() + " experiments in " + time + "ms." );
}
return validatedProbes;
}
/**
* @param random
* @return
*/
private String getRandomColour( Random random ) {
String colourString;
colourString = "#" + Integer.toHexString( random.nextInt( 16 ) ) + "0"
+ Integer.toHexString( random.nextInt( 16 ) ) + "0" + Integer.toHexString( random.nextInt( 16 ) ) + "0";
return colourString;
}
/**
* Get the names we'll use for the columns of the vectors.
*
* @param vectors
* @param vvo
* @param layouts
*/
private void getSampleNames( Collection<DoubleVectorValueObject> vectors, VisualizationValueObject vvo,
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts ) {
for ( DoubleVectorValueObject vec : vectors ) {
List<String> sampleNames = new ArrayList<String>();
if ( layouts != null && layouts.get( vec.getExpressionExperiment().getId() ) != null ) {
for ( BioAssayValueObject ba : layouts.get( vec.getExpressionExperiment().getId() ).keySet() ) {
sampleNames.add( ba.getName() ); // fIXME
}
if ( sampleNames.size() > 0 ) {
log.debug( sampleNames.size() + " sample names!" );
vvo.setSampleNames( sampleNames );
}
} else {
sampleNames = getSampleNames( vec );
if ( sampleNames.size() > 0 ) {
log.debug( sampleNames.size() + " sample names!" );
vvo.setSampleNames( sampleNames );
}
}
}
}
/**
* @param dedv
* @return
*/
private List<String> getSampleNames( DoubleVectorValueObject dedv ) {
List<String> result = new ArrayList<String>();
for ( BioAssayValueObject ba : dedv.getBioAssays() ) {
result.add( ba.getName() );
}
return result;
}
/**
* Takes the DEDVs and put them in point objects and normalize the values. returns a map of eeid to visValueObject.
* Currently removes multiple hits for same gene. Tries to pick best DEDV. Organizes the experiments from lowest to
* higest p-value
*
* @param dedvs
* @param genes
* @param validatedProbes (bad name)
* @param layouts
* @return
*/
private VisualizationValueObject[] makeDiffVisCollection( Collection<DoubleVectorValueObject> dedvs,
List<Long> genes, Map<Long, Collection<DifferentialExpressionValueObject>> validatedProbes,
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts ) {
StopWatch watch = new StopWatch();
watch.start();
Map<Long, Collection<DoubleVectorValueObject>> vvoMap = new HashMap<Long, Collection<DoubleVectorValueObject>>();
Map<Long, ExpressionExperimentValueObject> eeMap = new HashMap<Long, ExpressionExperimentValueObject>();
// Organize by expression experiment
for ( DoubleVectorValueObject dvvo : dedvs ) {
ExpressionExperimentValueObject ee = dvvo.getExpressionExperiment();
eeMap.put( ee.getId(), ee );
if ( !vvoMap.containsKey( ee.getId() ) ) {
vvoMap.put( ee.getId(), new HashSet<DoubleVectorValueObject>() );
}
vvoMap.get( ee.getId() ).add( dvvo );
}
class EE2PValue implements Comparable<EE2PValue> {
Long EEId;
double pValue;
public EE2PValue() {
super();
}
public EE2PValue( Long eeid, double pValue ) {
this();
this.EEId = eeid;
this.pValue = pValue;
}
@Override
public int compareTo( EE2PValue o ) {
if ( this.pValue > o.getPValue() )
return 1;
else if ( this.pValue > o.getPValue() )
return -1;
else
return 0;
}
public Long getEEId() {
return EEId;
}
public double getPValue() {
return pValue;
}
}
List<EE2PValue> sortedEE = new ArrayList<EE2PValue>();
// Need to sort the expression experiments by lowest p-value
for ( Long eeId : vvoMap.keySet() ) {
Collection<DifferentialExpressionValueObject> devos = validatedProbes.get( eeId );
double minP = 1;
if ( devos != null && !devos.isEmpty() ) {
for ( DifferentialExpressionValueObject devo : devos ) {
if ( minP > devo.getP() ) {
minP = devo.getP();
}
}
}
sortedEE.add( new EE2PValue( eeId, minP ) );
}
Collections.sort( sortedEE );
VisualizationValueObject[] result = new VisualizationValueObject[vvoMap.keySet().size()];
List<GeneValueObject> geneValueObjects = getGeneValueObjectList( genes );
// Create collection of visualizationValueObject for flotr on js side
int i = 0;
for ( EE2PValue ee2P : sortedEE ) {
VisualizationValueObject vvo = new VisualizationValueObject( vvoMap.get( ee2P.getEEId() ),
geneValueObjects, ee2P.getPValue(), validatedProbes.get( ee2P.getEEId() ) );
getSampleNames( vvoMap.get( ee2P.getEEId() ), vvo, layouts );
if ( layouts != null && !layouts.isEmpty() && layouts.containsKey( ee2P.getEEId() ) ) {
LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> layout = layouts
.get( ee2P.getEEId() );
this.prepareFactorsForFrontEndDisplay( vvo, layout );
}
/*
* Set up the experimental designinfo so we can show it above the graph.
*/
if ( layouts != null ) {
ExpressionExperimentValueObject ee = eeMap.get( ee2P.getEEId() );
log.debug( "setup experimental design layout profiles for " + ee );
vvo.setUpFactorProfiles( layouts.get( ee.getId() ) );
}
result[i] = vvo;
i++;
}
Long time = watch.getTime();
if ( time > 1000 ) {
log.info( "Created vis value objects in: " + time );
}
return result;
}
/**
* @param dedv
* @return
*/
private String makeHeader( DoubleVectorValueObject dedv ) {
if ( dedv.isReorganized() ) {
/*
* FIXME we should output the names in the 'right' order.
*/
return "Gene Symbol\tGene Name\tProbe\n";
}
StringBuilder buf = new StringBuilder();
ExpressionExperimentValueObject ee = dedv.getExpressionExperiment();
buf.append( "# " + ee.getShortName() + " : " + ee.getName() + "\n" );
for ( BioAssayValueObject ba : dedv.getBioAssays() ) {
buf.append( ba.getName() + "\t" );
}
buf.deleteCharAt( buf.length() - 1 );
buf.append( "\n" );
return buf.toString();
}
/**
* @param newResults
* @param layouts
* @return
*/
private Map<ExpressionExperimentValueObject, Map<Long, Collection<DoubleVectorValueObject>>> makeVectorMap(
Collection<DoubleVectorValueObject> newResults,
Map<Long, LinkedHashMap<BioAssay, LinkedHashMap<ExperimentalFactor, Double>>> layouts ) {
// FIXME use the layouts.
Map<ExpressionExperimentValueObject, Map<Long, Collection<DoubleVectorValueObject>>> result = new HashMap<ExpressionExperimentValueObject, Map<Long, Collection<DoubleVectorValueObject>>>();
for ( DoubleVectorValueObject v : newResults ) {
ExpressionExperimentValueObject e = v.getExpressionExperiment();
if ( !result.containsKey( e ) ) {
result.put( e, new HashMap<Long, Collection<DoubleVectorValueObject>>() );
}
Map<Long, Collection<DoubleVectorValueObject>> innerMap = result.get( e );
if ( v.getGenes() == null || v.getGenes().isEmpty() ) {
continue;
}
for ( Long g : v.getGenes() ) {
if ( !innerMap.containsKey( g ) ) {
innerMap.put( g, new HashSet<DoubleVectorValueObject>() );
}
innerMap.get( g ).add( v );
}
}
return result;
}
/**
* Takes the DEDVs and put them in point objects and normalize the values. returns a map of eeid to visValueObject.
* Currently removes multiple hits for same gene. Tries to pick best DEDV.
*
* @param dedvs
* @param genes
* @param validatedProbes
* @param layouts
* @return
*/
private VisualizationValueObject[] makeVisCollection( Collection<DoubleVectorValueObject> dedvs,
Collection<Long> genes, Map<Long, Collection<Long>> validatedProbes,
Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts ) {
Map<Long, List<DoubleVectorValueObject>> vvoMap = new HashMap<Long, List<DoubleVectorValueObject>>();
// Organize by expression experiment
if ( dedvs == null || dedvs.isEmpty() ) return new VisualizationValueObject[1];
for ( DoubleVectorValueObject dvvo : dedvs ) {
ExpressionExperimentValueObject ee = dvvo.getExpressionExperiment();
if ( !vvoMap.containsKey( ee.getId() ) ) {
vvoMap.put( ee.getId(), new ArrayList<DoubleVectorValueObject>() );
}
vvoMap.get( ee.getId() ).add( dvvo );
}
List<GeneValueObject> geneValueObjects = new ArrayList<GeneValueObject>();
if ( genes == null || genes.isEmpty() ) {
geneValueObjects = new ArrayList<GeneValueObject>( getGeneValueObjectsUsed( dedvs ).values() );
} else {
geneValueObjects = getGeneValueObjectList( new ArrayList<Long>( genes ) );
}
StopWatch timer = new StopWatch();
timer.start();
VisualizationValueObject[] result = new VisualizationValueObject[vvoMap.keySet().size()];
// Create collection of visualizationValueObject for flotr on js side
int i = 0;
for ( Long ee : vvoMap.keySet() ) {
Collection<Long> validatedProbeList = null;
if ( validatedProbes != null ) {
validatedProbeList = validatedProbes.get( ee );
}
Collection<DoubleVectorValueObject> vectors = vvoMap.get( ee );
VisualizationValueObject vvo = new VisualizationValueObject( vectors, geneValueObjects, validatedProbeList );
if ( vectors.size() > 0 ) {
getSampleNames( vectors, vvo, layouts );
if ( vectors.size() > 0 && layouts != null && !layouts.isEmpty() && layouts.containsKey( ee ) ) {
// Set up the experimental designinfo so we can show it above the graph.
LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> layout = layouts
.get( ee );
this.prepareFactorsForFrontEndDisplay( vvo, layout );
}
}
/*
* Set up the experimental design info so we can show it above the graph.
*/
if ( layouts != null && layouts.get( ee ) != null ) {
vvo.setUpFactorProfiles( layouts.get( ee ) );
}
result[i] = vvo;
i++;
}
long time = timer.getTime();
if ( time > 1000 ) {
log.info( "Created vis value objects in: " + time );
}
return result;
}
/**
* Prepare vvo for display on front end. Uses factors and factor values from layouts
*
* @param vvo Note: This will be modified! It will be updated with the factorNames and factorValuesToNames
* @param eeLayouts
*/
private void prepareFactorsForFrontEndDisplay( VisualizationValueObject vvo,
LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> eeLayouts ) {
if ( eeLayouts == null || eeLayouts.isEmpty() ) {
log.warn( "No layouts, bail" );
vvo.setFactorNames( null );
vvo.setFactorValuesToNames( null );
return;
}
LinkedHashSet<String> factorNames = getFactorNames( eeLayouts );
// colours for conditions/factor values bar chart
Map<String, Queue<String>> factorColoursMap = createFactorNameToColoursMap( factorNames );
String missingValueColour = "#DCDCDC";
Random random = new Random();
LinkedHashMap<String, LinkedHashMap<String, String>> factorToValueNames = new LinkedHashMap<String, LinkedHashMap<String, String>>();
// list of maps with entries: key = factorName, value=array of factor values
// 1 entry per sample
ArrayList<LinkedHashMap<String, String[]>> factorValueMaps = new ArrayList<LinkedHashMap<String, String[]>>();
Collection<String> factorsMissingValues = new HashSet<String>();
Map<Long, FactorValue> fvs = new HashMap<Long, FactorValue>(); // avoid loading repeatedly.
Collection<ExperimentalFactor> seenFactors = new HashSet<ExperimentalFactor>();
for ( BioAssayValueObject ba : eeLayouts.keySet() ) {
// double should be the factorValue id, defined in
// ubic.gemma.visualization.ExperimentalDesignVisualizationService.getExperimentalDesignLayout(ExpressionExperiment,
// BioAssayDimension)
LinkedHashMap<ExperimentalFactor, Double> factorMap = eeLayouts.get( ba );
LinkedHashMap<String, String[]> factorNamesToValueColourPairs = new LinkedHashMap<String, String[]>(
factorNames.size() );
// this is defensive, should only come into play when there's something messed up with the data.
// for every factor, add a missing-value entry (guards against missing data messing up the layout)
for ( String facName : factorNames ) {
String[] facValAndColour = new String[] { "No value", missingValueColour };
factorNamesToValueColourPairs.put( facName, facValAndColour );
}
ExperimentalFactor factor;
Double valueOrId;
// for each experimental factor, store the name and value
for ( Entry<ExperimentalFactor, Double> pair : factorMap.entrySet() ) {
factor = pair.getKey();
valueOrId = pair.getValue();
if ( factor == null ) continue;
/*
* the double is only a double because it is meant to hold measurements when the factor is continuous if
* the factor is categorical, the double value is set to the value's id see
* ubic.gemma.visualization.ExperimentalDesignVisualizationService.getExperimentalDesignLayout(
* ExpressionExperiment, BioAssayDimension)
*/
if ( valueOrId == null || factor.getType() == null
|| ( factor.getType().equals( FactorType.CATEGORICAL ) && factor.getFactorValues().isEmpty() ) ) {
factorsMissingValues.add( factor.getName() );
continue;
}
if ( !seenFactors.contains( factor ) && factor.getType().equals( FactorType.CATEGORICAL ) ) {
for ( FactorValue fv : factor.getFactorValues() ) {
fvs.put( fv.getId(), fv );
}
}
String facValsStr;
if ( factor.getType() == FactorType.CONTINUOUS ) {
log.debug( "Experiment has continuous factor." );
facValsStr = valueOrId.toString();
} else {
Long id = new Long( Math.round( valueOrId ) );
FactorValue facVal = fvs.get( id );
if ( facVal == null ) {
// probably already have it already ... so this is just in case.
facVal = factorValueService.load( id );
log.warn( "Surprisingly have a factorvalue not already accounted for by premapping: " + facVal );
}
if ( facVal == null ) {
log.warn( "Failed to load factorValue with id = " + valueOrId
+ ". Load returned null. Experiment is: " + vvo.getEevo().toString() );
factorsMissingValues.add( factor.getName() );
continue;
}
fvs.put( facVal.getId(), facVal );
facValsStr = getFactorValueDisplayString( facVal );
}
if ( !factorToValueNames.containsKey( factor.getName() ) ) {
factorToValueNames.put( factor.getName(), new LinkedHashMap<String, String>() );
}
// assign colour if unassigned or fetch it if already assigned
String colourString = "";
if ( !factorToValueNames.get( factor.getName() ).containsKey( facValsStr ) ) {
if ( factorColoursMap.containsKey( factor.getName() ) ) {
colourString = factorColoursMap.get( factor.getName() ).poll();
}
if ( colourString == null || colourString == "" ) { // ran out of predefined colours
colourString = getRandomColour( random );
}
factorToValueNames.get( factor.getName() ).put( facValsStr, colourString );
} else {
colourString = factorToValueNames.get( factor.getName() ).get( facValsStr );
}
String[] facValAndColour = new String[] { facValsStr, colourString };
factorNamesToValueColourPairs.put( factor.getName(), facValAndColour );
}
factorValueMaps.add( factorNamesToValueColourPairs );
}
// add missing value entries here so they show up at the end of the legend's value lists
if ( !factorsMissingValues.isEmpty() ) {
for ( String factorName : factorsMissingValues ) {
if ( !factorToValueNames.containsKey( factorName ) ) {
factorToValueNames.put( factorName, new LinkedHashMap<String, String>() );
}
factorToValueNames.get( factorName ).put( "No value", missingValueColour );
}
}
vvo.setFactorNames( factorToValueNames ); // this is summary of values & colours by factor, used for legend
vvo.setFactorValuesToNames( factorValueMaps ); // this is list of maps for each sample
}
} | fix for 3554 - temporary.
| gemma-web/src/main/java/ubic/gemma/web/controller/expression/experiment/DEDVController.java | fix for 3554 - temporary. | <ide><path>emma-web/src/main/java/ubic/gemma/web/controller/expression/experiment/DEDVController.java
<ide>
<ide> layouts = experimentalDesignVisualizationService.sortVectorDataByDesign( dedvs );
<ide>
<del> layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts );
<add> // layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts );
<ide> watch.stop();
<ide> Long time = watch.getTime();
<ide>
<ide> + " ms (times <100ms not reported)." );
<ide> }
<ide>
<del> layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts ); // required? yes, see
<del> // GSE11859
<add> // layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts ); // required? yes, see
<add> // GSE11859
<ide>
<ide> time = watch.getTime();
<ide> if ( time > 100 ) {
<ide> + " ms (times <100ms not reported)." );
<ide> }
<ide>
<del> layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts ); // required? yes, see
<del> // GSE11859
<add> // layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts ); // required? yes, see
<add> // GSE11859
<ide>
<ide> time = watch.getTime();
<ide> watch.reset();
<ide> Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> layouts = experimentalDesignVisualizationService
<ide> .sortVectorDataByDesign( dedvs );
<ide>
<del> layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts );
<add> // layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts );
<ide>
<ide> return makeVisCollection( dedvs, null, null, layouts );
<ide>
<ide> log.info( "Ran sortVectorDataByDesign on " + dedvs.size() + " DEDVs for " + eeIds.size() + " EEs" + " in "
<ide> + time + " ms (times <100ms not reported)." );
<ide> }
<del> layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts ); // required?
<add> // layouts = experimentalDesignVisualizationService.sortLayoutSamplesByFactor( layouts ); // required?
<ide>
<ide> watch.stop();
<ide> time = watch.getTime(); |
|
Java | mit | 02f073424a1920b7e6b6f5a7ac26a996fc9ca204 | 0 | yukung/playground,yukung/playground,yukung/playground,yukung/playground | package org.yukung.sandbox.http;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.ServerSocket;
import java.net.Socket;
/**
* @author yukung
*/
public class Main {
public static void main(String[] args) throws IOException {
System.out.println("start >>>");
try (
ServerSocket ss = new ServerSocket(8080);
Socket socket = ss.accept();
BufferedReader br = new BufferedReader(new InputStreamReader(socket.getInputStream(), "UTF-8"));
) {
String line = br.readLine();
StringBuilder header = new StringBuilder();
int contentLength = 0;
while (line != null && !line.isEmpty()) {
if (line.startsWith("Content-Length")) {
contentLength = Integer.parseInt(line.split(":")[1].trim());
}
header.append(line + "\n");
line = br.readLine();
}
String body = null;
if (0 < contentLength) {
char[] c = new char[contentLength];
br.read(c);
body = new String(c);
}
System.out.println(header);
System.out.println(body);
}
System.out.println("<<< end");
}
}
| another-simple-server/src/main/java/org/yukung/sandbox/http/Main.java | package org.yukung.sandbox.http;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.ServerSocket;
import java.net.Socket;
/**
* @author yukung
*/
public class Main {
public static void main(String[] args) throws IOException {
System.out.println("start >>>");
try (
ServerSocket ss = new ServerSocket(8080);
Socket socket = ss.accept();
BufferedReader br = new BufferedReader(new InputStreamReader(socket.getInputStream(), "UTF-8"));
) {
String line = br.readLine();
StringBuilder header = new StringBuilder();
while (line != null && !line.isEmpty()) {
header.append(line + "\n");
line = br.readLine();
}
System.out.println(header);
}
System.out.println("<<< end");
}
}
| Acquire the message body.
| another-simple-server/src/main/java/org/yukung/sandbox/http/Main.java | Acquire the message body. | <ide><path>nother-simple-server/src/main/java/org/yukung/sandbox/http/Main.java
<ide> ) {
<ide> String line = br.readLine();
<ide> StringBuilder header = new StringBuilder();
<add> int contentLength = 0;
<ide>
<ide> while (line != null && !line.isEmpty()) {
<add> if (line.startsWith("Content-Length")) {
<add> contentLength = Integer.parseInt(line.split(":")[1].trim());
<add> }
<ide> header.append(line + "\n");
<ide> line = br.readLine();
<ide> }
<ide>
<add> String body = null;
<add>
<add> if (0 < contentLength) {
<add> char[] c = new char[contentLength];
<add> br.read(c);
<add> body = new String(c);
<add> }
<add>
<ide> System.out.println(header);
<add> System.out.println(body);
<ide> }
<ide>
<ide> System.out.println("<<< end"); |
|
Java | apache-2.0 | 2cf7cbc9b02585a3ffba38b3b3578753469921f3 | 0 | darranl/directory-server,lucastheisen/apache-directory-server,lucastheisen/apache-directory-server,apache/directory-server,drankye/directory-server,darranl/directory-server,drankye/directory-server,apache/directory-server | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.server.ldap.handlers.controls;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.directory.server.core.filtering.EntryFilteringCursor;
import org.apache.directory.server.ldap.LdapSession;
import org.apache.directory.shared.asn1.ber.tlv.Value;
import org.apache.directory.shared.ldap.constants.SchemaConstants;
import org.apache.directory.shared.ldap.exception.LdapException;
import org.apache.directory.shared.ldap.message.internal.InternalSearchRequest;
import org.apache.directory.shared.ldap.schema.AttributeType;
import org.apache.directory.shared.ldap.schema.SchemaManager;
import org.apache.directory.shared.ldap.util.StringTools;
/**
* The structure which stores the informations relative to the pagedSearch control.
* They are associated to a cookie, stored into the session and associated to an
* instance of this class.
*
* @author <a href="mailto:[email protected]">Apache Directory Project</a>
* @version $Rev: $
*/
public class PagedSearchContext
{
/** The previous search request */
private InternalSearchRequest previousSearchRequest;
/** The current position in the cursor */
private int currentPosition;
/** The cookie key */
private byte[] cookie;
/** The integer value for the cookie */
private AtomicInteger cookieValue;
/** The associated cursor for the current search request */
private EntryFilteringCursor cursor;
/**
* Creates a new instance of this class, storing the SearchRequest into it.
*/
public PagedSearchContext( InternalSearchRequest searchRequest )
{
previousSearchRequest = searchRequest;
currentPosition = 0;
// We compute a key for this cookie. It combines the search request
// and some time seed, in order to avoid possible collisions, as
// a user may send more than one PagedSearch on the same session.
cookieValue = new AtomicInteger( searchRequest.getMessageId() << 16 );
cookie = Value.getBytes( cookieValue.get() );
}
/**
* Compute a new key for this cookie, based on the current searchRequest
* hashCode and the current position. This value will be stored into the
* session, and will permit the retrieval of this instance.
*
* @return The new cookie's key
*/
public byte[] getCookie()
{
return cookie;
}
public int getCookieValue()
{
return cookieValue.get();
}
/**
* Compute a new cookie, if the previous one already exists. This
* is unlikely, as we are based on some time seed, but just in case,
* this method will generate a new one.
* @return The new cookie
*/
public byte[] getNewCookie()
{
cookie = Value.getBytes( cookieValue.incrementAndGet() );
return cookie;
}
/**
* Build a set of OIDs from the list of attributes we have in the search request
*/
private Set<String> buildAttributeSet( InternalSearchRequest request, LdapSession session,
SchemaManager schemaManager )
{
Set<String> requestSet = new HashSet<String>();
// Build the set of attributeType from the attributes
for ( String attribute:request.getAttributes() )
{
try
{
AttributeType at = schemaManager.lookupAttributeTypeRegistry( attribute );
requestSet.add( at.getOid() );
}
catch ( LdapException le )
{
// Deal with special attributes : '*', '+' and '1.1'
if ( attribute.equals( SchemaConstants.ALL_OPERATIONAL_ATTRIBUTES ) ||
attribute.equals( SchemaConstants.ALL_USER_ATTRIBUTES ) ||
attribute.equals( SchemaConstants.NO_ATTRIBUTE ) )
{
requestSet.add( attribute );
}
// Otherwise, don't add the attribute to the set
}
}
return requestSet;
}
/**
* Compare the previous search request and the new one, and return
* true if they are equal. We compare every field but the MessageID.
*
* @param request The new SearchRequest
* @return true if both request are equal.
*/
public boolean hasSameRequest( InternalSearchRequest request, LdapSession session )
{
// Compares the scope
if ( request.getScope() != previousSearchRequest.getScope() )
{
return false;
}
// Compares the sizeLimit
if ( request.getSizeLimit() != previousSearchRequest.getSizeLimit() )
{
return false;
}
// Compares the timeLimit
if ( request.getTimeLimit() != previousSearchRequest.getTimeLimit() )
{
return false;
}
// Compares the TypesOnly
if ( request.getTypesOnly() != previousSearchRequest.getTypesOnly() )
{
return false;
}
// Compares the deref aliases mode
if ( request.getDerefAliases() != previousSearchRequest.getDerefAliases() )
{
return false;
}
SchemaManager schemaManager =
session.getLdapServer().getDirectoryService().getSchemaManager();
// Compares the attributes
if ( request.getAttributes() == null )
{
if ( previousSearchRequest.getAttributes() != null )
{
return false;
}
}
else
{
if ( previousSearchRequest.getAttributes() == null )
{
return false;
}
else
{
// We have to normalize the attributes in order to compare them
if ( request.getAttributes().size() != previousSearchRequest.getAttributes().size() )
{
return false;
}
// Build the set of attributeType from both requests
Set<String> requestSet = buildAttributeSet( request, session, schemaManager );
Set<String> previousRequestSet = buildAttributeSet( previousSearchRequest, session, schemaManager );
// Check that both sets have the same size again after having converted
// the attributes to OID
if ( requestSet.size() != previousRequestSet.size() )
{
return false;
}
for ( String attribute:requestSet )
{
previousRequestSet.remove( attribute );
}
// The other set must be empty
if ( !previousRequestSet.isEmpty() )
{
return false;
}
}
}
// Compare the baseDN
try
{
request.getBase().normalize( schemaManager.getNormalizerMapping() );
if ( !previousSearchRequest.getBase().isNormalized() )
{
previousSearchRequest.getBase().normalize( schemaManager.getNormalizerMapping() );
}
if ( !request.getBase().equals( previousSearchRequest.getBase() ) )
{
return false;
}
}
catch ( LdapException le )
{
return false;
}
// Compare the filters
// Here, we assume the user hasn't changed the filter's order or content,
// as the filter is not normalized. This is a real problem, as the normalization
// phase is done in the interceptor chain, which is a bad decision wrt what we
// do here.
return true; //request.getFilter().equals( previousSearchRequest.getFilter() );
}
/**
* @return The current position in the cursor. This value is updated
* after each successful search request.
*/
public int getCurrentPosition()
{
return currentPosition;
}
/**
* Set the new current position, incrementing it with the
* number of returned entries.
*
* @param returnedEntries The number of returned entries
*/
public void incrementCurrentPosition( int returnedEntries )
{
this.currentPosition += returnedEntries;
}
/**
* @return The previous search request
*/
public InternalSearchRequest getPreviousSearchRequest()
{
return previousSearchRequest;
}
/**
* @return The associated cursor
*/
public EntryFilteringCursor getCursor()
{
return cursor;
}
/**
* Set the new cursor for this search request
* @param cursor The associated cursor
*/
public void setCursor( EntryFilteringCursor cursor )
{
this.cursor = cursor;
}
/**
* @see Object#toString()
*/
public String toString()
{
StringBuilder sb = new StringBuilder();
sb.append( "PagedSearch context : <" );
sb.append( StringTools.dumpBytes( cookie ) );
sb.append( ", " );
sb.append( currentPosition );
sb.append( ">" );
return sb.toString();
}
}
| protocol-ldap/src/main/java/org/apache/directory/server/ldap/handlers/controls/PagedSearchContext.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.server.ldap.handlers.controls;
import java.util.HashSet;
import java.util.Set;
import org.apache.directory.server.core.filtering.EntryFilteringCursor;
import org.apache.directory.server.ldap.LdapSession;
import org.apache.directory.shared.asn1.ber.tlv.Value;
import org.apache.directory.shared.ldap.constants.SchemaConstants;
import org.apache.directory.shared.ldap.exception.LdapException;
import org.apache.directory.shared.ldap.message.internal.InternalSearchRequest;
import org.apache.directory.shared.ldap.schema.AttributeType;
import org.apache.directory.shared.ldap.schema.SchemaManager;
import org.apache.directory.shared.ldap.util.StringTools;
/**
* The structure which stores the informations relative to the pagedSearch control.
* They are associated to a cookie, stored into the session and associated to an
* instance of this class.
*
* @author <a href="mailto:[email protected]">Apache Directory Project</a>
* @version $Rev: $
*/
public class PagedSearchContext
{
/** The previous search request */
private InternalSearchRequest previousSearchRequest;
/** The current position in the cursor */
private int currentPosition;
/** The cookie key */
private byte[] cookie;
/** The integer value for the cookie */
private int cookieValue;
/** The associated cursor for the current search request */
private EntryFilteringCursor cursor;
/**
* Creates a new instance of this class, storing the Searchrequest into it.
*/
public PagedSearchContext( InternalSearchRequest searchRequest )
{
previousSearchRequest = searchRequest;
currentPosition = 0;
// We compute a key for this cookie. It combines the search request
// and some time seed, in order to avoid possible collisions, as
// a user may send more than one PagedSearch on the same session.
cookieValue = (int)(System.nanoTime()*17) + searchRequest.getMessageId();
cookie = Value.getBytes( cookieValue );
}
/**
* Compute a new key for this cookie, based on the current searchRequest
* hashCode and the current position. This value will be stored into the
* session, and will permit the retrieval of this instance.
*
* @return The new cookie's key
*/
public byte[] getCookie()
{
return cookie;
}
public int getCookieValue()
{
return cookieValue;
}
/**
* Compute a new cookie, if the previous one already exists. This
* is unlikely, as we are based on some time seed, but just in case,
* this method will generate a new one.
* @return The new cookie
*/
public byte[] getNewCookie()
{
cookieValue = cookieValue + (int)(System.nanoTime()*17);
cookie = Value.getBytes( cookieValue );
return cookie;
}
/**
* Build a set of OIDs from the list of attributes we have in the search request
*/
private Set<String> buildAttributeSet( InternalSearchRequest request, LdapSession session,
SchemaManager schemaManager )
{
Set<String> requestSet = new HashSet<String>();
// Build the set of attributeType from the attributes
for ( String attribute:request.getAttributes() )
{
try
{
AttributeType at = schemaManager.lookupAttributeTypeRegistry( attribute );
requestSet.add( at.getOid() );
}
catch ( LdapException le )
{
// Deal with special attributes : '*', '+' and '1.1'
if ( attribute.equals( SchemaConstants.ALL_OPERATIONAL_ATTRIBUTES ) ||
attribute.equals( SchemaConstants.ALL_USER_ATTRIBUTES ) ||
attribute.equals( SchemaConstants.NO_ATTRIBUTE ) )
{
requestSet.add( attribute );
}
// Otherwise, don't add the attribute to the set
}
}
return requestSet;
}
/**
* Compare the previous search request and the new one, and return
* true if they are equal. We compare every field but the MessageID.
*
* @param request The new SearchRequest
* @return true if both request are equal.
*/
public boolean hasSameRequest( InternalSearchRequest request, LdapSession session )
{
// Compares the scope
if ( request.getScope() != previousSearchRequest.getScope() )
{
return false;
}
// Compares the sizeLimit
if ( request.getSizeLimit() != previousSearchRequest.getSizeLimit() )
{
return false;
}
// Compares the timeLimit
if ( request.getTimeLimit() != previousSearchRequest.getTimeLimit() )
{
return false;
}
// Compares the TypesOnly
if ( request.getTypesOnly() != previousSearchRequest.getTypesOnly() )
{
return false;
}
// Compares the deref aliases mode
if ( request.getDerefAliases() != previousSearchRequest.getDerefAliases() )
{
return false;
}
SchemaManager schemaManager =
session.getLdapServer().getDirectoryService().getSchemaManager();
// Compares the attributes
if ( request.getAttributes() == null )
{
if ( previousSearchRequest.getAttributes() != null )
{
return false;
}
}
else
{
if ( previousSearchRequest.getAttributes() == null )
{
return false;
}
else
{
// We have to normalize the attributes in order to compare them
if ( request.getAttributes().size() != previousSearchRequest.getAttributes().size() )
{
return false;
}
// Build the set of attributeType from both requests
Set<String> requestSet = buildAttributeSet( request, session, schemaManager );
Set<String> previousRequestSet = buildAttributeSet( previousSearchRequest, session, schemaManager );
// Check that both sets have the same size again after having converted
// the attributes to OID
if ( requestSet.size() != previousRequestSet.size() )
{
return false;
}
for ( String attribute:requestSet )
{
previousRequestSet.remove( attribute );
}
// The other set must be empty
if ( !previousRequestSet.isEmpty() )
{
return false;
}
}
}
// Compare the baseDN
try
{
request.getBase().normalize( schemaManager.getNormalizerMapping() );
if ( !previousSearchRequest.getBase().isNormalized() )
{
previousSearchRequest.getBase().normalize( schemaManager.getNormalizerMapping() );
}
if ( !request.getBase().equals( previousSearchRequest.getBase() ) )
{
return false;
}
}
catch ( LdapException le )
{
return false;
}
// Compare the filters
// Here, we assume the user hasn't changed the filter's order or content,
// as the filter is not normalized. This is a real problem, as the normalization
// phase is done in the interceptor chain, which is a bad decision wrt what we
// do here.
return true; //request.getFilter().equals( previousSearchRequest.getFilter() );
}
/**
* @return The current position in the cursor. This value is updated
* after each successful search request.
*/
public int getCurrentPosition()
{
return currentPosition;
}
/**
* Set the new current position, incrementing it with the
* number of returned entries.
*
* @param returnedEntries The number of returned entries
*/
public void incrementCurrentPosition( int returnedEntries )
{
this.currentPosition += returnedEntries;
}
/**
* @return The previous search request
*/
public InternalSearchRequest getPreviousSearchRequest()
{
return previousSearchRequest;
}
/**
* @return The associated cursor
*/
public EntryFilteringCursor getCursor()
{
return cursor;
}
/**
* Set the new cursor for this search request
* @param cursor The associated cursor
*/
public void setCursor( EntryFilteringCursor cursor )
{
this.cursor = cursor;
}
/**
* @see Object#toString()
*/
public String toString()
{
StringBuilder sb = new StringBuilder();
sb.append( "PagedSearch context : <" );
sb.append( StringTools.dumpBytes( cookie ) );
sb.append( ", " );
sb.append( currentPosition );
sb.append( ">" );
return sb.toString();
}
}
| o Used an AtomicInteger for the cookie
o Compute the cookie to avoid collisions
git-svn-id: 90776817adfbd895fc5cfa90f675377e0a62e745@944944 13f79535-47bb-0310-9956-ffa450edef68
| protocol-ldap/src/main/java/org/apache/directory/server/ldap/handlers/controls/PagedSearchContext.java | o Used an AtomicInteger for the cookie o Compute the cookie to avoid collisions | <ide><path>rotocol-ldap/src/main/java/org/apache/directory/server/ldap/handlers/controls/PagedSearchContext.java
<ide>
<ide> import java.util.HashSet;
<ide> import java.util.Set;
<add>import java.util.concurrent.atomic.AtomicInteger;
<ide>
<ide> import org.apache.directory.server.core.filtering.EntryFilteringCursor;
<ide> import org.apache.directory.server.ldap.LdapSession;
<ide> private byte[] cookie;
<ide>
<ide> /** The integer value for the cookie */
<del> private int cookieValue;
<add> private AtomicInteger cookieValue;
<ide>
<ide> /** The associated cursor for the current search request */
<ide> private EntryFilteringCursor cursor;
<ide>
<ide> /**
<del> * Creates a new instance of this class, storing the Searchrequest into it.
<add> * Creates a new instance of this class, storing the SearchRequest into it.
<ide> */
<ide> public PagedSearchContext( InternalSearchRequest searchRequest )
<ide> {
<ide> // We compute a key for this cookie. It combines the search request
<ide> // and some time seed, in order to avoid possible collisions, as
<ide> // a user may send more than one PagedSearch on the same session.
<del> cookieValue = (int)(System.nanoTime()*17) + searchRequest.getMessageId();
<del>
<del> cookie = Value.getBytes( cookieValue );
<add> cookieValue = new AtomicInteger( searchRequest.getMessageId() << 16 );
<add>
<add> cookie = Value.getBytes( cookieValue.get() );
<ide> }
<ide>
<ide>
<ide>
<ide> public int getCookieValue()
<ide> {
<del> return cookieValue;
<add> return cookieValue.get();
<ide> }
<ide>
<ide>
<ide> */
<ide> public byte[] getNewCookie()
<ide> {
<del> cookieValue = cookieValue + (int)(System.nanoTime()*17);
<del> cookie = Value.getBytes( cookieValue );
<add> cookie = Value.getBytes( cookieValue.incrementAndGet() );
<ide>
<ide> return cookie;
<ide> } |
|
Java | agpl-3.0 | b1c8c4eaabf5b24bdde810622259ad9de23407a1 | 0 | dmontag/neo4j-enterprise,dmontag/neo4j-enterprise | /**
* Copyright (c) 2002-2011 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.backup;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
import static org.neo4j.helpers.collection.MapUtil.stringMap;
import static org.neo4j.kernel.Config.ENABLE_ONLINE_BACKUP;
import java.io.File;
import org.apache.commons.io.FileUtils;
import org.junit.Before;
import org.junit.Test;
import org.neo4j.com.ComException;
import org.neo4j.graphdb.DynamicRelationshipType;
import org.neo4j.graphdb.GraphDatabaseService;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.Transaction;
import org.neo4j.graphdb.TransactionFailureException;
import org.neo4j.graphdb.index.Index;
import org.neo4j.index.impl.lucene.LuceneDataSource;
import org.neo4j.kernel.AbstractGraphDatabase;
import org.neo4j.kernel.Config;
import org.neo4j.kernel.EmbeddedGraphDatabase;
import org.neo4j.kernel.impl.transaction.xaframework.XaDataSource;
import org.neo4j.test.DbRepresentation;
import org.neo4j.test.subprocess.SubProcess;
public class TestBackup
{
private final String serverPath = "target/var/serverdb";
private final String otherServerPath = serverPath + "2";
private final String backupPath = "target/var/backuedup-serverdb";
@Before
public void before() throws Exception
{
FileUtils.deleteDirectory( new File( serverPath ) );
FileUtils.deleteDirectory( new File( otherServerPath ) );
FileUtils.deleteDirectory( new File( backupPath ) );
}
// TODO MP: What happens if the server database keeps growing, virtually making the files endless?
@Test
public void makeSureFullFailsWhenDbExists() throws Exception
{
createInitialDataSet( serverPath );
ServerInterface server = startServer( serverPath );
OnlineBackup backup = OnlineBackup.from( "localhost" );
createInitialDataSet( backupPath );
try
{
backup.full( backupPath );
fail( "Shouldn't be able to do full backup into existing db" );
}
catch ( Exception e )
{
// good
}
shutdownServer( server );
}
@Test
public void makeSureIncrementalFailsWhenNoDb() throws Exception
{
createInitialDataSet( serverPath );
ServerInterface server = startServer( serverPath );
OnlineBackup backup = OnlineBackup.from( "localhost" );
try
{
backup.incremental( backupPath );
fail( "Shouldn't be able to do incremental backup into non-existing db" );
}
catch ( Exception e )
{
// Good
}
shutdownServer( server );
}
@Test
public void fullThenIncremental() throws Exception
{
DbRepresentation initialDataSetRepresentation = createInitialDataSet( serverPath );
ServerInterface server = startServer( serverPath );
OnlineBackup backup = OnlineBackup.from( "localhost" );
backup.full( backupPath );
assertEquals( initialDataSetRepresentation, DbRepresentation.of( backupPath ) );
shutdownServer( server );
DbRepresentation furtherRepresentation = addMoreData( serverPath );
server = startServer( serverPath );
backup.incremental( backupPath );
assertEquals( furtherRepresentation, DbRepresentation.of( backupPath ) );
shutdownServer( server );
}
@Test
public void makeSureStoreIdIsEnforced() throws Exception
{
// Create data set X on server A
DbRepresentation initialDataSetRepresentation = createInitialDataSet( serverPath );
ServerInterface server = startServer( serverPath );
// Grab initial backup from server A
OnlineBackup backup = OnlineBackup.from( "localhost" );
backup.full( backupPath );
assertEquals( initialDataSetRepresentation, DbRepresentation.of( backupPath ) );
shutdownServer( server );
// Create data set X+Y on server B
createInitialDataSet( otherServerPath );
addMoreData( otherServerPath );
server = startServer( otherServerPath );
// Try to grab incremental backup from server B.
// Data should be OK, but store id check should prevent that.
try
{
backup.incremental( backupPath );
fail( "Shouldn't work" );
}
catch ( ComException e )
{ // Good
}
shutdownServer( server );
// Just make sure incremental backup can be received properly from
// server A, even after a failed attempt from server B
DbRepresentation furtherRepresentation = addMoreData( serverPath );
server = startServer( serverPath );
backup.incremental( backupPath );
assertEquals( furtherRepresentation, DbRepresentation.of( backupPath ) );
shutdownServer( server );
}
private ServerInterface startServer( String path ) throws Exception
{
/*
ServerProcess server = new ServerProcess();
try
{
server.startup( Pair.of( path, "true" ) );
}
catch ( Throwable e )
{
// TODO Auto-generated catch block
throw new RuntimeException( e );
}
*/
ServerInterface server = new EmbeddedServer( path, "true" );
server.awaitStarted();
return server;
}
private void shutdownServer( ServerInterface server ) throws Exception
{
server.shutdown();
Thread.sleep( 1000 );
}
private DbRepresentation addMoreData( String path )
{
GraphDatabaseService db = startGraphDatabase( path );
Transaction tx = db.beginTx();
Node node = db.createNode();
node.setProperty( "backup", "Is great" );
db.getReferenceNode().createRelationshipTo( node,
DynamicRelationshipType.withName( "LOVES" ) );
tx.success();
tx.finish();
DbRepresentation result = DbRepresentation.of( db );
db.shutdown();
return result;
}
private GraphDatabaseService startGraphDatabase( String path )
{
return new EmbeddedGraphDatabase( path, stringMap(
Config.KEEP_LOGICAL_LOGS, "true" ) );
}
private DbRepresentation createInitialDataSet( String path )
{
GraphDatabaseService db = startGraphDatabase( path );
Transaction tx = db.beginTx();
Node node = db.createNode();
node.setProperty( "myKey", "myValue" );
Index<Node> nodeIndex = db.index().forNodes( "db-index" );
nodeIndex.add( node, "myKey", "myValue" );
db.getReferenceNode().createRelationshipTo( node,
DynamicRelationshipType.withName( "KNOWS" ) );
tx.success();
tx.finish();
DbRepresentation result = DbRepresentation.of( db );
db.shutdown();
return result;
}
@Test
public void multipleIncrementals() throws Exception
{
GraphDatabaseService db = null;
try
{
db = new EmbeddedGraphDatabase( serverPath,
stringMap( ENABLE_ONLINE_BACKUP, "true" ) );
Transaction tx = db.beginTx();
Index<Node> index = db.index().forNodes( "yo" );
index.add( db.createNode(), "justTo", "commitATx" );
tx.success();
tx.finish();
OnlineBackup backup = OnlineBackup.from( "localhost" );
backup.full( backupPath );
long lastCommittedTxForLucene = getLastCommittedTx( backupPath );
for ( int i = 0; i < 5; i++ )
{
tx = db.beginTx();
Node node = db.createNode();
index.add( node, "key", "value" + i );
tx.success();
tx.finish();
backup.incremental( backupPath );
assertEquals( lastCommittedTxForLucene + i + 1,
getLastCommittedTx( backupPath ) );
}
}
finally
{
if ( db != null )
{
db.shutdown();
}
}
}
@Test
// @Ignore
public void backupIndexWithNoCommits() throws Exception
{
GraphDatabaseService db = null;
try
{
db = new EmbeddedGraphDatabase( serverPath,
stringMap( ENABLE_ONLINE_BACKUP, "true" ) );
db.index().forNodes( "created-no-commits" );
OnlineBackup backup = OnlineBackup.from( "localhost" );
backup.full( backupPath );
}
finally
{
if ( db != null )
{
db.shutdown();
}
}
}
private long getLastCommittedTx( String path )
{
GraphDatabaseService db = new EmbeddedGraphDatabase( path );
try
{
XaDataSource ds = ((AbstractGraphDatabase)db).getConfig().getTxModule().getXaDataSourceManager().getXaDataSource(
LuceneDataSource.DEFAULT_NAME );
return ds.getLastCommittedTxId();
}
finally
{
db.shutdown();
}
}
@Test
public void shouldRetainFileLocksAfterFullBackupOnLiveDatabase() throws Exception
{
GraphDatabaseService db = new EmbeddedGraphDatabase( serverPath, stringMap( ENABLE_ONLINE_BACKUP, "true" ) );
try
{
assertStoreIsLocked( serverPath );
OnlineBackup.from( "localhost" ).full( backupPath );
assertStoreIsLocked( serverPath );
}
finally
{
db.shutdown();
}
}
private static void assertStoreIsLocked( String path )
{
try
{
new EmbeddedGraphDatabase( path ).shutdown();
fail( "Could start up database in same process, store not locked" );
}
catch ( TransactionFailureException ex )
{
// expected
}
StartupChecker proc = new LockProcess().start( path );
try
{
assertFalse( "Could start up database in subprocess, store is not locked", proc.startupOk() );
}
finally
{
SubProcess.stop( proc );
}
}
public interface StartupChecker
{
boolean startupOk();
}
@SuppressWarnings( "serial" )
private static class LockProcess extends SubProcess<StartupChecker, String> implements StartupChecker
{
private volatile Object state;
@Override
public boolean startupOk()
{
Object result;
do
{
result = state;
}
while ( result == null );
return !( state instanceof Exception );
}
@Override
protected void startup( String path ) throws Throwable
{
GraphDatabaseService db;
try
{
db = new EmbeddedGraphDatabase( path );
}
catch ( TransactionFailureException ex )
{
state = ex;
return;
}
state = new Object();
db.shutdown();
}
}
}
| backup/src/test/java/org/neo4j/backup/TestBackup.java | /**
* Copyright (c) 2002-2011 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.backup;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
import static org.neo4j.helpers.collection.MapUtil.stringMap;
import static org.neo4j.kernel.Config.ENABLE_ONLINE_BACKUP;
import java.io.File;
import org.apache.commons.io.FileUtils;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.neo4j.com.ComException;
import org.neo4j.graphdb.DynamicRelationshipType;
import org.neo4j.graphdb.GraphDatabaseService;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.Transaction;
import org.neo4j.graphdb.TransactionFailureException;
import org.neo4j.graphdb.index.Index;
import org.neo4j.index.impl.lucene.LuceneDataSource;
import org.neo4j.kernel.AbstractGraphDatabase;
import org.neo4j.kernel.Config;
import org.neo4j.kernel.EmbeddedGraphDatabase;
import org.neo4j.kernel.impl.transaction.xaframework.XaDataSource;
import org.neo4j.test.DbRepresentation;
import org.neo4j.test.subprocess.SubProcess;
public class TestBackup
{
private final String serverPath = "target/var/serverdb";
private final String otherServerPath = serverPath + "2";
private final String backupPath = "target/var/backuedup-serverdb";
@Before
public void before() throws Exception
{
FileUtils.deleteDirectory( new File( serverPath ) );
FileUtils.deleteDirectory( new File( otherServerPath ) );
FileUtils.deleteDirectory( new File( backupPath ) );
}
// TODO MP: What happens if the server database keeps growing, virtually making the files endless?
@Test
public void makeSureFullFailsWhenDbExists() throws Exception
{
createInitialDataSet( serverPath );
ServerInterface server = startServer( serverPath );
OnlineBackup backup = OnlineBackup.from( "localhost" );
createInitialDataSet( backupPath );
try
{
backup.full( backupPath );
fail( "Shouldn't be able to do full backup into existing db" );
}
catch ( Exception e )
{
// good
}
shutdownServer( server );
}
@Test
public void makeSureIncrementalFailsWhenNoDb() throws Exception
{
createInitialDataSet( serverPath );
ServerInterface server = startServer( serverPath );
OnlineBackup backup = OnlineBackup.from( "localhost" );
try
{
backup.incremental( backupPath );
fail( "Shouldn't be able to do incremental backup into non-existing db" );
}
catch ( Exception e )
{
// Good
}
shutdownServer( server );
}
@Test
public void fullThenIncremental() throws Exception
{
DbRepresentation initialDataSetRepresentation = createInitialDataSet( serverPath );
ServerInterface server = startServer( serverPath );
OnlineBackup backup = OnlineBackup.from( "localhost" );
backup.full( backupPath );
assertEquals( initialDataSetRepresentation, DbRepresentation.of( backupPath ) );
shutdownServer( server );
DbRepresentation furtherRepresentation = addMoreData( serverPath );
server = startServer( serverPath );
backup.incremental( backupPath );
assertEquals( furtherRepresentation, DbRepresentation.of( backupPath ) );
shutdownServer( server );
}
@Test
public void makeSureStoreIdIsEnforced() throws Exception
{
// Create data set X on server A
DbRepresentation initialDataSetRepresentation = createInitialDataSet( serverPath );
ServerInterface server = startServer( serverPath );
// Grab initial backup from server A
OnlineBackup backup = OnlineBackup.from( "localhost" );
backup.full( backupPath );
assertEquals( initialDataSetRepresentation, DbRepresentation.of( backupPath ) );
shutdownServer( server );
// Create data set X+Y on server B
createInitialDataSet( otherServerPath );
addMoreData( otherServerPath );
server = startServer( otherServerPath );
// Try to grab incremental backup from server B.
// Data should be OK, but store id check should prevent that.
try
{
backup.incremental( backupPath );
fail( "Shouldn't work" );
}
catch ( ComException e )
{ // Good
}
shutdownServer( server );
// Just make sure incremental backup can be received properly from
// server A, even after a failed attempt from server B
DbRepresentation furtherRepresentation = addMoreData( serverPath );
server = startServer( serverPath );
backup.incremental( backupPath );
assertEquals( furtherRepresentation, DbRepresentation.of( backupPath ) );
shutdownServer( server );
}
private ServerInterface startServer( String path ) throws Exception
{
/*
ServerProcess server = new ServerProcess();
try
{
server.startup( Pair.of( path, "true" ) );
}
catch ( Throwable e )
{
// TODO Auto-generated catch block
throw new RuntimeException( e );
}
*/
ServerInterface server = new EmbeddedServer( path, "true" );
server.awaitStarted();
return server;
}
private void shutdownServer( ServerInterface server ) throws Exception
{
server.shutdown();
Thread.sleep( 1000 );
}
private DbRepresentation addMoreData( String path )
{
GraphDatabaseService db = startGraphDatabase( path );
Transaction tx = db.beginTx();
Node node = db.createNode();
node.setProperty( "backup", "Is great" );
db.getReferenceNode().createRelationshipTo( node,
DynamicRelationshipType.withName( "LOVES" ) );
tx.success();
tx.finish();
DbRepresentation result = DbRepresentation.of( db );
db.shutdown();
return result;
}
private GraphDatabaseService startGraphDatabase( String path )
{
return new EmbeddedGraphDatabase( path, stringMap(
Config.KEEP_LOGICAL_LOGS, "true" ) );
}
private DbRepresentation createInitialDataSet( String path )
{
GraphDatabaseService db = startGraphDatabase( path );
Transaction tx = db.beginTx();
Node node = db.createNode();
node.setProperty( "myKey", "myValue" );
Index<Node> nodeIndex = db.index().forNodes( "db-index" );
nodeIndex.add( node, "myKey", "myValue" );
db.getReferenceNode().createRelationshipTo( node,
DynamicRelationshipType.withName( "KNOWS" ) );
tx.success();
tx.finish();
DbRepresentation result = DbRepresentation.of( db );
db.shutdown();
return result;
}
@Test
public void multipleIncrementals() throws Exception
{
GraphDatabaseService db = null;
try
{
db = new EmbeddedGraphDatabase( serverPath,
stringMap( ENABLE_ONLINE_BACKUP, "true" ) );
Transaction tx = db.beginTx();
Index<Node> index = db.index().forNodes( "yo" );
index.add( db.createNode(), "justTo", "commitATx" );
tx.success();
tx.finish();
OnlineBackup backup = OnlineBackup.from( "localhost" );
backup.full( backupPath );
long lastCommittedTxForLucene = getLastCommittedTx( backupPath );
for ( int i = 0; i < 5; i++ )
{
tx = db.beginTx();
Node node = db.createNode();
index.add( node, "key", "value" + i );
tx.success();
tx.finish();
backup.incremental( backupPath );
assertEquals( lastCommittedTxForLucene + i + 1,
getLastCommittedTx( backupPath ) );
}
}
finally
{
if ( db != null )
{
db.shutdown();
}
}
}
@Test
@Ignore
public void backupIndexWithNoCommits() throws Exception
{
GraphDatabaseService db = null;
try
{
db = new EmbeddedGraphDatabase( serverPath,
stringMap( ENABLE_ONLINE_BACKUP, "true" ) );
db.index().forNodes( "created-no-commits" );
OnlineBackup backup = OnlineBackup.from( "localhost" );
backup.full( backupPath );
}
finally
{
if ( db != null )
{
db.shutdown();
}
}
}
private long getLastCommittedTx( String path )
{
GraphDatabaseService db = new EmbeddedGraphDatabase( path );
try
{
XaDataSource ds = ((AbstractGraphDatabase)db).getConfig().getTxModule().getXaDataSourceManager().getXaDataSource(
LuceneDataSource.DEFAULT_NAME );
return ds.getLastCommittedTxId();
}
finally
{
db.shutdown();
}
}
@Test
public void shouldRetainFileLocksAfterFullBackupOnLiveDatabase() throws Exception
{
GraphDatabaseService db = new EmbeddedGraphDatabase( serverPath, stringMap( ENABLE_ONLINE_BACKUP, "true" ) );
try
{
assertStoreIsLocked( serverPath );
OnlineBackup.from( "localhost" ).full( backupPath );
assertStoreIsLocked( serverPath );
}
finally
{
db.shutdown();
}
}
private static void assertStoreIsLocked( String path )
{
try
{
new EmbeddedGraphDatabase( path ).shutdown();
fail( "Could start up database in same process, store not locked" );
}
catch ( TransactionFailureException ex )
{
// expected
}
StartupChecker proc = new LockProcess().start( path );
try
{
assertFalse( "Could start up database in subprocess, store is not locked", proc.startupOk() );
}
finally
{
SubProcess.stop( proc );
}
}
public interface StartupChecker
{
boolean startupOk();
}
@SuppressWarnings( "serial" )
private static class LockProcess extends SubProcess<StartupChecker, String> implements StartupChecker
{
private volatile Object state;
@Override
public boolean startupOk()
{
Object result;
do
{
result = state;
}
while ( result == null );
return !( state instanceof Exception );
}
@Override
protected void startup( String path ) throws Throwable
{
GraphDatabaseService db;
try
{
db = new EmbeddedGraphDatabase( path );
}
catch ( TransactionFailureException ex )
{
state = ex;
return;
}
state = new Object();
db.shutdown();
}
}
}
| Enabled ignored test
| backup/src/test/java/org/neo4j/backup/TestBackup.java | Enabled ignored test | <ide><path>ackup/src/test/java/org/neo4j/backup/TestBackup.java
<ide>
<ide> import org.apache.commons.io.FileUtils;
<ide> import org.junit.Before;
<del>import org.junit.Ignore;
<ide> import org.junit.Test;
<ide> import org.neo4j.com.ComException;
<ide> import org.neo4j.graphdb.DynamicRelationshipType;
<ide> }
<ide>
<ide> @Test
<del> @Ignore
<add> // @Ignore
<ide> public void backupIndexWithNoCommits() throws Exception
<ide> {
<ide> GraphDatabaseService db = null; |
|
Java | agpl-3.0 | c68ea3e9ab97e2c8e5897a32ecca55d5ec1b8c86 | 0 | otavanopisto/kunta-api-server,Metatavu/kunta-api-server,otavanopisto/kunta-api-server,Metatavu/kunta-api-server,Metatavu/kunta-api-server | package fi.otavanopisto.kuntaapi.server.rest;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.Stateful;
import javax.enterprise.context.RequestScoped;
import javax.enterprise.inject.Instance;
import javax.inject.Inject;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Request;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import org.apache.commons.lang3.EnumUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import fi.otavanopisto.kuntaapi.server.controllers.BannerController;
import fi.otavanopisto.kuntaapi.server.controllers.HttpCacheController;
import fi.otavanopisto.kuntaapi.server.controllers.MenuController;
import fi.otavanopisto.kuntaapi.server.controllers.OrganizationController;
import fi.otavanopisto.kuntaapi.server.controllers.PageController;
import fi.otavanopisto.kuntaapi.server.controllers.TileController;
import fi.otavanopisto.kuntaapi.server.id.AttachmentId;
import fi.otavanopisto.kuntaapi.server.id.BannerId;
import fi.otavanopisto.kuntaapi.server.id.EventId;
import fi.otavanopisto.kuntaapi.server.id.JobId;
import fi.otavanopisto.kuntaapi.server.id.MenuId;
import fi.otavanopisto.kuntaapi.server.id.MenuItemId;
import fi.otavanopisto.kuntaapi.server.id.NewsArticleId;
import fi.otavanopisto.kuntaapi.server.id.OrganizationId;
import fi.otavanopisto.kuntaapi.server.id.OrganizationServiceId;
import fi.otavanopisto.kuntaapi.server.id.PageId;
import fi.otavanopisto.kuntaapi.server.id.TileId;
import fi.otavanopisto.kuntaapi.server.integrations.AttachmentData;
import fi.otavanopisto.kuntaapi.server.integrations.EventProvider;
import fi.otavanopisto.kuntaapi.server.integrations.FileId;
import fi.otavanopisto.kuntaapi.server.integrations.JobProvider;
import fi.otavanopisto.kuntaapi.server.integrations.JobProvider.JobOrder;
import fi.otavanopisto.kuntaapi.server.integrations.JobProvider.JobOrderDirection;
import fi.otavanopisto.kuntaapi.server.integrations.KuntaApiConsts;
import fi.otavanopisto.kuntaapi.server.integrations.NewsProvider;
import fi.otavanopisto.kuntaapi.server.integrations.OrganizationServiceProvider;
import fi.otavanopisto.kuntaapi.server.rest.model.Attachment;
import fi.otavanopisto.kuntaapi.server.rest.model.Banner;
import fi.otavanopisto.kuntaapi.server.rest.model.Event;
import fi.otavanopisto.kuntaapi.server.rest.model.Job;
import fi.otavanopisto.kuntaapi.server.rest.model.LocalizedValue;
import fi.otavanopisto.kuntaapi.server.rest.model.Menu;
import fi.otavanopisto.kuntaapi.server.rest.model.MenuItem;
import fi.otavanopisto.kuntaapi.server.rest.model.NewsArticle;
import fi.otavanopisto.kuntaapi.server.rest.model.Organization;
import fi.otavanopisto.kuntaapi.server.rest.model.OrganizationService;
import fi.otavanopisto.kuntaapi.server.rest.model.OrganizationSetting;
import fi.otavanopisto.kuntaapi.server.rest.model.Page;
import fi.otavanopisto.kuntaapi.server.rest.model.Tile;
import fi.otavanopisto.kuntaapi.server.system.OrganizationSettingProvider;
/**
* REST Service implementation
*
* @author Antti Leppä
* @author Heikki Kurhinen
*/
@RequestScoped
@Stateful
@SuppressWarnings ("squid:S3306")
public class OrganizationsApiImpl extends OrganizationsApi {
private static final String INVALID_SETTING_ID = "Invalid setting id";
private static final String MAX_RESULTS_MUST_BY_A_POSITIVE_INTEGER = "maxResults must by a positive integer";
private static final String FIRST_RESULT_MUST_BY_A_POSITIVE_INTEGER = "firstResult must by a positive integer";
private static final String NOT_FOUND = "Not Found";
private static final String NOT_IMPLEMENTED = "Not implemented";
private static final String INTERNAL_SERVER_ERROR = "Internal Server Error";
private static final String FAILED_TO_STREAM_IMAGE_TO_CLIENT = "Failed to stream image to client";
@Inject
private Logger logger;
@Inject
private OrganizationSettingProvider organizationSettingProvider;
@Inject
private OrganizationController organizationController;
@Inject
private PageController pageController;
@Inject
private MenuController menuController;
@Inject
private BannerController bannerController;
@Inject
private TileController tileController;
@Inject
private HttpCacheController httpCacheController;
@Inject
private Instance<OrganizationServiceProvider> organizationServiceProviders;
@Inject
private Instance<EventProvider> eventProviders;
@Inject
private Instance<NewsProvider> newsProviders;
@Inject
private Instance<JobProvider> jobProviders;
@Override
public Response listOrganizations(String businessName, String businessCode, String search, Long firstResult, Long maxResults, @Context Request request) {
List<Organization> organizations;
if (search != null) {
organizations = organizationController.searchOrganizations(search, businessName, businessCode, firstResult, maxResults);
} else {
organizations = organizationController.listOrganizations(businessName, businessCode, firstResult, maxResults);
}
List<String> ids = httpCacheController.getEntityIds(organizations);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(organizations, ids);
}
@Override
public Response findOrganization(String organizationIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
Response notModified = httpCacheController.getNotModified(request, organizationId);
if (notModified != null) {
return notModified;
}
Organization organization = organizationController.findOrganization(organizationId);
if (organization != null) {
return httpCacheController.sendModified(organization, organization.getId());
}
return createNotFound(NOT_FOUND);
}
@Override
public Response createOrganizationService(String organizationId, OrganizationService body, @Context Request request) {
return createNotImplemented(NOT_IMPLEMENTED);
}
@Override
public Response findOrganizationService(String organizationIdParam, String organizationServiceIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
OrganizationServiceId organizationServiceId = toOrganizationServiceId(organizationServiceIdParam);
for (OrganizationServiceProvider organizationServiceProvider : getOrganizationServiceProviders()) {
OrganizationService organizationService = organizationServiceProvider.findOrganizationService(organizationId, organizationServiceId);
if (organizationService != null) {
return Response.ok(organizationService)
.build();
}
}
return Response
.status(Status.NOT_FOUND)
.build();
}
@Override
public Response listOrganizationOrganizationServices(String organizationIdParam, Long firstResult, Long maxResults, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return Response.status(Status.BAD_REQUEST)
.entity("Organization parameter is mandatory")
.build();
}
Response validationResponse = validateListLimitParams(firstResult, maxResults);
if (validationResponse != null) {
return validationResponse;
}
List<OrganizationService> result = new ArrayList<>();
for (OrganizationServiceProvider organizationServiceProvider : getOrganizationServiceProviders()) {
result.addAll(organizationServiceProvider.listOrganizationServices(organizationId));
}
int resultCount = result.size();
int firstIndex = firstResult == null ? 0 : Math.min(firstResult.intValue(), resultCount);
int toIndex = maxResults == null ? resultCount : Math.min(firstIndex + maxResults.intValue(), resultCount);
return Response.ok(result.subList(firstIndex, toIndex))
.build();
}
@Override
public Response updateOrganizationService(String organizationId, String organizationServiceId,
OrganizationService body, @Context Request request) {
return createNotImplemented(NOT_IMPLEMENTED);
}
@Override
public Response findOrganizationEvent(String organizationIdParam, String eventIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
EventId eventId = toEventId(eventIdParam);
for (EventProvider eventProvider : getEventProviders()) {
Event event = eventProvider.findOrganizationEvent(organizationId, eventId);
if (event != null) {
return Response.ok(event)
.build();
}
}
return Response.status(Status.NOT_FOUND)
.build();
}
@Override
public Response findOrganizationEventImage(String organizationIdParam, String eventIdParam, String imageIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
EventId eventId = toEventId(eventIdParam);
AttachmentId attachmentId = new AttachmentId(KuntaApiConsts.IDENTIFIER_NAME, imageIdParam);
for (EventProvider eventProvider : getEventProviders()) {
Attachment attachment = eventProvider.findEventImage(organizationId, eventId, attachmentId);
if (attachment != null) {
return Response.ok(attachment)
.build();
}
}
return Response.status(Status.NOT_FOUND)
.build();
}
@Override
public Response getOrganizationEventImageData(String organizationIdParam, String eventIdParam, String imageIdParam, Integer size, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
EventId eventId = toEventId(eventIdParam);
AttachmentId attachmentId = new AttachmentId(KuntaApiConsts.IDENTIFIER_NAME, imageIdParam);
for (EventProvider eventProvider : getEventProviders()) {
AttachmentData attachmentData = eventProvider.getEventImageData(organizationId, eventId, attachmentId, size);
if (attachmentData != null) {
try (InputStream stream = new ByteArrayInputStream(attachmentData.getData())) {
return Response.ok(stream, attachmentData.getType())
.build();
} catch (IOException e) {
logger.log(Level.SEVERE, FAILED_TO_STREAM_IMAGE_TO_CLIENT, e);
return Response.status(Status.INTERNAL_SERVER_ERROR)
.entity(INTERNAL_SERVER_ERROR)
.build();
}
}
}
return Response.status(Status.NOT_FOUND)
.build();
}
@Override
public Response listOrganizationEventImages(String organizationIdParam, String eventIdParam, @Context Request request) {
List<Attachment> result = new ArrayList<>();
OrganizationId organizationId = toOrganizationId(organizationIdParam);
EventId eventId = toEventId(eventIdParam);
for (EventProvider eventProvider : getEventProviders()) {
result.addAll(eventProvider.listEventImages(organizationId, eventId));
}
return Response.ok(result)
.build();
}
@Override
public Response listOrganizationEvents(String organizationIdParam,
String startBefore, String startAfter,
String endBefore, String endAfter,
Integer firstResult, Integer maxResults,
String orderBy, String orderDir, @Context Request request) {
EventProvider.EventOrder order = EventProvider.EventOrder.START_DATE;
EventProvider.EventOrderDirection orderDirection = EventProvider.EventOrderDirection.DESCENDING;
if (StringUtils.isNotBlank(orderBy)) {
order = EnumUtils.getEnum(EventProvider.EventOrder.class, orderBy);
if (order == null) {
return Response.status(Status.BAD_REQUEST)
.entity(String.format("Invalid event order %s", orderBy))
.build();
}
}
if (StringUtils.isNotBlank(orderDir)) {
orderDirection = EnumUtils.getEnum(EventProvider.EventOrderDirection.class, orderDir);
if (orderDirection == null) {
return Response.status(Status.BAD_REQUEST)
.entity(String.format("Invalid event order direction %s", orderDir))
.build();
}
}
OrganizationId organizationId = toOrganizationId(organizationIdParam);
List<Event> result = new ArrayList<>();
for (EventProvider eventProvider : getEventProviders()) {
result.addAll(eventProvider.listOrganizationEvents(organizationId, getDateTime(startBefore), getDateTime(startAfter), getDateTime(endBefore), getDateTime(endAfter), order, orderDirection, firstResult, maxResults));
}
return Response.ok(result)
.build();
}
/* News */
@Override
public Response listOrganizationNews(String organizationIdParam, String publishedBefore, String publishedAfter,
Integer firstResult, Integer maxResults, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
List<NewsArticle> result = new ArrayList<>();
for (NewsProvider newsProvider : getNewsProviders()) {
result.addAll(newsProvider.listOrganizationNews(organizationId, getDateTime(publishedBefore), getDateTime(publishedAfter), firstResult, maxResults));
}
return Response.ok(result)
.build();
}
@Override
public Response findOrganizationNewsArticle(String organizationIdParam, String newsArticleIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
NewsArticleId newsArticleId = toNewsArticleId(newsArticleIdParam);
for (NewsProvider newsProvider : getNewsProviders()) {
NewsArticle newsArticle = newsProvider.findOrganizationNewsArticle(organizationId, newsArticleId);
if (newsArticle != null) {
return Response.ok(newsArticle)
.build();
}
}
return Response.status(Status.NOT_FOUND)
.build();
}
@Override
public Response findOrganizationNewsArticleImage(String organizationIdParam, String newsArticleIdParam, String imageIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
NewsArticleId newsArticleId = toNewsArticleId(newsArticleIdParam);
AttachmentId attachmentId = toAttachmentId(imageIdParam);
for (NewsProvider newsProvider : getNewsProviders()) {
Attachment attachment = newsProvider.findNewsArticleImage(organizationId, newsArticleId, attachmentId);
if (attachment != null) {
return Response.ok(attachment)
.build();
}
}
return Response.status(Status.NOT_FOUND)
.build();
}
@Override
public Response getOrganizationNewsArticleImageData(String organizationIdParam, String newsArticleIdParam, String imageIdParam, Integer size, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
NewsArticleId newsArticleId = toNewsArticleId(newsArticleIdParam);
AttachmentId attachmentId = toAttachmentId(imageIdParam);
for (NewsProvider newsProvider : getNewsProviders()) {
AttachmentData attachmentData = newsProvider.getNewsArticleImageData(organizationId, newsArticleId, attachmentId, size);
if (attachmentData != null) {
try (InputStream stream = new ByteArrayInputStream(attachmentData.getData())) {
return Response.ok(stream, attachmentData.getType())
.build();
} catch (IOException e) {
logger.log(Level.SEVERE, FAILED_TO_STREAM_IMAGE_TO_CLIENT, e);
return Response.status(Status.INTERNAL_SERVER_ERROR)
.entity(INTERNAL_SERVER_ERROR)
.build();
}
}
}
return Response.status(Status.NOT_FOUND)
.build();
}
@Override
public Response listOrganizationNewsArticleImages(String organizationIdParam, String newsArticleIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
NewsArticleId newsArticleId = toNewsArticleId(newsArticleIdParam);
List<Attachment> result = new ArrayList<>();
for (NewsProvider newsProvider : getNewsProviders()) {
result.addAll(newsProvider.listNewsArticleImages(organizationId, newsArticleId));
}
return Response.ok(result)
.build();
}
/* Banners */
@Override
public Response listOrganizationBanners(String organizationIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
List<Banner> result = bannerController.listBanners(organizationId);
List<String> ids = httpCacheController.getEntityIds(result);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(result, ids);
}
@Override
public Response findOrganizationBanner(String organizationIdParam, String bannerIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
BannerId bannerId = toBannerId(bannerIdParam);
Response notModified = httpCacheController.getNotModified(request, bannerId);
if (notModified != null) {
return notModified;
}
Banner banner = bannerController.findBanner(organizationId, bannerId);
if (banner != null) {
return httpCacheController.sendModified(banner, banner.getId());
}
return createNotFound(NOT_FOUND);
}
@Override
public Response listOrganizationBannerImages(String organizationIdParam, String bannerIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
BannerId bannerId = toBannerId(bannerIdParam);
List<Attachment> result = bannerController.listBannerImages(organizationId, bannerId);
List<String> ids = httpCacheController.getEntityIds(result);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(result, ids);
}
@Override
public Response findOrganizationBannerImage(String organizationIdParam, String bannerIdParam, String imageIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
BannerId bannerId = toBannerId(bannerIdParam);
AttachmentId attachmentId = toAttachmentId(imageIdParam);
Response notModified = httpCacheController.getNotModified(request, attachmentId);
if (notModified != null) {
return notModified;
}
Attachment attachment = bannerController.findBannerImage(organizationId, bannerId, attachmentId);
if (attachment != null) {
return httpCacheController.sendModified(attachment, attachment.getId());
}
return createNotFound(NOT_FOUND);
}
@Override
public Response getOrganizationBannerImageData(String organizationIdParam, String bannerIdParam, String imageIdParam, Integer size, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
BannerId bannerId = toBannerId(bannerIdParam);
AttachmentId attachmentId = toAttachmentId(imageIdParam);
Response notModified = httpCacheController.getNotModified(request, attachmentId);
if (notModified != null) {
return notModified;
}
AttachmentData attachmentData = bannerController.getBannerImageData(organizationId, bannerId, attachmentId, size);
if (attachmentData != null) {
return httpCacheController.streamModified(attachmentData.getData(), attachmentData.getType(), attachmentId);
}
return Response.status(Status.NOT_FOUND)
.build();
}
/* Tiles */
@Override
public Response listOrganizationTiles(String organizationIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
List<Tile> result = tileController.listTiles(organizationId);
List<String> ids = httpCacheController.getEntityIds(result);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(result, ids);
}
@Override
public Response findOrganizationTile(String organizationIdParam, String tileIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
TileId tileId = toTileId(tileIdParam);
Response notModified = httpCacheController.getNotModified(request, tileId);
if (notModified != null) {
return notModified;
}
Tile tile = tileController.findTile(organizationId, tileId);
if (tile != null) {
return httpCacheController.sendModified(tile, tile.getId());
}
return createNotFound(NOT_FOUND);
}
@Override
public Response listOrganizationTileImages(String organizationIdParam, String tileIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
TileId tileId = toTileId(tileIdParam);
List<Attachment> result = tileController.listTileImages(organizationId, tileId);
List<String> ids = httpCacheController.getEntityIds(result);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(result, ids);
}
@Override
public Response findOrganizationTileImage(String organizationIdParam, String tileIdParam, String imageIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
TileId tileId = toTileId(tileIdParam);
AttachmentId attachmentId = toAttachmentId(imageIdParam);
Response notModified = httpCacheController.getNotModified(request, attachmentId);
if (notModified != null) {
return notModified;
}
Attachment attachment = tileController.findTileImage(organizationId, tileId, attachmentId);
if (attachment != null) {
return httpCacheController.sendModified(attachment, attachment.getId());
}
return createNotFound(NOT_FOUND);
}
@Override
public Response getOrganizationTileImageData(String organizationIdParam, String tileIdParam, String imageIdParam, Integer size, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
TileId tileId = toTileId(tileIdParam);
AttachmentId attachmentId = toAttachmentId(imageIdParam);
Response notModified = httpCacheController.getNotModified(request, attachmentId);
if (notModified != null) {
return notModified;
}
AttachmentData attachmentData = tileController.getTileImageData(organizationId, tileId, attachmentId, size);
if (attachmentData != null) {
return httpCacheController.streamModified(attachmentData.getData(), attachmentData.getType(), attachmentId);
}
return Response.status(Status.NOT_FOUND)
.build();
}
@Override
@SuppressWarnings("squid:MethodCyclomaticComplexity")
public Response createOrganizationSetting(String organizationIdParam, OrganizationSetting setting, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
if (StringUtils.isBlank(setting.getKey())) {
return createBadRequest("Key is required");
}
if (StringUtils.isBlank(setting.getValue())) {
return createBadRequest("Value is required");
}
List<OrganizationSetting> organizationSettings = organizationSettingProvider.listOrganizationSettings(organizationId, setting.getKey());
if (!organizationSettings.isEmpty()) {
return createBadRequest("Setting already exists");
}
OrganizationSetting organizationSetting = organizationSettingProvider.createOrganizationSetting(organizationId, setting.getKey(), setting.getValue());
if (organizationSetting == null) {
return createInternalServerError(INTERNAL_SERVER_ERROR);
}
return Response.ok()
.entity(organizationSetting)
.build();
}
@Override
public Response listOrganizationSettings(String organizationIdParam, String key, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
List<OrganizationSetting> result = organizationSettingProvider.listOrganizationSettings(organizationId, key);
return Response.ok()
.entity(result)
.build();
}
@Override
public Response findOrganizationSetting(String organizationIdParam, String settingIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
if (!StringUtils.isNumeric(settingIdParam)) {
return createBadRequest(INVALID_SETTING_ID);
}
Long settingId = NumberUtils.createLong(settingIdParam);
OrganizationSetting organizationSetting = organizationSettingProvider.findOrganizationSetting(organizationId, settingId);
if (organizationSetting == null) {
return createNotFound(NOT_FOUND);
}
return Response.ok()
.entity(organizationSetting)
.build();
}
@Override
@SuppressWarnings ("squid:MethodCyclomaticComplexity")
public Response updateOrganizationSetting(String organizationIdParam, String settingIdParam, OrganizationSetting setting, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
if (StringUtils.isBlank(setting.getKey())) {
return createBadRequest("Key is required");
}
if (StringUtils.isBlank(setting.getValue())) {
return createBadRequest("Value is required");
}
if (!StringUtils.isNumeric(settingIdParam)) {
return createBadRequest(INVALID_SETTING_ID);
}
Long settingId = NumberUtils.createLong(settingIdParam);
OrganizationSetting organizationSetting = organizationSettingProvider.findOrganizationSetting(organizationId, settingId);
if (organizationSetting == null) {
return createNotFound(NOT_FOUND);
}
if (!StringUtils.equals(organizationSetting.getKey(), setting.getKey())) {
return createBadRequest("Cannot update setting key");
}
OrganizationSetting updatedOrganizationSetting = organizationSettingProvider.updateOrganizationSetting(settingId, setting.getValue());
if (updatedOrganizationSetting == null) {
return createNotFound(NOT_FOUND);
}
return Response.ok()
.entity(updatedOrganizationSetting)
.build();
}
@Override
public Response deleteOrganizationSetting(String organizationIdParam, String settingIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
if (!StringUtils.isNumeric(settingIdParam)) {
return createBadRequest(INVALID_SETTING_ID);
}
Long settingId = NumberUtils.createLong(settingIdParam);
OrganizationSetting organizationSetting = organizationSettingProvider.findOrganizationSetting(organizationId, settingId);
if (organizationSetting == null) {
return createNotFound(NOT_FOUND);
}
organizationSettingProvider.deleteOrganizationSetting(settingId);
return Response.noContent()
.build();
}
/* Pages */
@Override
public Response listOrganizationPages(String organizationIdParam, String parentIdParam, String path, String search, Long firstResult, Long maxResults, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
if (search != null && (parentIdParam != null || path != null)) {
return createNotImplemented("Search parameter can not be combined with path or parentId parameters");
}
boolean onlyRootPages = StringUtils.equals("ROOT", parentIdParam);
PageId parentId = onlyRootPages ? null : toPageId(parentIdParam);
List<Page> result = listOrganizationPages(organizationId, onlyRootPages, parentId, path, search, firstResult, maxResults);
List<String> ids = httpCacheController.getEntityIds(result);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(result, ids);
}
@Override
public Response findOrganizationPage(String organizationIdParam, String pageIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
PageId pageId = toPageId(pageIdParam);
if (pageId == null) {
return createNotFound(NOT_FOUND);
}
Response notModified = httpCacheController.getNotModified(request, pageId);
if (notModified != null) {
return notModified;
}
Page page = pageController.findPage(organizationId, pageId);
if (page != null) {
return httpCacheController.sendModified(page, page.getId());
}
return createNotFound(NOT_FOUND);
}
@Override
public Response findOrganizationPageContent(String organizationIdParam, String pageIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
PageId pageId = toPageId(pageIdParam);
if (pageId == null) {
return createNotFound(NOT_FOUND);
}
Response notModified = httpCacheController.getNotModified(request, pageId);
if (notModified != null) {
return notModified;
}
List<LocalizedValue> pageContents = pageController.getPageContents(organizationId, pageId);
if (pageContents != null) {
return httpCacheController.sendModified(pageContents, pageId.getId());
}
return createNotFound(NOT_FOUND);
}
@Override
public Response listOrganizationPageImages(String organizationIdParam, String pageIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
PageId pageId = toPageId(pageIdParam);
List<Attachment> result = pageController.listPageImages(organizationId, pageId);
List<String> ids = httpCacheController.getEntityIds(result);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(result, ids);
}
@Override
public Response findOrganizationPageImage(String organizationIdParam, String pageIdParam, String imageIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
PageId pageId = toPageId(pageIdParam);
AttachmentId attachmentId = toAttachmentId(imageIdParam);
Response notModified = httpCacheController.getNotModified(request, attachmentId);
if (notModified != null) {
return notModified;
}
Attachment attachment = pageController.findPageImage(organizationId, pageId, attachmentId);
if (attachment != null) {
return httpCacheController.sendModified(attachment, attachment.getId());
}
return Response.status(Status.NOT_FOUND)
.build();
}
@Override
public Response getOrganizationPageImageData(String organizationIdParam, String pageIdParam, String imageIdParam, Integer size, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
PageId pageId = toPageId(pageIdParam);
AttachmentId attachmentId = toAttachmentId(imageIdParam);
Response notModified = httpCacheController.getNotModified(request, attachmentId);
if (notModified != null) {
return notModified;
}
AttachmentData attachmentData = pageController.getPageAttachmentData(organizationId, pageId, attachmentId, size);
if (attachmentData != null) {
return httpCacheController.streamModified(attachmentData.getData(), attachmentData.getType(), attachmentId);
}
return Response.status(Status.NOT_FOUND)
.build();
}
/* Menus */
@Override
public Response listOrganizationMenus(String organizationIdParam, String slug, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
List<Menu> result = menuController.listMenus(slug, organizationId);
List<String> ids = httpCacheController.getEntityIds(result);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(result, ids);
}
@Override
public Response findOrganizationMenu(String organizationIdParam, String menuIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
MenuId menuId = toMenuId(menuIdParam);
if (menuId == null) {
return createNotFound(NOT_FOUND);
}
Response notModified = httpCacheController.getNotModified(request, menuId);
if (notModified != null) {
return notModified;
}
Menu menu = menuController.findMenu(organizationId, menuId);
if (menu != null) {
return httpCacheController.sendModified(menu, menu.getId());
}
return createNotFound(NOT_FOUND);
}
/* Menu Items */
@Override
public Response listOrganizationMenuItems(String organizationIdParam, String menuIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
MenuId menuId = toMenuId(menuIdParam);
if (menuId == null) {
return createNotFound(NOT_FOUND);
}
List<MenuItem> result = menuController.listMenuItems(organizationId, menuId);
List<String> ids = httpCacheController.getEntityIds(result);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(result, ids);
}
@Override
public Response findOrganizationMenuItem(String organizationIdParam, String menuIdParam, String menuItemIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
MenuId menuId = toMenuId(menuIdParam);
if (menuId == null) {
return createNotFound(NOT_FOUND);
}
MenuItemId menuItemId = toMenuItemId(menuItemIdParam);
if (menuItemId == null) {
return createNotFound(NOT_FOUND);
}
return findOrganizationMenuItem(organizationId, menuId, menuItemId, request);
}
private Response findOrganizationMenuItem(OrganizationId organizationId, MenuId menuId, MenuItemId menuItemId, Request request) {
Response notModified = httpCacheController.getNotModified(request, menuItemId);
if (notModified != null) {
return notModified;
}
MenuItem menuItem = menuController.findMenuItem(organizationId, menuId, menuItemId);
if (menuItem != null) {
return httpCacheController.sendModified(menuItem, menuItem.getId());
}
return createNotFound(NOT_FOUND);
}
/* Files */
@Override
public Response listOrganizationFiles(String organizationId, String pageId, @Context Request request) {
return createNotImplemented(NOT_IMPLEMENTED);
}
@Override
public Response findOrganizationFile(String organizationId, String fileId, @Context Request request) {
return createNotImplemented(NOT_IMPLEMENTED);
}
@Override
public Response getOrganizationFileData(String organizationId, String fileId, @Context Request request) {
return createNotImplemented(NOT_IMPLEMENTED);
}
/* Jobs */
@Override
public Response findOrganizationJob(String organizationIdParam, String jobIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
JobId jobId = toJobId(jobIdParam);
if (jobId == null) {
return createNotFound(NOT_FOUND);
}
for (JobProvider jobProvider : getJobProviders()) {
Job job = jobProvider.findOrganizationJob(organizationId, jobId);
if (job != null) {
return Response.ok(job).build();
}
}
return createNotFound(NOT_FOUND);
}
@Override
public Response listOrganizationJobs(String organizationIdParam, String sortBy, String sortDir, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
List<Job> result = new ArrayList<>();
JobOrder order = null;
JobOrderDirection orderDirection = null;
if (StringUtils.isNotBlank(sortBy)) {
order = EnumUtils.getEnum(JobProvider.JobOrder.class, sortBy);
if (order == null) {
return createBadRequest("Invalid value for sortBy");
}
}
if (StringUtils.isNotBlank(sortDir)) {
orderDirection = EnumUtils.getEnum(JobOrderDirection.class, sortDir);
if (orderDirection == null) {
return createBadRequest("Invalid value for sortDir");
}
}
for (JobProvider jobProvider : getJobProviders()) {
result.addAll(jobProvider.listOrganizationJobs(organizationId));
}
return Response.ok(sortJobs(result, order, orderDirection))
.build();
}
/* Announcements */
@Override
public Response findOrganizationAnnouncement(String organizationId, String announcementId, @Context Request request) {
return null;
}
@Override
public Response listOrganizationAnnouncements(String organizationId, Integer firstResult, Integer maxResults,
String sortBy, String sortDir, @Context Request request) {
return null;
}
private List<Page> listOrganizationPages(OrganizationId organizationId, boolean onlyRootPages, PageId parentId, String path, String search, Long firstResult, Long maxResults) {
if (search != null) {
return pageController.searchPages(organizationId, search, firstResult, maxResults);
} else {
return pageController.listPages(organizationId, path, onlyRootPages, parentId, firstResult, maxResults);
}
}
private List<Job> sortJobs(List<Job> jobs, JobOrder order, JobOrderDirection orderDirection) {
if (order == null) {
return jobs;
}
List<Job> sorted = new ArrayList<>(jobs);
switch (order) {
case PUBLICATION_END:
Collections.sort(sorted, (Job o1, Job o2)
-> orderDirection != JobOrderDirection.ASCENDING
? o2.getPublicationEnd().compareTo(o1.getPublicationEnd())
: o1.getPublicationEnd().compareTo(o2.getPublicationEnd()));
break;
case PUBLICATION_START:
Collections.sort(sorted, (Job o1, Job o2)
-> orderDirection != JobOrderDirection.ASCENDING
? o2.getPublicationStart().compareTo(o1.getPublicationStart())
: o1.getPublicationStart().compareTo(o2.getPublicationStart()));
break;
default:
}
return sorted;
}
private Response validateListLimitParams(Long firstResult, Long maxResults) {
if (firstResult != null && firstResult < 0) {
return createBadRequest(FIRST_RESULT_MUST_BY_A_POSITIVE_INTEGER);
}
if (maxResults != null && maxResults < 0) {
return createBadRequest(MAX_RESULTS_MUST_BY_A_POSITIVE_INTEGER);
}
return null;
}
private BannerId toBannerId(String id) {
if (StringUtils.isNotBlank(id)) {
return new BannerId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private TileId toTileId(String id) {
if (StringUtils.isNotBlank(id)) {
return new TileId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private NewsArticleId toNewsArticleId(String id) {
if (StringUtils.isNotBlank(id)) {
return new NewsArticleId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private OrganizationId toOrganizationId(String id) {
if (StringUtils.isNotBlank(id)) {
return new OrganizationId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private OrganizationServiceId toOrganizationServiceId(String id) {
if (StringUtils.isNotBlank(id)) {
return new OrganizationServiceId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private EventId toEventId(String id) {
if (StringUtils.isNotBlank(id)) {
return new EventId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private AttachmentId toAttachmentId(String id) {
if (StringUtils.isNotBlank(id)) {
return new AttachmentId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
@SuppressWarnings("unused")
private FileId toFileId(String id) {
if (StringUtils.isNotBlank(id)) {
return new FileId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private PageId toPageId(String id) {
if (StringUtils.isNotBlank(id)) {
return new PageId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private MenuId toMenuId(String id) {
if (StringUtils.isNotBlank(id)) {
return new MenuId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private MenuItemId toMenuItemId(String id) {
if (StringUtils.isNotBlank(id)) {
return new MenuItemId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private JobId toJobId(String id) {
if (StringUtils.isNotBlank(id)) {
return new JobId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private OffsetDateTime getDateTime(String timeString) {
if (StringUtils.isNotBlank(timeString)) {
return OffsetDateTime.parse(timeString);
}
return null;
}
private List<OrganizationServiceProvider> getOrganizationServiceProviders() {
List<OrganizationServiceProvider> result = new ArrayList<>();
Iterator<OrganizationServiceProvider> iterator = organizationServiceProviders.iterator();
while (iterator.hasNext()) {
result.add(iterator.next());
}
return Collections.unmodifiableList(result);
}
private List<EventProvider> getEventProviders() {
List<EventProvider> result = new ArrayList<>();
Iterator<EventProvider> iterator = eventProviders.iterator();
while (iterator.hasNext()) {
result.add(iterator.next());
}
return Collections.unmodifiableList(result);
}
private List<NewsProvider> getNewsProviders() {
List<NewsProvider> result = new ArrayList<>();
Iterator<NewsProvider> iterator = newsProviders.iterator();
while (iterator.hasNext()) {
result.add(iterator.next());
}
return Collections.unmodifiableList(result);
}
private List<JobProvider> getJobProviders() {
List<JobProvider> result = new ArrayList<>();
Iterator<JobProvider> iterator = jobProviders.iterator();
while (iterator.hasNext()) {
result.add(iterator.next());
}
return Collections.unmodifiableList(result);
}
} | src/main/java/fi/otavanopisto/kuntaapi/server/rest/OrganizationsApiImpl.java | package fi.otavanopisto.kuntaapi.server.rest;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.Stateful;
import javax.enterprise.context.RequestScoped;
import javax.enterprise.inject.Instance;
import javax.inject.Inject;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Request;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import org.apache.commons.lang3.EnumUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import fi.otavanopisto.kuntaapi.server.controllers.BannerController;
import fi.otavanopisto.kuntaapi.server.controllers.HttpCacheController;
import fi.otavanopisto.kuntaapi.server.controllers.MenuController;
import fi.otavanopisto.kuntaapi.server.controllers.OrganizationController;
import fi.otavanopisto.kuntaapi.server.controllers.PageController;
import fi.otavanopisto.kuntaapi.server.controllers.TileController;
import fi.otavanopisto.kuntaapi.server.id.AttachmentId;
import fi.otavanopisto.kuntaapi.server.id.BannerId;
import fi.otavanopisto.kuntaapi.server.id.EventId;
import fi.otavanopisto.kuntaapi.server.id.JobId;
import fi.otavanopisto.kuntaapi.server.id.MenuId;
import fi.otavanopisto.kuntaapi.server.id.MenuItemId;
import fi.otavanopisto.kuntaapi.server.id.NewsArticleId;
import fi.otavanopisto.kuntaapi.server.id.OrganizationId;
import fi.otavanopisto.kuntaapi.server.id.OrganizationServiceId;
import fi.otavanopisto.kuntaapi.server.id.PageId;
import fi.otavanopisto.kuntaapi.server.id.TileId;
import fi.otavanopisto.kuntaapi.server.integrations.AttachmentData;
import fi.otavanopisto.kuntaapi.server.integrations.EventProvider;
import fi.otavanopisto.kuntaapi.server.integrations.FileId;
import fi.otavanopisto.kuntaapi.server.integrations.JobProvider;
import fi.otavanopisto.kuntaapi.server.integrations.JobProvider.JobOrder;
import fi.otavanopisto.kuntaapi.server.integrations.JobProvider.JobOrderDirection;
import fi.otavanopisto.kuntaapi.server.integrations.KuntaApiConsts;
import fi.otavanopisto.kuntaapi.server.integrations.NewsProvider;
import fi.otavanopisto.kuntaapi.server.integrations.OrganizationServiceProvider;
import fi.otavanopisto.kuntaapi.server.rest.model.Attachment;
import fi.otavanopisto.kuntaapi.server.rest.model.Banner;
import fi.otavanopisto.kuntaapi.server.rest.model.Event;
import fi.otavanopisto.kuntaapi.server.rest.model.Job;
import fi.otavanopisto.kuntaapi.server.rest.model.LocalizedValue;
import fi.otavanopisto.kuntaapi.server.rest.model.Menu;
import fi.otavanopisto.kuntaapi.server.rest.model.MenuItem;
import fi.otavanopisto.kuntaapi.server.rest.model.NewsArticle;
import fi.otavanopisto.kuntaapi.server.rest.model.Organization;
import fi.otavanopisto.kuntaapi.server.rest.model.OrganizationService;
import fi.otavanopisto.kuntaapi.server.rest.model.OrganizationSetting;
import fi.otavanopisto.kuntaapi.server.rest.model.Page;
import fi.otavanopisto.kuntaapi.server.rest.model.Tile;
import fi.otavanopisto.kuntaapi.server.system.OrganizationSettingProvider;
/**
* REST Service implementation
*
* @author Antti Leppä
* @author Heikki Kurhinen
*/
@RequestScoped
@Stateful
@SuppressWarnings ("squid:S3306")
public class OrganizationsApiImpl extends OrganizationsApi {
private static final String INVALID_SETTING_ID = "Invalid setting id";
private static final String MAX_RESULTS_MUST_BY_A_POSITIVE_INTEGER = "maxResults must by a positive integer";
private static final String FIRST_RESULT_MUST_BY_A_POSITIVE_INTEGER = "firstResult must by a positive integer";
private static final String NOT_FOUND = "Not Found";
private static final String NOT_IMPLEMENTED = "Not implemented";
private static final String INTERNAL_SERVER_ERROR = "Internal Server Error";
private static final String FAILED_TO_STREAM_IMAGE_TO_CLIENT = "Failed to stream image to client";
@Inject
private Logger logger;
@Inject
private OrganizationSettingProvider organizationSettingProvider;
@Inject
private OrganizationController organizationController;
@Inject
private PageController pageController;
@Inject
private MenuController menuController;
@Inject
private BannerController bannerController;
@Inject
private TileController tileController;
@Inject
private HttpCacheController httpCacheController;
@Inject
private Instance<OrganizationServiceProvider> organizationServiceProviders;
@Inject
private Instance<EventProvider> eventProviders;
@Inject
private Instance<NewsProvider> newsProviders;
@Inject
private Instance<JobProvider> jobProviders;
@Override
public Response listOrganizations(String businessName, String businessCode, String search, Long firstResult, Long maxResults, @Context Request request) {
List<Organization> organizations;
if (search != null) {
organizations = organizationController.searchOrganizations(search, businessName, businessCode, firstResult, maxResults);
} else {
organizations = organizationController.listOrganizations(businessName, businessCode, firstResult, maxResults);
}
List<String> ids = httpCacheController.getEntityIds(organizations);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(organizations, ids);
}
@Override
public Response findOrganization(String organizationIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
Response notModified = httpCacheController.getNotModified(request, organizationId);
if (notModified != null) {
return notModified;
}
Organization organization = organizationController.findOrganization(organizationId);
if (organization != null) {
return httpCacheController.sendModified(organization, organization.getId());
}
return createNotFound(NOT_FOUND);
}
@Override
public Response createOrganizationService(String organizationId, OrganizationService body, @Context Request request) {
return createNotImplemented(NOT_IMPLEMENTED);
}
@Override
public Response findOrganizationService(String organizationIdParam, String organizationServiceIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
OrganizationServiceId organizationServiceId = toOrganizationServiceId(organizationServiceIdParam);
for (OrganizationServiceProvider organizationServiceProvider : getOrganizationServiceProviders()) {
OrganizationService organizationService = organizationServiceProvider.findOrganizationService(organizationId, organizationServiceId);
if (organizationService != null) {
return Response.ok(organizationService)
.build();
}
}
return Response
.status(Status.NOT_FOUND)
.build();
}
@Override
public Response listOrganizationOrganizationServices(String organizationIdParam, Long firstResult, Long maxResults, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return Response.status(Status.BAD_REQUEST)
.entity("Organization parameter is mandatory")
.build();
}
Response validationResponse = validateListLimitParams(firstResult, maxResults);
if (validationResponse != null) {
return validationResponse;
}
List<OrganizationService> result = new ArrayList<>();
for (OrganizationServiceProvider organizationServiceProvider : getOrganizationServiceProviders()) {
result.addAll(organizationServiceProvider.listOrganizationServices(organizationId));
}
int resultCount = result.size();
int firstIndex = firstResult == null ? 0 : Math.min(firstResult.intValue(), resultCount);
int toIndex = maxResults == null ? resultCount : Math.min(firstIndex + maxResults.intValue(), resultCount);
return Response.ok(result.subList(firstIndex, toIndex))
.build();
}
@Override
public Response updateOrganizationService(String organizationId, String organizationServiceId,
OrganizationService body, @Context Request request) {
return createNotImplemented(NOT_IMPLEMENTED);
}
@Override
public Response findOrganizationEvent(String organizationIdParam, String eventIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
EventId eventId = toEventId(eventIdParam);
for (EventProvider eventProvider : getEventProviders()) {
Event event = eventProvider.findOrganizationEvent(organizationId, eventId);
if (event != null) {
return Response.ok(event)
.build();
}
}
return Response.status(Status.NOT_FOUND)
.build();
}
@Override
public Response findOrganizationEventImage(String organizationIdParam, String eventIdParam, String imageIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
EventId eventId = toEventId(eventIdParam);
AttachmentId attachmentId = new AttachmentId(KuntaApiConsts.IDENTIFIER_NAME, imageIdParam);
for (EventProvider eventProvider : getEventProviders()) {
Attachment attachment = eventProvider.findEventImage(organizationId, eventId, attachmentId);
if (attachment != null) {
return Response.ok(attachment)
.build();
}
}
return Response.status(Status.NOT_FOUND)
.build();
}
@Override
public Response getOrganizationEventImageData(String organizationIdParam, String eventIdParam, String imageIdParam, Integer size, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
EventId eventId = toEventId(eventIdParam);
AttachmentId attachmentId = new AttachmentId(KuntaApiConsts.IDENTIFIER_NAME, imageIdParam);
for (EventProvider eventProvider : getEventProviders()) {
AttachmentData attachmentData = eventProvider.getEventImageData(organizationId, eventId, attachmentId, size);
if (attachmentData != null) {
try (InputStream stream = new ByteArrayInputStream(attachmentData.getData())) {
return Response.ok(stream, attachmentData.getType())
.build();
} catch (IOException e) {
logger.log(Level.SEVERE, FAILED_TO_STREAM_IMAGE_TO_CLIENT, e);
return Response.status(Status.INTERNAL_SERVER_ERROR)
.entity(INTERNAL_SERVER_ERROR)
.build();
}
}
}
return Response.status(Status.NOT_FOUND)
.build();
}
@Override
public Response listOrganizationEventImages(String organizationIdParam, String eventIdParam, @Context Request request) {
List<Attachment> result = new ArrayList<>();
OrganizationId organizationId = toOrganizationId(organizationIdParam);
EventId eventId = toEventId(eventIdParam);
for (EventProvider eventProvider : getEventProviders()) {
result.addAll(eventProvider.listEventImages(organizationId, eventId));
}
return Response.ok(result)
.build();
}
@Override
public Response listOrganizationEvents(String organizationIdParam,
String startBefore, String startAfter,
String endBefore, String endAfter,
Integer firstResult, Integer maxResults,
String orderBy, String orderDir, @Context Request request) {
EventProvider.EventOrder order = EventProvider.EventOrder.START_DATE;
EventProvider.EventOrderDirection orderDirection = EventProvider.EventOrderDirection.DESCENDING;
if (StringUtils.isNotBlank(orderBy)) {
order = EnumUtils.getEnum(EventProvider.EventOrder.class, orderBy);
if (order == null) {
return Response.status(Status.BAD_REQUEST)
.entity(String.format("Invalid event order %s", orderBy))
.build();
}
}
if (StringUtils.isNotBlank(orderDir)) {
orderDirection = EnumUtils.getEnum(EventProvider.EventOrderDirection.class, orderDir);
if (orderDirection == null) {
return Response.status(Status.BAD_REQUEST)
.entity(String.format("Invalid event order direction %s", orderDir))
.build();
}
}
OrganizationId organizationId = toOrganizationId(organizationIdParam);
List<Event> result = new ArrayList<>();
for (EventProvider eventProvider : getEventProviders()) {
result.addAll(eventProvider.listOrganizationEvents(organizationId, getDateTime(startBefore), getDateTime(startAfter), getDateTime(endBefore), getDateTime(endAfter), order, orderDirection, firstResult, maxResults));
}
return Response.ok(result)
.build();
}
/* News */
@Override
public Response listOrganizationNews(String organizationIdParam, String publishedBefore, String publishedAfter,
Integer firstResult, Integer maxResults, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
List<NewsArticle> result = new ArrayList<>();
for (NewsProvider newsProvider : getNewsProviders()) {
result.addAll(newsProvider.listOrganizationNews(organizationId, getDateTime(publishedBefore), getDateTime(publishedAfter), firstResult, maxResults));
}
return Response.ok(result)
.build();
}
@Override
public Response findOrganizationNewsArticle(String organizationIdParam, String newsArticleIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
NewsArticleId newsArticleId = toNewsArticleId(newsArticleIdParam);
for (NewsProvider newsProvider : getNewsProviders()) {
NewsArticle newsArticle = newsProvider.findOrganizationNewsArticle(organizationId, newsArticleId);
if (newsArticle != null) {
return Response.ok(newsArticle)
.build();
}
}
return Response.status(Status.NOT_FOUND)
.build();
}
@Override
public Response findOrganizationNewsArticleImage(String organizationIdParam, String newsArticleIdParam, String imageIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
NewsArticleId newsArticleId = toNewsArticleId(newsArticleIdParam);
AttachmentId attachmentId = toAttachmentId(imageIdParam);
for (NewsProvider newsProvider : getNewsProviders()) {
Attachment attachment = newsProvider.findNewsArticleImage(organizationId, newsArticleId, attachmentId);
if (attachment != null) {
return Response.ok(attachment)
.build();
}
}
return Response.status(Status.NOT_FOUND)
.build();
}
@Override
public Response getOrganizationNewsArticleImageData(String organizationIdParam, String newsArticleIdParam, String imageIdParam, Integer size, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
NewsArticleId newsArticleId = toNewsArticleId(newsArticleIdParam);
AttachmentId attachmentId = toAttachmentId(imageIdParam);
for (NewsProvider newsProvider : getNewsProviders()) {
AttachmentData attachmentData = newsProvider.getNewsArticleImageData(organizationId, newsArticleId, attachmentId, size);
if (attachmentData != null) {
try (InputStream stream = new ByteArrayInputStream(attachmentData.getData())) {
return Response.ok(stream, attachmentData.getType())
.build();
} catch (IOException e) {
logger.log(Level.SEVERE, FAILED_TO_STREAM_IMAGE_TO_CLIENT, e);
return Response.status(Status.INTERNAL_SERVER_ERROR)
.entity(INTERNAL_SERVER_ERROR)
.build();
}
}
}
return Response.status(Status.NOT_FOUND)
.build();
}
@Override
public Response listOrganizationNewsArticleImages(String organizationIdParam, String newsArticleIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
NewsArticleId newsArticleId = toNewsArticleId(newsArticleIdParam);
List<Attachment> result = new ArrayList<>();
for (NewsProvider newsProvider : getNewsProviders()) {
result.addAll(newsProvider.listNewsArticleImages(organizationId, newsArticleId));
}
return Response.ok(result)
.build();
}
/* Banners */
@Override
public Response listOrganizationBanners(String organizationIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
List<Banner> result = bannerController.listBanners(organizationId);
List<String> ids = httpCacheController.getEntityIds(result);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(result, ids);
}
@Override
public Response findOrganizationBanner(String organizationIdParam, String bannerIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
BannerId bannerId = toBannerId(bannerIdParam);
Response notModified = httpCacheController.getNotModified(request, bannerId);
if (notModified != null) {
return notModified;
}
Banner banner = bannerController.findBanner(organizationId, bannerId);
if (banner != null) {
return httpCacheController.sendModified(banner, banner.getId());
}
return createNotFound(NOT_FOUND);
}
@Override
public Response listOrganizationBannerImages(String organizationIdParam, String bannerIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
BannerId bannerId = toBannerId(bannerIdParam);
List<Attachment> result = bannerController.listBannerImages(organizationId, bannerId);
List<String> ids = httpCacheController.getEntityIds(result);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(result, ids);
}
@Override
public Response findOrganizationBannerImage(String organizationIdParam, String bannerIdParam, String imageIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
BannerId bannerId = toBannerId(bannerIdParam);
AttachmentId attachmentId = toAttachmentId(imageIdParam);
Response notModified = httpCacheController.getNotModified(request, attachmentId);
if (notModified != null) {
return notModified;
}
Attachment attachment = bannerController.findBannerImage(organizationId, bannerId, attachmentId);
if (attachment != null) {
return httpCacheController.sendModified(attachment, attachment.getId());
}
return createNotFound(NOT_FOUND);
}
@Override
public Response getOrganizationBannerImageData(String organizationIdParam, String bannerIdParam, String imageIdParam, Integer size, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
BannerId bannerId = toBannerId(bannerIdParam);
AttachmentId attachmentId = toAttachmentId(imageIdParam);
Response notModified = httpCacheController.getNotModified(request, attachmentId);
if (notModified != null) {
return notModified;
}
AttachmentData attachmentData = bannerController.getBannerImageData(organizationId, bannerId, attachmentId, size);
if (attachmentData != null) {
return httpCacheController.streamModified(attachmentData.getData(), attachmentData.getType(), attachmentId);
}
return Response.status(Status.NOT_FOUND)
.build();
}
/* Tiles */
@Override
public Response listOrganizationTiles(String organizationIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
List<Tile> result = tileController.listTiles(organizationId);
List<String> ids = httpCacheController.getEntityIds(result);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(result, ids);
}
@Override
public Response findOrganizationTile(String organizationIdParam, String tileIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
TileId tileId = toTileId(tileIdParam);
Response notModified = httpCacheController.getNotModified(request, tileId);
if (notModified != null) {
return notModified;
}
Tile tile = tileController.findTile(organizationId, tileId);
if (tile != null) {
return httpCacheController.sendModified(tile, tile.getId());
}
return createNotFound(NOT_FOUND);
}
@Override
public Response listOrganizationTileImages(String organizationIdParam, String tileIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
TileId tileId = toTileId(tileIdParam);
List<Attachment> result = tileController.listTileImages(organizationId, tileId);
List<String> ids = httpCacheController.getEntityIds(result);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(result, ids);
}
@Override
public Response findOrganizationTileImage(String organizationIdParam, String tileIdParam, String imageIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
TileId tileId = toTileId(tileIdParam);
AttachmentId attachmentId = toAttachmentId(imageIdParam);
Response notModified = httpCacheController.getNotModified(request, tileId);
if (notModified != null) {
return notModified;
}
Attachment attachment = tileController.findTileImage(organizationId, tileId, attachmentId);
if (attachment != null) {
return httpCacheController.sendModified(attachment, attachment.getId());
}
return createNotFound(NOT_FOUND);
}
@Override
public Response getOrganizationTileImageData(String organizationIdParam, String tileIdParam, String imageIdParam, Integer size, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
TileId tileId = toTileId(tileIdParam);
AttachmentId attachmentId = toAttachmentId(imageIdParam);
Response notModified = httpCacheController.getNotModified(request, attachmentId);
if (notModified != null) {
return notModified;
}
AttachmentData attachmentData = tileController.getTileImageData(organizationId, tileId, attachmentId, size);
if (attachmentData != null) {
return httpCacheController.streamModified(attachmentData.getData(), attachmentData.getType(), attachmentId);
}
return Response.status(Status.NOT_FOUND)
.build();
}
@Override
@SuppressWarnings("squid:MethodCyclomaticComplexity")
public Response createOrganizationSetting(String organizationIdParam, OrganizationSetting setting, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
if (StringUtils.isBlank(setting.getKey())) {
return createBadRequest("Key is required");
}
if (StringUtils.isBlank(setting.getValue())) {
return createBadRequest("Value is required");
}
List<OrganizationSetting> organizationSettings = organizationSettingProvider.listOrganizationSettings(organizationId, setting.getKey());
if (!organizationSettings.isEmpty()) {
return createBadRequest("Setting already exists");
}
OrganizationSetting organizationSetting = organizationSettingProvider.createOrganizationSetting(organizationId, setting.getKey(), setting.getValue());
if (organizationSetting == null) {
return createInternalServerError(INTERNAL_SERVER_ERROR);
}
return Response.ok()
.entity(organizationSetting)
.build();
}
@Override
public Response listOrganizationSettings(String organizationIdParam, String key, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
List<OrganizationSetting> result = organizationSettingProvider.listOrganizationSettings(organizationId, key);
return Response.ok()
.entity(result)
.build();
}
@Override
public Response findOrganizationSetting(String organizationIdParam, String settingIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
if (!StringUtils.isNumeric(settingIdParam)) {
return createBadRequest(INVALID_SETTING_ID);
}
Long settingId = NumberUtils.createLong(settingIdParam);
OrganizationSetting organizationSetting = organizationSettingProvider.findOrganizationSetting(organizationId, settingId);
if (organizationSetting == null) {
return createNotFound(NOT_FOUND);
}
return Response.ok()
.entity(organizationSetting)
.build();
}
@Override
@SuppressWarnings ("squid:MethodCyclomaticComplexity")
public Response updateOrganizationSetting(String organizationIdParam, String settingIdParam, OrganizationSetting setting, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
if (StringUtils.isBlank(setting.getKey())) {
return createBadRequest("Key is required");
}
if (StringUtils.isBlank(setting.getValue())) {
return createBadRequest("Value is required");
}
if (!StringUtils.isNumeric(settingIdParam)) {
return createBadRequest(INVALID_SETTING_ID);
}
Long settingId = NumberUtils.createLong(settingIdParam);
OrganizationSetting organizationSetting = organizationSettingProvider.findOrganizationSetting(organizationId, settingId);
if (organizationSetting == null) {
return createNotFound(NOT_FOUND);
}
if (!StringUtils.equals(organizationSetting.getKey(), setting.getKey())) {
return createBadRequest("Cannot update setting key");
}
OrganizationSetting updatedOrganizationSetting = organizationSettingProvider.updateOrganizationSetting(settingId, setting.getValue());
if (updatedOrganizationSetting == null) {
return createNotFound(NOT_FOUND);
}
return Response.ok()
.entity(updatedOrganizationSetting)
.build();
}
@Override
public Response deleteOrganizationSetting(String organizationIdParam, String settingIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
if (!StringUtils.isNumeric(settingIdParam)) {
return createBadRequest(INVALID_SETTING_ID);
}
Long settingId = NumberUtils.createLong(settingIdParam);
OrganizationSetting organizationSetting = organizationSettingProvider.findOrganizationSetting(organizationId, settingId);
if (organizationSetting == null) {
return createNotFound(NOT_FOUND);
}
organizationSettingProvider.deleteOrganizationSetting(settingId);
return Response.noContent()
.build();
}
/* Pages */
@Override
public Response listOrganizationPages(String organizationIdParam, String parentIdParam, String path, String search, Long firstResult, Long maxResults, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
if (search != null && (parentIdParam != null || path != null)) {
return createNotImplemented("Search parameter can not be combined with path or parentId parameters");
}
boolean onlyRootPages = StringUtils.equals("ROOT", parentIdParam);
PageId parentId = onlyRootPages ? null : toPageId(parentIdParam);
List<Page> result = listOrganizationPages(organizationId, onlyRootPages, parentId, path, search, firstResult, maxResults);
List<String> ids = httpCacheController.getEntityIds(result);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(result, ids);
}
@Override
public Response findOrganizationPage(String organizationIdParam, String pageIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
PageId pageId = toPageId(pageIdParam);
if (pageId == null) {
return createNotFound(NOT_FOUND);
}
Response notModified = httpCacheController.getNotModified(request, pageId);
if (notModified != null) {
return notModified;
}
Page page = pageController.findPage(organizationId, pageId);
if (page != null) {
return httpCacheController.sendModified(page, page.getId());
}
return createNotFound(NOT_FOUND);
}
@Override
public Response findOrganizationPageContent(String organizationIdParam, String pageIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
PageId pageId = toPageId(pageIdParam);
if (pageId == null) {
return createNotFound(NOT_FOUND);
}
Response notModified = httpCacheController.getNotModified(request, pageId);
if (notModified != null) {
return notModified;
}
List<LocalizedValue> pageContents = pageController.getPageContents(organizationId, pageId);
if (pageContents != null) {
return httpCacheController.sendModified(pageContents, pageId.getId());
}
return createNotFound(NOT_FOUND);
}
@Override
public Response listOrganizationPageImages(String organizationIdParam, String pageIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
PageId pageId = toPageId(pageIdParam);
List<Attachment> result = pageController.listPageImages(organizationId, pageId);
List<String> ids = httpCacheController.getEntityIds(result);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(result, ids);
}
@Override
public Response findOrganizationPageImage(String organizationIdParam, String pageIdParam, String imageIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
PageId pageId = toPageId(pageIdParam);
AttachmentId attachmentId = toAttachmentId(imageIdParam);
Response notModified = httpCacheController.getNotModified(request, attachmentId);
if (notModified != null) {
return notModified;
}
Attachment attachment = pageController.findPageImage(organizationId, pageId, attachmentId);
if (attachment != null) {
return httpCacheController.sendModified(attachment, attachment.getId());
}
return Response.status(Status.NOT_FOUND)
.build();
}
@Override
public Response getOrganizationPageImageData(String organizationIdParam, String pageIdParam, String imageIdParam, Integer size, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
PageId pageId = toPageId(pageIdParam);
AttachmentId attachmentId = toAttachmentId(imageIdParam);
Response notModified = httpCacheController.getNotModified(request, attachmentId);
if (notModified != null) {
return notModified;
}
AttachmentData attachmentData = pageController.getPageAttachmentData(organizationId, pageId, attachmentId, size);
if (attachmentData != null) {
return httpCacheController.streamModified(attachmentData.getData(), attachmentData.getType(), attachmentId);
}
return Response.status(Status.NOT_FOUND)
.build();
}
/* Menus */
@Override
public Response listOrganizationMenus(String organizationIdParam, String slug, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
List<Menu> result = menuController.listMenus(slug, organizationId);
List<String> ids = httpCacheController.getEntityIds(result);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(result, ids);
}
@Override
public Response findOrganizationMenu(String organizationIdParam, String menuIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
MenuId menuId = toMenuId(menuIdParam);
if (menuId == null) {
return createNotFound(NOT_FOUND);
}
Response notModified = httpCacheController.getNotModified(request, menuId);
if (notModified != null) {
return notModified;
}
Menu menu = menuController.findMenu(organizationId, menuId);
if (menu != null) {
return httpCacheController.sendModified(menu, menu.getId());
}
return createNotFound(NOT_FOUND);
}
/* Menu Items */
@Override
public Response listOrganizationMenuItems(String organizationIdParam, String menuIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
MenuId menuId = toMenuId(menuIdParam);
if (menuId == null) {
return createNotFound(NOT_FOUND);
}
List<MenuItem> result = menuController.listMenuItems(organizationId, menuId);
List<String> ids = httpCacheController.getEntityIds(result);
Response notModified = httpCacheController.getNotModified(request, ids);
if (notModified != null) {
return notModified;
}
return httpCacheController.sendModified(result, ids);
}
@Override
public Response findOrganizationMenuItem(String organizationIdParam, String menuIdParam, String menuItemIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
MenuId menuId = toMenuId(menuIdParam);
if (menuId == null) {
return createNotFound(NOT_FOUND);
}
MenuItemId menuItemId = toMenuItemId(menuItemIdParam);
if (menuItemId == null) {
return createNotFound(NOT_FOUND);
}
return findOrganizationMenuItem(organizationId, menuId, menuItemId, request);
}
private Response findOrganizationMenuItem(OrganizationId organizationId, MenuId menuId, MenuItemId menuItemId, Request request) {
Response notModified = httpCacheController.getNotModified(request, menuItemId);
if (notModified != null) {
return notModified;
}
MenuItem menuItem = menuController.findMenuItem(organizationId, menuId, menuItemId);
if (menuItem != null) {
return httpCacheController.sendModified(menuItem, menuItem.getId());
}
return createNotFound(NOT_FOUND);
}
/* Files */
@Override
public Response listOrganizationFiles(String organizationId, String pageId, @Context Request request) {
return createNotImplemented(NOT_IMPLEMENTED);
}
@Override
public Response findOrganizationFile(String organizationId, String fileId, @Context Request request) {
return createNotImplemented(NOT_IMPLEMENTED);
}
@Override
public Response getOrganizationFileData(String organizationId, String fileId, @Context Request request) {
return createNotImplemented(NOT_IMPLEMENTED);
}
/* Jobs */
@Override
public Response findOrganizationJob(String organizationIdParam, String jobIdParam, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
JobId jobId = toJobId(jobIdParam);
if (jobId == null) {
return createNotFound(NOT_FOUND);
}
for (JobProvider jobProvider : getJobProviders()) {
Job job = jobProvider.findOrganizationJob(organizationId, jobId);
if (job != null) {
return Response.ok(job).build();
}
}
return createNotFound(NOT_FOUND);
}
@Override
public Response listOrganizationJobs(String organizationIdParam, String sortBy, String sortDir, @Context Request request) {
OrganizationId organizationId = toOrganizationId(organizationIdParam);
if (organizationId == null) {
return createNotFound(NOT_FOUND);
}
List<Job> result = new ArrayList<>();
JobOrder order = null;
JobOrderDirection orderDirection = null;
if (StringUtils.isNotBlank(sortBy)) {
order = EnumUtils.getEnum(JobProvider.JobOrder.class, sortBy);
if (order == null) {
return createBadRequest("Invalid value for sortBy");
}
}
if (StringUtils.isNotBlank(sortDir)) {
orderDirection = EnumUtils.getEnum(JobOrderDirection.class, sortDir);
if (orderDirection == null) {
return createBadRequest("Invalid value for sortDir");
}
}
for (JobProvider jobProvider : getJobProviders()) {
result.addAll(jobProvider.listOrganizationJobs(organizationId));
}
return Response.ok(sortJobs(result, order, orderDirection))
.build();
}
/* Announcements */
@Override
public Response findOrganizationAnnouncement(String organizationId, String announcementId, @Context Request request) {
return null;
}
@Override
public Response listOrganizationAnnouncements(String organizationId, Integer firstResult, Integer maxResults,
String sortBy, String sortDir, @Context Request request) {
return null;
}
private List<Page> listOrganizationPages(OrganizationId organizationId, boolean onlyRootPages, PageId parentId, String path, String search, Long firstResult, Long maxResults) {
if (search != null) {
return pageController.searchPages(organizationId, search, firstResult, maxResults);
} else {
return pageController.listPages(organizationId, path, onlyRootPages, parentId, firstResult, maxResults);
}
}
private List<Job> sortJobs(List<Job> jobs, JobOrder order, JobOrderDirection orderDirection) {
if (order == null) {
return jobs;
}
List<Job> sorted = new ArrayList<>(jobs);
switch (order) {
case PUBLICATION_END:
Collections.sort(sorted, (Job o1, Job o2)
-> orderDirection != JobOrderDirection.ASCENDING
? o2.getPublicationEnd().compareTo(o1.getPublicationEnd())
: o1.getPublicationEnd().compareTo(o2.getPublicationEnd()));
break;
case PUBLICATION_START:
Collections.sort(sorted, (Job o1, Job o2)
-> orderDirection != JobOrderDirection.ASCENDING
? o2.getPublicationStart().compareTo(o1.getPublicationStart())
: o1.getPublicationStart().compareTo(o2.getPublicationStart()));
break;
default:
}
return sorted;
}
private Response validateListLimitParams(Long firstResult, Long maxResults) {
if (firstResult != null && firstResult < 0) {
return createBadRequest(FIRST_RESULT_MUST_BY_A_POSITIVE_INTEGER);
}
if (maxResults != null && maxResults < 0) {
return createBadRequest(MAX_RESULTS_MUST_BY_A_POSITIVE_INTEGER);
}
return null;
}
private BannerId toBannerId(String id) {
if (StringUtils.isNotBlank(id)) {
return new BannerId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private TileId toTileId(String id) {
if (StringUtils.isNotBlank(id)) {
return new TileId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private NewsArticleId toNewsArticleId(String id) {
if (StringUtils.isNotBlank(id)) {
return new NewsArticleId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private OrganizationId toOrganizationId(String id) {
if (StringUtils.isNotBlank(id)) {
return new OrganizationId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private OrganizationServiceId toOrganizationServiceId(String id) {
if (StringUtils.isNotBlank(id)) {
return new OrganizationServiceId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private EventId toEventId(String id) {
if (StringUtils.isNotBlank(id)) {
return new EventId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private AttachmentId toAttachmentId(String id) {
if (StringUtils.isNotBlank(id)) {
return new AttachmentId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
@SuppressWarnings("unused")
private FileId toFileId(String id) {
if (StringUtils.isNotBlank(id)) {
return new FileId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private PageId toPageId(String id) {
if (StringUtils.isNotBlank(id)) {
return new PageId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private MenuId toMenuId(String id) {
if (StringUtils.isNotBlank(id)) {
return new MenuId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private MenuItemId toMenuItemId(String id) {
if (StringUtils.isNotBlank(id)) {
return new MenuItemId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private JobId toJobId(String id) {
if (StringUtils.isNotBlank(id)) {
return new JobId(KuntaApiConsts.IDENTIFIER_NAME, id);
}
return null;
}
private OffsetDateTime getDateTime(String timeString) {
if (StringUtils.isNotBlank(timeString)) {
return OffsetDateTime.parse(timeString);
}
return null;
}
private List<OrganizationServiceProvider> getOrganizationServiceProviders() {
List<OrganizationServiceProvider> result = new ArrayList<>();
Iterator<OrganizationServiceProvider> iterator = organizationServiceProviders.iterator();
while (iterator.hasNext()) {
result.add(iterator.next());
}
return Collections.unmodifiableList(result);
}
private List<EventProvider> getEventProviders() {
List<EventProvider> result = new ArrayList<>();
Iterator<EventProvider> iterator = eventProviders.iterator();
while (iterator.hasNext()) {
result.add(iterator.next());
}
return Collections.unmodifiableList(result);
}
private List<NewsProvider> getNewsProviders() {
List<NewsProvider> result = new ArrayList<>();
Iterator<NewsProvider> iterator = newsProviders.iterator();
while (iterator.hasNext()) {
result.add(iterator.next());
}
return Collections.unmodifiableList(result);
}
private List<JobProvider> getJobProviders() {
List<JobProvider> result = new ArrayList<>();
Iterator<JobProvider> iterator = jobProviders.iterator();
while (iterator.hasNext()) {
result.add(iterator.next());
}
return Collections.unmodifiableList(result);
}
} | Fixed issue that prevented http cache on tile image find endpoint | src/main/java/fi/otavanopisto/kuntaapi/server/rest/OrganizationsApiImpl.java | Fixed issue that prevented http cache on tile image find endpoint | <ide><path>rc/main/java/fi/otavanopisto/kuntaapi/server/rest/OrganizationsApiImpl.java
<ide> TileId tileId = toTileId(tileIdParam);
<ide> AttachmentId attachmentId = toAttachmentId(imageIdParam);
<ide>
<del> Response notModified = httpCacheController.getNotModified(request, tileId);
<add> Response notModified = httpCacheController.getNotModified(request, attachmentId);
<ide> if (notModified != null) {
<ide> return notModified;
<ide> } |
|
JavaScript | mit | c1899d9d8ed7971036e65a07576b3f98518f1fd1 | 0 | greyhwndz/hammer.js,mcanthony/hammer.js,runspired/hammer.js,meobyte/hammer.js,DominikMayrhofer/hammer.js,Anlim/hammer.js,r14r-work/fork_javascript_hammer.js,greyhwndz/hammer.js,DominikMayrhofer/hammer.js,longseespace/hammer.js,r14r-work/fork_javascript_hammer.js,angeliaz/hammer.js,pengfeiWang/hammer.js,fashionsun/hammer.js,naver/hammer.js,heamon7/hammer.js,longseespace/hammer.js,hugofys/hammer.js,kevinmel2000/hammer.js,zhn4/hammer.js,naver/hammer.js,kevinmel2000/hammer.js,yuhualingfeng/hammer.js,r14r/fork_javascript_hammer.js,kevin0307/hammer.js,welheor1/hammer.js,XM-Right/hammer.js,heamon7/hammer.js,meobyte/hammer.js,dieface/hammer.js,stringworld/hammer.js,yuhualingfeng/hammer.js,GerHobbelt/hammer.js,XM-Right/hammer.js,EightMedia/hammer.js,welheor1/hammer.js,kevin0307/hammer.js,hitesh97/hammer.js,pengfeiWang/hammer.js,succ1984/hammer.js,fashionsun/hammer.js,hugofys/hammer.js,stringworld/hammer.js,hitesh97/hammer.js,zhn4/hammer.js,mcanthony/hammer.js,r14r/fork_javascript_hammer.js,runspired/hammer.js,EightMedia/hammer.js,hammerjs/hammer.js,succ1984/hammer.js,hammerjs/hammer.js,Anlim/hammer.js,dieface/hammer.js,GerHobbelt/hammer.js | /*
* Hammer.JS
* version 0.6.2
* author: Eight Media
* https://github.com/EightMedia/hammer.js
* Licensed under the MIT license.
*/
function Hammer(element, options, undefined)
{
var self = this;
var defaults = {
// prevent the default event or not... might be buggy when false
prevent_default : false,
css_hacks : true,
swipe : true,
swipe_time : 200, // ms
swipe_min_distance : 20, // pixels
drag : true,
drag_vertical : true,
drag_horizontal : true,
// minimum distance before the drag event starts
drag_min_distance : 20, // pixels
// pinch zoom and rotation
transform : true,
scale_treshold : 0.1,
rotation_treshold : 15, // degrees
tap : true,
tap_double : true,
tap_max_interval : 300,
tap_max_distance : 10,
tap_double_distance: 20,
hold : true,
hold_timeout : 500
};
options = mergeObject(defaults, options);
// some css hacks
(function() {
if(!options.css_hacks) {
return false;
}
var vendors = ['webkit','moz','ms','o',''];
var css_props = {
"userSelect": "none",
"touchCallout": "none",
"userDrag": "none",
"tapHighlightColor": "rgba(0,0,0,0)"
};
var prop = '';
for(var i = 0; i < vendors.length; i++) {
for(var p in css_props) {
prop = p;
if(vendors[i]) {
prop = vendors[i] + prop.substring(0, 1).toUpperCase() + prop.substring(1);
}
element.style[ prop ] = css_props[p];
}
}
})();
// holds the distance that has been moved
var _distance = 0;
// holds the exact angle that has been moved
var _angle = 0;
// holds the direction that has been moved
var _direction = 0;
// holds position movement for sliding
var _pos = { };
// how many fingers are on the screen
var _fingers = 0;
var _first = false;
var _gesture = null;
var _prev_gesture = null;
var _touch_start_time = null;
var _prev_tap_pos = {x: 0, y: 0};
var _prev_tap_end_time = null;
var _hold_timer = null;
var _offset = {};
// keep track of the mouse status
var _mousedown = false;
var _event_start;
var _event_move;
var _event_end;
var _has_touch = ('ontouchstart' in window);
/**
* option setter/getter
* @param string key
* @param mixed value
* @return mixed value
*/
this.option = function(key, val) {
if(val != undefined) {
options[key] = val;
}
return options[key];
};
/**
* angle to direction define
* @param float angle
* @return string direction
*/
this.getDirectionFromAngle = function( angle ) {
var directions = {
down: angle >= 45 && angle < 135, //90
left: angle >= 135 || angle <= -135, //180
up: angle < -45 && angle > -135, //270
right: angle >= -45 && angle <= 45 //0
};
var direction, key;
for(key in directions){
if(directions[key]){
direction = key;
break;
}
}
return direction;
};
/**
* destory events
* @return void
*/
this.destroy = function() {
if(_has_touch) {
removeEvent(element, "touchstart touchmove touchend touchcancel", handleEvents);
}
// for non-touch
else {
removeEvent(element, "mouseup mousedown mousemove", handleEvents);
removeEvent(element, "mouseout", handleMouseOut);
}
};
/**
* count the number of fingers in the event
* when no fingers are detected, one finger is returned (mouse pointer)
* @param event
* @return int fingers
*/
function countFingers( event )
{
// there is a bug on android (until v4?) that touches is always 1,
// so no multitouch is supported, e.g. no, zoom and rotation...
return event.touches ? event.touches.length : 1;
}
/**
* get the x and y positions from the event object
* @param event
* @return array [{ x: int, y: int }]
*/
function getXYfromEvent( event )
{
event = event || window.event;
// no touches, use the event pageX and pageY
if(!_has_touch) {
var doc = document,
body = doc.body;
return [{
x: event.pageX || event.clientX + ( doc && doc.scrollLeft || body && body.scrollLeft || 0 ) - ( doc && doc.clientLeft || body && doc.clientLeft || 0 ),
y: event.pageY || event.clientY + ( doc && doc.scrollTop || body && body.scrollTop || 0 ) - ( doc && doc.clientTop || body && doc.clientTop || 0 )
}];
}
// multitouch, return array with positions
else {
var pos = [], src;
for(var t=0, len=event.touches.length; t<len; t++) {
src = event.touches[t];
pos.push({ x: src.pageX, y: src.pageY });
}
return pos;
}
}
/**
* calculate the angle between two points
* @param object pos1 { x: int, y: int }
* @param object pos2 { x: int, y: int }
*/
function getAngle( pos1, pos2 )
{
return Math.atan2(pos2.y - pos1.y, pos2.x - pos1.x) * 180 / Math.PI;
}
/**
* calculate the distance between two points
* @param object pos1 { x: int, y: int }
* @param object pos2 { x: int, y: int }
*/
function getDistance( pos1, pos2 )
{
var x = pos2.x - pos1.x, y = pos2.y - pos1.y;
return Math.sqrt((x * x) + (y * y));
}
/**
* calculate the scale size between two fingers
* @param object pos_start
* @param object pos_move
* @return float scale
*/
function calculateScale(pos_start, pos_move)
{
if(pos_start.length == 2 && pos_move.length == 2) {
var start_distance = getDistance(pos_start[0], pos_start[1]);
var end_distance = getDistance(pos_move[0], pos_move[1]);
return end_distance / start_distance;
}
return 0;
}
/**
* calculate the rotation degrees between two fingers
* @param object pos_start
* @param object pos_move
* @return float rotation
*/
function calculateRotation(pos_start, pos_move)
{
if(pos_start.length == 2 && pos_move.length == 2) {
var start_rotation = getAngle(pos_start[1], pos_start[0]);
var end_rotation = getAngle(pos_move[1], pos_move[0]);
return end_rotation - start_rotation;
}
return 0;
}
/**
* trigger an event/callback by name with params
* @param string name
* @param array params
*/
function triggerEvent( eventName, params )
{
// return touches object
params.touches = getXYfromEvent(params.originalEvent);
params.type = eventName;
// trigger callback
if(isFunction(self["on"+ eventName])) {
self["on"+ eventName].call(self, params);
}
}
/**
* cancel event
* @param object event
* @return void
*/
function cancelEvent(event)
{
event = event || window.event;
if(event.preventDefault){
event.preventDefault();
event.stopPropagation();
}else{
event.returnValue = false;
event.cancelBubble = true;
}
}
/**
* reset the internal vars to the start values
*/
function reset()
{
_pos = {};
_first = false;
_fingers = 0;
_distance = 0;
_angle = 0;
_gesture = null;
}
var gestures = {
// hold gesture
// fired on touchstart
hold : function(event)
{
// only when one finger is on the screen
if(options.hold) {
_gesture = 'hold';
clearTimeout(_hold_timer);
_hold_timer = setTimeout(function() {
if(_gesture == 'hold') {
triggerEvent("hold", {
originalEvent : event,
position : _pos.start
});
}
}, options.hold_timeout);
}
},
// swipe gesture
// fired on touchend
swipe : function(event)
{
if(!_pos.move) {
return;
}
// get the distance we moved
var _distance_x = _pos.move[0].x - _pos.start[0].x;
var _distance_y = _pos.move[0].y - _pos.start[0].y;
_distance = Math.sqrt(_distance_x*_distance_x + _distance_y*_distance_y);
// compare the kind of gesture by time
var now = new Date().getTime();
var touch_time = now - _touch_start_time;
if(options.swipe && (options.swipe_time > touch_time) && (_distance > options.swipe_min_distance)) {
// calculate the angle
_angle = getAngle(_pos.start[0], _pos.move[0]);
_direction = self.getDirectionFromAngle(_angle);
_gesture = 'swipe';
var position = { x: _pos.move[0].x - _offset.left,
y: _pos.move[0].y - _offset.top };
var event_obj = {
originalEvent : event,
position : position,
direction : _direction,
distance : _distance,
distanceX : _distance_x,
distanceY : _distance_y,
angle : _angle
};
// normal slide event
triggerEvent("swipe", event_obj);
}
},
// drag gesture
// fired on mousemove
drag : function(event)
{
// get the distance we moved
var _distance_x = _pos.move[0].x - _pos.start[0].x;
var _distance_y = _pos.move[0].y - _pos.start[0].y;
_distance = Math.sqrt(_distance_x * _distance_x + _distance_y * _distance_y);
// drag
// minimal movement required
if(options.drag && (_distance > options.drag_min_distance) || _gesture == 'drag') {
// calculate the angle
_angle = getAngle(_pos.start[0], _pos.move[0]);
_direction = self.getDirectionFromAngle(_angle);
// check the movement and stop if we go in the wrong direction
var is_vertical = (_direction == 'up' || _direction == 'down');
if(((is_vertical && !options.drag_vertical) || (!is_vertical && !options.drag_horizontal))
&& (_distance > options.drag_min_distance)) {
return;
}
_gesture = 'drag';
var position = { x: _pos.move[0].x - _offset.left,
y: _pos.move[0].y - _offset.top };
var event_obj = {
originalEvent : event,
position : position,
direction : _direction,
distance : _distance,
distanceX : _distance_x,
distanceY : _distance_y,
angle : _angle
};
// on the first time trigger the start event
if(_first) {
triggerEvent("dragstart", event_obj);
_first = false;
}
// normal slide event
triggerEvent("drag", event_obj);
cancelEvent(event);
}
},
// transform gesture
// fired on touchmove
transform : function(event)
{
if(options.transform) {
if(countFingers(event) != 2) {
return false;
}
var rotation = calculateRotation(_pos.start, _pos.move);
var scale = calculateScale(_pos.start, _pos.move);
if(_gesture != 'drag' &&
(_gesture == 'transform' || Math.abs(1-scale) > options.scale_treshold || Math.abs(rotation) > options.rotation_treshold)) {
_gesture = 'transform';
_pos.center = { x: ((_pos.move[0].x + _pos.move[1].x) / 2) - _offset.left,
y: ((_pos.move[0].y + _pos.move[1].y) / 2) - _offset.top };
var event_obj = {
originalEvent : event,
position : _pos.center,
scale : scale,
rotation : rotation
};
// on the first time trigger the start event
if(_first) {
triggerEvent("transformstart", event_obj);
_first = false;
}
triggerEvent("transform", event_obj);
cancelEvent(event);
return true;
}
}
return false;
},
// tap and double tap gesture
// fired on touchend
tap : function(event)
{
// compare the kind of gesture by time
var now = new Date().getTime();
var touch_time = now - _touch_start_time;
// dont fire when hold is fired
if(options.hold && !(options.hold && options.hold_timeout > touch_time)) {
return;
}
// when previous event was tap and the tap was max_interval ms ago
var is_double_tap = (function(){
if (_prev_tap_pos &&
options.tap_double &&
_prev_gesture == 'tap' &&
(_touch_start_time - _prev_tap_end_time) < options.tap_max_interval)
{
var x_distance = Math.abs(_prev_tap_pos[0].x - _pos.start[0].x);
var y_distance = Math.abs(_prev_tap_pos[0].y - _pos.start[0].y);
return (_prev_tap_pos && _pos.start && Math.max(x_distance, y_distance) < options.tap_double_distance);
}
return false;
})();
if(is_double_tap) {
_gesture = 'double_tap';
_prev_tap_end_time = null;
triggerEvent("doubletap", {
originalEvent : event,
position : _pos.start
});
cancelEvent(event);
}
// single tap is single touch
else {
var x_distance = (_pos.move) ? Math.abs(_pos.move[0].x - _pos.start[0].x) : 0;
var y_distance = (_pos.move) ? Math.abs(_pos.move[0].y - _pos.start[0].y) : 0;
_distance = Math.max(x_distance, y_distance);
if(_distance < options.tap_max_distance) {
_gesture = 'tap';
_prev_tap_end_time = now;
_prev_tap_pos = _pos.start;
if(options.tap) {
triggerEvent("tap", {
originalEvent : event,
position : _pos.start
});
cancelEvent(event);
}
}
}
}
};
function handleEvents(event)
{
switch(event.type)
{
case 'mousedown':
case 'touchstart':
_pos.start = getXYfromEvent(event);
_touch_start_time = new Date().getTime();
_fingers = countFingers(event);
_first = true;
_event_start = event;
// borrowed from jquery offset https://github.com/jquery/jquery/blob/master/src/offset.js
var box = element.getBoundingClientRect();
var clientTop = element.clientTop || document.body.clientTop || 0;
var clientLeft = element.clientLeft || document.body.clientLeft || 0;
var scrollTop = window.pageYOffset || element.scrollTop || document.body.scrollTop;
var scrollLeft = window.pageXOffset || element.scrollLeft || document.body.scrollLeft;
_offset = {
top: box.top + scrollTop - clientTop,
left: box.left + scrollLeft - clientLeft
};
_mousedown = true;
// hold gesture
gestures.hold(event);
if(options.prevent_default) {
cancelEvent(event);
}
break;
case 'mousemove':
case 'touchmove':
if(!_mousedown) {
return false;
}
_event_move = event;
_pos.move = getXYfromEvent(event);
if(!gestures.transform(event)) {
gestures.drag(event);
}
break;
case 'mouseup':
case 'mouseout':
case 'touchcancel':
case 'touchend':
if(!_mousedown || (_gesture != 'transform' && event.touches && event.touches.length > 0)) {
return false;
}
_mousedown = false;
_event_end = event;
// swipe gesture
gestures.swipe(event);
// drag gesture
// dragstart is triggered, so dragend is possible
if(_gesture == 'drag') {
triggerEvent("dragend", {
originalEvent : event,
direction : _direction,
distance : _distance,
angle : _angle
});
}
// transform
// transformstart is triggered, so transformed is possible
else if(_gesture == 'transform') {
triggerEvent("transformend", {
originalEvent : event,
position : _pos.center,
scale : calculateScale(_pos.start, _pos.move),
rotation : calculateRotation(_pos.start, _pos.move)
});
}
else {
gestures.tap(_event_start);
}
_prev_gesture = _gesture;
// trigger release event.
// "release" by default doesn't return the co-ords where your
// finger was released. "position" will return "the last touched co-ords"
triggerEvent("release", {
originalEvent : event,
gesture : _gesture,
position : _pos.move || _pos.start
});
// reset vars
reset();
break;
}
}
function handleMouseOut(event) {
if(!isInsideHammer(element, event.relatedTarget)) {
handleEvents(event);
}
}
// bind events for touch devices
// except for windows phone 7.5, it doesnt support touch events..!
if(_has_touch) {
addEvent(element, "touchstart touchmove touchend touchcancel", handleEvents);
}
// for non-touch
else {
addEvent(element, "mouseup mousedown mousemove", handleEvents);
addEvent(element, "mouseout", handleMouseOut);
}
/**
* find if element is (inside) given parent element
* @param object element
* @param object parent
* @return bool inside
*/
function isInsideHammer(parent, child) {
// get related target for IE
if(!child && window.event && window.event.toElement){
child = window.event.toElement;
}
if(parent === child){
return true;
}
// loop over parentNodes of child until we find hammer element
if(child){
var node = child.parentNode;
while(node !== null){
if(node === parent){
return true;
};
node = node.parentNode;
}
}
return false;
}
/**
* merge 2 objects into a new object
* @param object obj1
* @param object obj2
* @return object merged object
*/
function mergeObject(obj1, obj2) {
var output = {};
if(!obj2) {
return obj1;
}
for (var prop in obj1) {
if (prop in obj2) {
output[prop] = obj2[prop];
} else {
output[prop] = obj1[prop];
}
}
return output;
}
/**
* check if object is a function
* @param object obj
* @return bool is function
*/
function isFunction( obj ){
return Object.prototype.toString.call( obj ) == "[object Function]";
}
/**
* attach event
* @param node element
* @param string types
* @param object callback
*/
function addEvent(element, types, callback) {
types = types.split(" ");
for(var t= 0,len=types.length; t<len; t++) {
if(element.addEventListener){
element.addEventListener(types[t], callback, false);
}
else if(document.attachEvent){
element.attachEvent("on"+ types[t], callback);
}
}
}
/**
* detach event
* @param node element
* @param string types
* @param object callback
*/
function removeEvent(element, types, callback) {
types = types.split(" ");
for(var t= 0,len=types.length; t<len; t++) {
if(element.removeEventListener){
element.removeEventListener(types[t], callback, false);
}
else if(document.detachEvent){
element.detachEvent("on"+ types[t], callback);
}
}
}
} | hammer.js | /*
* Hammer.JS
* version 0.6.2
* author: Eight Media
* https://github.com/EightMedia/hammer.js
* Licensed under the MIT license.
*/
function Hammer(element, options, undefined)
{
var self = this;
var defaults = {
// prevent the default event or not... might be buggy when false
prevent_default : false,
css_hacks : true,
swipe : true,
swipe_time : 200, // ms
swipe_min_distance : 20, // pixels
drag : true,
drag_vertical : true,
drag_horizontal : true,
// minimum distance before the drag event starts
drag_min_distance : 20, // pixels
// pinch zoom and rotation
transform : true,
scale_treshold : 0.1,
rotation_treshold : 15, // degrees
tap : true,
tap_double : true,
tap_max_interval : 300,
tap_max_distance : 10,
tap_double_distance: 20,
hold : true,
hold_timeout : 500
};
options = mergeObject(defaults, options);
// some css hacks
(function() {
if(!options.css_hacks) {
return false;
}
var vendors = ['webkit','moz','ms','o',''];
var css_props = {
"userSelect": "none",
"touchCallout": "none",
"userDrag": "none",
"tapHighlightColor": "rgba(0,0,0,0)"
};
var prop = '';
for(var i = 0; i < vendors.length; i++) {
for(var p in css_props) {
prop = p;
if(vendors[i]) {
prop = vendors[i] + prop.substring(0, 1).toUpperCase() + prop.substring(1);
}
element.style[ prop ] = css_props[p];
}
}
})();
// holds the distance that has been moved
var _distance = 0;
// holds the exact angle that has been moved
var _angle = 0;
// holds the diraction that has been moved
var _direction = 0;
// holds position movement for sliding
var _pos = { };
// how many fingers are on the screen
var _fingers = 0;
var _first = false;
var _gesture = null;
var _prev_gesture = null;
var _touch_start_time = null;
var _prev_tap_pos = {x: 0, y: 0};
var _prev_tap_end_time = null;
var _hold_timer = null;
var _offset = {};
// keep track of the mouse status
var _mousedown = false;
var _event_start;
var _event_move;
var _event_end;
var _has_touch = ('ontouchstart' in window);
/**
* option setter/getter
* @param string key
* @param mixed value
* @return mixed value
*/
this.option = function(key, val) {
if(val != undefined) {
options[key] = val;
}
return options[key];
};
/**
* angle to direction define
* @param float angle
* @return string direction
*/
this.getDirectionFromAngle = function( angle ) {
var directions = {
down: angle >= 45 && angle < 135, //90
left: angle >= 135 || angle <= -135, //180
up: angle < -45 && angle > -135, //270
right: angle >= -45 && angle <= 45 //0
};
var direction, key;
for(key in directions){
if(directions[key]){
direction = key;
break;
}
}
return direction;
};
/**
* destory events
* @return void
*/
this.destroy = function() {
if(_has_touch) {
removeEvent(element, "touchstart touchmove touchend touchcancel", handleEvents);
}
// for non-touch
else {
removeEvent(element, "mouseup mousedown mousemove", handleEvents);
removeEvent(element, "mouseout", handleMouseOut);
}
};
/**
* count the number of fingers in the event
* when no fingers are detected, one finger is returned (mouse pointer)
* @param event
* @return int fingers
*/
function countFingers( event )
{
// there is a bug on android (until v4?) that touches is always 1,
// so no multitouch is supported, e.g. no, zoom and rotation...
return event.touches ? event.touches.length : 1;
}
/**
* get the x and y positions from the event object
* @param event
* @return array [{ x: int, y: int }]
*/
function getXYfromEvent( event )
{
event = event || window.event;
// no touches, use the event pageX and pageY
if(!_has_touch) {
var doc = document,
body = doc.body;
return [{
x: event.pageX || event.clientX + ( doc && doc.scrollLeft || body && body.scrollLeft || 0 ) - ( doc && doc.clientLeft || body && doc.clientLeft || 0 ),
y: event.pageY || event.clientY + ( doc && doc.scrollTop || body && body.scrollTop || 0 ) - ( doc && doc.clientTop || body && doc.clientTop || 0 )
}];
}
// multitouch, return array with positions
else {
var pos = [], src;
for(var t=0, len=event.touches.length; t<len; t++) {
src = event.touches[t];
pos.push({ x: src.pageX, y: src.pageY });
}
return pos;
}
}
/**
* calculate the angle between two points
* @param object pos1 { x: int, y: int }
* @param object pos2 { x: int, y: int }
*/
function getAngle( pos1, pos2 )
{
return Math.atan2(pos2.y - pos1.y, pos2.x - pos1.x) * 180 / Math.PI;
}
/**
* calculate the distance between two points
* @param object pos1 { x: int, y: int }
* @param object pos2 { x: int, y: int }
*/
function getDistance( pos1, pos2 )
{
var x = pos2.x - pos1.x, y = pos2.y - pos1.y;
return Math.sqrt((x * x) + (y * y));
}
/**
* calculate the scale size between two fingers
* @param object pos_start
* @param object pos_move
* @return float scale
*/
function calculateScale(pos_start, pos_move)
{
if(pos_start.length == 2 && pos_move.length == 2) {
var start_distance = getDistance(pos_start[0], pos_start[1]);
var end_distance = getDistance(pos_move[0], pos_move[1]);
return end_distance / start_distance;
}
return 0;
}
/**
* calculate the rotation degrees between two fingers
* @param object pos_start
* @param object pos_move
* @return float rotation
*/
function calculateRotation(pos_start, pos_move)
{
if(pos_start.length == 2 && pos_move.length == 2) {
var start_rotation = getAngle(pos_start[1], pos_start[0]);
var end_rotation = getAngle(pos_move[1], pos_move[0]);
return end_rotation - start_rotation;
}
return 0;
}
/**
* trigger an event/callback by name with params
* @param string name
* @param array params
*/
function triggerEvent( eventName, params )
{
// return touches object
params.touches = getXYfromEvent(params.originalEvent);
params.type = eventName;
// trigger callback
if(isFunction(self["on"+ eventName])) {
self["on"+ eventName].call(self, params);
}
}
/**
* cancel event
* @param object event
* @return void
*/
function cancelEvent(event)
{
event = event || window.event;
if(event.preventDefault){
event.preventDefault();
event.stopPropagation();
}else{
event.returnValue = false;
event.cancelBubble = true;
}
}
/**
* reset the internal vars to the start values
*/
function reset()
{
_pos = {};
_first = false;
_fingers = 0;
_distance = 0;
_angle = 0;
_gesture = null;
}
var gestures = {
// hold gesture
// fired on touchstart
hold : function(event)
{
// only when one finger is on the screen
if(options.hold) {
_gesture = 'hold';
clearTimeout(_hold_timer);
_hold_timer = setTimeout(function() {
if(_gesture == 'hold') {
triggerEvent("hold", {
originalEvent : event,
position : _pos.start
});
}
}, options.hold_timeout);
}
},
// swipe gesture
// fired on touchend
swipe : function(event)
{
if(!_pos.move) {
return;
}
// get the distance we moved
var _distance_x = _pos.move[0].x - _pos.start[0].x;
var _distance_y = _pos.move[0].y - _pos.start[0].y;
_distance = Math.sqrt(_distance_x*_distance_x + _distance_y*_distance_y);
// compare the kind of gesture by time
var now = new Date().getTime();
var touch_time = now - _touch_start_time;
if(options.swipe && (options.swipe_time > touch_time) && (_distance > options.swipe_min_distance)) {
// calculate the angle
_angle = getAngle(_pos.start[0], _pos.move[0]);
_direction = self.getDirectionFromAngle(_angle);
_gesture = 'swipe';
var position = { x: _pos.move[0].x - _offset.left,
y: _pos.move[0].y - _offset.top };
var event_obj = {
originalEvent : event,
position : position,
direction : _direction,
distance : _distance,
distanceX : _distance_x,
distanceY : _distance_y,
angle : _angle
};
// normal slide event
triggerEvent("swipe", event_obj);
}
},
// drag gesture
// fired on mousemove
drag : function(event)
{
// get the distance we moved
var _distance_x = _pos.move[0].x - _pos.start[0].x;
var _distance_y = _pos.move[0].y - _pos.start[0].y;
_distance = Math.sqrt(_distance_x * _distance_x + _distance_y * _distance_y);
// drag
// minimal movement required
if(options.drag && (_distance > options.drag_min_distance) || _gesture == 'drag') {
// calculate the angle
_angle = getAngle(_pos.start[0], _pos.move[0]);
_direction = self.getDirectionFromAngle(_angle);
// check the movement and stop if we go in the wrong direction
var is_vertical = (_direction == 'up' || _direction == 'down');
if(((is_vertical && !options.drag_vertical) || (!is_vertical && !options.drag_horizontal))
&& (_distance > options.drag_min_distance)) {
return;
}
_gesture = 'drag';
var position = { x: _pos.move[0].x - _offset.left,
y: _pos.move[0].y - _offset.top };
var event_obj = {
originalEvent : event,
position : position,
direction : _direction,
distance : _distance,
distanceX : _distance_x,
distanceY : _distance_y,
angle : _angle
};
// on the first time trigger the start event
if(_first) {
triggerEvent("dragstart", event_obj);
_first = false;
}
// normal slide event
triggerEvent("drag", event_obj);
cancelEvent(event);
}
},
// transform gesture
// fired on touchmove
transform : function(event)
{
if(options.transform) {
if(countFingers(event) != 2) {
return false;
}
var rotation = calculateRotation(_pos.start, _pos.move);
var scale = calculateScale(_pos.start, _pos.move);
if(_gesture != 'drag' &&
(_gesture == 'transform' || Math.abs(1-scale) > options.scale_treshold || Math.abs(rotation) > options.rotation_treshold)) {
_gesture = 'transform';
_pos.center = { x: ((_pos.move[0].x + _pos.move[1].x) / 2) - _offset.left,
y: ((_pos.move[0].y + _pos.move[1].y) / 2) - _offset.top };
var event_obj = {
originalEvent : event,
position : _pos.center,
scale : scale,
rotation : rotation
};
// on the first time trigger the start event
if(_first) {
triggerEvent("transformstart", event_obj);
_first = false;
}
triggerEvent("transform", event_obj);
cancelEvent(event);
return true;
}
}
return false;
},
// tap and double tap gesture
// fired on touchend
tap : function(event)
{
// compare the kind of gesture by time
var now = new Date().getTime();
var touch_time = now - _touch_start_time;
// dont fire when hold is fired
if(options.hold && !(options.hold && options.hold_timeout > touch_time)) {
return;
}
// when previous event was tap and the tap was max_interval ms ago
var is_double_tap = (function(){
if (_prev_tap_pos &&
options.tap_double &&
_prev_gesture == 'tap' &&
(_touch_start_time - _prev_tap_end_time) < options.tap_max_interval)
{
var x_distance = Math.abs(_prev_tap_pos[0].x - _pos.start[0].x);
var y_distance = Math.abs(_prev_tap_pos[0].y - _pos.start[0].y);
return (_prev_tap_pos && _pos.start && Math.max(x_distance, y_distance) < options.tap_double_distance);
}
return false;
})();
if(is_double_tap) {
_gesture = 'double_tap';
_prev_tap_end_time = null;
triggerEvent("doubletap", {
originalEvent : event,
position : _pos.start
});
cancelEvent(event);
}
// single tap is single touch
else {
var x_distance = (_pos.move) ? Math.abs(_pos.move[0].x - _pos.start[0].x) : 0;
var y_distance = (_pos.move) ? Math.abs(_pos.move[0].y - _pos.start[0].y) : 0;
_distance = Math.max(x_distance, y_distance);
if(_distance < options.tap_max_distance) {
_gesture = 'tap';
_prev_tap_end_time = now;
_prev_tap_pos = _pos.start;
if(options.tap) {
triggerEvent("tap", {
originalEvent : event,
position : _pos.start
});
cancelEvent(event);
}
}
}
}
};
function handleEvents(event)
{
switch(event.type)
{
case 'mousedown':
case 'touchstart':
_pos.start = getXYfromEvent(event);
_touch_start_time = new Date().getTime();
_fingers = countFingers(event);
_first = true;
_event_start = event;
// borrowed from jquery offset https://github.com/jquery/jquery/blob/master/src/offset.js
var box = element.getBoundingClientRect();
var clientTop = element.clientTop || document.body.clientTop || 0;
var clientLeft = element.clientLeft || document.body.clientLeft || 0;
var scrollTop = window.pageYOffset || element.scrollTop || document.body.scrollTop;
var scrollLeft = window.pageXOffset || element.scrollLeft || document.body.scrollLeft;
_offset = {
top: box.top + scrollTop - clientTop,
left: box.left + scrollLeft - clientLeft
};
_mousedown = true;
// hold gesture
gestures.hold(event);
if(options.prevent_default) {
cancelEvent(event);
}
break;
case 'mousemove':
case 'touchmove':
if(!_mousedown) {
return false;
}
_event_move = event;
_pos.move = getXYfromEvent(event);
if(!gestures.transform(event)) {
gestures.drag(event);
}
break;
case 'mouseup':
case 'mouseout':
case 'touchcancel':
case 'touchend':
if(!_mousedown || (_gesture != 'transform' && event.touches && event.touches.length > 0)) {
return false;
}
_mousedown = false;
_event_end = event;
// swipe gesture
gestures.swipe(event);
// drag gesture
// dragstart is triggered, so dragend is possible
if(_gesture == 'drag') {
triggerEvent("dragend", {
originalEvent : event,
direction : _direction,
distance : _distance,
angle : _angle
});
}
// transform
// transformstart is triggered, so transformed is possible
else if(_gesture == 'transform') {
triggerEvent("transformend", {
originalEvent : event,
position : _pos.center,
scale : calculateScale(_pos.start, _pos.move),
rotation : calculateRotation(_pos.start, _pos.move)
});
}
else {
gestures.tap(_event_start);
}
_prev_gesture = _gesture;
// trigger release event.
// "release" by default doesn't return the co-ords where your
// finger was released. "position" will return "the last touched co-ords"
triggerEvent("release", {
originalEvent : event,
gesture : _gesture,
position : _pos.move || _pos.start
});
// reset vars
reset();
break;
}
}
function handleMouseOut(event) {
if(!isInsideHammer(element, event.relatedTarget)) {
handleEvents(event);
}
}
// bind events for touch devices
// except for windows phone 7.5, it doesnt support touch events..!
if(_has_touch) {
addEvent(element, "touchstart touchmove touchend touchcancel", handleEvents);
}
// for non-touch
else {
addEvent(element, "mouseup mousedown mousemove", handleEvents);
addEvent(element, "mouseout", handleMouseOut);
}
/**
* find if element is (inside) given parent element
* @param object element
* @param object parent
* @return bool inside
*/
function isInsideHammer(parent, child) {
// get related target for IE
if(!child && window.event && window.event.toElement){
child = window.event.toElement;
}
if(parent === child){
return true;
}
// loop over parentNodes of child until we find hammer element
if(child){
var node = child.parentNode;
while(node !== null){
if(node === parent){
return true;
};
node = node.parentNode;
}
}
return false;
}
/**
* merge 2 objects into a new object
* @param object obj1
* @param object obj2
* @return object merged object
*/
function mergeObject(obj1, obj2) {
var output = {};
if(!obj2) {
return obj1;
}
for (var prop in obj1) {
if (prop in obj2) {
output[prop] = obj2[prop];
} else {
output[prop] = obj1[prop];
}
}
return output;
}
/**
* check if object is a function
* @param object obj
* @return bool is function
*/
function isFunction( obj ){
return Object.prototype.toString.call( obj ) == "[object Function]";
}
/**
* attach event
* @param node element
* @param string types
* @param object callback
*/
function addEvent(element, types, callback) {
types = types.split(" ");
for(var t= 0,len=types.length; t<len; t++) {
if(element.addEventListener){
element.addEventListener(types[t], callback, false);
}
else if(document.attachEvent){
element.attachEvent("on"+ types[t], callback);
}
}
}
/**
* detach event
* @param node element
* @param string types
* @param object callback
*/
function removeEvent(element, types, callback) {
types = types.split(" ");
for(var t= 0,len=types.length; t<len; t++) {
if(element.removeEventListener){
element.removeEventListener(types[t], callback, false);
}
else if(document.detachEvent){
element.detachEvent("on"+ types[t], callback);
}
}
}
} | Corrected silly typo.
Changed "diraction" to "direction". | hammer.js | Corrected silly typo. | <ide><path>ammer.js
<ide> // holds the exact angle that has been moved
<ide> var _angle = 0;
<ide>
<del> // holds the diraction that has been moved
<add> // holds the direction that has been moved
<ide> var _direction = 0;
<ide>
<ide> // holds position movement for sliding |
|
JavaScript | apache-2.0 | 0eeb7163e0a26d4197957e1d7011a0365b5324e2 | 0 | joefitzgerald/gometalinter-linter | 'use babel'
import {CompositeDisposable} from 'atom'
import os from 'os'
import path from 'path'
function capitalizeFirstLetter (str) {
if (!str) {
return str
}
return str.charAt(0).toUpperCase() + str.slice(1)
}
class GometalinterLinter {
constructor (goconfigFunc, gogetFunc) {
this.goget = gogetFunc
this.goconfig = goconfigFunc
this.subscriptions = new CompositeDisposable()
this.name = 'gometalinter'
this.grammarScopes = ['source.go']
this.scope = 'project'
this.lintOnFly = false
this.toolCheckComplete = false
this.subscriptions.add(atom.commands.add('atom-workspace', 'golang:updatelinters', () => {
this.updateTools()
}))
this.registered = false
}
dispose () {
if (this.subscriptions) {
this.subscriptions.dispose()
}
this.subscriptions = null
this.goget = null
this.goconfig = null
this.name = null
this.grammarScopes = null
this.lintOnFly = null
this.toolCheckComplete = null
}
registerTool () {
if (this.registered) {
return
}
let g = this.goget()
if (!g) {
return
}
this.subscriptions.add(g.register('github.com/alecthomas/gometalinter', (outcome) => {
if (!outcome.success) {
return
}
this.updateTools()
}))
this.registered = true
}
ready () {
if (!this.goconfig) {
return false
}
let config = this.goconfig()
if (!config) {
return false
}
return true
}
lint (editor) {
if (!this.ready() || !editor) {
return []
}
let buffer = editor.getBuffer()
if (!buffer) {
return []
}
let args = atom.config.get('gometalinter-linter.args')
if (!args || args.constructor !== Array || args.indexOf('--json') === -1) {
args = ['--vendor', '--fast', '--json', './...']
}
if (args.indexOf('--json') === -1) {
args.unshift('--json')
}
let config = this.goconfig()
let options = this.getLocatorOptions(editor)
return config.locator.findTool('gometalinter', options).then((cmd) => {
if (!cmd) {
this.checkForTool(editor)
return []
}
let options = this.getExecutorOptions(editor)
return config.executor.exec(cmd, args, options).then((r) => {
if (r.stderr && r.stderr.trim() !== '') {
console.log('gometalinter-linter: (stderr) ' + r.stderr)
}
let messages = []
if (r.stdout && r.stdout.trim() !== '') {
messages = this.mapMessages(r.stdout, editor, options.cwd)
}
if (!messages || messages.length < 1) {
return []
}
return messages
}).catch((e) => {
console.log(e)
return []
})
})
}
checkForTool (editor = atom.workspace.getActiveTextEditor()) {
let config = this.goconfig()
let options = this.getLocatorOptions(editor)
return config.locator.findTool('gometalinter', options).then((cmd) => {
if (!cmd && !this.toolCheckComplete) {
this.toolCheckComplete = true
let goget = this.goget()
if (!goget) {
return
}
goget.get({
name: 'gometalinter-linter',
packageName: 'gometalinter',
packagePath: 'github.com/alecthomas/gometalinter',
type: 'missing' // TODO check whether missing or outdated
}).then((r) => {
if (!r.success) {
return false
}
return this.updateTools(editor)
}).catch((e) => {
console.log(e)
})
}
})
}
getLocatorOptions (editor = atom.workspace.getActiveTextEditor()) {
let options = {}
if (editor) {
options.file = editor.getPath()
if (options.file) {
options.directory = path.dirname(options.file)
}
}
if (!options.directory) {
let paths = atom.project.getPaths()
if (paths.length) {
options.directory = paths[0]
}
}
return options
}
getExecutorOptions (editor = atom.workspace.getActiveTextEditor()) {
let o = this.getLocatorOptions(editor)
let options = {}
if (o.directory) {
options.cwd = o.directory
}
let config = this.goconfig()
if (config) {
options.env = config.environment(o)
}
if (!options.env) {
options.env = process.env
}
return options
}
updateTools (editor = atom.workspace.getActiveTextEditor()) {
if (!this.ready()) {
return Promise.resolve(false)
}
let config = this.goconfig()
let options = this.getLocatorOptions(editor)
return config.locator.findTool('gometalinter', options).then((cmd) => {
if (!cmd) {
return false
}
let args = ['--install']
let notification = atom.notifications.addInfo('gometalinter', {
dismissable: true,
icon: 'cloud-download',
description: 'Running `gometalinter --install` to install tools.'
})
let options = this.getExecutorOptions(editor)
return config.executor.exec(cmd, args, options).then((r) => {
notification.dismiss()
let detail = r.stdout + os.EOL + r.stderr
if (r.exitcode !== 0) {
atom.notifications.addWarning('gometalinter', {
dismissable: true,
icon: 'cloud-download',
detail: detail.trim()
})
return r
}
if (r.stderr && r.stderr.trim() !== '') {
console.log('gometalinter-linter: (stderr) ' + r.stderr)
}
atom.notifications.addSuccess('gometalinter', {
dismissable: true,
icon: 'cloud-download',
detail: detail.trim(),
description: 'The tools were installed.'
})
return r
})
})
}
mapMessages (data, editor, cwd) {
let messages = []
try {
messages = JSON.parse(data)
} catch (e) {
console.log(e)
}
if (!messages || messages.length < 1) {
return []
}
messages.sort((a, b) => {
if (!a && !b) {
return 0
}
if (!a && b) {
return -1
}
if (a && !b) {
return 1
}
if (!a.path && b.path) {
return -1
}
if (a.path && !b.path) {
return 1
}
if (a.path === b.path) {
if (a.line - b.line === 0) {
return a.row - b.row
}
return a.line - b.line
} else {
return a.path.localeCompare(b.path)
}
})
let results = []
for (let message of messages) {
let range
if (message.col && message.col >= 0) {
range = [[message.line - 1, message.col - 1], [message.line - 1, 1000]]
} else {
range = [[message.line - 1, 0], [message.line - 1, 1000]]
}
results.push({name: message.linter, type: capitalizeFirstLetter(message.severity), row: message.line, column: message.col, text: message.message + ' (' + message.linter + ')', filePath: path.join(cwd, message.path), range: range})
}
return results
}
}
export {GometalinterLinter}
| lib/linter.js | 'use babel'
import {CompositeDisposable} from 'atom'
import os from 'os'
import path from 'path'
function capitalizeFirstLetter (str) {
if (!str) {
return str
}
return str.charAt(0).toUpperCase() + str.slice(1)
}
class GometalinterLinter {
constructor (goconfigFunc, gogetFunc) {
this.goget = gogetFunc
this.goconfig = goconfigFunc
this.subscriptions = new CompositeDisposable()
this.name = 'gometalinter'
this.grammarScopes = ['source.go']
this.scope = 'project'
this.lintOnFly = false
this.toolCheckComplete = false
this.subscriptions.add(atom.commands.add('atom-workspace', 'golang:updatelinters', () => {
this.updateTools()
}))
this.registered = false
}
dispose () {
if (this.subscriptions) {
this.subscriptions.dispose()
}
this.subscriptions = null
this.goget = null
this.goconfig = null
this.name = null
this.grammarScopes = null
this.lintOnFly = null
this.toolCheckComplete = null
}
registerTool () {
if (this.registered) {
return
}
let g = this.goget()
if (!g) {
return
}
this.subscriptions.add(g.register('github.com/alecthomas/gometalinter', (outcome) => {
if (!outcome.success) {
return
}
this.updateTools()
}))
this.registered = true
}
ready () {
if (!this.goconfig) {
return false
}
let config = this.goconfig()
if (!config) {
return false
}
return true
}
lint (editor) {
if (!this.ready() || !editor) {
return []
}
let buffer = editor.getBuffer()
if (!buffer) {
return []
}
let args = atom.config.get('gometalinter-linter.args')
if (!args || args.constructor !== Array || args.indexOf('--json') === -1) {
args = ['--vendor', '--fast', '--json', './...']
}
if (args.indexOf('--json') === -1) {
args.unshift('--json')
}
let config = this.goconfig()
let options = this.getLocatorOptions(editor)
return config.locator.findTool('gometalinter', options).then((cmd) => {
if (!cmd) {
this.checkForTool(editor)
return []
}
let options = this.getExecutorOptions(editor)
return config.executor.exec(cmd, args, options).then((r) => {
if (r.stderr && r.stderr.trim() !== '') {
console.log('gometalinter-linter: (stderr) ' + r.stderr)
}
let messages = []
if (r.stdout && r.stdout.trim() !== '') {
messages = this.mapMessages(r.stdout, editor, options.cwd)
}
if (!messages || messages.length < 1) {
return []
}
return messages
}).catch((e) => {
console.log(e)
return []
})
})
}
checkForTool (editor = atom.workspace.getActiveTextEditor()) {
let config = this.goconfig()
let options = this.getLocatorOptions(editor)
return config.locator.findTool('gometalinter', options).then((cmd) => {
if (!cmd && !this.toolCheckComplete) {
this.toolCheckComplete = true
let goget = this.goget()
if (!goget) {
return
}
goget.get({
name: 'gometalinter-linter',
packageName: 'gometalinter',
packagePath: 'github.com/alecthomas/gometalinter',
type: 'missing' // TODO check whether missing or outdated
}).then((r) => {
if (!r.success) {
return false
}
return this.updateTools(editor)
}).catch((e) => {
console.log(e)
})
}
})
}
getLocatorOptions (editor = atom.workspace.getActiveTextEditor()) {
let options = {}
if (editor) {
options.file = editor.getPath()
if (options.file) {
options.directory = path.dirname(options.file)
}
}
if (!options.directory) {
let paths = atom.project.getPaths()
if (paths.length) {
options.directory = paths[0]
}
}
return options
}
getExecutorOptions (editor = atom.workspace.getActiveTextEditor()) {
let o = this.getLocatorOptions(editor)
let options = {}
if (o.directory) {
options.cwd = o.directory
}
let config = this.goconfig()
if (config) {
options.env = config.environment(o)
}
if (!options.env) {
options.env = process.env
}
return options
}
updateTools (editor = atom.workspace.getActiveTextEditor()) {
if (!this.ready()) {
return Promise.resolve(false)
}
let config = this.goconfig()
let options = this.getLocatorOptions(editor)
return config.locator.findTool('gometalinter', options).then((cmd) => {
if (!cmd) {
return false
}
let args = ['--install']
let notification = atom.notifications.addInfo('gometalinter', {
dismissable: false,
icon: 'cloud-download',
description: 'Running `gometalinter --install` to install tools.'
})
let options = this.getExecutorOptions(editor)
return config.executor.exec(cmd, args, options).then((r) => {
notification.dismiss()
let detail = r.stdout + os.EOL + r.stderr
if (r.exitcode !== 0) {
atom.notifications.addWarning('gometalinter', {
dismissable: true,
icon: 'cloud-download',
detail: detail.trim()
})
return r
}
if (r.stderr && r.stderr.trim() !== '') {
console.log('gometalinter-linter: (stderr) ' + r.stderr)
}
atom.notifications.addSuccess('gometalinter', {
dismissable: true,
icon: 'cloud-download',
detail: detail.trim(),
description: 'The tools were installed.'
})
return r
})
})
}
mapMessages (data, editor, cwd) {
let messages = []
try {
messages = JSON.parse(data)
} catch (e) {
console.log(e)
}
if (!messages || messages.length < 1) {
return []
}
messages.sort((a, b) => {
if (!a && !b) {
return 0
}
if (!a && b) {
return -1
}
if (a && !b) {
return 1
}
if (!a.path && b.path) {
return -1
}
if (a.path && !b.path) {
return 1
}
if (a.path === b.path) {
if (a.line - b.line === 0) {
return a.row - b.row
}
return a.line - b.line
} else {
return a.path.localeCompare(b.path)
}
})
let results = []
for (let message of messages) {
let range
if (message.col && message.col >= 0) {
range = [[message.line - 1, message.col - 1], [message.line - 1, 1000]]
} else {
range = [[message.line - 1, 0], [message.line - 1, 1000]]
}
results.push({name: message.linter, type: capitalizeFirstLetter(message.severity), row: message.line, column: message.col, text: message.message + ' (' + message.linter + ')', filePath: path.join(cwd, message.path), range: range})
}
return results
}
}
export {GometalinterLinter}
| Make notification dismissable
- Prevents the notification from disappearing prematurely, or remaining visible too long
| lib/linter.js | Make notification dismissable | <ide><path>ib/linter.js
<ide>
<ide> let args = ['--install']
<ide> let notification = atom.notifications.addInfo('gometalinter', {
<del> dismissable: false,
<add> dismissable: true,
<ide> icon: 'cloud-download',
<ide> description: 'Running `gometalinter --install` to install tools.'
<ide> }) |
|
Java | apache-2.0 | 6c649fa544d7ed592e273c23ecb9ee4b8d91a0c9 | 0 | operasoftware/operaprestodriver,operasoftware/operaprestodriver,operasoftware/operaprestodriver | package com.opera.core.systems;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import com.opera.core.systems.scope.protos.SystemInputProtos.ModifierPressed;
import com.opera.core.systems.scope.protos.SystemInputProtos.MouseInfo.MouseButton;
import com.opera.core.systems.settings.OperaDriverSettings;
class RunDriver {
private static OperaDesktopDriver driver;
// private static OperaDriver driver;
public static void main(String[]args) {
OperaDriverSettings settings = new OperaDriverSettings();
settings.setNoQuit(true);
settings.setNoRestart(true);
settings.setRunOperaLauncherFromOperaDriver(true);
//settings.setOperaBinaryLocation("/home/karianne/work/auto_common/run/lib/opera/opera");
settings.setOperaBinaryArguments("-watirtest");
settings.setOperaLauncherBinary("/home/karianne/ui_test/launcher/opera-launcher/projects/linux/launcher");
settings.setNoQuit(true);
System.out.println("Create Driver");
driver = new OperaDesktopDriver(settings);
// driver = new OperaDriver();
System.out.println(".Driver created ......");
for (int i = 0; i < 100; i++) {
driver.waitStart();
List<ModifierPressed> list = new ArrayList<ModifierPressed>();
driver.keyPress("t", list);
int id = driver.waitForWindowShown("Document Window");
System.out.println(" id of window shown = " + id);
QuickWidget widget = (QuickWidget) driver.findWidgetByName(id, "pb_CloseButton");
if (widget != null) {
driver.waitStart();
widget.click(MouseButton.LEFT, 1, list);
driver.waitForWindowClose("Document Window");
}
driver.resetOperaPrefs("new_prefs2");
System.out.println("-------------------------------------------------");
System.out.println("------- Runinng test no " + i + "----------------");
System.out.println("--------------------------------------------------");
}
/*
System.out.println("Do wait start");
driver.waitStart(); // wait for dialog to open
System.out.println("Do add to bookmarks action ");
driver.operaAction("Add to bookmarks");
//System.out.println("Wait for window activated ");
//driver.waitForWindowActivated();
// ---- Internal stuff to check text on the dialog -------
int id = driver.getWindowID("Bookmark Properties");
System.out.println("Id of bookmark dialog is " + id);
driver.getWidgetList(id);
System.out.println("Get Widget label_for_Nick_edit");
int widget_id = driver.getQuickWidgetID(id, "name", "label_for_Nick_edit");
System.out.println("widget_id returned is " + widget_id);
System.out.println("-- Done --");
// _-----------------------
*/
/*
driver.waitStart(); // wait for dialog to open
System.out.println("-- wait for dialog -- ");
driver.operaDesktopAction("Add to bookmarks");
if (driver.waitForWindowShown("Add Bookmark Dialog") != 0)
{
QuickWidget widget = driver.findWidgetByName(-1, "label_for_Nick_edit");
if (widget != null)
System.out.println("Widget: " + widget.getWidgetID());
driver.waitStart();
driver.operaDesktopAction("Cancel");
System.out.println("--Wait for window to close--");
driver.waitForWindowClose("Add Bookmark Dialog");
}
else
{
System.out.println("Time out waiting for dialog");
}
*/
// IN ruby:
//driver.label("name", "label_for_Nick_edit").verify_contains("Nickname");
/*
QuickWidget widget = (QuickWidget) driver.findWidgetByName("label_for_Nick_edit");
if (widget.getText().equals("Nickname"))
System.out.println("Success");
else
System.out.println("Failure");
*/
// ---------------------
/*
driver.waitStart();
driver.operaDesktopAction("Show preferences");
System.out.println("--Wait for window to open--");
int win_id = driver.waitForWindowShown("New Preferences Dialog");
QuickWidget qw = driver.findWidgetByName(-1, "Startpage_edit");
System.out.println("By name: " + qw.getText());
//qw.verifyText("D_STARTUP_WITH_HOMEPAGE");
qw.isVisible();
*/
// qw.click(MouseButton.LEFT, 0, ModifierPressed.NONE);
/*
System.out.println("Window name: " + driver.getWindowName(win_id));
driver.waitStart();
driver.operaDesktopAction("Cancel");
System.out.println("--Wait for window to close--");
driver.waitForWindowClose("New Preferences Dialog");
*/
/*
driver.waitStart();
System.out.println("--Wait for window to open--");
driver.operaDesktopAction("Open url in new page", "http://t/security/bts/164110/scary_file-keyboard.kini");
driver.waitForWindowShown("Setup Apply Dialog Confirm Dialog");
QuickWidget qw = driver.findWidgetByName(-1, "Simple_message");
System.out.println("Text: " + qw.getText());
System.out.println("Contains Text D_SECURITYALERT_SETUPDOWNLOAD: " + qw.verifyContainsText("D_SECURITYALERT_SETUPDOWNLOAD"));
QuickWidget qw_button = driver.findWidgetByName(-1, "button_Cancel");
qw_button.click();
*/
/*
driver.waitStart();
System.out.println("--Wait for window to close--");
driver.operaDesktopAction("Cancel");
driver.waitForWindowClose("Setup Apply Dialog Confirm Dialog");
driver.operaDesktopAction("Close page", "1");
*/
// System.out.println("Text: " + driver.getString("D_SECURITYALERT_SETUPDOWNLOAD_TITLE"));
/*
driver.waitStart();
System.out.println("--Wait for window to open--");
driver.operaDesktopAction("Add to bookmarks");
driver.waitForWindowShown("Add Bookmark Dialog");
QuickWidget qw = driver.findWidgetByName(-1, "label_for_Name_edit");
System.out.println("By name: " + qw.getText());
qw.verifyText("DI_ID_HLFILEPROP_FNAME_LABEL");
driver.waitStart();
System.out.println("--Wait for window to close--");
driver.operaDesktopAction("Cancel");
driver.waitForWindowClose("Add Bookmark Dialog");
*/
/*
System.out.println("--Go to Google --");
driver.get("http://www.google.com");
System.out.println("Gone to google ");
OperaWebElement element = (OperaWebElement) driver.findElementByName("q");
element.sendKeys("Opera Software");
element.submit();
*/
System.out.println("Done ");
//driver.quit();
}
}
| src/com/opera/core/systems/RunDriver.java | package com.opera.core.systems;
import org.openqa.selenium.By;
import org.openqa.selenium.NotFoundException;
import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.WebElement;
import com.opera.core.systems.OperaDriver;
import com.opera.core.systems.OperaWebElement;
import com.opera.core.systems.scope.protos.SystemInputProtos.ModifierPressed;
import com.opera.core.systems.scope.protos.SystemInputProtos.MouseInfo.MouseButton;
import com.opera.core.systems.settings.OperaDriverSettings;
class RunDriver {
private static OperaDesktopDriver driver;
// private static OperaDriver driver;
public static void main(String[]args) {
OperaDriverSettings settings = new OperaDriverSettings();
settings.setRunOperaLauncherFromOperaDriver(true);
// settings.setOperaBinaryLocation("/Users/minch/Documents/DesktopDev/Kjevik/work/output/Debug/Opera.app/Contents/MacOS/Opera");
settings.setOperaBinaryArguments("-watirtest");
settings.setOperaLauncherBinary("/Users/minch/Documents/DesktopDev/OperaWatir/opera-watir/utils/launchers/launcher-mac"); // Hardcoded to Mac
settings.setNoQuit(true);
System.out.println("Create Driver");
driver = new OperaDesktopDriver(settings);
// driver = new OperaDriver();
System.out.println(".Driver created ......");
// driver.quit_opera();
driver.quit_driver();
// driver.get("http://www.google.com");
/*
System.out.println("Do wait start");
driver.waitStart(); // wait for dialog to open
System.out.println("Do add to bookmarks action ");
driver.operaAction("Add to bookmarks");
//System.out.println("Wait for window activated ");
//driver.waitForWindowActivated();
// ---- Internal stuff to check text on the dialog -------
int id = driver.getWindowID("Bookmark Properties");
System.out.println("Id of bookmark dialog is " + id);
driver.getWidgetList(id);
System.out.println("Get Widget label_for_Nick_edit");
int widget_id = driver.getQuickWidgetID(id, "name", "label_for_Nick_edit");
System.out.println("widget_id returned is " + widget_id);
System.out.println("-- Done --");
// _-----------------------
*/
/*
driver.waitStart(); // wait for dialog to open
System.out.println("-- wait for dialog -- ");
driver.operaDesktopAction("Add to bookmarks");
if (driver.waitForWindowShown("Add Bookmark Dialog") != 0)
{
QuickWidget widget = driver.findWidgetByName(-1, "label_for_Nick_edit");
if (widget != null)
System.out.println("Widget: " + widget.getWidgetID());
driver.waitStart();
driver.operaDesktopAction("Cancel");
System.out.println("--Wait for window to close--");
driver.waitForWindowClose("Add Bookmark Dialog");
}
else
{
System.out.println("Time out waiting for dialog");
}
*/
// IN ruby:
//driver.label("name", "label_for_Nick_edit").verify_contains("Nickname");
/*
QuickWidget widget = (QuickWidget) driver.findWidgetByName("label_for_Nick_edit");
if (widget.getText().equals("Nickname"))
System.out.println("Success");
else
System.out.println("Failure");
*/
// ---------------------
/*
driver.waitStart();
driver.operaDesktopAction("Show preferences");
System.out.println("--Wait for window to open--");
int win_id = driver.waitForWindowShown("New Preferences Dialog");
QuickWidget qw = driver.findWidgetByName(-1, "Startpage_edit");
System.out.println("By name: " + qw.getText());
//qw.verifyText("D_STARTUP_WITH_HOMEPAGE");
qw.isVisible();
*/
// qw.click(MouseButton.LEFT, 0, ModifierPressed.NONE);
/*
System.out.println("Window name: " + driver.getWindowName(win_id));
driver.waitStart();
driver.operaDesktopAction("Cancel");
System.out.println("--Wait for window to close--");
driver.waitForWindowClose("New Preferences Dialog");
*/
/*
driver.waitStart();
System.out.println("--Wait for window to open--");
driver.operaDesktopAction("Open url in new page", "http://t/security/bts/164110/scary_file-keyboard.kini");
driver.waitForWindowShown("Setup Apply Dialog Confirm Dialog");
QuickWidget qw = driver.findWidgetByName(-1, "Simple_message");
System.out.println("Text: " + qw.getText());
System.out.println("Contains Text D_SECURITYALERT_SETUPDOWNLOAD: " + qw.verifyContainsText("D_SECURITYALERT_SETUPDOWNLOAD"));
QuickWidget qw_button = driver.findWidgetByName(-1, "button_Cancel");
qw_button.click();
*/
/*
driver.waitStart();
System.out.println("--Wait for window to close--");
driver.operaDesktopAction("Cancel");
driver.waitForWindowClose("Setup Apply Dialog Confirm Dialog");
driver.operaDesktopAction("Close page", "1");
*/
// System.out.println("Text: " + driver.getString("D_SECURITYALERT_SETUPDOWNLOAD_TITLE"));
/*
driver.waitStart();
System.out.println("--Wait for window to open--");
driver.operaDesktopAction("Add to bookmarks");
driver.waitForWindowShown("Add Bookmark Dialog");
QuickWidget qw = driver.findWidgetByName(-1, "label_for_Name_edit");
System.out.println("By name: " + qw.getText());
qw.verifyText("DI_ID_HLFILEPROP_FNAME_LABEL");
driver.waitStart();
System.out.println("--Wait for window to close--");
driver.operaDesktopAction("Cancel");
driver.waitForWindowClose("Add Bookmark Dialog");
*/
/*
System.out.println("--Go to Google --");
driver.get("http://www.google.com");
System.out.println("Gone to google ");
OperaWebElement element = (OperaWebElement) driver.findElementByName("q");
element.sendKeys("Opera Software");
element.submit();
*/
System.out.println("Done ");
//driver.quit();
}
}
| Debug loop RunDriver
| src/com/opera/core/systems/RunDriver.java | Debug loop RunDriver | <ide><path>rc/com/opera/core/systems/RunDriver.java
<ide> package com.opera.core.systems;
<ide>
<del>import org.openqa.selenium.By;
<del>import org.openqa.selenium.NotFoundException;
<del>import org.openqa.selenium.WebDriverException;
<del>import org.openqa.selenium.WebElement;
<del>
<del>import com.opera.core.systems.OperaDriver;
<del>import com.opera.core.systems.OperaWebElement;
<add>import java.util.ArrayList;
<add>import java.util.List;
<add>import java.util.Set;
<ide> import com.opera.core.systems.scope.protos.SystemInputProtos.ModifierPressed;
<ide> import com.opera.core.systems.scope.protos.SystemInputProtos.MouseInfo.MouseButton;
<ide> import com.opera.core.systems.settings.OperaDriverSettings;
<ide>
<ide> public static void main(String[]args) {
<ide> OperaDriverSettings settings = new OperaDriverSettings();
<add> settings.setNoQuit(true);
<add> settings.setNoRestart(true);
<ide>
<ide> settings.setRunOperaLauncherFromOperaDriver(true);
<del>// settings.setOperaBinaryLocation("/Users/minch/Documents/DesktopDev/Kjevik/work/output/Debug/Opera.app/Contents/MacOS/Opera");
<add> //settings.setOperaBinaryLocation("/home/karianne/work/auto_common/run/lib/opera/opera");
<ide> settings.setOperaBinaryArguments("-watirtest");
<del> settings.setOperaLauncherBinary("/Users/minch/Documents/DesktopDev/OperaWatir/opera-watir/utils/launchers/launcher-mac"); // Hardcoded to Mac
<add> settings.setOperaLauncherBinary("/home/karianne/ui_test/launcher/opera-launcher/projects/linux/launcher");
<ide> settings.setNoQuit(true);
<add>
<ide>
<ide> System.out.println("Create Driver");
<ide> driver = new OperaDesktopDriver(settings);
<ide> // driver = new OperaDriver();
<ide> System.out.println(".Driver created ......");
<ide>
<del>// driver.quit_opera();
<del> driver.quit_driver();
<add> for (int i = 0; i < 100; i++) {
<add> driver.waitStart();
<add> List<ModifierPressed> list = new ArrayList<ModifierPressed>();
<add> driver.keyPress("t", list);
<add> int id = driver.waitForWindowShown("Document Window");
<add> System.out.println(" id of window shown = " + id);
<add>
<add> QuickWidget widget = (QuickWidget) driver.findWidgetByName(id, "pb_CloseButton");
<add> if (widget != null) {
<add> driver.waitStart();
<add> widget.click(MouseButton.LEFT, 1, list);
<add> driver.waitForWindowClose("Document Window");
<add> }
<ide>
<del>// driver.get("http://www.google.com");
<add> driver.resetOperaPrefs("new_prefs2");
<add>
<add> System.out.println("-------------------------------------------------");
<add> System.out.println("------- Runinng test no " + i + "----------------");
<add> System.out.println("--------------------------------------------------");
<add>
<add> }
<ide>
<ide> /*
<ide> System.out.println("Do wait start"); |
|
Java | mit | df7f3140c3b6402ac45bef5929c9b7e513ec1297 | 0 | vincentzhang96/VahrhedralBot | package co.phoenixlab.discord.api;
import co.phoenixlab.discord.api.entities.ReadyMessage;
import co.phoenixlab.discord.api.entities.Server;
import co.phoenixlab.discord.api.entities.User;
import com.google.gson.Gson;
import org.java_websocket.client.WebSocketClient;
import org.java_websocket.handshake.ServerHandshake;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.URI;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
public class DiscordWebSocketClient extends WebSocketClient {
private static final Logger LOGGER = LoggerFactory.getLogger("DiscordApiClient");
private final DiscordApiClient apiClient;
private final JSONParser parser;
private final Gson gson;
private ScheduledFuture keepAliveFuture;
public DiscordWebSocketClient(DiscordApiClient apiClient, URI serverUri) {
super(serverUri);
this.apiClient = apiClient;
this.parser = new JSONParser();
this.gson = new Gson();
}
@Override
public void onOpen(ServerHandshake handshakedata) {
Thread.currentThread().setName("WebSocketClient");
LOGGER.info("WebSocket connection opened");
send("{\"op\":2,\"d\":{\"token\":\"" + apiClient.getToken() + "\",\"properties\":{\"$os\":\"Linux\",\"" +
"$browser\":\"DesuBot\",\"$device\":\"DesuBot\",\"$referrer\":\"\",\"$referring_domain\"" +
":\"\"},\"v\":2}}");
}
@Override
public void onMessage(String message) {
LOGGER.debug("Recieved message: {}", message);
try {
JSONObject msg = (JSONObject) parser.parse(message);
String errorMessage = (String) msg.get("message");
if (errorMessage != null) {
if (errorMessage.isEmpty()) {
LOGGER.warn("Discord returned an unknown error");
} else {
LOGGER.warn("Discord returned error: {}", errorMessage);
}
return;
}
String type = (String) msg.get("t");
JSONObject data = (JSONObject) msg.get("d");
switch (type) {
case "READY":
handleReadyMessage(data);
break;
case "MESSAGE_CREATE":
handleMessageCreate(data);
break;
// TODO
default:
LOGGER.warn("Unknown message type {}:\n{}", type, data.toJSONString());
}
} catch (ParseException e) {
LOGGER.warn("Unable to parse message", e);
}
}
private void handleReadyMessage(JSONObject data) {
// Because this doesnt come often and to simplify matters
// we'll serialize the subobject to string and have Gson parse out the object
String json = data.toJSONString();
LOGGER.debug(json);
ReadyMessage readyMessage = gson.fromJson(json, ReadyMessage.class);
apiClient.setSessionId(readyMessage.getSessionId());
LOGGER.info("Using sessionId {}", apiClient.getSessionId());
User user = readyMessage.getUser();
apiClient.setClientUser(user);
LOGGER.info("Logged in as {}#{} ID {}", user.getUsername(), user.getDiscriminator(), user.getId());
startKeepAlive(readyMessage.getHeartbeatInterval());
LOGGER.info("Sending keepAlive every {} ms", readyMessage.getHeartbeatInterval());
LOGGER.info("Connected to {} servers", readyMessage.getServers().length);
LOGGER.info("Holding {} private conversations", readyMessage.getPrivateChannels().length);
// We don't bother populating channel messages since we only care about new messages coming in
List<Server> servers = apiClient.getServers();
servers.clear();
Collections.addAll(servers, readyMessage.getServers());
apiClient.remapServers();
}
@SuppressWarnings("unchecked")
private void startKeepAlive(long keepAliveInterval) {
if (keepAliveFuture != null) {
keepAliveFuture.cancel(true);
}
keepAliveFuture = apiClient.getExecutorService().scheduleAtFixedRate(() -> {
JSONObject keepAlive = new JSONObject();
keepAlive.put("op", 1);
keepAlive.put("d", System.currentTimeMillis());
LOGGER.debug("Sending keepAlive");
send(keepAlive.toJSONString());
}, 0, keepAliveInterval, TimeUnit.MILLISECONDS);
}
private void handleMessageCreate(JSONObject data) {
}
@Override
public void onClose(int code, String reason, boolean remote) {
LOGGER.info("Closing WebSocket {}: {} {}", code, reason, remote ? "remote" : "local");
if (keepAliveFuture != null) {
keepAliveFuture.cancel(true);
}
}
@Override
public void onError(Exception ex) {
LOGGER.warn("WebSocket error", ex);
}
}
| src/main/java/co/phoenixlab/discord/api/DiscordWebSocketClient.java | package co.phoenixlab.discord.api;
import co.phoenixlab.discord.api.entities.ReadyMessage;
import co.phoenixlab.discord.api.entities.Server;
import co.phoenixlab.discord.api.entities.User;
import com.google.gson.Gson;
import org.java_websocket.client.WebSocketClient;
import org.java_websocket.handshake.ServerHandshake;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.URI;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
public class DiscordWebSocketClient extends WebSocketClient {
private static final Logger LOGGER = LoggerFactory.getLogger("DiscordApiClient");
private final DiscordApiClient apiClient;
private final JSONParser parser;
private final Gson gson;
private ScheduledFuture keepAliveFuture;
public DiscordWebSocketClient(DiscordApiClient apiClient, URI serverUri) {
super(serverUri);
this.apiClient = apiClient;
this.parser = new JSONParser();
this.gson = new Gson();
}
@Override
public void onOpen(ServerHandshake handshakedata) {
Thread.currentThread().setName("WebSocketClient");
LOGGER.info("WebSocket connection opened");
send("{\"op\":2,\"d\":{\"token\":\"" + apiClient.getToken() + "\",\"properties\":{\"$os\":\"Linux\",\"" +
"$browser\":\"DesuBot\",\"$device\":\"DesuBot\",\"$referrer\":\"\",\"$referring_domain\"" +
":\"\"},\"v\":2}}");
}
@Override
public void onMessage(String message) {
LOGGER.debug("Recieved message: {}", message);
try {
JSONObject msg = (JSONObject) parser.parse(message);
String errorMessage = (String) msg.get("message");
if (errorMessage != null) {
if (errorMessage.isEmpty()) {
LOGGER.warn("Discord returned an unknown error");
} else {
LOGGER.warn("Discord returned error: {}", errorMessage);
}
return;
}
String type = (String) msg.get("t");
JSONObject data = (JSONObject) msg.get("d");
switch (type) {
case "READY":
handleReadyMessage(data);
break;
// TODO
default:
LOGGER.warn("Unknown message type {}:\n{}", type, data.toJSONString());
}
} catch (ParseException e) {
LOGGER.warn("Unable to parse message", e);
}
}
private void handleReadyMessage(JSONObject data) {
// Because this doesnt come often and to simplify matters
// we'll serialize the subobject to string and have Gson parse out the object
String json = data.toJSONString();
LOGGER.debug(json);
ReadyMessage readyMessage = gson.fromJson(json, ReadyMessage.class);
apiClient.setSessionId(readyMessage.getSessionId());
LOGGER.info("Using sessionId {}", apiClient.getSessionId());
User user = readyMessage.getUser();
apiClient.setClientUser(user);
LOGGER.info("Logged in as {}#{} ID {}", user.getUsername(), user.getDiscriminator(), user.getId());
startKeepAlive(readyMessage.getHeartbeatInterval());
LOGGER.info("Sending keepAlive every {} ms", readyMessage.getHeartbeatInterval());
LOGGER.info("Connected to {} servers", readyMessage.getServers().length);
LOGGER.info("Holding {} private conversations", readyMessage.getPrivateChannels().length);
// We don't bother populating channel messages since we only care about new messages coming in
List<Server> servers = apiClient.getServers();
servers.clear();
Collections.addAll(servers, readyMessage.getServers());
apiClient.remapServers();
}
@SuppressWarnings("unchecked")
private void startKeepAlive(long keepAliveInterval) {
if (keepAliveFuture != null) {
keepAliveFuture.cancel(true);
}
keepAliveFuture = apiClient.getExecutorService().scheduleAtFixedRate(() -> {
JSONObject keepAlive = new JSONObject();
keepAlive.put("op", 1);
keepAlive.put("d", System.currentTimeMillis());
LOGGER.debug("Sending keepAlive");
send(keepAlive.toJSONString());
}, 0, keepAliveInterval, TimeUnit.MILLISECONDS);
}
@Override
public void onClose(int code, String reason, boolean remote) {
LOGGER.info("Closing WebSocket {}: {} {}", code, reason, remote ? "remote" : "local");
if (keepAliveFuture != null) {
keepAliveFuture.cancel(true);
}
}
@Override
public void onError(Exception ex) {
LOGGER.warn("WebSocket error", ex);
}
}
| Stub message create event handler
| src/main/java/co/phoenixlab/discord/api/DiscordWebSocketClient.java | Stub message create event handler | <ide><path>rc/main/java/co/phoenixlab/discord/api/DiscordWebSocketClient.java
<ide> case "READY":
<ide> handleReadyMessage(data);
<ide> break;
<del>
<add> case "MESSAGE_CREATE":
<add> handleMessageCreate(data);
<add> break;
<ide> // TODO
<ide>
<ide> default:
<ide> }, 0, keepAliveInterval, TimeUnit.MILLISECONDS);
<ide> }
<ide>
<add> private void handleMessageCreate(JSONObject data) {
<add>
<add> }
<add>
<ide> @Override
<ide> public void onClose(int code, String reason, boolean remote) {
<ide> LOGGER.info("Closing WebSocket {}: {} {}", code, reason, remote ? "remote" : "local"); |
|
Java | apache-2.0 | 9c951b81a107ac61cb43119899343e932b420d34 | 0 | almende/dialog,almende/dialog,almende/dialog | package com.almende.dialog.adapter;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.logging.Logger;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.znerd.xmlenc.XMLOutputter;
import com.almende.dialog.DDRWrapper;
import com.almende.dialog.accounts.AdapterConfig;
import com.almende.dialog.adapter.tools.Broadsoft;
import com.almende.dialog.agent.AdapterAgent;
import com.almende.dialog.model.Answer;
import com.almende.dialog.model.MediaProperty.MediaPropertyKey;
import com.almende.dialog.model.MediaProperty.MediumType;
import com.almende.dialog.model.Question;
import com.almende.dialog.model.Session;
import com.almende.dialog.model.ddr.DDRRecord;
import com.almende.dialog.util.DDRUtils;
import com.almende.dialog.util.PhoneNumberUtils;
import com.almende.dialog.util.ServerUtils;
import com.almende.util.myBlobstore.MyBlobStore;
import com.google.i18n.phonenumbers.PhoneNumberUtil.PhoneNumberFormat;
@Path("/vxml/")
public class VoiceXMLRESTProxy {
protected static final Logger log = Logger.getLogger(VoiceXMLRESTProxy.class.getName());
protected static final com.almende.dialog.Logger dialogLog = new com.almende.dialog.Logger();
private static final int LOOP_DETECTION=10;
private static final String DTMFGRAMMAR="dtmf2hash";
private static final int MAX_RETRIES=1;
protected String TIMEOUT_URL="timeout";
protected String EXCEPTION_URL="exception";
private String host = "";
public static void killSession(Session session){
AdapterConfig config = session.getAdapterConfig();
if(config!=null) {
Broadsoft bs = new Broadsoft(config);
bs.endCall(session.getExternalSession());
}
}
/**
* @Deprecated. Use broadcast calling mechanism instead. <br>
* {@link VoiceXMLRESTProxy#dial(Map, String, String, AdapterConfig) dial} method
*
* @param address
* @param url
* @param config
* @return
*/
@Deprecated
public static String dial(String address, String url, AdapterConfig config)
{
try
{
address = PhoneNumberUtils.formatNumber( address, PhoneNumberFormat.E164 );
}
catch ( Exception e )
{
log.severe( String.format( "Phonenumber: %s is not valid", address ) );
return "";
}
String sessionKey = AdapterAgent.ADAPTER_TYPE_BROADSOFT +"|"+config.getMyAddress()+"|"+address;
Session session = Session.getSession(sessionKey);
if (session == null){
log.severe("VoiceXMLRESTProxy couldn't start new outbound Dialog, adapterConfig not found? "+sessionKey);
return "";
}
session.killed=false;
session.setStartUrl(url);
session.setDirection("outbound");
session.setRemoteAddress(address);
session.setType(AdapterAgent.ADAPTER_TYPE_BROADSOFT);
session.setTrackingToken(UUID.randomUUID().toString());
Question question = Question.fromURL(url,config.getConfigId(),address,config.getMyAddress());
session.setQuestion( question );
session.storeSession();
DDRWrapper.log(url,session.getTrackingToken(),session,"Dial",config);
Broadsoft bs = new Broadsoft(config);
bs.startSubscription();
String extSession = bs.startCall(address + "@outbound");
session.setExternalSession(extSession);
session.storeSession();
return sessionKey;
}
/**
* initiates a call to all the numbers in the addressNameMap and returns a
* Map of <adress, SessionKey>
* @return
*/
public static HashMap<String, String> dial( Map<String, String> addressNameMap, String url, String senderName, AdapterConfig config )
throws Exception
{
String sessionPrefix = AdapterAgent.ADAPTER_TYPE_BROADSOFT+"|"+config.getMyAddress()+"|" ;
HashMap<String, String> resultSessionMap = new HashMap<String, String>();
// If it is a broadcast don't provide the remote address because it is deceiving.
String loadAddress = "";
if(addressNameMap.size()==1)
loadAddress = addressNameMap.keySet().iterator().next();
//fetch the question
Question question = Question.fromURL( url, config.getConfigId(), loadAddress, config.getMyAddress() );
for ( String address : addressNameMap.keySet() )
{
try
{
String formattedAddress = PhoneNumberUtils.formatNumber( address, PhoneNumberFormat.E164 );
String sessionKey = sessionPrefix + formattedAddress;
Session session = Session.getSession( sessionKey );
if ( session == null )
{
log.severe( "VoiceXMLRESTProxy couldn't start new outbound Dialog, adapterConfig not found? "
+ sessionKey );
return null;
}
session.killed=false;
session.setStartUrl(url);
session.setDirection("outbound");
session.setRemoteAddress(formattedAddress);
session.setType(AdapterAgent.ADAPTER_TYPE_BROADSOFT);
session.setTrackingToken(UUID.randomUUID().toString());
session.setAdapterID(config.getConfigId());
session.setQuestion( question );
session.storeSession();
DDRWrapper.log(url,session.getTrackingToken(),session,"Dial",config);
Broadsoft bs = new Broadsoft( config );
bs.startSubscription();
String extSession = "";
if ( !ServerUtils.isInUnitTestingEnvironment() )
{
extSession = bs.startCall( formattedAddress + "@outbound" );
}
session.setExternalSession( extSession );
session.storeSession();
resultSessionMap.put( formattedAddress, sessionKey );
}
catch ( Exception e )
{
log.severe( String.format( "Phonenumber: %s is not valid", address ) );
}
}
return resultSessionMap;
}
public static ArrayList<String> getActiveCalls(AdapterConfig config) {
Broadsoft bs = new Broadsoft(config);
return bs.getActiveCalls();
}
public static ArrayList<String> getActiveCallsInfo(AdapterConfig config) {
Broadsoft bs = new Broadsoft(config);
return bs.getActiveCallsInfo();
}
public static boolean killActiveCalls(AdapterConfig config) {
Broadsoft bs = new Broadsoft(config);
return bs.killActiveCalls();
}
@Path("dtmf2hash")
@GET
@Produces("application/srgs+xml")
public Response getDTMF2Hash(@QueryParam("minlength") String minLength, @QueryParam("maxlength") String maxLength) {
minLength = (minLength != null && !minLength.isEmpty()) ? minLength : "0";
maxLength = (maxLength != null && !maxLength.isEmpty()) ? maxLength : "";
String repeat = minLength.equals( maxLength ) ? minLength : minLength + "-" + maxLength;
String result = "<?xml version=\"1.0\"?> "+
"<grammar mode=\"dtmf\" version=\"1.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://www.w3.org/2001/06/grammar http://www.w3.org/TR/speech-grammar/grammar.xsd\" xmlns=\"http://www.w3.org/2001/06/grammar\" root=\"untilHash\" > "+
"<rule id=\"digit\"> "+
"<one-of> "+
"<item> 0 </item> "+
"<item> 1 </item> "+
"<item> 2 </item> "+
"<item> 3 </item> "+
"<item> 4 </item> "+
"<item> 5 </item> "+
"<item> 6 </item> "+
"<item> 7 </item> "+
"<item> 8 </item> "+
"<item> 9 </item> "+
"<item> * </item> "+
"</one-of> "+
"</rule> "+
"<rule id=\"untilHash\" scope=\"public\"> "+
"<one-of> "+
"<item repeat=\"" + repeat + "\"><ruleref uri=\"#digit\"/></item> "+
"<item> # </item> "+
"</one-of> "+
"</rule> "+
"</grammar> ";
return Response.ok(result).build();
}
@Path("new")
@GET
@Produces("application/voicexml")
public Response getNewDialog(@QueryParam("direction") String direction,@QueryParam("remoteID") String remoteID,@QueryParam("localID") String localID, @Context UriInfo ui)
{
log.info("call started:"+direction+":"+remoteID+":"+localID);
this.host=ui.getBaseUri().toString().replace(":80", "");
AdapterConfig config = AdapterConfig.findAdapterConfig(AdapterAgent.ADAPTER_TYPE_BROADSOFT, localID);
String formattedRemoteId = remoteID;
//format the remote number
try
{
formattedRemoteId = PhoneNumberUtils.formatNumber( remoteID.split( "@" )[0], PhoneNumberFormat.E164 );
}
catch ( Exception e1 )
{
log.severe( "Remote number formatting failed: "+ remoteID.split( "@" )[0] );
}
String sessionKey = AdapterAgent.ADAPTER_TYPE_BROADSOFT+"|"+localID+"|"+ formattedRemoteId;
Session session = Session.getSession(sessionKey);
String url = "";
if ( session != null )
{
session.setDirection( direction );
session.setRemoteAddress( remoteID );
session.setType( AdapterAgent.ADAPTER_TYPE_BROADSOFT );
session.setAccountId( config.getOwner() );
session.setTrackingToken( UUID.randomUUID().toString() );
session.setAdapterID( config.getConfigId() );
if (direction.equals("inbound")){
url = config.getInitialAgentURL();
session.setStartUrl( url );
Broadsoft bs = new Broadsoft( config );
bs.startSubscription();
}
else if(direction.equalsIgnoreCase("outbound")) // Remove retry counter because call is succesfull
{
url = session.getStartUrl();
}
}
else {
log.severe(String.format("Session %s not found", sessionKey));
return null;
}
Question question = session.getQuestion();
if(question == null) {
question = Question.fromURL(url,session.getAdapterConfig().getConfigId(),remoteID,localID);
session.setQuestion( question );
}
DDRWrapper.log(question,session,"Start",config);
//add costs
try
{
DDRRecord ddrRecord = null;
if ( direction.equalsIgnoreCase( "outbound" ) )
{
ddrRecord = DDRUtils.createDDRRecordOnOutgoingCommunication( config, remoteID, 1 );
}
else
{
ddrRecord = DDRUtils.createDDRRecordOnIncomingCommunication( config, remoteID, 1 );
}
session.setDDRRecordId( ddrRecord.getId() );
}
catch ( Exception e )
{
String errorMessage = String.format(
"Creating DDR records failed. Direction: %s for adapterId: %s with address: %s remoteId: %s and localId: %s",
direction, config.getConfigId(), config.getMyAddress(), remoteID, localID );
log.severe( errorMessage );
dialogLog.severe( config.getConfigId(), errorMessage );
}
finally
{
session.storeSession();
}
return handleQuestion( question, config.getConfigId(), remoteID, sessionKey );
}
@Path( "answer" )
@GET
@Produces( "application/voicexml+xml" )
public Response answer( @QueryParam( "questionId" ) String question_id,
@QueryParam( "answerId" ) String answer_id, @QueryParam( "answerInput" ) String answer_input,
@QueryParam( "sessionKey" ) String sessionKey, @Context UriInfo ui )
{
try
{
answer_input = answer_input != null ? URLDecoder.decode( answer_input, "UTF-8" ) : answer_input;
}
catch ( UnsupportedEncodingException e )
{
log.warning( String.format( "Answer input decode failed for: %s", answer_input) );
}
this.host = ui.getBaseUri().toString().replace( ":80", "" );
String reply = "<vxml><exit/></vxml>";
Session session = Session.getSession( sessionKey );
if ( session != null )
{
Question question = session.getQuestion();
if ( question != null )
{
String responder = session.getRemoteAddress();
if ( session.killed )
{
log.warning( "session is killed" );
return Response.status( Response.Status.BAD_REQUEST ).build();
}
DDRWrapper.log( question, session, "Answer" );
question = question.answer( responder, session.getAdapterConfig().getConfigId(), answer_id,
answer_input, sessionKey );
session.setQuestion( question );
session.storeSession();
return handleQuestion( question, session.getAdapterConfig().getConfigId(), responder, sessionKey );
} else {
log.warning( "No question found in session!" );
}
}
else
{
log.warning( "No session found for: " + sessionKey );
}
return Response.ok( reply ).build();
}
@Path( "timeout" )
@GET
@Produces( "application/voicexml+xml" )
public Response timeout( @QueryParam( "questionId" ) String question_id,
@QueryParam( "sessionKey" ) String sessionKey ) throws Exception
{
String reply = "<vxml><exit/></vxml>";
Session session = Session.getSession( sessionKey );
if ( session != null )
{
Question question = session.getQuestion();
String responder = session.getRemoteAddress();
if ( session.killed )
{
return Response.status( Response.Status.BAD_REQUEST ).build();
}
DDRWrapper.log( question, session, "Timeout" );
HashMap<String,Object> extras = new HashMap<String, Object>();
extras.put( "sessionKey", sessionKey );
question = question.event( "timeout", "No answer received", extras, responder );
session.setQuestion( question );
session.storeSession();
return handleQuestion( question, session.getAdapterConfig().getConfigId(), responder, sessionKey );
}
return Response.ok( reply ).build();
}
@Path("exception")
@GET
@Produces("application/voicexml+xml")
public Response exception(@QueryParam("questionId") String question_id, @QueryParam("sessionKey") String sessionKey){
String reply="<vxml><exit/></vxml>";
Session session = Session.getSession( sessionKey );
if (session != null && session.getQuestion() != null){
Question question = session.getQuestion();
String responder = session.getRemoteAddress();
if (session.killed){
return Response.status(Response.Status.BAD_REQUEST).build();
}
DDRWrapper.log(question,session,"Timeout");
HashMap<String, String> extras = new HashMap<String, String>();
extras.put( "sessionKey", sessionKey );
question = question.event("exception", "Wrong answer received", extras, responder);
session.setQuestion( question );
session.storeSession();
return handleQuestion(question,session.getAdapterID(),responder,sessionKey);
}
return Response.ok(reply).build();
}
@Path("hangup")
@GET
@Produces("application/voicexml+xml")
public Response hangup( @QueryParam( "direction" ) String direction,
@QueryParam( "remoteID" ) String remoteID, @QueryParam( "localID" ) String localID,
@QueryParam( "startTime" ) String startTime, @QueryParam( "answerTime" ) String answerTime,
@QueryParam( "releaseTime" ) String releaseTime, @QueryParam( "notPickedUp" ) Boolean notPickedUp )
throws Exception
{
log.info("call hangup with:"+direction+":"+remoteID+":"+localID);
String sessionKey = AdapterAgent.ADAPTER_TYPE_BROADSOFT+"|"+localID+"|"+remoteID.split( "@" )[0];
Session session = Session.getSession(sessionKey);
log.info( String.format( "Session key: %s with remote: %s and local %s", sessionKey,
session.getRemoteAddress(), session.getLocalAddress() ) );
//update the session timings
session.setStartTimestamp( startTime );
session.setAnswerTimestamp( answerTime );
session.setReleaseTimestamp( releaseTime );
session.storeSession();
Question question = session.getQuestion();
String referredCalledId = null;
if ( question != null )
{
remoteID = session.getRemoteAddress();
referredCalledId = session.getExtras().get( "referredCalledId" );
}
else
{
question = Question.fromURL( session.getStartUrl(), session.getAdapterConfig().getConfigId(), remoteID,
localID );
}
if ( question != null )
{
HashMap<String, Object> timeMap = getTimeMap( startTime, answerTime, releaseTime );
if ( notPickedUp != null )
{
timeMap.put( "notPickedUp", notPickedUp );
}
timeMap.put( "referredCalledId", referredCalledId );
timeMap.put( "sessionKey", sessionKey );
question.event( "hangup", "Hangup", timeMap, remoteID );
session.setQuestion( null );
DDRWrapper.log( question, session, "Hangup" );
handleQuestion( null, session.getAdapterConfig().getConfigId(), remoteID, sessionKey );
}
else
{
log.info( "no question received" );
}
return Response.ok("").build();
}
/**
* used to trigger answered event unlike {@link VoiceXMLRESTProxy#answer(String, String, String, String, UriInfo)}
* @return
* @throws Exception
*/
public Response answered( String direction, String remoteID, String localID, String startTime,
String answerTime, String releaseTime ) throws Exception
{
log.info( "call answered with:" + direction + "_" + remoteID + "_" + localID );
String sessionKey = AdapterAgent.ADAPTER_TYPE_BROADSOFT+"|"+localID+"|"+remoteID.split( "@" )[0]; //ignore the @outbound suffix
Session session = Session.getSession(sessionKey);
//update the session timings
session.setStartTimestamp( startTime );
session.setAnswerTimestamp( answerTime );
session.setReleaseTimestamp( releaseTime );
session.storeSession();
Question question = session.getQuestion();
//for direction = transfer (redirect event), json should not be null
if ( question != null )
{
String responder = session.getRemoteAddress();
String referredCalledId = session.getExtras().get( "referredCalledId" );
HashMap<String, Object> timeMap = getTimeMap( startTime, answerTime, releaseTime );
timeMap.put( "referredCalledId", referredCalledId );
timeMap.put( "sessionKey", sessionKey );
question.event( "answered", "Answered", timeMap, responder );
DDRWrapper.log( question, session, "Answered" );
}
return Response.ok( "" ).build();
}
@Path("cc")
@POST
public Response receiveCCMessage(String xml) {
log.info("Received cc: "+xml);
try {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
Document dom = db.parse(new ByteArrayInputStream(xml.getBytes("UTF-8")));
Node subscriberId = dom.getElementsByTagName("subscriberId").item(0);
AdapterConfig config = AdapterConfig.findAdapterConfigByUsername(subscriberId.getTextContent());
Node eventData = dom.getElementsByTagName("eventData").item(0);
// check if incall event
if(eventData.getChildNodes().getLength()>1) {
Node call = eventData.getChildNodes().item(1);
Node personality = null;
Node callState = null;
Node remoteParty = null;
Node releaseCause = null;
Node answerTime = null;
Node releaseTime = null;
Node startTime = null;
for ( int i = 0; i < call.getChildNodes().getLength(); i++ )
{
Node node = call.getChildNodes().item( i );
if ( node.getNodeName().equals( "personality" ) )
{
personality = node;
}
else if ( node.getNodeName().equals( "callState" ) )
{
callState = node;
}
else if ( node.getNodeName().equals( "remoteParty" ) )
{
remoteParty = node;
}
else if ( node.getNodeName().equals( "releaseCause" ) )
{
releaseCause = node;
}
else if ( node.getNodeName().equals( "startTime" ) )
{
startTime = node;
}
else if ( node.getNodeName().equals( "answerTime" ) )
{
answerTime = node;
}
else if ( node.getNodeName().equals( "releaseTime" ) )
{
releaseTime = node;
}
}
if(callState!=null && callState.getNodeName().equals("callState")) {
// Check if call
if ( callState.getTextContent().equals( "Released" )
|| callState.getTextContent().equals( "Active" ) )
{
String startTimeString = startTime != null ? startTime.getTextContent()
: null;
String answerTimeString = answerTime != null ? answerTime.getTextContent()
: null;
String releaseTimeString = releaseTime != null ? releaseTime.getTextContent()
: null;
// Check if a sip or network call
String type="";
String address="";
String fullAddress = "";
for(int i=0; i<remoteParty.getChildNodes().getLength();i++) {
Node rpChild = remoteParty.getChildNodes().item(i);
if(rpChild.getNodeName().equals("address")) {
address=rpChild.getTextContent();
}else if(rpChild.getNodeName().equals("callType")) {
type=rpChild.getTextContent();
}
}
fullAddress = new String(address);
// Check if session can be matched to call
if(type.equals("Network") || type.equals("Group") || type.equals("Unknown")) {
address = address.replace("tel:", "").replace("sip:", "");
log.info("Going to format phone number: "+address);
if(address.startsWith("+"))
{
address = PhoneNumberUtils.formatNumber(address, null);
}
String sessionKey = AdapterAgent.ADAPTER_TYPE_BROADSOFT+"|"+config.getMyAddress()+"|"+address.split( "@" )[0];
Session session = Session.getSession(sessionKey);
log.info("Session key: "+sessionKey);
String direction="inbound";
if ( personality.getTextContent().equals( "Originator" )
&& !address.contains( "outbound" ) )
{
//address += "@outbound";
direction = "transfer";
log.info( "Transfer detected????" );
//when the receiver hangs up, an active callstate is also triggered.
// but the releaseCause is also set to Temporarily Unavailable
if ( callState.getTextContent().equals( "Active" ) )
{
if ( releaseCause == null
|| ( releaseCause != null
&& !releaseCause.getTextContent().equalsIgnoreCase(
"Temporarily Unavailable" ) && !releaseCause.getTextContent()
.equalsIgnoreCase( "User Not Found" ) ) )
{
session.setDirection( direction );
session.setAnswerTimestamp( answerTimeString );
session.setReleaseTimestamp( releaseTimeString );
session.setStartTimestamp( startTimeString );
if ( session.getQuestion() == null )
{
Question questionFromIncomingCall = Session
.getQuestionFromDifferentSession( config.getConfigId(), "inbound",
"referredCalledId", session.getRemoteAddress() );
if ( questionFromIncomingCall != null )
{
session.setQuestion( questionFromIncomingCall );
session.storeSession();
}
}
session.storeSession();
answered( direction, address, config.getMyAddress(), startTimeString,
answerTimeString, releaseTimeString );
}
}
}
else if ( personality.getTextContent().equals( "Originator" ) )
{
log.info( "Outbound detected?????" );
direction = "outbound";
}
else if ( personality.getTextContent().equals( "Click-to-Dial" ) )
{
log.info( "CTD hangup detected?????" );
direction = "outbound";
//TODO: move this to internal mechanism to check if call is started!
if ( releaseCause.getTextContent().equals( "Server Failure" ) )
{
log.severe( "Need to restart the call!!!! ReleaseCause: "
+ releaseCause.getTextContent() );
int retry = session.getRetryCount() != null ? session.getRetryCount() : 0;
if ( retry < MAX_RETRIES )
{
Broadsoft bs = new Broadsoft( config );
String extSession = bs.startCall( address );
log.info( "Restarted call extSession: " + extSession );
retry++;
session.setRetryCount( retry );
}
else
{
// TODO: Send mail to support!!!
log.severe( "Retries failed!!!" );
}
}
else if ( releaseCause.getTextContent().equals( "Request Failure" ) )
{
log.severe( "Restart call?? ReleaseCause: "
+ releaseCause.getTextContent() );
int retry = session.getRetryCount() != null ? session.getRetryCount() : 0;
if ( retry < MAX_RETRIES )
{
Broadsoft bs = new Broadsoft( config );
String extSession = bs.startCall( address );
log.info( "Restarted call extSession: " + extSession );
retry++;
session.setRetryCount( retry );
}
else
{
// TODO: Send mail to support!!!
log.severe( "Retries failed!!!" );
}
}
}
//store or update the session
session.storeSession();
if ( callState.getTextContent().equals( "Released" ) )
{
boolean callReleased = false;
if ( session != null && direction != "transfer"
&& !personality.getTextContent().equals( "Terminator" )
&& fullAddress.startsWith( "tel:" ) )
{
log.info( "SESSSION FOUND!! SEND HANGUP!!!" );
this.hangup( direction, address, config.getMyAddress(), startTimeString,
answerTimeString, releaseTimeString, false );
callReleased = true;
}
else
{
if ( personality.getTextContent().equals( "Originator" )
&& fullAddress.startsWith( "sip:" ) )
{
log.info( "Probably a disconnect of a sip. not calling hangup event" );
}
else if ( personality.getTextContent().equals( "Originator" )
&& fullAddress.startsWith( "tel:" ) )
{
log.info( "Probably a disconnect of a redirect. call hangup event" );
hangup( direction, address, config.getMyAddress(), startTimeString,
answerTimeString, releaseTimeString, null );
callReleased = true;
}
else if ( personality.getTextContent().equals( "Terminator" ) )
{
log.info( "No session for this inbound?????" );
callReleased = true;
}
else
{
log.info( "What the hell was this?????" );
log.info( "Session already ended?" );
}
}
//update session with call timings
if ( callReleased )
{
session.setAnswerTimestamp( answerTimeString );
session.setStartTimestamp( startTimeString );
session.setReleaseTimestamp( releaseTimeString );
session.setDirection( direction );
session.setRemoteAddress( address );
session.setLocalAddress( config.getMyAddress() );
session.storeSession();
log.info( String.format( "Call ended. session updated: %s",
ServerUtils.serialize( session ) ) );
stopCostsAtHangup( session );
//flush the keys
session.drop();
}
}
} else {
log.warning("Can't handle hangup of type: "+type+" (yet)");
}
}
}
} else {
Node eventName = dom.getElementsByTagName("eventName").item(0);
if(eventName!=null && eventName.getTextContent().equals("SubscriptionTerminatedEvent")) {
Broadsoft bs = new Broadsoft(config);
bs.startSubscription();
log.info("Start a new dialog");
}
log.info("Received a subscription update!");
}
} catch (Exception e) {
log.severe("Something failed: "+ e.getMessage());
e.printStackTrace();
}
return Response.ok("").build();
}
/**
* endpoint for tts functionality
* @param textForSpeech actually text that has to be spoken
* @param language format "language-country" check the full link at {@link http://www.voicerss.org/api/documentation.aspx VoiceRSS}
* @param contentType file format
* @param speed -10 to 10
* @param format audio formats
* @param req
* @param resp
*/
@GET
@Path( "tts/{textForSpeech}" )
public Response redirectToSpeechEngine( @PathParam( "textForSpeech" ) String textForSpeech,
@QueryParam( "hl" ) @DefaultValue( "nl-nl" ) String language,
@QueryParam( "c" ) @DefaultValue( "wav" ) String contentType,
@QueryParam( "r" ) @DefaultValue( "0" ) String speed,
@QueryParam( "f" ) @DefaultValue( "8khz_8bit_mono" ) String format,
@Context HttpServletRequest req,
@Context HttpServletResponse resp ) throws IOException, URISyntaxException
{
String ttsURL = getTTSURL( textForSpeech, language, contentType, speed, format );
return Response.seeOther( new URI( ttsURL ) ).build();
}
/**
* simple endpoint for repeating a question based on its session and question id
* @param sessionKey
* @param questionId
* @return
* @throws Exception
*/
@GET
@Path( "retry" )
public Response retryQuestion( @QueryParam( "sessionKey" ) String sessionKey ) throws Exception
{
Session session = Session.getSession( sessionKey );
if(session.getQuestion() != null)
{
return handleQuestion( session.getQuestion(), session.getAdapterID(), session.getRemoteAddress(), sessionKey );
}
return Response.ok( "" ).build();
}
public class Return {
ArrayList<String> prompts;
Question question;
public Return(ArrayList<String> prompts, Question question) {
this.prompts = prompts;
this.question = question;
}
}
public Return formQuestion(Question question, String adapterID,String address) {
ArrayList<String> prompts = new ArrayList<String>();
for (int count = 0; count<=LOOP_DETECTION; count++){
if (question == null) break;
log.info("Going to form question of type: "+question.getType());
if ( question.getType() == null )
{
question = null;
break;
}
String preferred_language = question.getPreferred_language();
question.setPreferred_language(preferred_language);
String qText = question.getQuestion_text();
if(qText!=null && !qText.equals("")) prompts.add(qText);
if (question.getType().equalsIgnoreCase("closed")) {
for (Answer ans : question.getAnswers()) {
String answer = ans.getAnswer_text();
if (answer != null && !answer.equals("")) prompts.add(answer);
}
break; //Jump from forloop
} else if (question.getType().equalsIgnoreCase("comment")) {
//question = question.answer(null, adapterID, null, null);
break;
} else if (question.getType().equalsIgnoreCase("referral")) {
if(!question.getUrl().startsWith("tel:")) {
question = Question.fromURL(question.getUrl(),adapterID,address);
//question = question.answer(null, null, null);
// break;
} else {
// Break out because we are going to reconnect
break;
}
} else {
break; //Jump from forloop (open questions, etc.)
}
}
return new Return(prompts, question);
}
protected String renderComment(Question question,ArrayList<String> prompts, String sessionKey){
String handleTimeoutURL = "timeout";
String handleExceptionURL = "exception";
String redirectTimeoutProperty = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TIMEOUT );
//assign a default timeout if one is not specified
String redirectTimeout = redirectTimeoutProperty != null ? redirectTimeoutProperty : "40s";
if(!redirectTimeout.endsWith("s"))
{
log.warning("Redirect timeout must be end with 's'. E.g. 40s. Found: "+ redirectTimeout);
redirectTimeout += "s";
}
String redirectTypeProperty = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TYPE );
String redirectType = redirectTypeProperty != null ? redirectTypeProperty.toLowerCase() : "bridge";
if(!redirectType.equals("blind") && !redirectType.equals("bridge"))
{
log.warning("Redirect must be blind or bridge. Found: "+ redirectTimeout);
redirectTypeProperty = "bridge";
}
StringWriter sw = new StringWriter();
try {
XMLOutputter outputter = new XMLOutputter(sw, "UTF-8");
outputter.declaration();
outputter.startTag("vxml");
outputter.attribute("version", "2.1");
outputter.attribute("xmlns", "http://www.w3.org/2001/vxml");
outputter.startTag("form");
if (question != null && question.getType().equalsIgnoreCase("referral")){
outputter.startTag("transfer");
outputter.attribute("name", "thisCall");
outputter.attribute("dest", question.getUrl());
if(redirectType.equals("bridge")) {
outputter.attribute("bridge","true");
} else {
outputter.attribute("bridge","false");
}
outputter.attribute("connecttimeout",redirectTimeout);
for (String prompt : prompts){
outputter.startTag("prompt");
outputter.startTag("audio");
outputter.attribute("src", prompt);
outputter.endTag();
outputter.endTag();
}
outputter.startTag("filled");
outputter.startTag("if");
outputter.attribute("cond", "thisCall=='noanswer'");
outputter.startTag("goto");
outputter.attribute("next", handleTimeoutURL+"?questionId="+question.getQuestion_id()+"&sessionKey="+sessionKey);
outputter.endTag();
outputter.startTag("elseif");
outputter.attribute("cond", "thisCall=='busy' || thisCall=='network_busy'");
outputter.endTag();
outputter.startTag("goto");
outputter.attribute("next", handleExceptionURL+"?questionId="+question.getQuestion_id()+"&sessionKey="+sessionKey);
outputter.endTag();
outputter.startTag("else");
outputter.endTag();
outputter.startTag("goto");
outputter.attribute("next", getAnswerUrl()+"?questionId="+question.getQuestion_id()+"&sessionKey="+sessionKey);
outputter.endTag();
outputter.endTag();
outputter.endTag();
outputter.endTag();
} else {
outputter.startTag("block");
for (String prompt : prompts){
outputter.startTag("prompt");
outputter.startTag("audio");
outputter.attribute("src", prompt);
outputter.endTag();
outputter.endTag();
}
if(question!=null) {
outputter.startTag("goto");
outputter.attribute("next", getAnswerUrl()+"?questionId="+question.getQuestion_id()+"&sessionKey="+sessionKey);
outputter.endTag();
}
outputter.endTag();
}
outputter.endTag();
outputter.endTag();
outputter.endDocument();
} catch (Exception e) {
log.severe("Exception in creating question XML: "+ e.toString());
}
return sw.toString();
}
private String renderClosedQuestion(Question question,ArrayList<String> prompts,String sessionKey){
ArrayList<Answer> answers=question.getAnswers();
String handleTimeoutURL = "timeout";
StringWriter sw = new StringWriter();
try {
XMLOutputter outputter = new XMLOutputter(sw, "UTF-8");
outputter.declaration();
outputter.startTag("vxml");
outputter.attribute("version", "2.1");
outputter.attribute("xmlns", "http://www.w3.org/2001/vxml");
//remove the termchar operator when # is found in the answer
for ( Answer answer : answers )
{
if ( answers.size() > 11
|| ( answer.getAnswer_text() != null && answer.getAnswer_text().contains( "dtmfKey://" ) ) )
{
outputter.startTag( "property" );
outputter.attribute( "name", "termchar" );
outputter.attribute( "value", "" );
outputter.endTag();
break;
}
}
String noAnswerTimeout = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TIMEOUT );
//assign a default timeout if one is not specified
noAnswerTimeout = noAnswerTimeout != null ? noAnswerTimeout : "10s";
if(!noAnswerTimeout.endsWith("s"))
{
log.warning("No answer timeout must end with 's'. E.g. 10s. Found: "+ noAnswerTimeout);
noAnswerTimeout += "s";
}
outputter.startTag( "property" );
outputter.attribute( "name", "timeout" );
outputter.attribute( "value", noAnswerTimeout );
outputter.endTag();
outputter.startTag("menu");
for (String prompt : prompts){
outputter.startTag("prompt");
outputter.startTag("audio");
outputter.attribute("src", prompt);
outputter.endTag();
outputter.endTag();
}
for ( int cnt = 0; cnt < answers.size(); cnt++ )
{
Integer dtmf = cnt + 1;
String dtmfValue = dtmf.toString();
if ( answers.get( cnt ).getAnswer_text() != null
&& answers.get( cnt ).getAnswer_text().startsWith( "dtmfKey://" ) )
{
dtmfValue = answers.get( cnt ).getAnswer_text().replace( "dtmfKey://", "" ).trim();
}
else
{
if ( dtmf == 10 )
{ // 10 translates into 0
dtmfValue = "0";
}
else if ( dtmf == 11 )
{
dtmfValue = "*";
}
else if ( dtmf == 12 )
{
dtmfValue = "#";
}
else if ( dtmf > 12 )
{
break;
}
}
outputter.startTag( "choice" );
outputter.attribute( "dtmf", dtmfValue );
outputter.attribute( "next", getAnswerUrl() + "?questionId=" + question.getQuestion_id()
+ "&answerId=" + answers.get( cnt ).getAnswer_id() + "&answerInput=" + URLEncoder.encode( dtmfValue, "UTF-8" ) + "&sessionKey="
+ sessionKey );
outputter.endTag();
}
outputter.startTag("noinput");
outputter.startTag("goto");
outputter.attribute("next", handleTimeoutURL+"?questionId="+question.getQuestion_id()+"&sessionKey="+sessionKey);
outputter.endTag();
outputter.endTag();
outputter.startTag("nomatch");
outputter.startTag("goto");
outputter.attribute("next", getAnswerUrl()+"?questionId="+question.getQuestion_id()+"&answerId=-1&sessionKey="+sessionKey);
outputter.endTag();
outputter.endTag();
outputter.endTag();
outputter.endTag();
outputter.endDocument();
} catch (Exception e) {
log.severe("Exception in creating question XML: "+ e.toString());
}
return sw.toString();
}
protected String renderOpenQuestion(Question question,ArrayList<String> prompts,String sessionKey)
{
String handleTimeoutURL = "/vxml/timeout";
StringWriter sw = new StringWriter();
try {
XMLOutputter outputter = new XMLOutputter(sw, "UTF-8");
outputter.declaration();
outputter.startTag("vxml");
outputter.attribute("version", "2.1");
outputter.attribute("xmlns", "http://www.w3.org/2001/vxml");
// Check if media property type equals audio
// if so record audio message, if not record dtmf input
String typeProperty = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TYPE );
if(typeProperty!=null && typeProperty.equalsIgnoreCase("audio"))
{
renderVoiceMailQuestion( question, prompts, sessionKey, outputter );
}
else
{
//see if a dtmf length is defined in the question
String dtmfMinLength = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.ANSWER_INPUT_MIN_LENGTH );
dtmfMinLength = dtmfMinLength != null ? dtmfMinLength : "";
String dtmfMaxLength = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.ANSWER_INPUT_MAX_LENGTH );
dtmfMaxLength = dtmfMaxLength != null ? dtmfMaxLength : "";
String noAnswerTimeout = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TIMEOUT );
String retryLimit = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.RETRY_LIMIT );
//assign a default timeout if one is not specified
noAnswerTimeout = noAnswerTimeout != null ? noAnswerTimeout : "5s";
if(!noAnswerTimeout.endsWith("s"))
{
log.warning("No answer timeout must end with 's'. E.g. 10s. Found: "+ noAnswerTimeout);
noAnswerTimeout += "s";
}
outputter.startTag("var");
outputter.attribute("name","answerInput");
outputter.endTag();
outputter.startTag("var");
outputter.attribute("name","questionId");
outputter.attribute("expr", "'"+question.getQuestion_id()+"'");
outputter.endTag();
outputter.startTag("var");
outputter.attribute("name","sessionKey");
outputter.attribute("expr", "'"+sessionKey+"'");
outputter.endTag();
outputter.startTag("form");
outputter.startTag( "property" );
outputter.attribute( "name", "timeout" );
outputter.attribute( "value", noAnswerTimeout );
outputter.endTag();
outputter.startTag("field");
outputter.attribute("name", "answer");
outputter.startTag("grammar");
outputter.attribute("mode", "dtmf");
outputter.attribute( "src", DTMFGRAMMAR + "?minlength=" + dtmfMinLength
+ "&maxlength=" + dtmfMaxLength );
outputter.attribute("type", "application/srgs+xml");
outputter.endTag();
for (String prompt: prompts){
outputter.startTag("prompt");
outputter.startTag("audio");
outputter.attribute("src", prompt);
outputter.endTag();
outputter.endTag();
}
outputter.startTag( "noinput" );
outputter.startTag( "goto" );
if ( retryLimit == null )
{
outputter.attribute( "next", handleTimeoutURL +
"?questionId=" + question.getQuestion_id() + "&sessionKey=" + sessionKey );
}
else
{
Integer retryCount = Question.getRetryCount( sessionKey );
if ( retryCount < Integer.parseInt( retryLimit ) )
{
outputter.attribute( "next", "/retry" + "?questionId=" + question.getQuestion_id()
+ "&sessionKey=" + sessionKey );
Question.updateRetryCount( sessionKey );
}
else
{
Question.flushRetryCount( sessionKey );
}
}
outputter.endTag();
outputter.endTag();
outputter.startTag("filled");
outputter.startTag("assign");
outputter.attribute("name", "answerInput");
outputter.attribute("expr", "answer$.utterance.replace(' ','','g')");
outputter.endTag();
outputter.startTag("submit");
outputter.attribute("next", getAnswerUrl());
outputter.attribute("namelist","answerInput questionId sessionKey");
outputter.endTag();
outputter.startTag("clear");
outputter.attribute("namelist", "answerInput answer");
outputter.endTag();
outputter.endTag();
outputter.endTag();
outputter.endTag();
}
outputter.endTag();
outputter.endDocument();
} catch (Exception e) {
log.severe("Exception in creating open question XML: "+ e.toString());
}
return sw.toString();
}
/** renders/updates the xml for recording an audio and posts it to the user on the callback
* @param question
* @param prompts
* @param sessionKey
* @param outputter
* @throws IOException
* @throws UnsupportedEncodingException
*/
protected void renderVoiceMailQuestion( Question question, ArrayList<String> prompts, String sessionKey,
XMLOutputter outputter ) throws IOException, UnsupportedEncodingException
{
//assign a default voice mail length if one is not specified
String voiceMessageLengthProperty = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.VOICE_MESSAGE_LENGTH );
voiceMessageLengthProperty = voiceMessageLengthProperty != null ? voiceMessageLengthProperty : "15s";
if(!voiceMessageLengthProperty.endsWith("s"))
{
log.warning("Voicemail length must be end with 's'. E.g. 40s. Found: "+ voiceMessageLengthProperty);
voiceMessageLengthProperty += "s";
}
String dtmfTerm = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.DTMF_TERMINATE );
dtmfTerm = dtmfTerm != null ? dtmfTerm : "true";
String voiceMailBeep = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.VOICE_MESSAGE_BEEP );
voiceMailBeep = voiceMailBeep != null ? voiceMailBeep : "true";
// Fetch the upload url
//String host = this.host.replace("rest/", "");
String uuid = UUID.randomUUID().toString();
String filename = uuid+".wav";
String storedAudiofile = host+"download/"+filename;
MyBlobStore store = new MyBlobStore();
String uploadURL = store.createUploadUrl(filename, "/dialoghandler/rest/download/audio.vxml");
outputter.startTag("form");
outputter.attribute("id", "ComposeMessage");
outputter.startTag("record");
outputter.attribute("name", "file");
outputter.attribute("beep", voiceMailBeep);
outputter.attribute("maxtime", voiceMessageLengthProperty);
outputter.attribute("dtmfterm", dtmfTerm);
//outputter.attribute("finalsilence", "3s");
for (String prompt : prompts){
outputter.startTag("prompt");
outputter.attribute("timeout", "5s");
outputter.startTag("audio");
outputter.attribute("src", prompt);
outputter.endTag();
outputter.endTag();
}
outputter.startTag("noinput");
for (String prompt : prompts){
outputter.startTag("prompt");
outputter.startTag("audio");
outputter.attribute("src", prompt);
outputter.endTag();
outputter.endTag();
}
/*outputter.startTag("goto");
outputter.attribute("next", handleTimeoutURL+"?question_id="+question.getQuestion_id()+"&sessionKey="+sessionKey);
outputter.endTag();*/
outputter.endTag();
outputter.endTag();
outputter.startTag("subdialog");
outputter.attribute("name", "saveWav");
outputter.attribute("src", uploadURL);
outputter.attribute("namelist", "file");
outputter.attribute("method", "post");
outputter.attribute("enctype", "multipart/form-data");
outputter.startTag("filled");
outputter.startTag("if");
outputter.attribute("cond", "saveWav.response='SUCCESS'");
outputter.startTag("goto");
outputter.attribute("next", getAnswerUrl()+"?questionId="+question.getQuestion_id()+"&sessionKey="+sessionKey+"&answerInput="+URLEncoder.encode(storedAudiofile, "UTF-8"));
outputter.endTag();
outputter.startTag("else");
outputter.endTag();
for (String prompt : prompts){
outputter.startTag("prompt");
outputter.startTag("audio");
outputter.attribute("src", prompt);
outputter.endTag();
outputter.endTag();
}
outputter.endTag();
outputter.endTag();
outputter.endTag();
outputter.endTag();
}
private Response handleQuestion(Question question, String adapterID,String remoteID,String sessionKey)
{
String result="<?xml version=\"1.0\" encoding=\"UTF-8\"?><vxml version=\"2.1\" xmlns=\"http://www.w3.org/2001/vxml\"><form><block><exit/></block></form></vxml>";
Return res = formQuestion(question,adapterID,remoteID);
if(question !=null && !question.getType().equalsIgnoreCase("comment"))
question = res.question;
log.info( "question formed at handleQuestion is: "+ ServerUtils.serializeWithoutException( question ));
log.info( "prompts formed at handleQuestion is: "+ res.prompts );
if ( question != null )
{
question.generateIds();
Session session = Session.getSession( sessionKey );
session.setQuestion( question );
session.setRemoteAddress( remoteID );
//convert all text prompts to speech
if(res.prompts != null)
{
String language = question.getPreferred_language().contains( "-" ) ? question.getPreferred_language()
: "nl-nl";
String ttsSpeedProperty = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TSS_SPEED );
ttsSpeedProperty = ttsSpeedProperty != null ? ttsSpeedProperty : "0";
ArrayList<String> promptsCopy = new ArrayList<String>();
for ( String prompt : res.prompts )
{
if ( !prompt.startsWith( "dtmfKey://" ) )
{
if ( !prompt.endsWith( ".wav" ) )
{
promptsCopy.add( getTTSURL( prompt, language, "wav", ttsSpeedProperty, null ) );
}
else
{
promptsCopy.add( prompt );
}
}
}
res.prompts = promptsCopy;
}
if ( question.getType().equalsIgnoreCase( "closed" ) )
{
result = renderClosedQuestion( question, res.prompts, sessionKey );
}
else if ( question.getType().equalsIgnoreCase( "open" ) )
{
result = renderOpenQuestion( question, res.prompts, sessionKey );
}
else if ( question.getType().equalsIgnoreCase( "referral" ) )
{
if ( question.getUrl().startsWith( "tel:" ) )
{
// added for release0.4.2 to store the question in the session,
//for triggering an answered event
log.info( String.format( "current session key before referral is: %s and remoteId %s", sessionKey,
remoteID ) );
try
{
String redirectedId = PhoneNumberUtils.formatNumber( question.getUrl().replace( "tel:", "" ),
null );
//update url with formatted redirecteId. RFC3966 returns format tel:<blabla> as expected
question.setUrl( PhoneNumberUtils.formatNumber( redirectedId, PhoneNumberFormat.RFC3966 ) );
//store the remoteId as its lost while trying to trigger the answered event
HashMap<String, String> extras = new HashMap<String, String>();
extras.put( "referredCalledId", redirectedId );
session.getExtras().putAll( extras );
session.setQuestion( question );
session.setRemoteAddress( remoteID );
//create a new ddr record and session to catch the redirect
Session referralSession = Session.getSession( AdapterAgent.ADAPTER_TYPE_BROADSOFT + "|"
+ session.getLocalAddress() + "|" + redirectedId );
if ( session.getDirection() != null
&& ( session.getDirection().equals( "outbound" ) || session.getDirection().equals(
"transfer" ) ) )
{
if ( referralSession != null && referralSession.getDDRRecordId() == null )
{
DDRRecord ddrRecord = DDRUtils.createDDRRecordOnOutgoingCommunication(
AdapterConfig.getAdapterConfig( adapterID ), redirectedId, 1 );
referralSession.setDDRRecordId( ddrRecord.getId() );
referralSession.setDirection( session.getDirection() );
}
}
else
//if its an inbound call, create a new ddr with DDRTypeCategory: OUTGOING_COMMUNICATION_COST
{
DDRRecord ddrRecordForIncomingReferral = DDRUtils.createDDRRecordOnOutgoingCommunication(
session.getAdapterConfig(), redirectedId, 1 );
referralSession.setDDRRecordId( ddrRecordForIncomingReferral.getId() );
//save the referred ddrRecord id in the session
session.getExtras().put( "referredDDRRecordId", ddrRecordForIncomingReferral.getId() );
}
referralSession.storeSession();
session.storeSession();
}
catch ( Exception e )
{
log.severe( String.format( "Phonenumber: %s is not valid",
question.getUrl().replace( "tel:", "" ) ) );
}
result = renderComment( question, res.prompts, sessionKey );
}
}
else if ( res.prompts.size() > 0 )
{
result = renderComment( question, res.prompts, sessionKey );
}
}
else if ( res.prompts.size() > 0 )
{
result = renderComment( null, res.prompts, sessionKey );
}
else
{
log.info( "Going to hangup? So clear Session?" );
}
log.info("Sending xml: "+result);
return Response.ok(result).build();
}
protected String getAnswerUrl() {
return "answer";
}
/**
* @param startTime
* @param answerTime
* @param releaseTime
* @return
*/
private HashMap<String, Object> getTimeMap( String startTime, String answerTime, String releaseTime )
{
HashMap<String, Object> timeMap = new HashMap<String, Object>();
timeMap.put( "startTime", startTime );
timeMap.put( "answerTime", answerTime );
timeMap.put( "releaseTime", releaseTime );
return timeMap;
}
/**
* returns the TTS URL from voiceRSS.
*
* @param textForSpeech
* @param language
* @param contentType
* @return
*/
private String getTTSURL( String textForSpeech, String language, String contentType, String speed, String format )
{
speed = (speed != null && !speed.isEmpty()) ? speed : "0";
contentType = (contentType != null && !contentType.isEmpty()) ? contentType : "wav";
format = (format != null && !format.isEmpty()) ? format : "8khz_8bit_mono";
try
{
textForSpeech = URLEncoder.encode( textForSpeech.replace( "text://", "" ), "UTF-8").replace( "+", "%20" );
}
catch ( UnsupportedEncodingException e )
{
e.printStackTrace();
log.severe( e.getLocalizedMessage() );
}
return "http://api.voicerss.org/?key=afafc70fde4b4b32a730842e6fcf0c62&src=" + textForSpeech + "&hl=" + language
+ "&c=" + contentType + "&r=" + speed + "&f=" + format + "&type=.wav";
}
private void stopCostsAtHangup( Session session )
{
AdapterConfig adapterConfig = session.getAdapterConfig();
//stop costs
try
{
log.info( String.format( "stopping charges for session: %s", ServerUtils.serialize( session ) ) );
if ( session.getStartTimestamp() != null && session.getReleaseTimestamp() != null
&& session.getDirection() != null )
{
DDRRecord ddrRecord = DDRUtils.updateDDRRecordOnCallStops( session.getDDRRecordId(),
adapterConfig.getOwner(), Long.parseLong( session.getStartTimestamp() ),
session.getAnswerTimestamp() != null ? Long.parseLong( session.getAnswerTimestamp() ) : null,
Long.parseLong( session.getReleaseTimestamp() ) );
//publish charges
Double totalCost = DDRUtils.calculateCommunicationDDRCost( ddrRecord, true );
// //check if it was an incoming redirection
// if(ddrRecord != null && session.getExtras().containsKey( "referredDDRRecordId" ))
// {
// DDRRecord refferedDdrRecord = DDRRecord.getDDRRecord(
// session.getExtras().get( "referredDDRRecordId" ), session.getAccountId() );
// if ( refferedDdrRecord != null && session.getAnswerTimestamp() != null)
// {
//// refferedDdrRecord.setStart( ddrRecord.getStart() );
//// refferedDdrRecord.setStatus( ddrRecord.getStatus() );
//// refferedDdrRecord.setDuration( ddrRecord.getDuration() );
//// refferedDdrRecord.createOrUpdate();
// totalCost += DDRUtils.calculateCommunicationDDRCost( refferedDdrRecord, false );
// }
// }
DDRUtils.publishDDREntryToQueue( adapterConfig.getOwner(), totalCost );
}
}
catch ( Exception e )
{
String errorMessage = String.format(
"Applying charges failed. Direction: %s for adapterId: %s with address: %s remoteId: %s and localId: %s \n Error: %s",
session.getDirection(), session.getAdapterID(), adapterConfig.getMyAddress(),
session.getRemoteAddress(), session.getLocalAddress(), e.getLocalizedMessage() );
log.severe( errorMessage );
dialogLog.severe( session.getAdapterConfig().getConfigId(), errorMessage );
}
}
}
| dialoghandler/src/main/java/com/almende/dialog/adapter/VoiceXMLRESTProxy.java | package com.almende.dialog.adapter;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.logging.Logger;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.znerd.xmlenc.XMLOutputter;
import com.almende.dialog.DDRWrapper;
import com.almende.dialog.accounts.AdapterConfig;
import com.almende.dialog.adapter.tools.Broadsoft;
import com.almende.dialog.agent.AdapterAgent;
import com.almende.dialog.model.Answer;
import com.almende.dialog.model.MediaProperty.MediaPropertyKey;
import com.almende.dialog.model.MediaProperty.MediumType;
import com.almende.dialog.model.Question;
import com.almende.dialog.model.Session;
import com.almende.dialog.model.ddr.DDRRecord;
import com.almende.dialog.util.DDRUtils;
import com.almende.dialog.util.PhoneNumberUtils;
import com.almende.dialog.util.ServerUtils;
import com.almende.util.myBlobstore.MyBlobStore;
import com.google.i18n.phonenumbers.PhoneNumberUtil.PhoneNumberFormat;
@Path("/vxml/")
public class VoiceXMLRESTProxy {
protected static final Logger log = Logger.getLogger(VoiceXMLRESTProxy.class.getName());
protected static final com.almende.dialog.Logger dialogLog = new com.almende.dialog.Logger();
private static final int LOOP_DETECTION=10;
private static final String DTMFGRAMMAR="dtmf2hash";
private static final int MAX_RETRIES=1;
protected String TIMEOUT_URL="timeout";
protected String EXCEPTION_URL="exception";
private String host = "";
public static void killSession(Session session){
AdapterConfig config = session.getAdapterConfig();
if(config!=null) {
Broadsoft bs = new Broadsoft(config);
bs.endCall(session.getExternalSession());
}
}
/**
* @Deprecated. Use broadcast calling mechanism instead. <br>
* {@link VoiceXMLRESTProxy#dial(Map, String, String, AdapterConfig) dial} method
*
* @param address
* @param url
* @param config
* @return
*/
@Deprecated
public static String dial(String address, String url, AdapterConfig config)
{
try
{
address = PhoneNumberUtils.formatNumber( address, PhoneNumberFormat.E164 );
}
catch ( Exception e )
{
log.severe( String.format( "Phonenumber: %s is not valid", address ) );
return "";
}
String sessionKey = AdapterAgent.ADAPTER_TYPE_BROADSOFT +"|"+config.getMyAddress()+"|"+address;
Session session = Session.getSession(sessionKey);
if (session == null){
log.severe("VoiceXMLRESTProxy couldn't start new outbound Dialog, adapterConfig not found? "+sessionKey);
return "";
}
session.killed=false;
session.setStartUrl(url);
session.setDirection("outbound");
session.setRemoteAddress(address);
session.setType(AdapterAgent.ADAPTER_TYPE_BROADSOFT);
session.setTrackingToken(UUID.randomUUID().toString());
Question question = Question.fromURL(url,config.getConfigId(),address,config.getMyAddress());
session.setQuestion( question );
session.storeSession();
DDRWrapper.log(url,session.getTrackingToken(),session,"Dial",config);
Broadsoft bs = new Broadsoft(config);
bs.startSubscription();
String extSession = bs.startCall(address + "@outbound");
session.setExternalSession(extSession);
session.storeSession();
return sessionKey;
}
/**
* initiates a call to all the numbers in the addressNameMap and returns a
* Map of <adress, SessionKey>
* @return
*/
public static HashMap<String, String> dial( Map<String, String> addressNameMap, String url, String senderName, AdapterConfig config )
throws Exception
{
String sessionPrefix = AdapterAgent.ADAPTER_TYPE_BROADSOFT+"|"+config.getMyAddress()+"|" ;
HashMap<String, String> resultSessionMap = new HashMap<String, String>();
// If it is a broadcast don't provide the remote address because it is deceiving.
String loadAddress = "";
if(addressNameMap.size()==1)
loadAddress = addressNameMap.keySet().iterator().next();
//fetch the question
Question question = Question.fromURL( url, config.getConfigId(), loadAddress, config.getMyAddress() );
for ( String address : addressNameMap.keySet() )
{
try
{
String formattedAddress = PhoneNumberUtils.formatNumber( address, PhoneNumberFormat.E164 );
String sessionKey = sessionPrefix + formattedAddress;
Session session = Session.getSession( sessionKey );
if ( session == null )
{
log.severe( "VoiceXMLRESTProxy couldn't start new outbound Dialog, adapterConfig not found? "
+ sessionKey );
return null;
}
session.killed=false;
session.setStartUrl(url);
session.setDirection("outbound");
session.setRemoteAddress(formattedAddress);
session.setType(AdapterAgent.ADAPTER_TYPE_BROADSOFT);
session.setTrackingToken(UUID.randomUUID().toString());
session.setAdapterID(config.getConfigId());
session.setQuestion( question );
session.storeSession();
DDRWrapper.log(url,session.getTrackingToken(),session,"Dial",config);
Broadsoft bs = new Broadsoft( config );
bs.startSubscription();
String extSession = "";
if ( !ServerUtils.isInUnitTestingEnvironment() )
{
extSession = bs.startCall( formattedAddress + "@outbound" );
}
session.setExternalSession( extSession );
session.storeSession();
resultSessionMap.put( formattedAddress, sessionKey );
}
catch ( Exception e )
{
log.severe( String.format( "Phonenumber: %s is not valid", address ) );
}
}
return resultSessionMap;
}
public static ArrayList<String> getActiveCalls(AdapterConfig config) {
Broadsoft bs = new Broadsoft(config);
return bs.getActiveCalls();
}
public static ArrayList<String> getActiveCallsInfo(AdapterConfig config) {
Broadsoft bs = new Broadsoft(config);
return bs.getActiveCallsInfo();
}
public static boolean killActiveCalls(AdapterConfig config) {
Broadsoft bs = new Broadsoft(config);
return bs.killActiveCalls();
}
@Path("dtmf2hash")
@GET
@Produces("application/srgs+xml")
public Response getDTMF2Hash(@QueryParam("minlength") String minLength, @QueryParam("maxlength") String maxLength) {
minLength = (minLength != null && !minLength.isEmpty()) ? minLength : "0";
maxLength = (maxLength != null && !maxLength.isEmpty()) ? maxLength : "";
String repeat = minLength.equals( maxLength ) ? minLength : minLength + "-" + maxLength;
String result = "<?xml version=\"1.0\"?> "+
"<grammar mode=\"dtmf\" version=\"1.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://www.w3.org/2001/06/grammar http://www.w3.org/TR/speech-grammar/grammar.xsd\" xmlns=\"http://www.w3.org/2001/06/grammar\" root=\"untilHash\" > "+
"<rule id=\"digit\"> "+
"<one-of> "+
"<item> 0 </item> "+
"<item> 1 </item> "+
"<item> 2 </item> "+
"<item> 3 </item> "+
"<item> 4 </item> "+
"<item> 5 </item> "+
"<item> 6 </item> "+
"<item> 7 </item> "+
"<item> 8 </item> "+
"<item> 9 </item> "+
"<item> * </item> "+
"</one-of> "+
"</rule> "+
"<rule id=\"untilHash\" scope=\"public\"> "+
"<one-of> "+
"<item repeat=\"" + repeat + "\"><ruleref uri=\"#digit\"/></item> "+
"<item> # </item> "+
"</one-of> "+
"</rule> "+
"</grammar> ";
return Response.ok(result).build();
}
@Path("new")
@GET
@Produces("application/voicexml")
public Response getNewDialog(@QueryParam("direction") String direction,@QueryParam("remoteID") String remoteID,@QueryParam("localID") String localID, @Context UriInfo ui)
{
log.info("call started:"+direction+":"+remoteID+":"+localID);
this.host=ui.getBaseUri().toString().replace(":80", "");
AdapterConfig config = AdapterConfig.findAdapterConfig(AdapterAgent.ADAPTER_TYPE_BROADSOFT, localID);
String formattedRemoteId = remoteID;
//format the remote number
try
{
formattedRemoteId = PhoneNumberUtils.formatNumber( remoteID.split( "@" )[0], PhoneNumberFormat.E164 );
}
catch ( Exception e1 )
{
log.severe( "Remote number formatting failed: "+ remoteID.split( "@" )[0] );
}
String sessionKey = AdapterAgent.ADAPTER_TYPE_BROADSOFT+"|"+localID+"|"+ formattedRemoteId;
Session session = Session.getSession(sessionKey);
String url = "";
if ( session != null )
{
session.setDirection( direction );
session.setRemoteAddress( remoteID );
session.setType( AdapterAgent.ADAPTER_TYPE_BROADSOFT );
session.setPubKey( config.getPublicKey() );
session.setTrackingToken( UUID.randomUUID().toString() );
session.setAdapterID( config.getConfigId() );
if (direction.equals("inbound")){
url = config.getInitialAgentURL();
session.setStartUrl( url );
Broadsoft bs = new Broadsoft( config );
bs.startSubscription();
}
else if(direction.equalsIgnoreCase("outbound")) // Remove retry counter because call is succesfull
{
url = session.getStartUrl();
}
}
else {
log.severe(String.format("Session %s not found", sessionKey));
return null;
}
Question question = session.getQuestion();
if(question == null) {
question = Question.fromURL(url,session.getAdapterConfig().getConfigId(),remoteID,localID);
session.setQuestion( question );
}
DDRWrapper.log(question,session,"Start",config);
//add costs
try
{
DDRRecord ddrRecord = null;
if ( direction.equalsIgnoreCase( "outbound" ) )
{
ddrRecord = DDRUtils.createDDRRecordOnOutgoingCommunication( config, remoteID, 1 );
}
else
{
ddrRecord = DDRUtils.createDDRRecordOnIncomingCommunication( config, remoteID, 1 );
}
session.setDDRRecordId( ddrRecord.getId() );
}
catch ( Exception e )
{
String errorMessage = String.format(
"Creating DDR records failed. Direction: %s for adapterId: %s with address: %s remoteId: %s and localId: %s",
direction, config.getConfigId(), config.getMyAddress(), remoteID, localID );
log.severe( errorMessage );
dialogLog.severe( config.getConfigId(), errorMessage );
}
finally
{
session.storeSession();
}
return handleQuestion( question, config.getConfigId(), remoteID, sessionKey );
}
@Path( "answer" )
@GET
@Produces( "application/voicexml+xml" )
public Response answer( @QueryParam( "questionId" ) String question_id,
@QueryParam( "answerId" ) String answer_id, @QueryParam( "answerInput" ) String answer_input,
@QueryParam( "sessionKey" ) String sessionKey, @Context UriInfo ui )
{
try
{
answer_input = answer_input != null ? URLDecoder.decode( answer_input, "UTF-8" ) : answer_input;
}
catch ( UnsupportedEncodingException e )
{
log.warning( String.format( "Answer input decode failed for: %s", answer_input) );
}
this.host = ui.getBaseUri().toString().replace( ":80", "" );
String reply = "<vxml><exit/></vxml>";
Session session = Session.getSession( sessionKey );
if ( session != null )
{
Question question = session.getQuestion();
if ( question != null )
{
String responder = session.getRemoteAddress();
if ( session.killed )
{
log.warning( "session is killed" );
return Response.status( Response.Status.BAD_REQUEST ).build();
}
DDRWrapper.log( question, session, "Answer" );
question = question.answer( responder, session.getAdapterConfig().getConfigId(), answer_id,
answer_input, sessionKey );
session.setQuestion( question );
session.storeSession();
return handleQuestion( question, session.getAdapterConfig().getConfigId(), responder, sessionKey );
} else {
log.warning( "No question found in session!" );
}
}
else
{
log.warning( "No session found for: " + sessionKey );
}
return Response.ok( reply ).build();
}
@Path( "timeout" )
@GET
@Produces( "application/voicexml+xml" )
public Response timeout( @QueryParam( "questionId" ) String question_id,
@QueryParam( "sessionKey" ) String sessionKey ) throws Exception
{
String reply = "<vxml><exit/></vxml>";
Session session = Session.getSession( sessionKey );
if ( session != null )
{
Question question = session.getQuestion();
String responder = session.getRemoteAddress();
if ( session.killed )
{
return Response.status( Response.Status.BAD_REQUEST ).build();
}
DDRWrapper.log( question, session, "Timeout" );
HashMap<String,Object> extras = new HashMap<String, Object>();
extras.put( "sessionKey", sessionKey );
question = question.event( "timeout", "No answer received", extras, responder );
session.setQuestion( question );
session.storeSession();
return handleQuestion( question, session.getAdapterConfig().getConfigId(), responder, sessionKey );
}
return Response.ok( reply ).build();
}
@Path("exception")
@GET
@Produces("application/voicexml+xml")
public Response exception(@QueryParam("questionId") String question_id, @QueryParam("sessionKey") String sessionKey){
String reply="<vxml><exit/></vxml>";
Session session = Session.getSession( sessionKey );
if (session != null && session.getQuestion() != null){
Question question = session.getQuestion();
String responder = session.getRemoteAddress();
if (session.killed){
return Response.status(Response.Status.BAD_REQUEST).build();
}
DDRWrapper.log(question,session,"Timeout");
HashMap<String, String> extras = new HashMap<String, String>();
extras.put( "sessionKey", sessionKey );
question = question.event("exception", "Wrong answer received", extras, responder);
session.setQuestion( question );
session.storeSession();
return handleQuestion(question,session.getAdapterID(),responder,sessionKey);
}
return Response.ok(reply).build();
}
@Path("hangup")
@GET
@Produces("application/voicexml+xml")
public Response hangup( @QueryParam( "direction" ) String direction,
@QueryParam( "remoteID" ) String remoteID, @QueryParam( "localID" ) String localID,
@QueryParam( "startTime" ) String startTime, @QueryParam( "answerTime" ) String answerTime,
@QueryParam( "releaseTime" ) String releaseTime, @QueryParam( "notPickedUp" ) Boolean notPickedUp )
throws Exception
{
log.info("call hangup with:"+direction+":"+remoteID+":"+localID);
String sessionKey = AdapterAgent.ADAPTER_TYPE_BROADSOFT+"|"+localID+"|"+remoteID.split( "@" )[0];
Session session = Session.getSession(sessionKey);
log.info( String.format( "Session key: %s with remote: %s and local %s", sessionKey,
session.getRemoteAddress(), session.getLocalAddress() ) );
Question question = session.getQuestion();
String referredCalledId = null;
if ( question != null )
{
remoteID = session.getRemoteAddress();
referredCalledId = session.getExtras().get( "referredCalledId" );
}
else {
question = Question.fromURL( session.getStartUrl(), session.getAdapterConfig().getConfigId(), remoteID,
localID );
}
if ( question != null )
{
HashMap<String, Object> timeMap = getTimeMap( startTime, answerTime, releaseTime );
if ( notPickedUp != null )
{
timeMap.put( "notPickedUp", notPickedUp );
}
timeMap.put( "referredCalledId", referredCalledId );
timeMap.put( "sessionKey", sessionKey );
question.event( "hangup", "Hangup", timeMap, remoteID );
session.setQuestion( null );
DDRWrapper.log( question, session, "Hangup" );
handleQuestion( null, session.getAdapterConfig().getConfigId(), remoteID, sessionKey );
}
else
{
log.info( "no question received" );
}
return Response.ok("").build();
}
/**
* used to trigger answered event unlike {@link VoiceXMLRESTProxy#answer(String, String, String, String, UriInfo)}
* @return
* @throws Exception
*/
public Response answered( String direction, String remoteID, String localID, String startTime,
String answerTime, String releaseTime ) throws Exception
{
log.info( "call answered with:" + direction + "_" + remoteID + "_" + localID );
String sessionKey = AdapterAgent.ADAPTER_TYPE_BROADSOFT+"|"+localID+"|"+remoteID.split( "@" )[0]; //ignore the @outbound suffix
Session session = Session.getSession(sessionKey);
log.info( "question from session got: "+ session.getSession_id() );
Question question = session.getQuestion();
//for direction = transfer (redirect event), json should not be null
if ( question != null )
{
String responder = session.getRemoteAddress();
String referredCalledId = session.getExtras().get( "referredCalledId" );
HashMap<String, Object> timeMap = getTimeMap( startTime, answerTime, releaseTime );
timeMap.put( "referredCalledId", referredCalledId );
timeMap.put( "sessionKey", sessionKey );
question.event( "answered", "Answered", timeMap, responder );
DDRWrapper.log( question, session, "Answered" );
}
return Response.ok( "" ).build();
}
@Path("cc")
@POST
public Response receiveCCMessage(String xml) {
log.info("Received cc: "+xml);
try {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
Document dom = db.parse(new ByteArrayInputStream(xml.getBytes("UTF-8")));
Node subscriberId = dom.getElementsByTagName("subscriberId").item(0);
AdapterConfig config = AdapterConfig.findAdapterConfigByUsername(subscriberId.getTextContent());
Node eventData = dom.getElementsByTagName("eventData").item(0);
// check if incall event
if(eventData.getChildNodes().getLength()>1) {
Node call = eventData.getChildNodes().item(1);
Node personality = null;
Node callState = null;
Node remoteParty = null;
Node releaseCause = null;
Node answerTime = null;
Node releaseTime = null;
Node startTime = null;
for ( int i = 0; i < call.getChildNodes().getLength(); i++ )
{
Node node = call.getChildNodes().item( i );
if ( node.getNodeName().equals( "personality" ) )
{
personality = node;
}
else if ( node.getNodeName().equals( "callState" ) )
{
callState = node;
}
else if ( node.getNodeName().equals( "remoteParty" ) )
{
remoteParty = node;
}
else if ( node.getNodeName().equals( "releaseCause" ) )
{
releaseCause = node;
}
else if ( node.getNodeName().equals( "startTime" ) )
{
startTime = node;
}
else if ( node.getNodeName().equals( "answerTime" ) )
{
answerTime = node;
}
else if ( node.getNodeName().equals( "releaseTime" ) )
{
releaseTime = node;
}
}
if(callState!=null && callState.getNodeName().equals("callState")) {
// Check if call
if ( callState.getTextContent().equals( "Released" )
|| callState.getTextContent().equals( "Active" ) )
{
String startTimeString = startTime != null ? startTime.getTextContent()
: null;
String answerTimeString = answerTime != null ? answerTime.getTextContent()
: null;
String releaseTimeString = releaseTime != null ? releaseTime.getTextContent()
: null;
// Check if a sip or network call
String type="";
String address="";
String fullAddress = "";
for(int i=0; i<remoteParty.getChildNodes().getLength();i++) {
Node rpChild = remoteParty.getChildNodes().item(i);
if(rpChild.getNodeName().equals("address")) {
address=rpChild.getTextContent();
}else if(rpChild.getNodeName().equals("userId")){
address=rpChild.getTextContent().replace( "@ask.ask.voipit.nl", "" );
}
else if(rpChild.getNodeName().equals("callType")) {
type=rpChild.getTextContent();
}
}
fullAddress = new String(address);
// Check if session can be matched to call
if(type.equals("Network") || type.equals("Group") || type.equals("Unknown")) {
address = address.replace("tel:", "").replace("sip:", "");
log.info("Going to format phone number: "+address);
if(address.startsWith("+"))
{
address = PhoneNumberUtils.formatNumber(address, null);
}
String sessionKey = AdapterAgent.ADAPTER_TYPE_BROADSOFT+"|"+config.getMyAddress()+"|"+address.split( "@" )[0];
Session session = Session.getSession(sessionKey);
log.info("Session key: "+sessionKey);
String direction="inbound";
if ( personality.getTextContent().equals( "Originator" )
&& !address.contains( "outbound" ) )
{
//address += "@outbound";
direction = "transfer";
log.info( "Transfer detected????" );
//when the receiver hangs up, an active callstate is also triggered.
// but the releaseCause is also set to Temporarily Unavailable
if ( callState.getTextContent().equals( "Active" ) )
{
if ( releaseCause == null
|| ( releaseCause != null
&& !releaseCause.getTextContent().equalsIgnoreCase(
"Temporarily Unavailable" ) && !releaseCause.getTextContent()
.equalsIgnoreCase( "User Not Found" ) ) )
{
answered( direction, address, config.getMyAddress(), startTimeString,
answerTimeString, releaseTimeString );
}
}
}
else if ( personality.getTextContent().equals( "Originator" ) )
{
log.info( "Outbound detected?????" );
direction = "outbound";
}
else if ( personality.getTextContent().equals( "Click-to-Dial" ) )
{
log.info( "CTD hangup detected?????" );
direction = "outbound";
//TODO: move this to internal mechanism to check if call is started!
if ( releaseCause.getTextContent().equals( "Server Failure" ) )
{
log.severe( "Need to restart the call!!!! ReleaseCause: "
+ releaseCause.getTextContent() );
int retry = session.getRetryCount() != null ? session.getRetryCount() : 0;
if ( retry < MAX_RETRIES )
{
Broadsoft bs = new Broadsoft( config );
String extSession = bs.startCall( address );
log.info( "Restarted call extSession: " + extSession );
retry++;
session.setRetryCount( retry );
}
else
{
// TODO: Send mail to support!!!
log.severe( "Retries failed!!!" );
}
}
else if ( releaseCause.getTextContent().equals( "Request Failure" ) )
{
log.severe( "Restart call?? ReleaseCause: "
+ releaseCause.getTextContent() );
int retry = session.getRetryCount() != null ? session.getRetryCount() : 0;
if ( retry < MAX_RETRIES )
{
Broadsoft bs = new Broadsoft( config );
String extSession = bs.startCall( address );
log.info( "Restarted call extSession: " + extSession );
retry++;
session.setRetryCount( retry );
}
else
{
// TODO: Send mail to support!!!
log.severe( "Retries failed!!!" );
}
}
}
//store or update the session
session.storeSession();
if ( callState.getTextContent().equals( "Released" ) )
{
boolean callReleased = false;
if ( session != null && direction != "transfer"
&& !personality.getTextContent().equals( "Terminator" )
&& fullAddress.startsWith( "tel:" ) )
{
log.info( "SESSSION FOUND!! SEND HANGUP!!!" );
this.hangup( direction, address, config.getMyAddress(), startTimeString,
answerTimeString, releaseTimeString, false );
callReleased = true;
}
else
{
if ( personality.getTextContent().equals( "Originator" )
&& fullAddress.startsWith( "sip:" ) )
{
log.info( "Probably a disconnect of a sip. not calling hangup event" );
}
else if ( personality.getTextContent().equals( "Originator" )
&& fullAddress.startsWith( "tel:" ) )
{
log.info( "Probably a disconnect of a redirect. call hangup event" );
hangup( direction, address, config.getMyAddress(), startTimeString,
answerTimeString, releaseTimeString, null );
callReleased = true;
}
else if ( personality.getTextContent().equals( "Terminator" ) )
{
log.info( "No session for this inbound?????" );
callReleased = true;
}
else
{
log.info( "What the hell was this?????" );
log.info( "Session already ended?" );
}
}
//update session with call timings
if ( callReleased )
{
session.setAnswerTimestamp( answerTimeString );
session.setStartTimestamp( startTimeString );
session.setReleaseTimestamp( releaseTimeString );
session.setDirection( direction );
session.setRemoteAddress( address );
session.setLocalAddress( config.getMyAddress() );
session.storeSession();
log.info( String.format( "Call ended. session updated: %s",
ServerUtils.serialize( session ) ) );
stopCostsAtHangup( session );
//flush the keys
session.drop();
}
}
} else {
log.warning("Can't handle hangup of type: "+type+" (yet)");
}
}
}
} else {
Node eventName = dom.getElementsByTagName("eventName").item(0);
if(eventName!=null && eventName.getTextContent().equals("SubscriptionTerminatedEvent")) {
Broadsoft bs = new Broadsoft(config);
bs.startSubscription();
log.info("Start a new dialog");
}
log.info("Received a subscription update!");
}
} catch (Exception e) {
log.severe("Something failed: "+ e.getMessage());
e.printStackTrace();
}
return Response.ok("").build();
}
/**
* endpoint for tts functionality
* @param textForSpeech actually text that has to be spoken
* @param language format "language-country" check the full link at {@link http://www.voicerss.org/api/documentation.aspx VoiceRSS}
* @param contentType file format
* @param speed -10 to 10
* @param format audio formats
* @param req
* @param resp
*/
@GET
@Path( "tts/{textForSpeech}" )
public Response redirectToSpeechEngine( @PathParam( "textForSpeech" ) String textForSpeech,
@QueryParam( "hl" ) @DefaultValue( "nl-nl" ) String language,
@QueryParam( "c" ) @DefaultValue( "wav" ) String contentType,
@QueryParam( "r" ) @DefaultValue( "0" ) String speed,
@QueryParam( "f" ) @DefaultValue( "8khz_8bit_mono" ) String format,
@Context HttpServletRequest req,
@Context HttpServletResponse resp ) throws IOException, URISyntaxException
{
String ttsURL = getTTSURL( textForSpeech, language, contentType, speed, format );
return Response.seeOther( new URI( ttsURL ) ).build();
}
/**
* simple endpoint for repeating a question based on its session and question id
* @param sessionKey
* @param questionId
* @return
* @throws Exception
*/
@GET
@Path( "retry" )
public Response retryQuestion( @QueryParam( "sessionKey" ) String sessionKey ) throws Exception
{
Session session = Session.getSession( sessionKey );
if(session.getQuestion() != null)
{
return handleQuestion( session.getQuestion(), session.getAdapterID(), session.getRemoteAddress(), sessionKey );
}
return Response.ok( "" ).build();
}
public class Return {
ArrayList<String> prompts;
Question question;
public Return(ArrayList<String> prompts, Question question) {
this.prompts = prompts;
this.question = question;
}
}
public Return formQuestion(Question question, String adapterID,String address) {
ArrayList<String> prompts = new ArrayList<String>();
for (int count = 0; count<=LOOP_DETECTION; count++){
if (question == null) break;
log.info("Going to form question of type: "+question.getType());
if ( question.getType() == null )
{
question = null;
break;
}
String preferred_language = question.getPreferred_language();
question.setPreferred_language(preferred_language);
String qText = question.getQuestion_text();
if(qText!=null && !qText.equals("")) prompts.add(qText);
if (question.getType().equalsIgnoreCase("closed")) {
for (Answer ans : question.getAnswers()) {
String answer = ans.getAnswer_text();
if (answer != null && !answer.equals("")) prompts.add(answer);
}
break; //Jump from forloop
} else if (question.getType().equalsIgnoreCase("comment")) {
//question = question.answer(null, adapterID, null, null);
break;
} else if (question.getType().equalsIgnoreCase("referral")) {
if(!question.getUrl().startsWith("tel:")) {
question = Question.fromURL(question.getUrl(),adapterID,address);
//question = question.answer(null, null, null);
// break;
} else {
// Break out because we are going to reconnect
break;
}
} else {
break; //Jump from forloop (open questions, etc.)
}
}
return new Return(prompts, question);
}
protected String renderComment(Question question,ArrayList<String> prompts, String sessionKey){
String handleTimeoutURL = "timeout";
String handleExceptionURL = "exception";
String redirectTimeoutProperty = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TIMEOUT );
//assign a default timeout if one is not specified
String redirectTimeout = redirectTimeoutProperty != null ? redirectTimeoutProperty : "40s";
if(!redirectTimeout.endsWith("s"))
{
log.warning("Redirect timeout must be end with 's'. E.g. 40s. Found: "+ redirectTimeout);
redirectTimeout += "s";
}
String redirectTypeProperty = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TYPE );
String redirectType = redirectTypeProperty != null ? redirectTypeProperty.toLowerCase() : "bridge";
if(!redirectType.equals("blind") && !redirectType.equals("bridge"))
{
log.warning("Redirect must be blind or bridge. Found: "+ redirectTimeout);
redirectTypeProperty = "bridge";
}
StringWriter sw = new StringWriter();
try {
XMLOutputter outputter = new XMLOutputter(sw, "UTF-8");
outputter.declaration();
outputter.startTag("vxml");
outputter.attribute("version", "2.1");
outputter.attribute("xmlns", "http://www.w3.org/2001/vxml");
outputter.startTag("form");
if (question != null && question.getType().equalsIgnoreCase("referral")){
outputter.startTag("transfer");
outputter.attribute("name", "thisCall");
outputter.attribute("dest", question.getUrl());
if(redirectType.equals("bridge")) {
outputter.attribute("bridge","true");
} else {
outputter.attribute("bridge","false");
}
outputter.attribute("connecttimeout",redirectTimeout);
for (String prompt : prompts){
outputter.startTag("prompt");
outputter.startTag("audio");
outputter.attribute("src", prompt);
outputter.endTag();
outputter.endTag();
}
outputter.startTag("filled");
outputter.startTag("if");
outputter.attribute("cond", "thisCall=='noanswer'");
outputter.startTag("goto");
outputter.attribute("next", handleTimeoutURL+"?questionId="+question.getQuestion_id()+"&sessionKey="+sessionKey);
outputter.endTag();
outputter.startTag("elseif");
outputter.attribute("cond", "thisCall=='busy' || thisCall=='network_busy'");
outputter.endTag();
outputter.startTag("goto");
outputter.attribute("next", handleExceptionURL+"?questionId="+question.getQuestion_id()+"&sessionKey="+sessionKey);
outputter.endTag();
outputter.startTag("else");
outputter.endTag();
outputter.startTag("goto");
outputter.attribute("next", getAnswerUrl()+"?questionId="+question.getQuestion_id()+"&sessionKey="+sessionKey);
outputter.endTag();
outputter.endTag();
outputter.endTag();
outputter.endTag();
} else {
outputter.startTag("block");
for (String prompt : prompts){
outputter.startTag("prompt");
outputter.startTag("audio");
outputter.attribute("src", prompt);
outputter.endTag();
outputter.endTag();
}
if(question!=null) {
outputter.startTag("goto");
outputter.attribute("next", getAnswerUrl()+"?questionId="+question.getQuestion_id()+"&sessionKey="+sessionKey);
outputter.endTag();
}
outputter.endTag();
}
outputter.endTag();
outputter.endTag();
outputter.endDocument();
} catch (Exception e) {
log.severe("Exception in creating question XML: "+ e.toString());
}
return sw.toString();
}
private String renderClosedQuestion(Question question,ArrayList<String> prompts,String sessionKey){
ArrayList<Answer> answers=question.getAnswers();
String handleTimeoutURL = "timeout";
StringWriter sw = new StringWriter();
try {
XMLOutputter outputter = new XMLOutputter(sw, "UTF-8");
outputter.declaration();
outputter.startTag("vxml");
outputter.attribute("version", "2.1");
outputter.attribute("xmlns", "http://www.w3.org/2001/vxml");
//remove the termchar operator when # is found in the answer
for ( Answer answer : answers )
{
if ( answers.size() > 11
|| ( answer.getAnswer_text() != null && answer.getAnswer_text().contains( "dtmfKey://" ) ) )
{
outputter.startTag( "property" );
outputter.attribute( "name", "termchar" );
outputter.attribute( "value", "" );
outputter.endTag();
break;
}
}
String noAnswerTimeout = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TIMEOUT );
//assign a default timeout if one is not specified
noAnswerTimeout = noAnswerTimeout != null ? noAnswerTimeout : "10s";
if(!noAnswerTimeout.endsWith("s"))
{
log.warning("No answer timeout must end with 's'. E.g. 10s. Found: "+ noAnswerTimeout);
noAnswerTimeout += "s";
}
outputter.startTag( "property" );
outputter.attribute( "name", "timeout" );
outputter.attribute( "value", noAnswerTimeout );
outputter.endTag();
outputter.startTag("menu");
for (String prompt : prompts){
outputter.startTag("prompt");
outputter.startTag("audio");
outputter.attribute("src", prompt);
outputter.endTag();
outputter.endTag();
}
for ( int cnt = 0; cnt < answers.size(); cnt++ )
{
Integer dtmf = cnt + 1;
String dtmfValue = dtmf.toString();
if ( answers.get( cnt ).getAnswer_text() != null
&& answers.get( cnt ).getAnswer_text().startsWith( "dtmfKey://" ) )
{
dtmfValue = answers.get( cnt ).getAnswer_text().replace( "dtmfKey://", "" ).trim();
}
else
{
if ( dtmf == 10 )
{ // 10 translates into 0
dtmfValue = "0";
}
else if ( dtmf == 11 )
{
dtmfValue = "*";
}
else if ( dtmf == 12 )
{
dtmfValue = "#";
}
else if ( dtmf > 12 )
{
break;
}
}
outputter.startTag( "choice" );
outputter.attribute( "dtmf", dtmfValue );
outputter.attribute( "next", getAnswerUrl() + "?questionId=" + question.getQuestion_id()
+ "&answerId=" + answers.get( cnt ).getAnswer_id() + "&answerInput=" + URLEncoder.encode( dtmfValue, "UTF-8" ) + "&sessionKey="
+ sessionKey );
outputter.endTag();
}
outputter.startTag("noinput");
outputter.startTag("goto");
outputter.attribute("next", handleTimeoutURL+"?questionId="+question.getQuestion_id()+"&sessionKey="+sessionKey);
outputter.endTag();
outputter.endTag();
outputter.startTag("nomatch");
outputter.startTag("goto");
outputter.attribute("next", getAnswerUrl()+"?questionId="+question.getQuestion_id()+"&answerId=-1&sessionKey="+sessionKey);
outputter.endTag();
outputter.endTag();
outputter.endTag();
outputter.endTag();
outputter.endDocument();
} catch (Exception e) {
log.severe("Exception in creating question XML: "+ e.toString());
}
return sw.toString();
}
protected String renderOpenQuestion(Question question,ArrayList<String> prompts,String sessionKey)
{
String handleTimeoutURL = "/vxml/timeout";
StringWriter sw = new StringWriter();
try {
XMLOutputter outputter = new XMLOutputter(sw, "UTF-8");
outputter.declaration();
outputter.startTag("vxml");
outputter.attribute("version", "2.1");
outputter.attribute("xmlns", "http://www.w3.org/2001/vxml");
// Check if media property type equals audio
// if so record audio message, if not record dtmf input
String typeProperty = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TYPE );
if(typeProperty!=null && typeProperty.equalsIgnoreCase("audio"))
{
renderVoiceMailQuestion( question, prompts, sessionKey, outputter );
}
else
{
//see if a dtmf length is defined in the question
String dtmfMinLength = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.ANSWER_INPUT_MIN_LENGTH );
dtmfMinLength = dtmfMinLength != null ? dtmfMinLength : "";
String dtmfMaxLength = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.ANSWER_INPUT_MAX_LENGTH );
dtmfMaxLength = dtmfMaxLength != null ? dtmfMaxLength : "";
String noAnswerTimeout = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TIMEOUT );
String retryLimit = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.RETRY_LIMIT );
//assign a default timeout if one is not specified
noAnswerTimeout = noAnswerTimeout != null ? noAnswerTimeout : "5s";
if(!noAnswerTimeout.endsWith("s"))
{
log.warning("No answer timeout must end with 's'. E.g. 10s. Found: "+ noAnswerTimeout);
noAnswerTimeout += "s";
}
outputter.startTag("var");
outputter.attribute("name","answerInput");
outputter.endTag();
outputter.startTag("var");
outputter.attribute("name","questionId");
outputter.attribute("expr", "'"+question.getQuestion_id()+"'");
outputter.endTag();
outputter.startTag("var");
outputter.attribute("name","sessionKey");
outputter.attribute("expr", "'"+sessionKey+"'");
outputter.endTag();
outputter.startTag("form");
outputter.startTag( "property" );
outputter.attribute( "name", "timeout" );
outputter.attribute( "value", noAnswerTimeout );
outputter.endTag();
outputter.startTag("field");
outputter.attribute("name", "answer");
outputter.startTag("grammar");
outputter.attribute("mode", "dtmf");
outputter.attribute( "src", DTMFGRAMMAR + "?minlength=" + dtmfMinLength
+ "&maxlength=" + dtmfMaxLength );
outputter.attribute("type", "application/srgs+xml");
outputter.endTag();
for (String prompt: prompts){
outputter.startTag("prompt");
outputter.startTag("audio");
outputter.attribute("src", prompt);
outputter.endTag();
outputter.endTag();
}
outputter.startTag( "noinput" );
outputter.startTag( "goto" );
if ( retryLimit == null )
{
outputter.attribute( "next", handleTimeoutURL +
"?questionId=" + question.getQuestion_id() + "&sessionKey=" + sessionKey );
}
else
{
Integer retryCount = Question.getRetryCount( sessionKey );
if ( retryCount < Integer.parseInt( retryLimit ) )
{
outputter.attribute( "next", "/retry" + "?questionId=" + question.getQuestion_id()
+ "&sessionKey=" + sessionKey );
Question.updateRetryCount( sessionKey );
}
else
{
Question.flushRetryCount( sessionKey );
}
}
outputter.endTag();
outputter.endTag();
outputter.startTag("filled");
outputter.startTag("assign");
outputter.attribute("name", "answerInput");
outputter.attribute("expr", "answer$.utterance.replace(' ','','g')");
outputter.endTag();
outputter.startTag("submit");
outputter.attribute("next", getAnswerUrl());
outputter.attribute("namelist","answerInput questionId sessionKey");
outputter.endTag();
outputter.startTag("clear");
outputter.attribute("namelist", "answerInput answer");
outputter.endTag();
outputter.endTag();
outputter.endTag();
outputter.endTag();
}
outputter.endTag();
outputter.endDocument();
} catch (Exception e) {
log.severe("Exception in creating open question XML: "+ e.toString());
}
return sw.toString();
}
/** renders/updates the xml for recording an audio and posts it to the user on the callback
* @param question
* @param prompts
* @param sessionKey
* @param outputter
* @throws IOException
* @throws UnsupportedEncodingException
*/
protected void renderVoiceMailQuestion( Question question, ArrayList<String> prompts, String sessionKey,
XMLOutputter outputter ) throws IOException, UnsupportedEncodingException
{
//assign a default voice mail length if one is not specified
String voiceMessageLengthProperty = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.VOICE_MESSAGE_LENGTH );
voiceMessageLengthProperty = voiceMessageLengthProperty != null ? voiceMessageLengthProperty : "15s";
if(!voiceMessageLengthProperty.endsWith("s"))
{
log.warning("Voicemail length must be end with 's'. E.g. 40s. Found: "+ voiceMessageLengthProperty);
voiceMessageLengthProperty += "s";
}
String dtmfTerm = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.DTMF_TERMINATE );
dtmfTerm = dtmfTerm != null ? dtmfTerm : "true";
String voiceMailBeep = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.VOICE_MESSAGE_BEEP );
voiceMailBeep = voiceMailBeep != null ? voiceMailBeep : "true";
// Fetch the upload url
//String host = this.host.replace("rest/", "");
String uuid = UUID.randomUUID().toString();
String filename = uuid+".wav";
String storedAudiofile = host+"download/"+filename;
MyBlobStore store = new MyBlobStore();
String uploadURL = store.createUploadUrl(filename, "/dialoghandler/rest/download/audio.vxml");
outputter.startTag("form");
outputter.attribute("id", "ComposeMessage");
outputter.startTag("record");
outputter.attribute("name", "file");
outputter.attribute("beep", voiceMailBeep);
outputter.attribute("maxtime", voiceMessageLengthProperty);
outputter.attribute("dtmfterm", dtmfTerm);
//outputter.attribute("finalsilence", "3s");
for (String prompt : prompts){
outputter.startTag("prompt");
outputter.attribute("timeout", "5s");
outputter.startTag("audio");
outputter.attribute("src", prompt);
outputter.endTag();
outputter.endTag();
}
outputter.startTag("noinput");
for (String prompt : prompts){
outputter.startTag("prompt");
outputter.startTag("audio");
outputter.attribute("src", prompt);
outputter.endTag();
outputter.endTag();
}
/*outputter.startTag("goto");
outputter.attribute("next", handleTimeoutURL+"?question_id="+question.getQuestion_id()+"&sessionKey="+sessionKey);
outputter.endTag();*/
outputter.endTag();
outputter.endTag();
outputter.startTag("subdialog");
outputter.attribute("name", "saveWav");
outputter.attribute("src", uploadURL);
outputter.attribute("namelist", "file");
outputter.attribute("method", "post");
outputter.attribute("enctype", "multipart/form-data");
outputter.startTag("filled");
outputter.startTag("if");
outputter.attribute("cond", "saveWav.response='SUCCESS'");
outputter.startTag("goto");
outputter.attribute("next", getAnswerUrl()+"?questionId="+question.getQuestion_id()+"&sessionKey="+sessionKey+"&answerInput="+URLEncoder.encode(storedAudiofile, "UTF-8"));
outputter.endTag();
outputter.startTag("else");
outputter.endTag();
for (String prompt : prompts){
outputter.startTag("prompt");
outputter.startTag("audio");
outputter.attribute("src", prompt);
outputter.endTag();
outputter.endTag();
}
outputter.endTag();
outputter.endTag();
outputter.endTag();
outputter.endTag();
}
private Response handleQuestion(Question question, String adapterID,String remoteID,String sessionKey)
{
String result="<?xml version=\"1.0\" encoding=\"UTF-8\"?><vxml version=\"2.1\" xmlns=\"http://www.w3.org/2001/vxml\"><form><block><exit/></block></form></vxml>";
Return res = formQuestion(question,adapterID,remoteID);
if(question !=null && !question.getType().equalsIgnoreCase("comment"))
question = res.question;
log.info( "question formed at handleQuestion is: "+ ServerUtils.serializeWithoutException( question ));
log.info( "prompts formed at handleQuestion is: "+ res.prompts );
if ( question != null )
{
question.generateIds();
Session session = Session.getSession( sessionKey );
session.setQuestion( question );
session.setRemoteAddress( remoteID );
//convert all text prompts to speech
if(res.prompts != null)
{
String language = question.getPreferred_language().contains( "-" ) ? question.getPreferred_language()
: "nl-nl";
String ttsSpeedProperty = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TSS_SPEED );
ttsSpeedProperty = ttsSpeedProperty != null ? ttsSpeedProperty : "0";
ArrayList<String> promptsCopy = new ArrayList<String>();
for ( String prompt : res.prompts )
{
if ( !prompt.startsWith( "dtmfKey://" ) )
{
if ( !prompt.endsWith( ".wav" ) )
{
promptsCopy.add( getTTSURL( prompt, language, "wav", ttsSpeedProperty, null ) );
}
else
{
promptsCopy.add( prompt );
}
}
}
res.prompts = promptsCopy;
}
if ( question.getType().equalsIgnoreCase( "closed" ) )
{
result = renderClosedQuestion( question, res.prompts, sessionKey );
}
else if ( question.getType().equalsIgnoreCase( "open" ) )
{
result = renderOpenQuestion( question, res.prompts, sessionKey );
}
else if ( question.getType().equalsIgnoreCase( "referral" ) )
{
if ( question.getUrl().startsWith( "tel:" ) )
{
// added for release0.4.2 to store the question in the session,
//for triggering an answered event
log.info( String.format( "session key at handle question is: %s and remoteId %s",
sessionKey, remoteID ) );
String[] sessionKeyArray = sessionKey.split( "\\|" );
if ( sessionKeyArray.length == 3 )
{
try
{
String redirectedId = PhoneNumberUtils
.formatNumber( question.getUrl().replace( "tel:", "" ), null );
//update url with formatted redirecteId. RFC3966 returns format tel:<blabla> as expected
question.setUrl( PhoneNumberUtils.formatNumber( redirectedId, PhoneNumberFormat.RFC3966 ) );
String transferKey = "transfer_" + redirectedId + "_" + sessionKeyArray[1];
log.info( String.format( "referral question %s stored with key: %s", question.toJSON(),
transferKey ) );
//store the remoteId as its lost while trying to trigger the answered event
HashMap<String, String> extras = new HashMap<String, String>();
extras.put( "referredCalledId", redirectedId );
session.getExtras().putAll( extras );
session.setQuestion( question );
session.setRemoteAddress( remoteID );
session.storeSession();
}
catch ( Exception e )
{
log.severe( String.format( "Phonenumber: %s is not valid",
question.getUrl().replace( "tel:", "" ) ) );
}
}
else
{
log.warning( "Could not save question in session: " + sessionKey
+ " for answered event as sessionKeyArray length is: "
+ sessionKeyArray.length );
}
result = renderComment( question, res.prompts, sessionKey );
}
}
else if ( res.prompts.size() > 0 )
{
result = renderComment( question, res.prompts, sessionKey );
}
}
else if ( res.prompts.size() > 0 )
{
result = renderComment( null, res.prompts, sessionKey );
}
else
{
log.info( "Going to hangup? So clear Session?" );
}
log.info("Sending xml: "+result);
return Response.ok(result).build();
}
protected String getAnswerUrl() {
return "answer";
}
/**
* @param startTime
* @param answerTime
* @param releaseTime
* @return
*/
private HashMap<String, Object> getTimeMap( String startTime, String answerTime, String releaseTime )
{
HashMap<String, Object> timeMap = new HashMap<String, Object>();
timeMap.put( "startTime", startTime );
timeMap.put( "answerTime", answerTime );
timeMap.put( "releaseTime", releaseTime );
return timeMap;
}
/**
* returns the TTS URL from voiceRSS.
*
* @param textForSpeech
* @param language
* @param contentType
* @return
*/
private String getTTSURL( String textForSpeech, String language, String contentType, String speed, String format )
{
speed = (speed != null && !speed.isEmpty()) ? speed : "0";
contentType = (contentType != null && !contentType.isEmpty()) ? contentType : "wav";
format = (format != null && !format.isEmpty()) ? format : "8khz_8bit_mono";
try
{
textForSpeech = URLEncoder.encode( textForSpeech.replace( "text://", "" ), "UTF-8").replace( "+", "%20" );
}
catch ( UnsupportedEncodingException e )
{
e.printStackTrace();
log.severe( e.getLocalizedMessage() );
}
return "http://api.voicerss.org/?key=afafc70fde4b4b32a730842e6fcf0c62&src=" + textForSpeech + "&hl=" + language
+ "&c=" + contentType + "&r=" + speed + "&f=" + format + "&type=.wav";
}
private void stopCostsAtHangup( Session session )
{
AdapterConfig adapterConfig = session.getAdapterConfig();
//stop costs
try
{
log.info( String.format( "stopping charges for session: %s", ServerUtils.serialize( session ) ) );
if ( session.getStartTimestamp() != null && session.getReleaseTimestamp() != null
&& session.getDirection() != null )
{
DDRRecord ddrRecord = DDRUtils.updateDDRRecordOnCallStops( session.getDDRRecordId(),
adapterConfig.getOwner(), Long.parseLong( session.getStartTimestamp() ),
session.getAnswerTimestamp() != null ? Long.parseLong( session.getAnswerTimestamp() ) : null,
Long.parseLong( session.getReleaseTimestamp() ) );
//publish charges
Double totalCost = DDRUtils.calculateCommunicationDDRCost( ddrRecord, true );
DDRUtils.publishDDREntryToQueue( adapterConfig.getOwner(), totalCost );
}
}
catch ( Exception e )
{
String errorMessage = String.format(
"Applying charges failed. Direction: %s for adapterId: %s with address: %s remoteId: %s and localId: %s \n Error: %s",
session.getDirection(), session.getAdapterID(), adapterConfig.getMyAddress(),
session.getRemoteAddress(), session.getLocalAddress(), e.getLocalizedMessage() );
log.severe( errorMessage );
dialogLog.severe( session.getAdapterConfig().getConfigId(), errorMessage );
}
}
}
| update: fixed charging incoming redirection | dialoghandler/src/main/java/com/almende/dialog/adapter/VoiceXMLRESTProxy.java | update: fixed charging incoming redirection | <ide><path>ialoghandler/src/main/java/com/almende/dialog/adapter/VoiceXMLRESTProxy.java
<ide> session.setDirection( direction );
<ide> session.setRemoteAddress( remoteID );
<ide> session.setType( AdapterAgent.ADAPTER_TYPE_BROADSOFT );
<del> session.setPubKey( config.getPublicKey() );
<add> session.setAccountId( config.getOwner() );
<ide> session.setTrackingToken( UUID.randomUUID().toString() );
<ide> session.setAdapterID( config.getConfigId() );
<ide>
<ide>
<ide> log.info( String.format( "Session key: %s with remote: %s and local %s", sessionKey,
<ide> session.getRemoteAddress(), session.getLocalAddress() ) );
<add> //update the session timings
<add> session.setStartTimestamp( startTime );
<add> session.setAnswerTimestamp( answerTime );
<add> session.setReleaseTimestamp( releaseTime );
<add> session.storeSession();
<ide> Question question = session.getQuestion();
<ide> String referredCalledId = null;
<ide> if ( question != null )
<ide> remoteID = session.getRemoteAddress();
<ide> referredCalledId = session.getExtras().get( "referredCalledId" );
<ide> }
<del> else {
<del>
<add> else
<add> {
<ide> question = Question.fromURL( session.getStartUrl(), session.getAdapterConfig().getConfigId(), remoteID,
<ide> localID );
<ide> }
<ide> log.info( "call answered with:" + direction + "_" + remoteID + "_" + localID );
<ide> String sessionKey = AdapterAgent.ADAPTER_TYPE_BROADSOFT+"|"+localID+"|"+remoteID.split( "@" )[0]; //ignore the @outbound suffix
<ide> Session session = Session.getSession(sessionKey);
<del> log.info( "question from session got: "+ session.getSession_id() );
<add> //update the session timings
<add> session.setStartTimestamp( startTime );
<add> session.setAnswerTimestamp( answerTime );
<add> session.setReleaseTimestamp( releaseTime );
<add> session.storeSession();
<ide> Question question = session.getQuestion();
<ide> //for direction = transfer (redirect event), json should not be null
<ide> if ( question != null )
<ide> Node rpChild = remoteParty.getChildNodes().item(i);
<ide> if(rpChild.getNodeName().equals("address")) {
<ide> address=rpChild.getTextContent();
<del> }else if(rpChild.getNodeName().equals("userId")){
<del> address=rpChild.getTextContent().replace( "@ask.ask.voipit.nl", "" );
<del> }
<del> else if(rpChild.getNodeName().equals("callType")) {
<add> }else if(rpChild.getNodeName().equals("callType")) {
<ide> type=rpChild.getTextContent();
<ide> }
<ide> }
<ide> "Temporarily Unavailable" ) && !releaseCause.getTextContent()
<ide> .equalsIgnoreCase( "User Not Found" ) ) )
<ide> {
<add> session.setDirection( direction );
<add> session.setAnswerTimestamp( answerTimeString );
<add> session.setReleaseTimestamp( releaseTimeString );
<add> session.setStartTimestamp( startTimeString );
<add> if ( session.getQuestion() == null )
<add> {
<add> Question questionFromIncomingCall = Session
<add> .getQuestionFromDifferentSession( config.getConfigId(), "inbound",
<add> "referredCalledId", session.getRemoteAddress() );
<add> if ( questionFromIncomingCall != null )
<add> {
<add> session.setQuestion( questionFromIncomingCall );
<add> session.storeSession();
<add> }
<add> }
<add> session.storeSession();
<ide> answered( direction, address, config.getMyAddress(), startTimeString,
<ide> answerTimeString, releaseTimeString );
<ide> }
<ide> {
<ide> // added for release0.4.2 to store the question in the session,
<ide> //for triggering an answered event
<del> log.info( String.format( "session key at handle question is: %s and remoteId %s",
<del> sessionKey, remoteID ) );
<del> String[] sessionKeyArray = sessionKey.split( "\\|" );
<del> if ( sessionKeyArray.length == 3 )
<add> log.info( String.format( "current session key before referral is: %s and remoteId %s", sessionKey,
<add> remoteID ) );
<add> try
<ide> {
<del> try
<add> String redirectedId = PhoneNumberUtils.formatNumber( question.getUrl().replace( "tel:", "" ),
<add> null );
<add> //update url with formatted redirecteId. RFC3966 returns format tel:<blabla> as expected
<add> question.setUrl( PhoneNumberUtils.formatNumber( redirectedId, PhoneNumberFormat.RFC3966 ) );
<add> //store the remoteId as its lost while trying to trigger the answered event
<add> HashMap<String, String> extras = new HashMap<String, String>();
<add> extras.put( "referredCalledId", redirectedId );
<add> session.getExtras().putAll( extras );
<add> session.setQuestion( question );
<add> session.setRemoteAddress( remoteID );
<add> //create a new ddr record and session to catch the redirect
<add> Session referralSession = Session.getSession( AdapterAgent.ADAPTER_TYPE_BROADSOFT + "|"
<add> + session.getLocalAddress() + "|" + redirectedId );
<add> if ( session.getDirection() != null
<add> && ( session.getDirection().equals( "outbound" ) || session.getDirection().equals(
<add> "transfer" ) ) )
<ide> {
<del> String redirectedId = PhoneNumberUtils
<del> .formatNumber( question.getUrl().replace( "tel:", "" ), null );
<del> //update url with formatted redirecteId. RFC3966 returns format tel:<blabla> as expected
<del> question.setUrl( PhoneNumberUtils.formatNumber( redirectedId, PhoneNumberFormat.RFC3966 ) );
<del> String transferKey = "transfer_" + redirectedId + "_" + sessionKeyArray[1];
<del> log.info( String.format( "referral question %s stored with key: %s", question.toJSON(),
<del> transferKey ) );
<del> //store the remoteId as its lost while trying to trigger the answered event
<del> HashMap<String, String> extras = new HashMap<String, String>();
<del> extras.put( "referredCalledId", redirectedId );
<del> session.getExtras().putAll( extras );
<del> session.setQuestion( question );
<del> session.setRemoteAddress( remoteID );
<del> session.storeSession();
<add> if ( referralSession != null && referralSession.getDDRRecordId() == null )
<add> {
<add> DDRRecord ddrRecord = DDRUtils.createDDRRecordOnOutgoingCommunication(
<add> AdapterConfig.getAdapterConfig( adapterID ), redirectedId, 1 );
<add> referralSession.setDDRRecordId( ddrRecord.getId() );
<add> referralSession.setDirection( session.getDirection() );
<add> }
<ide> }
<del> catch ( Exception e )
<add> else
<add> //if its an inbound call, create a new ddr with DDRTypeCategory: OUTGOING_COMMUNICATION_COST
<ide> {
<del> log.severe( String.format( "Phonenumber: %s is not valid",
<del> question.getUrl().replace( "tel:", "" ) ) );
<add> DDRRecord ddrRecordForIncomingReferral = DDRUtils.createDDRRecordOnOutgoingCommunication(
<add> session.getAdapterConfig(), redirectedId, 1 );
<add> referralSession.setDDRRecordId( ddrRecordForIncomingReferral.getId() );
<add> //save the referred ddrRecord id in the session
<add> session.getExtras().put( "referredDDRRecordId", ddrRecordForIncomingReferral.getId() );
<ide> }
<add> referralSession.storeSession();
<add> session.storeSession();
<ide> }
<del> else
<add> catch ( Exception e )
<ide> {
<del> log.warning( "Could not save question in session: " + sessionKey
<del> + " for answered event as sessionKeyArray length is: "
<del> + sessionKeyArray.length );
<add> log.severe( String.format( "Phonenumber: %s is not valid",
<add> question.getUrl().replace( "tel:", "" ) ) );
<ide> }
<ide> result = renderComment( question, res.prompts, sessionKey );
<ide> }
<ide>
<ide> //publish charges
<ide> Double totalCost = DDRUtils.calculateCommunicationDDRCost( ddrRecord, true );
<add>// //check if it was an incoming redirection
<add>// if(ddrRecord != null && session.getExtras().containsKey( "referredDDRRecordId" ))
<add>// {
<add>// DDRRecord refferedDdrRecord = DDRRecord.getDDRRecord(
<add>// session.getExtras().get( "referredDDRRecordId" ), session.getAccountId() );
<add>// if ( refferedDdrRecord != null && session.getAnswerTimestamp() != null)
<add>// {
<add>//// refferedDdrRecord.setStart( ddrRecord.getStart() );
<add>//// refferedDdrRecord.setStatus( ddrRecord.getStatus() );
<add>//// refferedDdrRecord.setDuration( ddrRecord.getDuration() );
<add>//// refferedDdrRecord.createOrUpdate();
<add>// totalCost += DDRUtils.calculateCommunicationDDRCost( refferedDdrRecord, false );
<add>// }
<add>// }
<ide> DDRUtils.publishDDREntryToQueue( adapterConfig.getOwner(), totalCost );
<ide> }
<ide> } |
|
Java | mit | 7e91e4e2119a34fb0630d096e27652810a39f1f4 | 0 | InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service | package org.innovateuk.ifs.nonifs.controller;
import org.innovateuk.ifs.BaseControllerMockMVCTest;
import org.innovateuk.ifs.competition.resource.CompetitionResource;
import org.innovateuk.ifs.nonifs.form.NonIfsDetailsForm;
import org.innovateuk.ifs.nonifs.formpopulator.NonIfsDetailsFormPopulator;
import org.innovateuk.ifs.nonifs.modelpopulator.NonIfsDetailsViewModelPopulator;
import org.innovateuk.ifs.nonifs.saver.NonIfsDetailsFormSaver;
import org.innovateuk.ifs.nonifs.viewmodel.NonIfsDetailsViewModel;
import org.junit.Test;
import org.mockito.Mock;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.validation.BindingResult;
import static org.innovateuk.ifs.commons.service.ServiceResult.serviceSuccess;
import static org.innovateuk.ifs.competition.builder.CompetitionResourceBuilder.newCompetitionResource;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.*;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
import static org.springframework.validation.BindingResultUtils.getBindingResult;
public class NonIfsCompetitionControllerTest extends BaseControllerMockMVCTest<NonIfsCompetitionController> {
@Mock
public NonIfsDetailsFormPopulator nonIfsDetailsFormPopulator;
@Mock
public NonIfsDetailsViewModelPopulator nonIfsDetailsViewModelPopulator;
@Mock
public NonIfsDetailsFormSaver nonIfsDetailsFormSaver;
@Override
protected NonIfsCompetitionController supplyControllerUnderTest() {
return new NonIfsCompetitionController();
}
@Test
public void testCreate() throws Exception {
Long competitionId = 10L;
when(competitionService.createNonIfs()).thenReturn(newCompetitionResource().withId(competitionId).build());
mockMvc.perform(get("/non-ifs-competition/create/"))
.andExpect(status().is3xxRedirection())
.andExpect(redirectedUrl("/non-ifs-competition/setup/"+competitionId));
}
@Test
public void testDetails() throws Exception {
Long competitionId = 11L;
CompetitionResource competitionResource = newCompetitionResource().withId(competitionId).withNonIfs(true).build();
NonIfsDetailsForm nonIfsDetailsForm = new NonIfsDetailsForm();
NonIfsDetailsViewModel nonIfsDetailsViewModel = new NonIfsDetailsViewModel();
when(competitionService.getById(competitionId)).thenReturn(competitionResource);
when(nonIfsDetailsFormPopulator.populate(competitionResource)).thenReturn(nonIfsDetailsForm);
when(nonIfsDetailsViewModelPopulator.populate()).thenReturn(nonIfsDetailsViewModel);
mockMvc.perform(get("/non-ifs-competition/setup/"+competitionId))
.andExpect(status().is2xxSuccessful())
.andExpect(view().name("competition/non-ifs-details"))
.andExpect(model().attribute("model", nonIfsDetailsViewModel))
.andExpect(model().attribute("form", nonIfsDetailsForm));
}
@Test
public void testDetails_whenCompetitionIsIfsRedirectionShouldOccur() throws Exception {
Long competitionId = 11L;
CompetitionResource competitionResource = newCompetitionResource().withId(competitionId).withNonIfs(false).build();
when(competitionService.getById(competitionId)).thenReturn(competitionResource);
mockMvc.perform(get("/non-ifs-competition/setup/"+competitionId))
.andExpect(status().is3xxRedirection())
.andExpect(redirectedUrl("/competition/setup/"+competitionId));
}
@Test
public void testSave() throws Exception {
Long competitionId = 11L;
CompetitionResource competitionResource = newCompetitionResource().withId(competitionId).withNonIfs(true).build();
when(competitionService.getById(competitionId)).thenReturn(competitionResource);
when(nonIfsDetailsFormSaver.save(any(), any())).thenReturn(serviceSuccess());
mockMvc.perform(post("/non-ifs-competition/setup/"+competitionId)
.param("title", "Competition Title")
.param("innovationSectorCategoryId", "12")
.param("innovationAreaCategoryId", "13")
.param("openDate.year", "2017")
.param("openDate.month", "01")
.param("openDate.day", "01")
.param("closeDate.year", "2017")
.param("closeDate.month", "01")
.param("closeDate.day", "01")
.param("closeDate.time", "NINE_AM")
.param("applicantNotifiedDate.year", "2017")
.param("applicantNotifiedDate.month", "01")
.param("applicantNotifiedDate.day", "01")
.param("url","https://worth.systems"))
.andExpect(status().is3xxRedirection())
.andExpect(redirectedUrl("/competition/setup/public-content/" + competitionId));
verify(nonIfsDetailsFormSaver, times(1)).save(any(), any());
}
@Test
public void testSave_emptyInputsShouldReturnAppropriateErrors() throws Exception {
Long competitionId = 11L;
CompetitionResource competitionResource = newCompetitionResource().withId(competitionId).withNonIfs(true).build();
when(competitionService.getById(competitionId)).thenReturn(competitionResource);
when(nonIfsDetailsFormSaver.save(any(), any())).thenReturn(serviceSuccess());
MvcResult mvcResult = mockMvc.perform(post("/non-ifs-competition/setup/"+competitionId)
.param("title", "")
.param("innovationSectorCategoryId", "")
.param("innovationAreaCategoryId", "")
.param("openDate.year", "")
.param("openDate.month", "")
.param("openDate.day", "")
.param("closeDate.year", "")
.param("closeDate.month", "")
.param("closeDate.day", "")
.param("closeDate.time", "")
.param("applicantNotifiedDate.year", "")
.param("applicantNotifiedDate.month", "")
.param("applicantNotifiedDate.day", "")
.param("url",""))
.andExpect(status().is2xxSuccessful())
.andReturn();
BindingResult bindingResult = getBindingResult(
mvcResult.getModelAndView().getModelMap(), "form"
);
assertTrue(bindingResult.getFieldError("url").getDefaultMessage().equals("Please enter a competition URL."));
assertTrue(bindingResult.getFieldError("title").getDefaultMessage().equals("Please enter a title."));
assertTrue(bindingResult.getFieldError("openDate").getDefaultMessage().equals("Please enter a valid date."));
assertTrue(bindingResult.getFieldError("applicantNotifiedDate").getDefaultMessage().equals("Please enter a valid date."));
assertTrue(bindingResult.getFieldError("closeDate").getDefaultMessage().equals("Please enter a valid date."));
assertTrue(bindingResult.getFieldError("innovationSectorCategoryId").getDefaultMessage().equals("This field cannot be left blank."));
assertTrue(bindingResult.getFieldError("innovationAreaCategoryId").getDefaultMessage().equals("This field cannot be left blank."));
verifyZeroInteractions(nonIfsDetailsFormSaver);
}
@Test
public void testSave_noParametersShouldReturnAppropriateErrors() throws Exception {
Long competitionId = 11L;
CompetitionResource competitionResource = newCompetitionResource().withId(competitionId).withNonIfs(true).build();
when(competitionService.getById(competitionId)).thenReturn(competitionResource);
when(nonIfsDetailsFormSaver.save(any(), any())).thenReturn(serviceSuccess());
MvcResult mvcResult = mockMvc.perform(post("/non-ifs-competition/setup/"+competitionId))
.andExpect(status().is2xxSuccessful())
.andReturn();
BindingResult bindingResult = getBindingResult(
mvcResult.getModelAndView().getModelMap(), "form"
);
assertEquals("Please enter a competition URL.", bindingResult.getFieldError("url").getDefaultMessage());
assertEquals("Please enter a title.",bindingResult.getFieldError("title").getDefaultMessage());
assertEquals("Please enter an open date.", bindingResult.getFieldError("openDate").getDefaultMessage());
assertEquals("Please enter an applicants notified date.", bindingResult.getFieldError("applicantNotifiedDate").getDefaultMessage());
assertEquals("Please enter a close date.", bindingResult.getFieldError("closeDate").getDefaultMessage());
assertEquals("This field cannot be left blank.", bindingResult.getFieldError("innovationSectorCategoryId").getDefaultMessage());
assertEquals("This field cannot be left blank.", bindingResult.getFieldError("innovationAreaCategoryId").getDefaultMessage());
verifyZeroInteractions(nonIfsDetailsFormSaver);
}
@Test
public void testSave_yearFieldsUnderAndAboveLimitsAreRejectd() throws Exception {
Long competitionId = 11L;
CompetitionResource competitionResource = newCompetitionResource().withId(competitionId).withNonIfs(true).build();
when(competitionService.getById(competitionId)).thenReturn(competitionResource);
when(nonIfsDetailsFormSaver.save(any(), any())).thenReturn(serviceSuccess());
MvcResult mvcResult = mockMvc.perform(post("/non-ifs-competition/setup/"+competitionId)
.param("openDate.year", "1999")
.param("openDate.month", "")
.param("openDate.day", "")
.param("closeDate.year", "10000")
.param("closeDate.month", "")
.param("closeDate.day", "")
.param("closeDate.time", "")
.param("applicantNotifiedDate.year", "-1")
.param("applicantNotifiedDate.month", "")
.param("applicantNotifiedDate.day", ""))
.andExpect(status().is2xxSuccessful())
.andReturn();
BindingResult bindingResult = getBindingResult(
mvcResult.getModelAndView().getModelMap(), "form"
);
assertEquals("Please enter a valid date.", bindingResult.getFieldError("openDate.year").getDefaultMessage());
assertEquals("Please enter a valid date.", bindingResult.getFieldError("applicantNotifiedDate.year").getDefaultMessage());
assertEquals("Please enter a valid date.", bindingResult.getFieldError("closeDate.year").getDefaultMessage());
verifyZeroInteractions(nonIfsDetailsFormSaver);
}
@Test
public void testSave_impossibleDatesShouldBeRejected() throws Exception {
Long competitionId = 11L;
CompetitionResource competitionResource = newCompetitionResource().withId(competitionId).withNonIfs(true).build();
when(competitionService.getById(competitionId)).thenReturn(competitionResource);
when(nonIfsDetailsFormSaver.save(any(), any())).thenReturn(serviceSuccess());
MvcResult mvcResult = mockMvc.perform(post("/non-ifs-competition/setup/"+competitionId)
.param("openDate.year", "01")
.param("openDate.month", "13")
.param("openDate.day", "32")
.param("closeDate.year", "2017")
.param("closeDate.month", "-1")
.param("closeDate.day", "01")
.param("applicantNotifiedDate.year", "2017")
.param("applicantNotifiedDate.month", "02")
.param("applicantNotifiedDate.day", "29"))
.andExpect(status().is2xxSuccessful())
.andReturn();
BindingResult bindingResult = getBindingResult(
mvcResult.getModelAndView().getModelMap(), "form"
);
assertEquals("Please enter a valid date.", bindingResult.getFieldError("openDate").getDefaultMessage());
assertEquals("Please enter a valid date.", bindingResult.getFieldError("applicantNotifiedDate").getDefaultMessage());
assertEquals("Please enter a valid date.", bindingResult.getFieldError("closeDate").getDefaultMessage());
verifyZeroInteractions(nonIfsDetailsFormSaver);
}
} | ifs-web-service/ifs-competition-mgt-service/src/test/java/org/innovateuk/ifs/nonifs/controller/NonIfsCompetitionControllerTest.java | package org.innovateuk.ifs.nonifs.controller;
import org.innovateuk.ifs.BaseControllerMockMVCTest;
import org.innovateuk.ifs.competition.resource.CompetitionResource;
import org.innovateuk.ifs.nonifs.form.NonIfsDetailsForm;
import org.innovateuk.ifs.nonifs.formpopulator.NonIfsDetailsFormPopulator;
import org.innovateuk.ifs.nonifs.modelpopulator.NonIfsDetailsViewModelPopulator;
import org.innovateuk.ifs.nonifs.saver.NonIfsDetailsFormSaver;
import org.innovateuk.ifs.nonifs.viewmodel.NonIfsDetailsViewModel;
import org.junit.Test;
import org.mockito.Mock;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.validation.BindingResult;
import static org.innovateuk.ifs.commons.service.ServiceResult.serviceSuccess;
import static org.innovateuk.ifs.competition.builder.CompetitionResourceBuilder.newCompetitionResource;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.*;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
import static org.springframework.validation.BindingResultUtils.getBindingResult;
public class NonIfsCompetitionControllerTest extends BaseControllerMockMVCTest<NonIfsCompetitionController> {
@Mock
public NonIfsDetailsFormPopulator nonIfsDetailsFormPopulator;
@Mock
public NonIfsDetailsViewModelPopulator nonIfsDetailsViewModelPopulator;
@Mock
public NonIfsDetailsFormSaver nonIfsDetailsFormSaver;
@Override
protected NonIfsCompetitionController supplyControllerUnderTest() {
return new NonIfsCompetitionController();
}
@Test
public void testCreate() throws Exception {
Long competitionId = 10L;
when(competitionService.createNonIfs()).thenReturn(newCompetitionResource().withId(competitionId).build());
mockMvc.perform(get("/non-ifs-competition/create/"))
.andExpect(status().is3xxRedirection())
.andExpect(redirectedUrl("/non-ifs-competition/setup/"+competitionId));
}
@Test
public void testDetails() throws Exception {
Long competitionId = 11L;
CompetitionResource competitionResource = newCompetitionResource().withId(competitionId).withNonIfs(true).build();
NonIfsDetailsForm nonIfsDetailsForm = new NonIfsDetailsForm();
NonIfsDetailsViewModel nonIfsDetailsViewModel = new NonIfsDetailsViewModel();
when(competitionService.getById(competitionId)).thenReturn(competitionResource);
when(nonIfsDetailsFormPopulator.populate(competitionResource)).thenReturn(nonIfsDetailsForm);
when(nonIfsDetailsViewModelPopulator.populate()).thenReturn(nonIfsDetailsViewModel);
mockMvc.perform(get("/non-ifs-competition/setup/"+competitionId))
.andExpect(status().is2xxSuccessful())
.andExpect(view().name("competition/non-ifs-details"))
.andExpect(model().attribute("model", nonIfsDetailsViewModel))
.andExpect(model().attribute("form", nonIfsDetailsForm));
}
@Test
public void testDetails_whenCompetitionIsIfsRedirectionShouldOccur() throws Exception {
Long competitionId = 11L;
CompetitionResource competitionResource = newCompetitionResource().withId(competitionId).withNonIfs(false).build();
when(competitionService.getById(competitionId)).thenReturn(competitionResource);
mockMvc.perform(get("/non-ifs-competition/setup/"+competitionId))
.andExpect(status().is3xxRedirection())
.andExpect(redirectedUrl("/competition/setup/"+competitionId));
}
@Test
public void testSave() throws Exception {
Long competitionId = 11L;
CompetitionResource competitionResource = newCompetitionResource().withId(competitionId).withNonIfs(true).build();
when(competitionService.getById(competitionId)).thenReturn(competitionResource);
when(nonIfsDetailsFormSaver.save(any(), any())).thenReturn(serviceSuccess());
mockMvc.perform(post("/non-ifs-competition/setup/"+competitionId)
.param("title", "Competition Title")
.param("innovationSectorCategoryId", "12")
.param("innovationAreaCategoryId", "13")
.param("openDate.year", "2017")
.param("openDate.month", "01")
.param("openDate.day", "01")
.param("closeDate.year", "2017")
.param("closeDate.month", "01")
.param("closeDate.day", "01")
.param("closeDate.time", "NINE_AM")
.param("applicantNotifiedDate.year", "2017")
.param("applicantNotifiedDate.month", "01")
.param("applicantNotifiedDate.day", "01")
.param("url","https://worth.systems"))
.andExpect(status().is3xxRedirection())
.andExpect(redirectedUrl("/competition/setup/public-content/" + competitionId));
verify(nonIfsDetailsFormSaver, times(1)).save(any(), any());
}
@Test
public void testSave_emptyInputsShouldReturnAppropriateErrors() throws Exception {
Long competitionId = 11L;
CompetitionResource competitionResource = newCompetitionResource().withId(competitionId).withNonIfs(true).build();
when(competitionService.getById(competitionId)).thenReturn(competitionResource);
when(nonIfsDetailsFormSaver.save(any(), any())).thenReturn(serviceSuccess());
MvcResult mvcResult = mockMvc.perform(post("/non-ifs-competition/setup/"+competitionId)
.param("title", "")
.param("innovationSectorCategoryId", "")
.param("innovationAreaCategoryId", "")
.param("openDate.year", "")
.param("openDate.month", "")
.param("openDate.day", "")
.param("closeDate.year", "")
.param("closeDate.month", "")
.param("closeDate.day", "")
.param("closeDate.time", "")
.param("applicantNotifiedDate.year", "")
.param("applicantNotifiedDate.month", "")
.param("applicantNotifiedDate.day", "")
.param("url",""))
.andExpect(status().is2xxSuccessful())
.andReturn();
BindingResult bindingResult = getBindingResult(
mvcResult.getModelAndView().getModelMap(), "form"
);
assertTrue(bindingResult.getFieldError("url").getDefaultMessage().equals("Please enter a competition URL."));
assertTrue(bindingResult.getFieldError("title").getDefaultMessage().equals("Please enter a title."));
assertTrue(bindingResult.getFieldError("openDate").getDefaultMessage().equals("Please enter a valid date."));
assertTrue(bindingResult.getFieldError("applicantNotifiedDate").getDefaultMessage().equals("Please enter a valid date."));
assertTrue(bindingResult.getFieldError("closeDate").getDefaultMessage().equals("Please enter a valid date."));
assertTrue(bindingResult.getFieldError("innovationSectorCategoryId").getDefaultMessage().equals("This field cannot be left blank."));
assertTrue(bindingResult.getFieldError("innovationAreaCategoryId").getDefaultMessage().equals("This field cannot be left blank."));
verifyZeroInteractions(nonIfsDetailsFormSaver);
}
@Test
public void testSave_noParametersShouldReturnAppropriateErrors() throws Exception {
Long competitionId = 11L;
CompetitionResource competitionResource = newCompetitionResource().withId(competitionId).withNonIfs(true).build();
when(competitionService.getById(competitionId)).thenReturn(competitionResource);
when(nonIfsDetailsFormSaver.save(any(), any())).thenReturn(serviceSuccess());
MvcResult mvcResult = mockMvc.perform(post("/non-ifs-competition/setup/"+competitionId))
.andExpect(status().is2xxSuccessful())
.andReturn();
BindingResult bindingResult = getBindingResult(
mvcResult.getModelAndView().getModelMap(), "form"
);
assertEquals("Please enter a competition URL.", bindingResult.getFieldError("url").getDefaultMessage());
assertEquals("Please enter a title.",bindingResult.getFieldError("title").getDefaultMessage());
assertEquals("Please enter an open date.", bindingResult.getFieldError("openDate").getDefaultMessage());
assertEquals("Please enter an applicants notified date.", bindingResult.getFieldError("applicantNotifiedDate").getDefaultMessage());
assertEquals("Please enter a close date.", bindingResult.getFieldError("closeDate").getDefaultMessage());
assertEquals("This field cannot be left blank.", bindingResult.getFieldError("innovationSectorCategoryId").getDefaultMessage());
assertEquals("This field cannot be left blank.", bindingResult.getFieldError("innovationAreaCategoryId").getDefaultMessage());
verifyZeroInteractions(nonIfsDetailsFormSaver);
}
@Test
public void testSave_yearFieldsUnderAndAboveLimitsAreRejectd() throws Exception {
Long competitionId = 11L;
CompetitionResource competitionResource = newCompetitionResource().withId(competitionId).withNonIfs(true).build();
when(competitionService.getById(competitionId)).thenReturn(competitionResource);
when(nonIfsDetailsFormSaver.save(any(), any())).thenReturn(serviceSuccess());
MvcResult mvcResult = mockMvc.perform(post("/non-ifs-competition/setup/"+competitionId)
.param("openDate.year", "1999")
.param("openDate.month", "")
.param("openDate.day", "")
.param("closeDate.year", "10000")
.param("closeDate.month", "")
.param("closeDate.day", "")
.param("closeDate.time", "")
.param("applicantNotifiedDate.year", "-1")
.param("applicantNotifiedDate.month", "")
.param("applicantNotifiedDate.day", ""))
.andExpect(status().is2xxSuccessful())
.andReturn();
BindingResult bindingResult = getBindingResult(
mvcResult.getModelAndView().getModelMap(), "form"
);
assertEquals("Please enter a year between 2000 and 9999.", bindingResult.getFieldError("openDate.year").getDefaultMessage());
assertEquals("Please enter a year between 2000 and 9999.", bindingResult.getFieldError("applicantNotifiedDate.year").getDefaultMessage());
assertEquals("Please enter a year between 2000 and 9999.", bindingResult.getFieldError("closeDate.year").getDefaultMessage());
verifyZeroInteractions(nonIfsDetailsFormSaver);
}
@Test
public void testSave_impossibleDatesShouldBeRejected() throws Exception {
Long competitionId = 11L;
CompetitionResource competitionResource = newCompetitionResource().withId(competitionId).withNonIfs(true).build();
when(competitionService.getById(competitionId)).thenReturn(competitionResource);
when(nonIfsDetailsFormSaver.save(any(), any())).thenReturn(serviceSuccess());
MvcResult mvcResult = mockMvc.perform(post("/non-ifs-competition/setup/"+competitionId)
.param("openDate.year", "01")
.param("openDate.month", "13")
.param("openDate.day", "32")
.param("closeDate.year", "2017")
.param("closeDate.month", "-1")
.param("closeDate.day", "01")
.param("applicantNotifiedDate.year", "2017")
.param("applicantNotifiedDate.month", "02")
.param("applicantNotifiedDate.day", "29"))
.andExpect(status().is2xxSuccessful())
.andReturn();
BindingResult bindingResult = getBindingResult(
mvcResult.getModelAndView().getModelMap(), "form"
);
assertEquals("Please enter a valid date.", bindingResult.getFieldError("openDate").getDefaultMessage());
assertEquals("Please enter a valid date.", bindingResult.getFieldError("applicantNotifiedDate").getDefaultMessage());
assertEquals("Please enter a valid date.", bindingResult.getFieldError("closeDate").getDefaultMessage());
verifyZeroInteractions(nonIfsDetailsFormSaver);
}
} | INFUND-9087 - Fixes NonIfsCompetitionControllerTest
| ifs-web-service/ifs-competition-mgt-service/src/test/java/org/innovateuk/ifs/nonifs/controller/NonIfsCompetitionControllerTest.java | INFUND-9087 - Fixes NonIfsCompetitionControllerTest | <ide><path>fs-web-service/ifs-competition-mgt-service/src/test/java/org/innovateuk/ifs/nonifs/controller/NonIfsCompetitionControllerTest.java
<ide> mvcResult.getModelAndView().getModelMap(), "form"
<ide> );
<ide>
<del> assertEquals("Please enter a year between 2000 and 9999.", bindingResult.getFieldError("openDate.year").getDefaultMessage());
<del> assertEquals("Please enter a year between 2000 and 9999.", bindingResult.getFieldError("applicantNotifiedDate.year").getDefaultMessage());
<del> assertEquals("Please enter a year between 2000 and 9999.", bindingResult.getFieldError("closeDate.year").getDefaultMessage());
<add> assertEquals("Please enter a valid date.", bindingResult.getFieldError("openDate.year").getDefaultMessage());
<add> assertEquals("Please enter a valid date.", bindingResult.getFieldError("applicantNotifiedDate.year").getDefaultMessage());
<add> assertEquals("Please enter a valid date.", bindingResult.getFieldError("closeDate.year").getDefaultMessage());
<ide>
<ide> verifyZeroInteractions(nonIfsDetailsFormSaver);
<ide> |
|
Java | mit | 9548abb14f05e6e550ced0d4bfd17fca250da4ee | 0 | nvenky/excel-parser | package com.thoughtworks.excelparser.helper;
import com.thoughtworks.excelparser.exception.ExcelParsingException;
import lombok.extern.slf4j.Slf4j;
import org.apache.poi.hssf.usermodel.HSSFCell;
import org.apache.poi.hssf.usermodel.HSSFDateUtil;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import java.text.DecimalFormat;
import java.text.MessageFormat;
import java.util.Date;
@Slf4j
public class HSSFHelper {
private static final String DATA_TYPE_NOT_SUPPORTED = "{0} Data type not supported for parsing";
private static final String INVALID_NUMBER_FORMAT = "Invalid number found in sheet {0} at row {1}, column {2}";
private static final String INVALID_DATE_FORMAT = "Invalid date found in sheet {0} at row {1}, column {2}";
/**
* Returns the cell value. Supports Integer, Double, Long, String, Date.
*
* @param sheet HSSF Sheet.
* @param sheetName Sheet name.
* @param type Class (Integer, Double, etc.)
* @param row Row number (Same as excelsheet). API will reduce -1 and invoke
* POI API.
* @param col Column number (Same as excelsheet). API will reduce -1 and
* invoke POI API.
* @param zeroIfNull whether Zero should be returned for Number fields when data is
* not found in excel.
* @return Class.
* @throws ExcelParsingException
*/
@SuppressWarnings("unchecked")
public <T> T getCellValue(Sheet sheet, String sheetName, Class<T> type, Integer row, Integer col, boolean zeroIfNull)
throws ExcelParsingException {
Cell cell = getCell(sheet, row, col);
if (type.equals(String.class)) {
return cell == null ? null : (T) getStringCell(cell);
} else if (type.equals(Date.class)) {
return cell == null ? null : (T) getDateCell(cell, sheetName, row, col);
}
if (type.equals(Integer.class)) {
return (T) getIntegerCell(cell, zeroIfNull, sheetName, row, col);
} else if (type.equals(Double.class)) {
return (T) getDoubleCell(cell, zeroIfNull, sheetName, row, col);
} else if (type.equals(Long.class)) {
return (T) getLongCell(cell, zeroIfNull, sheetName, row, col);
}
throw new ExcelParsingException(getErrorMessage(DATA_TYPE_NOT_SUPPORTED, type.getName()));
}
/**
* Gets the cell in a sheet in the given row and column.
*/
Cell getCell(Sheet sheet, int rowNumber, int columnNumber) {
Row row = sheet.getRow(rowNumber - 1);
return row == null ? null : row.getCell(columnNumber - 1);
}
/**
* Gets the value of string in the cell.
*
* @param cell TODO
* @return date present in the given cell.
* @throws ExcelParsingException if the cell is of wrong type or the given location of cell is
* invalid.
*/
String getStringCell(Cell cell) throws ExcelParsingException {
if (cell.getCellType() == HSSFCell.CELL_TYPE_FORMULA) {
int type = cell.getCachedFormulaResultType();
switch (type) {
case HSSFCell.CELL_TYPE_NUMERIC:
DecimalFormat df = new DecimalFormat("###.#");
return df.format(cell.getNumericCellValue());
case HSSFCell.CELL_TYPE_ERROR:
return "";
case HSSFCell.CELL_TYPE_STRING:
return cell.getRichStringCellValue().getString().trim();
case HSSFCell.CELL_TYPE_BOOLEAN:
return "" + cell.getBooleanCellValue();
}
} else if (cell.getCellType() != HSSFCell.CELL_TYPE_NUMERIC) {
return cell.getRichStringCellValue().getString().trim();
}
DecimalFormat df = new DecimalFormat("###.#");
return df.format(cell.getNumericCellValue());
}
/**
* Gets the value of date cell.
*/
Date getDateCell(Cell cell, Object... errorMessageArgs) throws ExcelParsingException {
try {
if (!HSSFDateUtil.isCellDateFormatted(cell)) {
throw new ExcelParsingException(getErrorMessage(INVALID_DATE_FORMAT, errorMessageArgs));
}
} catch (IllegalStateException illegalStateException) {
throw new ExcelParsingException(getErrorMessage(INVALID_DATE_FORMAT, errorMessageArgs));
}
return HSSFDateUtil.getJavaDate(cell.getNumericCellValue());
}
/**
* @param errorMessage Error Message.
* @param errorMessageArgs arguments.
*/
private String getErrorMessage(String errorMessage, Object... errorMessageArgs) {
return MessageFormat.format(errorMessage, errorMessageArgs);
}
Double getDoubleCell(Cell cell, boolean zeroIfNull, Object... errorMessageArgs) throws ExcelParsingException {
if (cell == null) {
return zeroIfNull ? 0d : null;
}
switch (cell.getCellType()) {
case HSSFCell.CELL_TYPE_NUMERIC:
case HSSFCell.CELL_TYPE_FORMULA:
return cell.getNumericCellValue();
case HSSFCell.CELL_TYPE_BLANK:
return zeroIfNull ? 0d : null;
default:
throw new ExcelParsingException(getErrorMessage(INVALID_NUMBER_FORMAT, errorMessageArgs));
}
}
Long getLongCell(Cell cell, boolean zeroIfNull, Object... errorMessageArgs) throws ExcelParsingException {
Double doubleValue = getNumberWithoutDecimals(cell, zeroIfNull, errorMessageArgs);
return doubleValue == null ? null : doubleValue.longValue();
}
Integer getIntegerCell(Cell cell, boolean zeroIfNull, Object... errorMessageArgs) throws ExcelParsingException {
Double doubleValue = getNumberWithoutDecimals(cell, zeroIfNull, errorMessageArgs);
return doubleValue == null ? null : doubleValue.intValue();
}
private Double getNumberWithoutDecimals(Cell cell, boolean zeroIfNull, Object... errorMessageArgs)
throws ExcelParsingException {
Double doubleValue = getDoubleCell(cell, zeroIfNull, errorMessageArgs);
if (doubleValue != null && doubleValue % 1 != 0) {
throw new ExcelParsingException(getErrorMessage(INVALID_NUMBER_FORMAT, errorMessageArgs));
}
return doubleValue;
}
}
| src/main/java/com/thoughtworks/excelparser/helper/HSSFHelper.java | package com.thoughtworks.excelparser.helper;
import com.thoughtworks.excelparser.exception.ExcelParsingException;
import lombok.extern.slf4j.Slf4j;
import org.apache.poi.hssf.usermodel.HSSFCell;
import org.apache.poi.hssf.usermodel.HSSFDateUtil;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import java.text.DecimalFormat;
import java.text.MessageFormat;
import java.util.Date;
@Slf4j
public class HSSFHelper {
private static final String DATA_TYPE_NOT_SUPPORTED = "{0} Data type not supported for parsing";
private static final String INVALID_NUMBER_FORMAT = "Invalid number found in sheet {0} at row {1}, column {2}";
private static final String INVALID_DATE_FORMAT = "Invalid date found in sheet {0} at row {1}, column {2}";
/**
* Returns the cell value. Supports Integer, Double, Long, String, Date.
*
* @param sheet HSSF Sheet.
* @param sheetName Sheet name.
* @param type Class (Integer, Double, etc.)
* @param row Row number (Same as excelsheet). API will reduce -1 and invoke
* POI API.
* @param col Column number (Same as excelsheet). API will reduce -1 and
* invoke POI API.
* @param zeroIfNull whether Zero should be returned for Number fields when data is
* not found in excel.
* @return Class.
* @throws ExcelParsingException
*/
@SuppressWarnings("unchecked")
public <T> T getCellValue(Sheet sheet, String sheetName, Class<T> type, Integer row, Integer col, boolean zeroIfNull)
throws ExcelParsingException {
Cell cell = getCell(sheet, row, col);
if (type.equals(String.class)) {
return cell == null ? null : (T) getStringCell(cell);
} else if (type.equals(Date.class)) {
return cell == null ? null : (T) getDateCell(cell, sheetName, row, col);
}
if (type.equals(Integer.class)) {
return (T) getIntegerCell(cell, zeroIfNull, sheetName, row, col);
} else if (type.equals(Double.class)) {
return (T) getDoubleCell(cell, zeroIfNull, sheetName, row, col);
} else if (type.equals(Long.class)) {
return (T) getLongCell(cell, zeroIfNull, sheetName, row, col);
}
throw new ExcelParsingException(getErrorMessage(DATA_TYPE_NOT_SUPPORTED, type.getName()));
}
/**
* Gets the cell in a sheet in the given row and column.
*/
Cell getCell(Sheet sheet, int rowNumber, int columnNumber) {
Row row = sheet.getRow(rowNumber - 1);
return row == null ? null : row.getCell(columnNumber - 1);
}
/**
* Gets the value of string in the cell.
*
* @param cell TODO
* @return date present in the given cell.
* @throws ExcelParsingException if the cell is of wrong type or the given location of cell is
* invalid.
*/
String getStringCell(Cell cell) throws ExcelParsingException {
if (cell.getCellType() == HSSFCell.CELL_TYPE_FORMULA) {
int type = cell.getCachedFormulaResultType();
switch (type) {
case HSSFCell.CELL_TYPE_NUMERIC:
DecimalFormat df = new DecimalFormat("###.#");
return df.format(cell.getNumericCellValue());
case HSSFCell.CELL_TYPE_ERROR:
return "";
case HSSFCell.CELL_TYPE_STRING:
return cell.getRichStringCellValue().getString().trim();
case HSSFCell.CELL_TYPE_BOOLEAN:
return "" + cell.getBooleanCellValue();
}
} else if (cell.getCellType() != HSSFCell.CELL_TYPE_NUMERIC) {
return cell.getRichStringCellValue().getString().trim();
}
DecimalFormat df = new DecimalFormat("###.#");
return df.format(cell.getNumericCellValue());
}
/**
* Gets the value of date cell.
*
* @param cell TODO
* @param sheetName Sheet Name
* @param rowNumber the row number where the cell is placed.
* @param columnNumber the column number where the cell is placed
* @return date present in the given cell.
* @throws ExcelParsingException if the cell is of wrong type or the given location of cell is
* invalid.
*/
Date getDateCell(Cell cell, Object... errorMessageArgs) throws ExcelParsingException {
try {
if (!HSSFDateUtil.isCellDateFormatted(cell)) {
throw new ExcelParsingException(getErrorMessage(INVALID_DATE_FORMAT, errorMessageArgs));
}
} catch (IllegalStateException illegalStateException) {
throw new ExcelParsingException(getErrorMessage(INVALID_DATE_FORMAT, errorMessageArgs));
}
return HSSFDateUtil.getJavaDate(cell.getNumericCellValue());
}
/**
* @param errorMessage Error Message.
* @param errorMessageArgs arguments.
* @return
*/
private String getErrorMessage(String errorMessage, Object... errorMessageArgs) {
return MessageFormat.format(errorMessage, errorMessageArgs);
}
Double getDoubleCell(Cell cell, boolean zeroIfNull, Object... errorMessageArgs) throws ExcelParsingException {
if (cell == null) {
return zeroIfNull ? 0d : null;
}
switch (cell.getCellType()) {
case HSSFCell.CELL_TYPE_NUMERIC:
case HSSFCell.CELL_TYPE_FORMULA:
return cell.getNumericCellValue();
case HSSFCell.CELL_TYPE_BLANK:
return zeroIfNull ? 0d : null;
default:
throw new ExcelParsingException(getErrorMessage(INVALID_NUMBER_FORMAT, errorMessageArgs));
}
}
Long getLongCell(Cell cell, boolean zeroIfNull, Object... errorMessageArgs) throws ExcelParsingException {
Double doubleValue = getNumberWithoutDecimals(cell, zeroIfNull, errorMessageArgs);
return doubleValue == null ? null : doubleValue.longValue();
}
Integer getIntegerCell(Cell cell, boolean zeroIfNull, Object... errorMessageArgs) throws ExcelParsingException {
Double doubleValue = getNumberWithoutDecimals(cell, zeroIfNull, errorMessageArgs);
return doubleValue == null ? null : doubleValue.intValue();
}
private Double getNumberWithoutDecimals(Cell cell, boolean zeroIfNull, Object... errorMessageArgs)
throws ExcelParsingException {
Double doubleValue = getDoubleCell(cell, zeroIfNull, errorMessageArgs);
if (doubleValue != null && doubleValue % 1 != 0) {
throw new ExcelParsingException(getErrorMessage(INVALID_NUMBER_FORMAT, errorMessageArgs));
}
return doubleValue;
}
}
| Remove incorrect javadoc
| src/main/java/com/thoughtworks/excelparser/helper/HSSFHelper.java | Remove incorrect javadoc | <ide><path>rc/main/java/com/thoughtworks/excelparser/helper/HSSFHelper.java
<ide>
<ide> /**
<ide> * Gets the value of date cell.
<del> *
<del> * @param cell TODO
<del> * @param sheetName Sheet Name
<del> * @param rowNumber the row number where the cell is placed.
<del> * @param columnNumber the column number where the cell is placed
<del> * @return date present in the given cell.
<del> * @throws ExcelParsingException if the cell is of wrong type or the given location of cell is
<del> * invalid.
<ide> */
<ide> Date getDateCell(Cell cell, Object... errorMessageArgs) throws ExcelParsingException {
<ide> try {
<ide> /**
<ide> * @param errorMessage Error Message.
<ide> * @param errorMessageArgs arguments.
<del> * @return
<ide> */
<ide> private String getErrorMessage(String errorMessage, Object... errorMessageArgs) {
<ide> return MessageFormat.format(errorMessage, errorMessageArgs); |
|
Java | mit | 738a3cfb25dd5471d8130403e523be720078cd05 | 0 | 1234224576/Project_Exercis_Car,1234224576/Project_Exercis_Car,1234224576/Project_Exercis_Car | package simplerace;
public class MyController implements Controller, Constants {
public static final int CHANGE_COUNT = 5;
private SensorModel inputs;
private boolean backMode = true;//デフォルトはバック走行
private double reduceSpeedDistance = 0;
private boolean isMiss = false;
public void reset() {}
public int control (SensorModel inputs) {
this.inputs = inputs;
int command = backward;
// System.out.println(inputs.getPosition().x + " " + inputs.getSpeed());
command = defaultThink();
//理想スピードを計算
double idealSpeed = calcSpeedWhenGetNextFlag();
//減速開始位置を算出
reduceSpeedDistance = calcReduceSpeedDistance(Math.abs(inputs.getSpeed()),idealSpeed);
if(inputs.getDistanceToNextWaypoint() < 0.05){
//リセット
reduceSpeedDistance = 0;
}
//減速開始判定処理
if(Math.abs(radian2Degree(inputs.getAngleToNextWaypoint())) <= 5.0|| Math.abs(radian2Degree(inputs.getAngleToNextWaypoint())) >= 175.0){
if(reduceSpeedDistance >= inputs.getDistanceToNextWaypoint()){
//ブレーキを踏む
command = (backMode) ? forward : backward;
}
}
//バックモード/フロントモードへの切り替えの決定
// backMode = decisionBackMode();
//旗取り逃し処理。バックする
int c = missCatchFlag();
if(c != -1) command = c;
//旗を取る直前に次の旗へ向かってハンドルを切る
if(inputs.getDistanceToNextWaypoint() <= 0.08){
command = goFowardNextNextFlagDirection();
}
//旗を取る直前に次の旗へ向かってハンドルを切る
// if(inputs.getDistanceToNextWaypoint() <= 0.08){
// command = goFowardNextNextFlagDirection();
// }
return command;
}
/***
直進するか、次の旗の方向に向かってハンドルを切るかを決定する
***/
private int defaultThink(){
double currentAngle = radian2Degree(inputs.getAngleToNextWaypoint());
double allowGoFowardAngle = calcAllowFowardAngle(); //旗との距離から許容角度を算出
if(Math.abs(currentAngle) <= allowGoFowardAngle){
return backMode ? backward : forward; //バックモード時で分岐
}else{
return goFowardNextFlagDirection();
}
}
/***
バックモード/フロントモードの切り替えを判断する
***/
private boolean decisionBackMode(){
boolean result = this.backMode;
double distance = getTwoPointDistance(inputs.getNextWaypointPosition(),inputs.getNextNextWaypointPosition()); //次の旗と次と次の旗との距離
double angle = Math.abs(radian2Degree(inputs.getAngleToNextWaypoint()));
// System.out.println(Math.abs(inputs.getSpeed()));
if(Math.abs(inputs.getSpeed()) > 3.0) return result;
// if(distance > 0.5) return result;
if(angle >= 100 && angle <= 170.0 && !backMode){
return true;
}
if(angle <= 50.0 && backMode){
return false;
}
return result;
}
/***
旗を取り逃した時の処理(未完成)
***/
private int missCatchFlag(){
// System.out.println("距離:"+inputs.getDistanceToNextWaypoint());
// System.out.println("角度:"+Math.abs(radian2Degree(inputs.getAngleToNextWaypoint())));
if(inputs.getDistanceToNextWaypoint() < 0.05){
double angle = Math.abs(radian2Degree(inputs.getAngleToNextWaypoint()));
if(angle >=7.0 && angle <= 173.0){
isMiss = true;
}
}
if(isMiss && inputs.getDistanceToNextWaypoint() < 0.08){
// return (backMode) ? forward : backward;
return goFowardNextFlagReverseDirection();
}else{
isMiss = false;
}
return -1;
}
/***
直進許容角度を計算して返す
***/
private double calcAllowFowardAngle(){
double angle = 0;
double distance = this.inputs.getDistanceToNextWaypoint();
if(distance < 0.2){
angle = 5.0;
}else if(distance < 0.5){
angle = 2.0;
}else if(distance < 1){
angle = 0.5;
}else{
angle = 0.1;
}
return angle;
}
/***
次の旗を取得時の突入スピードを返す
***///
private double calcSpeedWhenGetNextFlag(){
double idealSpeed = 0; //理想突入スピード
double distance = getTwoPointDistance(inputs.getNextWaypointPosition(),inputs.getNextNextWaypointPosition()); //次の旗と次と次の旗との距離
double angle = getTwoPointDegreeTwo(inputs.getNextWaypointPosition(),inputs.getNextNextWaypointPosition()) - getCarDegree(); //次の旗と次の次の旗との角度
double angle2 = Math.abs(angle);
if(angle2 > 180){
angle2 = 360 - angle2;
}
//理想速度をそれっぽい計算で求める
double rightAngleDistance = distance / Math.cos(Math.toRadians(90-angle2));
double rightAngleDistance2 = Math.abs(rightAngleDistance);
double angleScore = 0;
double distanceScore = 0;
if(angle2 >= 90) {
if(rightAngleDistance2 <= 50) idealSpeed = 3.0;
else if(rightAngleDistance2 <= 100) idealSpeed = 3.5;
// else if(rightAngleDistance2 <= 150) idealSpeed = ;
// else if(rightAngleDistance2 <= 200) idealSpeed = ;
// else if(rightAngleDistance2 <= 250) idealSpeed = ;
else if(rightAngleDistance2 <= 300) idealSpeed = 4.6;
else if(rightAngleDistance2 <= 350) idealSpeed = 4.8;
else if(rightAngleDistance2 <= 400) idealSpeed = 5.0;
} else {
if(angle2 <= 10) angleScore = 5.0;
else if(angle2 <= 20) angleScore = 4.8;
else if(angle2 <= 30) angleScore = 4.6;
else if(angle2 <= 40) angleScore = 4.4;
// else if(angle2 <= 50) angleScore = ;
// else if(angle2 <= 60) angleScore = ;
// else if(angle2 <= 70) angleScore = ;
// else if(angle2 <= 80) angleScore = ;
else if(angle2 < 90) angleScore = 3.0;
if(rightAngleDistance2 <= 50) distanceScore = 3.0;
else if(rightAngleDistance2 <= 100) distanceScore = 3.5;
// else if(rightAngleDistance2 <= 150) distanceScore = ;
// else if(rightAngleDistance2 <= 200) distanceScore = ;
// else if(rightAngleDistance2 <= 250) distanceScore = ;
else if(rightAngleDistance2 <= 300) distanceScore = 4.6;
else if(rightAngleDistance2 <= 350) distanceScore = 4.8;
else if(rightAngleDistance2 <= 400) distanceScore = 5.0;
idealSpeed = (angleScore + distanceScore) / 2;
}
// System.out.println("cos距離:"+rightAngleDistance2);
// System.out.println("距離:"+distance);
return idealSpeed;
}
/***
今のスピードから指定スピードに減速するにはどれくらいの距離を要するかを計算する
***/
private double calcReduceSpeedDistance(double currentSpeed,double targetSpeed){
double cx = 0;
double tx = 0;
if(!backMode){
//フロントモード
cx = Math.pow(2.7,2.2*(Math.log(currentSpeed) - Math.log(6.06)));
tx = Math.pow(2.7, 2.2*(Math.log(targetSpeed) - Math.log(6.06)));
}else{
//計測しなおさないといけない
//バックモード
cx = Math.pow(2.7,2.3*(Math.log(currentSpeed) - Math.log(4.4)));
tx = Math.pow(2.7, 2.3*(Math.log(targetSpeed) - Math.log(4.4)));
}
// cx /= Math.pow(3600,0.5);
// tx /= Math.pow(3600,0.5);
double result = cx - tx;
// System.out.println(result+"\t"+inputs.getDistanceToNextWaypoint());
return result;
}
/***
コマンド取得系メソッド
***/
private int goFowardNextFlagDirection(){
if(this.inputs.getAngleToNextWaypoint() >= 0) {
return (backMode) ? backwardleft :forwardleft;
}
return (backMode) ? backwardright :forwardright;
}
private int goFowardNextFlagReverseDirection(){
if(this.inputs.getAngleToNextWaypoint() >= 0) {
return (backMode) ? backwardright :forwardright;
}
return (backMode) ? backwardleft :forwardleft;
}
private int goFowardNextNextFlagDirection(){
if(this.inputs.getAngleToNextNextWaypoint() >= 0) {
return (backMode) ? backwardleft : forwardleft;
}
return (backMode) ? backwardright : forwardright;
}
/***
ユーティリティ系メソッド
***/
protected double getTwoPointDistance(Vector2d v, Vector2d v2) {
double distance = Math.sqrt((v2.x - v.x) * (v2.x - v.x) + (v2.y - v.y) * (v2.y - v.y));
return distance;
}
private double getTwoPointDegree(Vector2d v1,Vector2d v2) {
double radian = Math.atan2(v2.x - v1.x, v2.y - v1.y);
double degree = radian2Degree(radian);
return degree;
}
private double radian2Degree(double rad){
double degree = rad * 180d / Math.PI;
return degree;
}
private double getCarDegree() {
double radian = inputs.getOrientation();
double degree = radian2Degree(radian);
return degree;
}
private double getTwoPointDegreeTwo(Vector2d v1,Vector2d v2) {
double radian = Math.atan2(v2.y - v1.y, v2.x - v1.x);
double degree = radian2Degree(radian);
return degree;
}
}
| carrace/carrace/simplerace/MyController.java | package simplerace;
public class MyController implements Controller, Constants {
public static final int CHANGE_COUNT = 5;
private SensorModel inputs;
private boolean backMode = true;//デフォルトはバック走行
private double reduceSpeedDistance = 0;
private boolean isMiss = false;
public void reset() {}
public int control (SensorModel inputs) {
this.inputs = inputs;
int command = backward;
// System.out.println(inputs.getPosition().x + " " + inputs.getSpeed());
command = defaultThink();
//理想スピードを計算
double idealSpeed = calcSpeedWhenGetNextFlag();
//減速開始位置を算出
reduceSpeedDistance = calcReduceSpeedDistance(Math.abs(inputs.getSpeed()),idealSpeed);
if(inputs.getDistanceToNextWaypoint() < 0.05){
//リセット
reduceSpeedDistance = 0;
}
//減速開始判定処理
if(Math.abs(radian2Degree(inputs.getAngleToNextWaypoint())) <= 5.0|| Math.abs(radian2Degree(inputs.getAngleToNextWaypoint())) >= 175.0){
if(reduceSpeedDistance >= inputs.getDistanceToNextWaypoint()){
//ブレーキを踏む
command = (backMode) ? forward : backward;
}
}
//バックモード/フロントモードへの切り替えの決定
// backMode = decisionBackMode();
//旗取り逃し処理。バックする
int c = missCatchFlag();
if(c != -1) command = c;
//旗を取る直前に次の旗へ向かってハンドルを切る
if(inputs.getDistanceToNextWaypoint() <= 0.08){
command = goFowardNextNextFlagDirection();
}
//旗を取る直前に次の旗へ向かってハンドルを切る
// if(inputs.getDistanceToNextWaypoint() <= 0.08){
// command = goFowardNextNextFlagDirection();
// }
return command;
}
/***
直進するか、次の旗の方向に向かってハンドルを切るかを決定する
***/
private int defaultThink(){
double currentAngle = radian2Degree(inputs.getAngleToNextWaypoint());
double allowGoFowardAngle = calcAllowFowardAngle(); //旗との距離から許容角度を算出
if(Math.abs(currentAngle) <= allowGoFowardAngle){
return backMode ? backward : forward; //バックモード時で分岐
}else{
return goFowardNextFlagDirection();
}
}
/***
バックモード/フロントモードの切り替えを判断する
***/
private boolean decisionBackMode(){
boolean result = this.backMode;
double distance = getTwoPointDistance(inputs.getNextWaypointPosition(),inputs.getNextNextWaypointPosition()); //次の旗と次と次の旗との距離
double angle = Math.abs(radian2Degree(inputs.getAngleToNextWaypoint()));
// System.out.println(Math.abs(inputs.getSpeed()));
if(Math.abs(inputs.getSpeed()) > 3.0) return result;
// if(distance > 0.5) return result;
if(angle >= 100 && angle <= 170.0 && !backMode){
return true;
}
if(angle <= 50.0 && backMode){
return false;
}
return result;
}
/***
旗を取り逃した時の処理(未完成)
***/
private int missCatchFlag(){
// System.out.println("距離:"+inputs.getDistanceToNextWaypoint());
// System.out.println("角度:"+Math.abs(radian2Degree(inputs.getAngleToNextWaypoint())));
if(inputs.getDistanceToNextWaypoint() < 0.05){
double angle = Math.abs(radian2Degree(inputs.getAngleToNextWaypoint()));
if(angle >=7.0 && angle <= 173.0){
isMiss = true;
}
}
if(isMiss && inputs.getDistanceToNextWaypoint() < 0.08){
// return (backMode) ? forward : backward;
return goFowardNextFlagReverseDirection();
}else{
isMiss = false;
}
return -1;
}
/***
直進許容角度を計算して返す
***/
private double calcAllowFowardAngle(){
double angle = 0;
double distance = this.inputs.getDistanceToNextWaypoint();
if(distance < 0.2){
angle = 5.0;
}else if(distance < 0.5){
angle = 2.0;
}else if(distance < 1){
angle = 0.5;
}else{
angle = 0.1;
}
return angle;
}
/***
次の旗を取得時の突入スピードを返す
***/
private double calcSpeedWhenGetNextFlag(){
double idealSpeed = 0; //理想突入スピード
double distance = getTwoPointDistance(inputs.getNextWaypointPosition(),inputs.getNextNextWaypointPosition()); //次の旗と次と次の旗との距離
double angle = getTwoPointDegreeTwo(inputs.getNextWaypointPosition(),inputs.getNextNextWaypointPosition()) - getCarDegree(); //次の旗と次の次の旗との角度
double angle2 = Math.abs(angle);
if(angle2 > 180){
angle2 = 360 - angle2;
}
//理想速度をそれっぽい計算で求める
double rightAngleDistance = distance / Math.cos(Math.toRadians(90-angle2));
double rightAngleDistance2 = Math.abs(rightAngleDistance);
double angleScore = 0;
double distanceScore = 0;
if(angle2 >= 90) {
if(rightAngleDistance2 <= 50) idealSpeed = 3.0;
else if(rightAngleDistance2 <= 100) idealSpeed = 3.5;
// else if(rightAngleDistance2 <= 150) idealSpeed = ;
// else if(rightAngleDistance2 <= 200) idealSpeed = ;
// else if(rightAngleDistance2 <= 250) idealSpeed = ;
else if(rightAngleDistance2 <= 300) idealSpeed = 4.6;
else if(rightAngleDistance2 <= 350) idealSpeed = 4.8;
else if(rightAngleDistance2 <= 400) idealSpeed = 5.0;
} else {
if(angle2 <= 10) angleScore = 5.0;
else if(angle2 <= 20) angleScore = 4.8;
else if(angle2 <= 30) angleScore = 4.6;
else if(angle2 <= 40) angleScore = 4.4;
// else if(angle2 <= 50) angleScore = ;
// else if(angle2 <= 60) angleScore = ;
// else if(angle2 <= 70) angleScore = ;
// else if(angle2 <= 80) angleScore = ;
else if(angle2 < 90) angleScore = 3.0;
if(rightAngleDistance2 <= 50) distanceScore = 3.0;
else if(rightAngleDistance2 <= 100) distanceScore = 3.5;
// else if(rightAngleDistance2 <= 150) distanceScore = ;
// else if(rightAngleDistance2 <= 200) distanceScore = ;
// else if(rightAngleDistance2 <= 250) distanceScore = ;
else if(rightAngleDistance2 <= 300) distanceScore = 4.6;
else if(rightAngleDistance2 <= 350) distanceScore = 4.8;
else if(rightAngleDistance2 <= 400) distanceScore = 5.0;
idealSpeed = (angleScore + distanceScore) / 2;
}
// System.out.println("cos距離:"+rightAngleDistance2);
// System.out.println("距離:"+distance);
return idealSpeed;
}
/***
今のスピードから指定スピードに減速するにはどれくらいの距離を要するかを計算する
***/
private double calcReduceSpeedDistance(double currentSpeed,double targetSpeed){
double cx = 0;
double tx = 0;
if(!backMode){
//フロントモード
cx = Math.pow(2.7,2.2*(Math.log(currentSpeed) - Math.log(6.06)));
tx = Math.pow(2.7, 2.2*(Math.log(targetSpeed) - Math.log(6.06)));
}else{
//計測しなおさないといけない
//バックモード
cx = Math.pow(2.7,2.3*(Math.log(currentSpeed) - Math.log(4.4)));
tx = Math.pow(2.7, 2.3*(Math.log(targetSpeed) - Math.log(4.4)));
}
// cx /= Math.pow(3600,0.5);
// tx /= Math.pow(3600,0.5);
double result = cx - tx;
// System.out.println(result+"\t"+inputs.getDistanceToNextWaypoint());
return result;
}
/***
コマンド取得系メソッド
***/
private int goFowardNextFlagDirection(){
if(this.inputs.getAngleToNextWaypoint() >= 0) {
return (backMode) ? backwardleft :forwardleft;
}
return (backMode) ? backwardright :forwardright;
}
private int goFowardNextFlagReverseDirection(){
if(this.inputs.getAngleToNextWaypoint() >= 0) {
return (backMode) ? backwardright :forwardright;
}
return (backMode) ? backwardleft :forwardleft;
}
private int goFowardNextNextFlagDirection(){
if(this.inputs.getAngleToNextNextWaypoint() >= 0) {
return (backMode) ? backwardleft : forwardleft;
}
return (backMode) ? backwardright : forwardright;
}
/***
ユーティリティ系メソッド
***/
protected double getTwoPointDistance(Vector2d v, Vector2d v2) {
double distance = Math.sqrt((v2.x - v.x) * (v2.x - v.x) + (v2.y - v.y) * (v2.y - v.y));
return distance;
}
private double getTwoPointDegree(Vector2d v1,Vector2d v2) {
double radian = Math.atan2(v2.x - v1.x, v2.y - v1.y);
double degree = radian2Degree(radian);
return degree;
}
private double radian2Degree(double rad){
double degree = rad * 180d / Math.PI;
return degree;
}
private double getCarDegree() {
double radian = inputs.getOrientation();
double degree = radian2Degree(radian);
return degree;
}
private double getTwoPointDegreeTwo(Vector2d v1,Vector2d v2) {
double radian = Math.atan2(v2.y - v1.y, v2.x - v1.x);
double degree = radian2Degree(radian);
return degree;
}
}
| no message
| carrace/carrace/simplerace/MyController.java | no message | <ide><path>arrace/carrace/simplerace/MyController.java
<ide>
<ide> /***
<ide> 次の旗を取得時の突入スピードを返す
<del> ***/
<add> ***///
<ide> private double calcSpeedWhenGetNextFlag(){
<ide> double idealSpeed = 0; //理想突入スピード
<ide> |
|
Java | mit | 0f85373bb6655acd4097d30fa126de31b4f05cbf | 0 | dbayub/SB_Elsinore_Server,DougEdey/SB_Elsinore_Server,DougEdey/SB_Elsinore_Server,DougEdey/SB_Elsinore_Server,dbayub/SB_Elsinore_Server,DougEdey/SB_Elsinore_Server,dbayub/SB_Elsinore_Server,dbayub/SB_Elsinore_Server | package ca.strangebrew.recipe;
import java.util.ArrayList;
import java.util.Collections;
import com.sb.elsinore.BrewServer;
import org.json.simple.JSONObject;
/**
* $Id: Mash.java,v 1.37 2008/01/16 17:55:04 jimcdiver Exp $
* @author aavis
*
*/
public class Mash {
// set this:
private double maltWeightLbs;
private Recipe myRecipe;
//options:
private double mashRatio;
private String mashRatioU;
private String tempUnits = "F";
private String volUnits = Quantity.GAL;
private double grainTempF;
private double boilTempF;
// private double thermalMass;
private double tunLossF;
private Quantity deadSpace = new Quantity();
private double thinDecoctRatio;
private double thickDecoctRatio;
private double cerealMashTemp;
private String name;
// calculated:
private double volQts;
private int totalTime;
private double absorbedQTS;
private double totalWaterQTS;
private double spargeQTS;
// steps:
private ArrayList<MashStep> steps = new ArrayList<MashStep>();
// configurable temps, can be set by the user:
// target temps are 1/2 between temp + next temp
private float ACIDTMPF = 85;
private float GLUCANTMPF = 95;
private float PROTEINTMPF = 113;
private float BETATMPF = 131;
private float ALPHATMPF = 151;
private float MASHOUTTMPF = 161;
private float SPARGETMPF = 170;
static final public String QT_PER_LB = "qt/lb";
static final public String L_PER_KG = "l/kg";
static final public String ACID = "acid";
static final public String GLUCAN = "glucan";
static final public String PROTEIN = "protein";
static final public String BETA = "beta";
static final public String ALPHA = "alpha";
static final public String MASHOUT = "mashout";
static final public String SPARGE = "sparge";
static final public String INFUSION = "infusion";
static final public String DECOCTION = "decoction";
static final public String DECOCTION_THICK = "decoction thick";
static final public String DECOCTION_THIN = "decoction thin";
static final public String DIRECT = "direct";
static final public String CEREAL_MASH = "cereal mash";
static final public String FLY = "fly";
static final public String BATCH = "batch";
static final public String[] ratioUnits = {QT_PER_LB, L_PER_KG};
static final public String[] types = {ACID, GLUCAN, PROTEIN, BETA, ALPHA, MASHOUT, SPARGE};
static final public String[] methods = {INFUSION, DECOCTION, DECOCTION_THICK, DECOCTION_THIN, DIRECT, CEREAL_MASH};
static final public String[] spargeMethods = {FLY, BATCH};
private String notes;
private double tunTemp;
private double spargeTemp;
private double ph;
private Quantity tunWeight;
private double tunSpecificHeat;
private boolean tunAdjust;
private double totalMashLbs;
public Mash(String name, Recipe recipe){
this.name = name;
this.myRecipe = recipe;
}
public void setNotes(String notes) {
this.notes = notes;
}
public String getNotes() {
return notes;
}
public void setTunTemp(double tunTemp) {
this.tunTemp = tunTemp;
}
public double getTunTemp() {
return tunTemp;
}
public void setSpargeTemp(double spargeTemp) {
this.spargeTemp = spargeTemp;
}
public double getSpargeTemp() {
return spargeTemp;
}
public void setPh(double ph) {
this.ph = ph;
}
public double getPh() {
return ph;
}
public void setTunWeight(double tunWeight) {
this.tunWeight = new Quantity();
this.tunWeight.setUnits(Quantity.KG);
this.tunWeight.setAmount(tunWeight);
}
public void setTunWeight(String weightString) {
this.tunWeight = new Quantity(weightString);
}
public Quantity getTunWeight() {
return tunWeight;
}
public void setTunSpecificHeat(double tunSpecificHeat) {
this.tunSpecificHeat = tunSpecificHeat;
}
public double getTunSpecificHeat() {
return tunSpecificHeat;
}
public void setTunAdjust(boolean tunAdjust) {
this.tunAdjust = tunAdjust;
}
public boolean isTunAdjust() {
return tunAdjust;
}
public double getMashRatio(int i) {
return steps.get(i).getMashRatio();
}
public String getMashRatioU(int i) {
return steps.get(i).getMashRatioU();
}
public String getStepInfuseTemp(int i) {
return steps.get(i).getInfuseTemp();
}
public String getStepTempU(int i) {
return steps.get(i).getStrikeTempU();
}
public String getStepName(int i) {
return steps.get(i).getName();
}
public double getStrikeTemp(int i) {
return steps.get(i).getStrikeTemp();
}
public String getDisplayStrikeTemp(int i) {
MashStep s = steps.get(i);
double temp = s.getStrikeTemp();
if (s.getStrikeTempU().equals("F")) {
temp = BrewCalcs.cToF(s.getStrikeTemp());
}
return temp + " " + s.getStrikeTempU();
}
public String getDisplayStepStartTemp(int i) {
MashStep s = steps.get(i);
double temp = s.getStartTemp();
if (s.getStrikeTempU().equals("F")) {
temp = BrewCalcs.cToF(s.getStartTemp());
}
return temp + " " + s.getStrikeTempU();
}
public double getTotalMashLbs() {
myRecipe.setAllowRecalcs(true);
myRecipe.calcMaltTotals();
myRecipe.setAllowRecalcs(false);
return myRecipe.getTotalMashLbs();
}
public class MashStep implements Comparable<MashStep> {
private String type;
private double startTemp;
private double endTemp;
private String method;
private int minutes;
private int rampMin;
private String directions;
private double temp;
private double weightLbs;
public Quantity inVol = new Quantity();
public Quantity outVol = new Quantity();
private double infuseTemp;
private String infuseTempUnit;
private double mashRatio;
private String mashRatioU;
private String strikeTempU = "C";
private double strikeTemp;
private String name;
// public Quantity decoctVol = new Quantity();
public MashStep(String type, double startTemp, double endTemp, String method, int min,
int rmin) {
this.type = type;
this.startTemp = startTemp;
this.endTemp = endTemp;
this.method = method;
minutes = min;
rampMin = rmin;
}
// default constructor:
public MashStep() {
rampMin = 0;
endTemp = ALPHATMPF + 1;
startTemp = ALPHATMPF + 1;
minutes = 60;
method = INFUSION;
type = ALPHA;
weightLbs = 0;
}
// getter/setter methods
public String getDirections() {
return directions;
}
public void setDirections(String directions) {
this.directions = directions;
}
public double getEndTemp() {
return endTemp;
}
public void setEndTemp(double endTemp) {
this.endTemp = endTemp;
}
public Quantity getInVol() {
return inVol;
}
public void setInVol(Quantity vol) {
this.inVol = vol;
}
public Quantity getOutVol() {
return outVol;
}
public void setOutVol(Quantity vol) {
this.outVol = vol;
}
public String getMethod() {
return method;
}
public void setMethod(String method) {
this.method = method;
}
public int getMinutes() {
return minutes;
}
public void setMinutes(int minutes) {
this.minutes = minutes;
}
public int getRampMin() {
return rampMin;
}
public void setRampMin(int rampMin) {
this.rampMin = rampMin;
}
public double getStartTemp() {
return startTemp;
}
public void setStartTemp(double startTemp) {
this.startTemp = startTemp;
}
public double getTemp() {
return temp;
}
public String getType() {
return type;
}
public void setType(String s) {
type = s;
}
public int compareTo(MashStep m) {
int result = ((Double)this.getStartTemp()).compareTo((Double)m.getStartTemp());
return (result == 0 ? -1 : result);
}
public void setInVol(double infuseAmount) {
this.inVol.setUnits(Quantity.LITRES);
this.inVol.setAmount(infuseAmount);
}
public void setInfuseTemp(String infuseTemp) {
String[] split = infuseTemp.trim().split(" ");
try {
this.infuseTemp = Double.parseDouble(split[0].trim().replace(",", ""));
this.infuseTempUnit = split[1].trim();
} catch (NumberFormatException nfe) {
System.out.println("Couldn't parse: " + split[0] + " as a number.");
nfe.printStackTrace();
}
}
public String getInfuseTemp() {
return infuseTemp + " " + this.getStrikeTempU();
}
public void setMashRatio(String mashRatio) {
try {
this.mashRatio = Double.parseDouble(mashRatio);
} catch (NumberFormatException nfe) {
nfe.printStackTrace();
}
}
public double getMashRatio() {
return mashRatio;
}
public void setMashRatioU(String mashRatioU) {
this.mashRatioU = mashRatioU;
}
public String getMashRatioU() {
return mashRatioU;
}
public void setStrikeTempU(String strikeTempU) {
this.strikeTempU = strikeTempU;
}
public String getStrikeTempU() {
return strikeTempU;
}
public void setStrikeTemp(double strikeTemp) {
this.strikeTemp = strikeTemp;
}
public double getStrikeTemp() {
return strikeTemp;
}
public double getDisplayStrikeTemp() {
if (getStrikeTempU().equals("F"))
return BrewCalcs.cToF(strikeTemp);
return strikeTemp;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
public MashStep addStep(String type, double startTemp, double endTemp, String method, int min,
int rampmin, double weight) {
MashStep step = new MashStep(type, startTemp, endTemp, method, min, rampmin);
step.weightLbs = weight;
steps.add(step);
calcMashSchedule();
return step;
}
public int addStep(){
MashStep step = new MashStep();
// calcStepType(temp);
if (!steps.isEmpty()) {
MashStep lastStep = (Mash.MashStep)steps.get(steps.size() -1);
step.setStartTemp(lastStep.getEndTemp() + 1);
step.setEndTemp(step.getStartTemp());
step.setType(calcStepType(step.getStartTemp()));
}
steps.add(step);
int i = steps.size();
calcMashSchedule();
// return the index of the last added step:
return i-1;
}
public void delStep(int i){
if (steps.size()>i && !steps.isEmpty() && i > -1){
steps.remove(i);
calcMashSchedule();
}
}
// set methods:
// public void setMaltWeight(double mw) { maltWeightLbs = mw; }
public void setMashRatio(double mr){
mashRatio = mr;
calcMashSchedule();
}
public double getMashRatio(){
return mashRatio;
}
public void setMashRatioU(String u){
mashRatioU = u;
calcMashSchedule();
}
public String getMashRatioU(){
return mashRatioU;
}
public void setMashVolUnits(String u){
volUnits = u;
calcMashSchedule();
}
public void setMashTempUnits(String newUnits){
if (newUnits.trim().endsWith("F")) {
tempUnits = "F";
} else {
tempUnits = "C";
}
calcMashSchedule();
}
// TODO hardcoded temp strings should be a static somewhere
public void setGrainTemp(double t){
if (tempUnits.equals("F"))
grainTempF = t;
else
grainTempF = BrewCalcs.cToF(t);
calcMashSchedule();
}
public void setBoilTemp(double t){
if (tempUnits.equals("F"))
boilTempF = t;
else
boilTempF = BrewCalcs.cToF(t);
calcMashSchedule();
}
public void setTunLoss(double t){
if (tempUnits.equals("F"))
tunLossF = t;
else
tunLossF = t * 1.8;
calcMashSchedule();
}
public void setDeadSpace(double d) {
deadSpace.setAmount(d);
}
public double getDeadSpace() {
return deadSpace.getValue();
}
public void setDecoctRatio(String type, double r){
if (type.equals("thick"))
thickDecoctRatio = r;
else
thinDecoctRatio = r;
calcMashSchedule();
}
public void setTempRange(String type, double t){
if (tempUnits.equals("C"))
t = BrewCalcs.cToF(t);
if (type.equalsIgnoreCase(MASHOUT))
MASHOUTTMPF = (float)t;
if (type.equalsIgnoreCase(SPARGE))
SPARGETMPF = (float)t;
}
public void setName(String s) { name = s; }
/**
*
* @param val Value to convert to a string
* @return Val converted to the mash vol, formated to 1 decimal
*/
private String getVolConverted(double val){
double d = Quantity.convertUnit(Quantity.QT, volUnits, val);
String s = SBStringUtils.format(d, 1);
return s;
}
// get methods:
public String getMashVolUnits(){ return volUnits; }
public String getMashTempUnits(){ return tempUnits; }
public int getMashTotalTime(){ return totalTime; }
public double getGrainTemp() {
if (tempUnits.equals("F"))
return grainTempF;
else
return BrewCalcs.fToC(grainTempF);
}
public double getBoilTemp() {
if (tempUnits.equals("F"))
return boilTempF;
else
return BrewCalcs.fToC(boilTempF);
}
public double getTempRange(String type){
double t=0;
if (type.equals(MASHOUT))
t = MASHOUTTMPF;
else if (type.equals(SPARGE))
t = SPARGETMPF;
if (tempUnits.equals("C"))
t = BrewCalcs.fToC(t);
return t;
}
public double getSpargeVol(){
return Quantity.convertUnit(Quantity.QT, volUnits, spargeQTS);
}
public double getSpargeQts(){ return spargeQTS; }
public double getTunLoss() {
if (tempUnits.equals("F"))
return tunLossF;
else
return ( tunLossF / 1.8 );
}
/**
*
* @return A string, which is the total converted to the mash units
* + the units.
*/
public String getMashTotalVol() {
double d = Quantity.convertUnit(Quantity.QT, volUnits, volQts);
String s = SBStringUtils.format(d, 1) + " " + volUnits;
return s;
}
public String getAbsorbedStr() {
return getVolConverted(absorbedQTS);
}
public double getAbsorbedQts() {
return absorbedQTS;
}
public String getTotalWaterStr() {
return getVolConverted(totalWaterQTS);
}
public double getTotalWaterQts() {
return totalWaterQTS;
}
public double getThickDecoctRatio() {
return thickDecoctRatio;
}
public double getThinDecoctRatio() {
return thinDecoctRatio;
}
public String getName(){ return name; }
// mash step methods:
public int setStepType(int i, String t){
if (steps.size() < i || steps.isEmpty())
return -1;
MashStep ms = (MashStep)steps.get(i);
ms.setType(t);
ms.setStartTemp(calcStepTemp(t));
ms.setEndTemp(calcStepTemp(t));
return 0;
}
public String getStepType(int i) {
if (steps.size() < i || steps.isEmpty())
return "";
MashStep ms = (MashStep)steps.get(i);
return ms.getType();
}
public String getStepDirections(int i){
return ((MashStep)steps.get(i)).getDirections();
}
public void setStepMethod(int i, String m){
((MashStep)steps.get(i)).setMethod(m);
if (m.equals(CEREAL_MASH))
((MashStep)steps.get(i)).weightLbs = 0;
calcMashSchedule();
}
public String getStepMethod(int i) {
return ((MashStep)steps.get(i)).getMethod();
}
public void setStepStartTemp(int i, double t){
if (tempUnits.equals("C")){
t = BrewCalcs.cToF(t);
}
((MashStep)steps.get(i)).setStartTemp(t);
((MashStep)steps.get(i)).setEndTemp(t);
((MashStep)steps.get(i)).setType(calcStepType(t));
calcMashSchedule();
}
public double getStepStartTemp(int i) {
if (tempUnits.equals("C"))
return BrewCalcs.fToC(((MashStep)steps.get(i)).getStartTemp());
else
return ((MashStep)steps.get(i)).getStartTemp();
}
public void setStepEndTemp(int i, double t){
if (tempUnits.equals("C"))
((MashStep)steps.get(i)).setEndTemp(BrewCalcs.cToF(t));
else
((MashStep)steps.get(i)).setEndTemp(t);
calcMashSchedule();
}
public double getStepEndTemp(int i) {
if (tempUnits.equals("C"))
return BrewCalcs.fToC(((MashStep)steps.get(i)).getEndTemp());
else
return ((MashStep)steps.get(i)).getEndTemp();
}
public void setStepRampMin(int i, int m){
((MashStep)steps.get(i)).setRampMin(m);
}
public int getStepRampMin(int i) {
return ((MashStep)steps.get(i)).getRampMin();
}
public double getStepTemp(int i) {
if (((MashStep)steps.get(i)).getTemp() == 0)
return 0;
if (tempUnits.equals("F"))
return steps.get(i).getTemp();
else
return BrewCalcs.fToC(steps.get(i).getTemp());
}
public double getStepWeight(int i) {
double w = steps.get(i).weightLbs;
return Quantity.convertUnit(Quantity.LB, myRecipe.getMaltUnits(), w);
}
public void setStepWeight(int i, double w){
// you can only set the weight on a cereal mash step
MashStep s = steps.get(i);
if (s.method.equals(CEREAL_MASH)){
double w2 = Quantity.convertUnit(myRecipe.getMaltUnits(), Quantity.LB, w);
s.weightLbs = w2;
calcMashSchedule();
}
}
public void setStepMin(int i, int m){
steps.get(i).setMinutes(m);
}
public int getStepMin(int i) {
return steps.get(i).getMinutes();
}
public double getStepInVol(int i) {
double vol = steps.get(i).getInVol().getValue();
return Quantity.convertUnit(Quantity.QT, volUnits, vol);
}
public Quantity getStepInQuantity(int i) {
return steps.get(i).getInVol();
}
public double getStepOutVol(int i) {
double vol = steps.get(i).getOutVol().getValue();
return Quantity.convertUnit(Quantity.QT, volUnits, vol);
}
public Quantity getStepOutQuantity(int i) {
return steps.get(i).getOutVol();
}
public int getStepSize(){
return steps.size();
}
// Introducing: the big huge mash calc method!
public void calcMashSchedule() {
// Method to run through the mash table and calculate values
if (!myRecipe.allowRecalcs)
return;
double targetTemp = 0;
double waterAddedQTS = 0;
double waterEquiv = 0;
double currentTemp = getGrainTemp();
double displTemp = 0;
double tunLoss; // figure out a better way to do this, eg: themal mass
double decoct = 0;
int totalMashTime = 0;
int totalSpargeTime = 0;
double mashWaterQTS = 0;
double mashVolQTS = 0;
int numSparge = 0;
double totalWeightLbs = 0;
double totalCerealLbs = 0;
maltWeightLbs = myRecipe.getTotalMashLbs();
// convert CurrentTemp to F
if (tempUnits.equals("C")) {
currentTemp = BrewCalcs.cToF(currentTemp);
tunLoss = tunLossF * 1.8;
} else {
tunLoss = tunLossF;
}
// perform calcs on first record
if (steps.isEmpty())
return;
// sort the list
Collections.sort(steps);
// add up the cereal mash lbs
for (int i = 0; i < steps.size(); i++) {
MashStep stp = ((MashStep) steps.get(i));
// convert mash ratio to qts/lb if in l/kg
double mr = stp.getMashRatio();
if (stp.getMashRatioU().equalsIgnoreCase(L_PER_KG)) {
mr *= 0.479325;
}
if (stp.method.equals("cereal mash")) {
totalCerealLbs += stp.weightLbs;
}
stp.setStrikeTemp(calcStrikeTemp(stp.getStartTemp(), currentTemp, mr, tunLoss));
stp.setStrikeTempU(tempUnits);
waterAddedQTS += stp.weightLbs * mr;
waterEquiv += stp.weightLbs * (0.192 + mr);
mashVolQTS += calcMashVol(stp.weightLbs, mr);
}
totalWeightLbs = maltWeightLbs - totalCerealLbs;
// the first step is always an infusion
MashStep stp = steps.get(0);
totalMashTime += stp.minutes;
mashWaterQTS += waterAddedQTS;
stp.inVol.setUnits(Quantity.QT);
stp.inVol.setAmount(waterAddedQTS);
stp.method = INFUSION;
stp.weightLbs = totalWeightLbs;
// subtract the water added from the Water Equiv so that they are correct when added in the next part of the loop
waterEquiv -= waterAddedQTS;
stp.directions = "Mash in with " + SBStringUtils.format(stp.inVol.getValueAs(volUnits), 1) + " " + volUnits
+ " of water at " + SBStringUtils.format(stp.getDisplayStrikeTemp(), 1) + " " + stp.getStrikeTempU();
// set TargetTemp to the end temp
targetTemp = stp.endTemp;
for (int i = 1; i < steps.size(); i++) {
stp = steps.get(i);
currentTemp = targetTemp; // switch
targetTemp = stp.startTemp;
// if this is a former sparge step that's been changed, change
// the method to infusion
BrewServer.LOG.info("Mash step Type: " + stp.type + " Method: " + stp.method);
if (!stp.type.equals(SPARGE) && (stp.method.equals(FLY) || stp.method.equals(BATCH)))
stp.method = INFUSION;
// do calcs
if (stp.method.equals(INFUSION)) { // calculate an infusion step
decoct = 0;
waterEquiv += waterAddedQTS; // add previous addition to get WE
double strikeTemp = boilTempF; // boiling water
// Updated the water added
waterAddedQTS = calcWaterAddition(targetTemp, currentTemp,
waterEquiv, boilTempF);
stp.outVol.setAmount(0);
stp.inVol.setUnits(Quantity.QT);
stp.inVol.setAmount(waterAddedQTS);
stp.temp = strikeTemp;
stp.weightLbs = totalWeightLbs;
if (tempUnits == "C")
strikeTemp = 100;
stp.directions = "Add " + SBStringUtils.format(stp.inVol.getValueAs(volUnits), 1) + " " + volUnits
+ " of water at " + SBStringUtils.format(strikeTemp, 1) + " " + tempUnits;
mashWaterQTS += waterAddedQTS;
mashVolQTS += waterAddedQTS;
} else if (stp.method.indexOf(DECOCTION) > -1) { // calculate a decoction step
waterEquiv += waterAddedQTS; // add previous addition to get WE
waterAddedQTS = 0;
double ratio = 0.75;
if (stp.method.indexOf(DECOCTION_THICK) > -1)
ratio = thickDecoctRatio;
else if (stp.method.indexOf(DECOCTION_THIN) > -1)
ratio = thinDecoctRatio;
// Calculate volume (qts) of mash to remove
decoct = calcDecoction2(targetTemp, currentTemp, mashWaterQTS, ratio, totalWeightLbs);
stp.outVol.setUnits(Quantity.QT);
stp.outVol.setAmount(decoct);
stp.inVol.setAmount(0);
stp.temp = boilTempF;
stp.weightLbs = totalWeightLbs;
// Updated the decoction, convert to right units & make directions
stp.directions = "Remove " + SBStringUtils.format(stp.outVol.getValueAs(volUnits), 1) + " " + volUnits
+ " of mash, boil, and return to mash.";
} else if (stp.method.equals(DIRECT)) { // calculate a direct heat step
waterEquiv += waterAddedQTS; // add previous addition to get WE
waterAddedQTS = 0;
displTemp = stp.startTemp;
if (tempUnits.equals("C"))
displTemp = BrewCalcs.fToC(displTemp);
stp.directions = "Add direct heat until mash reaches " + displTemp
+ " " + tempUnits + ".";
stp.inVol.setAmount(0);
stp.outVol.setAmount(0);
stp.temp = 0;
stp.weightLbs = totalWeightLbs;
} else if (stp.method.indexOf(CEREAL_MASH) > -1) { // calculate a cereal mash step
double mr = stp.getMashRatio();
if (stp.getMashRatioU().equalsIgnoreCase(L_PER_KG)) {
mr *= 0.479325;
}
waterEquiv += waterAddedQTS; // add previous addition to get WE
targetTemp = stp.startTemp;
double extraWaterQTS = 0;
double cerealTemp = boilTempF;
double cerealTargTemp = cerealMashTemp;
String addStr = "";
/*
* 1. check the temp of the mash when you add the boiling cereal mash @ default ratio back
* 2. if it's > than the step temp, adjust the step temp
* 3. if it's < than the step temp, add extra water to increase the "heat equivalencey" of the cereal mash
*/
double cerealWaterEquiv = stp.weightLbs * (0.192 + mr);
waterAddedQTS = mr * stp.weightLbs;
double strikeTemp = calcStrikeTemp(cerealMashTemp, grainTempF, mr, 0);
double newTemp = ((waterEquiv * currentTemp) + (cerealWaterEquiv * cerealTemp)) / (waterEquiv + cerealWaterEquiv);
if (newTemp > targetTemp) {
stp.startTemp = newTemp;
}
if (newTemp < targetTemp) {
double addQts = ((waterEquiv * (targetTemp - currentTemp)) / (cerealTemp - targetTemp)) - 0.192;
extraWaterQTS = addQts - waterAddedQTS;
addStr = " Add " + SBStringUtils.format(Quantity.convertUnit("qt", volUnits, extraWaterQTS), 1)
+ " " + volUnits + " water to the cereal mash.";
}
// Calculate final temp of cereal mash
// cerealTemp = (targetTemp * (waterEquiv + cerealWaterEquiv) - (waterEquiv * currentTemp)) / cerealWaterEquiv;
totalMashTime += stp.minutes;
mashWaterQTS += waterAddedQTS + extraWaterQTS;
stp.inVol.setUnits(Quantity.QT);
stp.inVol.setAmount(waterAddedQTS);
stp.outVol.setAmount(0);
stp.temp = strikeTemp;
// make directions
String weightStr = SBStringUtils.format(Quantity.convertUnit(Quantity.LB, myRecipe.getMaltUnits(), stp.weightLbs), 1)
+ " " + myRecipe.getMaltUnits();
String volStr = SBStringUtils.format(Quantity.convertUnit(Quantity.QT, volUnits, waterAddedQTS), 1)
+ " " + volUnits;
if (tempUnits == "C") {
strikeTemp = BrewCalcs.fToC(strikeTemp);
cerealTemp = BrewCalcs.fToC(cerealTemp);
targetTemp = BrewCalcs.fToC(targetTemp);
cerealTargTemp = BrewCalcs.fToC(cerealTargTemp);
}
String tempStr = SBStringUtils.format(strikeTemp, 1) + tempUnits;
String tempStr2 = SBStringUtils.format(cerealTemp, 1) + tempUnits;
String tempStr3 = SBStringUtils.format(targetTemp, 1) + tempUnits;
String tempStr4 = SBStringUtils.format(cerealTargTemp, 1) + tempUnits;
stp.directions = "Cereal mash: mash " + weightStr + " grain with " + volStr + " water at " +
tempStr + " to hit " + tempStr4 + " and rest.";
stp.directions += addStr;
stp.directions += " Raise to " + tempStr2 + " and add to the main mash to reach " + tempStr3;
// add cereal mash to total weight
totalWeightLbs += stp.weightLbs;
} else {
BrewServer.LOG.warning("Unrecognised mash step: " + stp.method);
}
if (stp.type.equals(SPARGE))
numSparge++;
else {
totalMashTime += stp.minutes;
}
// set target temp to end temp for next step
targetTemp = stp.endTemp;
} // for steps.size()
waterEquiv += waterAddedQTS; // add previous addition to get WE
totalTime = totalMashTime;
volQts = mashVolQTS;
// water use stats:
BrewServer.LOG.warning("Total weight: " + totalWeightLbs);
absorbedQTS = totalWeightLbs * 0.52; // figure from HBD
// spargeTotalQTS = (myRecipe.getPreBoilVol("qt")) - (mashWaterQTS - absorbedQTS);
totalWaterQTS = mashWaterQTS;
spargeQTS = myRecipe.getPreBoilVol(Quantity.QT) -
(mashWaterQTS - absorbedQTS - deadSpace.getValueAs(Quantity.QT));
BrewServer.LOG.warning("Sparge Quarts: " + spargeQTS);
// Now let's figure out the sparging:
if (numSparge == 0)
return;
// Amount to collect per sparge
double col = myRecipe.getPreBoilVol(Quantity.QT) / numSparge;
double charge[] = new double[numSparge];
double collect[] = new double[numSparge];
double totalCollectQts = myRecipe.getPreBoilVol(Quantity.QT);
// do we need to add more water to charge up
// is the amount we need to collect less than the initial mash volume - absorbption
System.out.println("Collecting: " + col + " MashWater " + mashWaterQTS +
" Absorbed " + absorbedQTS + " Loss: " + deadSpace.getValueAs(Quantity.QT));
if (col <= (mashWaterQTS - absorbedQTS)) {
charge[0] = 0;
collect[0] = mashWaterQTS - absorbedQTS; // how much is left over from the mash
totalCollectQts = totalCollectQts - collect[0];
} else {
charge[0] = col - (mashWaterQTS - absorbedQTS); // add the additional water to get out the desired first collection amount PER sparge
collect[0] = col;
totalCollectQts = totalCollectQts - collect[0];
}
// do we need any more steps?
if (numSparge > 1) {
/*
batch_1_sparge_liters = (boil_size_l/<total number of steps> ) - mash_water_l + grain_wt_kg * 0.625)
batch_2_liters = boil_size_l / <total number of steps>
*/
BrewServer.LOG.info("NumSparge: " + numSparge);
BrewServer.LOG.info("Collecting: " + col);
for (int i = 1; i < numSparge; i++) {
charge[i] = col;
collect[i] = col;
}
}
int j = 0;
for (int i = 1; i < steps.size(); i++) {
stp = ((MashStep) steps.get(i));
if (stp.getType().equals(SPARGE)) {
stp.inVol.setUnits(Quantity.QT);
stp.inVol.setAmount(charge[j]);
stp.outVol.setUnits(Quantity.QT);
stp.outVol.setAmount(collect[j]);
stp.temp = SPARGETMPF;
totalSpargeTime += stp.getMinutes();
String collectStr = SBStringUtils.format(Quantity.convertUnit(Quantity.QT, volUnits, collect[j]), 2) +
" " + volUnits;
String tempStr;
if (tempUnits.equals("F")) {
tempStr = "" + SBStringUtils.format(SPARGETMPF, 1) + "F";
} else {
tempStr = SBStringUtils.format(BrewCalcs.fToC(SPARGETMPF), 1) + "C";
}
if (numSparge > 1) {
stp.setMethod(BATCH);
String add = SBStringUtils.format(Quantity.convertUnit(Quantity.QT, volUnits, charge[j]), 2) +
" " + volUnits;
stp.setDirections("Add " + add + " at " + tempStr + " to collect " + collectStr);
} else {
stp.inVol.setUnits(Quantity.QT);
stp.inVol.setAmount(spargeQTS);
stp.outVol.setUnits(Quantity.QT);
stp.outVol.setAmount(collect[j]);
stp.setMethod(FLY);
stp.setDirections("Sparge with " +
SBStringUtils.format(Quantity.convertUnit("qt", volUnits, spargeQTS), 1) +
" " + volUnits + " at " + tempStr + " to collect " + collectStr);
}
j++;
}
}
}
// private methods:
/* from John Palmer:
* Vd (quarts) = [(T2 - T1)(.4G + 2W)] / [(Td - T1)(.4g + w)]
Where:
Vd = decoction volume
T1 = initial mash temperature
T2 = target mash temperature
Td = decoction temperature (212F)
G = Grainbill weight
W = volume of water in mash (i.e. initial infusion volume)
g = pounds of grain per quart of decoction = 1/(Rd + .32)
w = quarts of water per quart of decoction = g*Rd*water density = 2gRd
Rd = ratio of grain to water in the decoction volume (range of .6 to 1
quart/lb)
thick decoctions will have a ratio of .6-.7, thinner decoctions will
have a ratio of .8-.9
*/
private double calcDecoction2(double targetTemp, double currentTemp, double waterVolQTS, double ratio, double weightLbs){
double decoctQTS=0;
double g = 1 / (ratio + .32);
double w = 2 * g * ratio;
decoctQTS = ((targetTemp - currentTemp) * ((0.4 * weightLbs) + (2 * waterVolQTS)))
/ ((boilTempF - currentTemp) * (0.4 * g + w));
return decoctQTS;
}
private String calcStepType(double temp) {
String stepType = "none";
// less than 90, none
// 86 - 95 - acid
if (temp >= ACIDTMPF && temp < GLUCANTMPF)
stepType = ACID;
// 95 - 113 - glucan
else if (temp < PROTEINTMPF)
stepType = GLUCAN;
// 113 - 131 protein
else if (temp < BETATMPF)
stepType = PROTEIN;
// 131 - 150 beta
else if (temp < ALPHATMPF)
stepType = BETA;
// 150-162 alpha
else if (temp < MASHOUTTMPF)
stepType = ALPHA;
// 163-169, mashout
else if (temp < SPARGETMPF)
stepType = MASHOUT;
// over 170, sparge
else if (temp >= SPARGETMPF)
stepType = SPARGE;
return stepType;
}
private double calcStepTemp(String stepType) {
float stepTempF = 0;
if (stepType == ACID)
stepTempF = (ACIDTMPF + GLUCANTMPF) / 2;
else if (stepType == GLUCAN)
stepTempF = (GLUCANTMPF + PROTEINTMPF) / 2;
else if (stepType == PROTEIN)
stepTempF = (PROTEINTMPF + BETATMPF) / 2;
else if (stepType == BETA)
stepTempF = (BETATMPF + ALPHATMPF) / 2;
else if (stepType == ALPHA)
stepTempF = (ALPHATMPF + MASHOUTTMPF) / 2;
else if (stepType == MASHOUT)
stepTempF = (MASHOUTTMPF + SPARGETMPF) / 2;
else if (stepType == SPARGE)
stepTempF = SPARGETMPF;
return stepTempF;
}
double calcMashVol(double grainWeightLBS, double ratio) {
// given lbs and ratio, what is the volume of the grain in quarts?
// note: this calc is for the first record only, and returns the heat equivalent of
// grain + water added for first infusion
// HBD posts indicate 0.32, but reality is closer to 0.42
return (grainWeightLBS * (0.42 + ratio));
}
double calcStrikeTemp(double targetTemp, double currentTemp, double ratio,
double tunLossF) {
// calculate strike temp
// Ratio is in quarts / lb, TunLoss is in F
// this uses thermal mass:
// double strikeTemp = (maltWeightLbs + thermalMass)*( targetTemp - currentTemp )/( boilTempF - targetTemp );
return (targetTemp + 0.192 * (targetTemp - currentTemp) / ratio)
+ tunLossF;
}
double calcWaterAddition(double targetTemp, double currentTemp,
double mashVol, double boilTempF) {
// calculate amount of boiling water to add to raise mash to new temp
return (mashVol * (targetTemp - currentTemp) / (boilTempF - targetTemp));
}
public String toXml() {
calcMashSchedule();
StringBuffer sb = new StringBuffer();
sb.append(" <MASH>\n");
sb.append(SBStringUtils.xmlElement("NAME", name, 4));
sb.append(SBStringUtils.xmlElement("MASH_VOLUME", SBStringUtils.format(Quantity.convertUnit("qt", volUnits, volQts), 2) , 4));
sb.append(SBStringUtils.xmlElement("MASH_VOL_U", "" + volUnits, 4));
sb.append(SBStringUtils.xmlElement("MASH_RATIO", "" + mashRatio, 4));
sb.append(SBStringUtils.xmlElement("MASH_RATIO_U", "" + mashRatioU, 4));
sb.append(SBStringUtils.xmlElement("MASH_TIME", "" + totalTime, 4));
sb.append(SBStringUtils.xmlElement("MASH_TMP_U", "" + tempUnits, 4));
sb.append(SBStringUtils.xmlElement("THICK_DECOCT_RATIO", "" + thickDecoctRatio, 4));
sb.append(SBStringUtils.xmlElement("THIN_DECOCT_RATIO", "" + thinDecoctRatio, 4));
if (tempUnits.equals("C")){
sb.append(SBStringUtils.xmlElement("MASH_TUNLOSS_TEMP", "" + (tunLossF/1.8), 4));
sb.append(SBStringUtils.xmlElement("GRAIN_TEMP", "" + BrewCalcs.fToC(grainTempF), 4));
sb.append(SBStringUtils.xmlElement("BOIL_TEMP", "" + BrewCalcs.fToC(boilTempF), 4));
}
else {
sb.append(SBStringUtils.xmlElement("MASH_TUNLOSS_TEMP", "" + tunLossF, 4));
sb.append(SBStringUtils.xmlElement("GRAIN_TEMP", "" + grainTempF, 4));
sb.append(SBStringUtils.xmlElement("BOIL_TEMP", "" + boilTempF, 4));
}
for (int i = 0; i < steps.size(); i++) {
MashStep st = (MashStep) steps.get(i);
sb.append(" <ITEM>\n");
sb.append(" <TYPE>" + st.type + "</TYPE>\n");
sb.append(" <TEMP>" + st.startTemp + "</TEMP>\n");
if (tempUnits.equals("C"))
sb.append(" <DISPL_TEMP>" + SBStringUtils.format(BrewCalcs.fToC(st.startTemp), 1) + "</DISPL_TEMP>\n");
else
sb.append(" <DISPL_TEMP>" + st.startTemp + "</DISPL_TEMP>\n");
sb.append(" <END_TEMP>" + st.endTemp + "</END_TEMP>\n");
if (tempUnits.equals("C"))
sb.append(" <DISPL_END_TEMP>" + SBStringUtils.format(BrewCalcs.fToC(st.endTemp), 1) + "</DISPL_END_TEMP>\n");
else
sb.append(" <DISPL_END_TEMP>" + st.endTemp + "</DISPL_END_TEMP>\n");
sb.append(" <MIN>" + st.minutes + "</MIN>\n");
sb.append(" <RAMP_MIN>" + st.rampMin + "</RAMP_MIN>\n");
sb.append(" <METHOD>" + st.method + "</METHOD>\n");
sb.append(" <WEIGHT_LBS>" + st.weightLbs + "</WEIGHT_LBS>\n");
sb.append(" <DIRECTIONS>" + st.directions + "</DIRECTIONS>\n");
sb.append(" </ITEM>\n");
}
sb.append(" </MASH>\n");
return sb.toString();
}
/**
* Return the current mash steps as a JSON Representation.
* @return
*/
public JSONObject toJSONObject(String device) {
JSONObject mashObject = new JSONObject();
for (int i = 0; i < steps.size(); i++) {
MashStep st = (MashStep) steps.get(i);
JSONObject currentStep = new JSONObject();
currentStep.put("type", st.type);
currentStep.put("method", st.method);
currentStep.put("temp", st.getStartTemp());
currentStep.put("duration", st.minutes);
currentStep.put("tempUnit", this.tempUnits);
currentStep.put("position", i);
mashObject.put(i, currentStep);
}
if (device != null) {
mashObject.put("pid", device);
}
return mashObject;
}
} | src/main/java/ca/strangebrew/recipe/Mash.java | package ca.strangebrew.recipe;
import java.util.ArrayList;
import java.util.Collections;
import com.sb.elsinore.BrewServer;
import org.json.simple.JSONObject;
/**
* $Id: Mash.java,v 1.37 2008/01/16 17:55:04 jimcdiver Exp $
* @author aavis
*
*/
public class Mash {
// set this:
private double maltWeightLbs;
private Recipe myRecipe;
//options:
private double mashRatio;
private String mashRatioU;
private String tempUnits = "F";
private String volUnits = Quantity.GAL;
private double grainTempF;
private double boilTempF;
// private double thermalMass;
private double tunLossF;
private Quantity deadSpace = new Quantity();
private double thinDecoctRatio;
private double thickDecoctRatio;
private double cerealMashTemp;
private String name;
// calculated:
private double volQts;
private int totalTime;
private double absorbedQTS;
private double totalWaterQTS;
private double spargeQTS;
// steps:
private ArrayList<MashStep> steps = new ArrayList<MashStep>();
// configurable temps, can be set by the user:
// target temps are 1/2 between temp + next temp
private float ACIDTMPF = 85;
private float GLUCANTMPF = 95;
private float PROTEINTMPF = 113;
private float BETATMPF = 131;
private float ALPHATMPF = 151;
private float MASHOUTTMPF = 161;
private float SPARGETMPF = 170;
static final public String QT_PER_LB = "qt/lb";
static final public String L_PER_KG = "l/kg";
static final public String ACID = "acid";
static final public String GLUCAN = "glucan";
static final public String PROTEIN = "protein";
static final public String BETA = "beta";
static final public String ALPHA = "alpha";
static final public String MASHOUT = "mashout";
static final public String SPARGE = "sparge";
static final public String INFUSION = "infusion";
static final public String DECOCTION = "decoction";
static final public String DECOCTION_THICK = "decoction thick";
static final public String DECOCTION_THIN = "decoction thin";
static final public String DIRECT = "direct";
static final public String CEREAL_MASH = "cereal mash";
static final public String FLY = "fly";
static final public String BATCH = "batch";
static final public String[] ratioUnits = {QT_PER_LB, L_PER_KG};
static final public String[] types = {ACID, GLUCAN, PROTEIN, BETA, ALPHA, MASHOUT, SPARGE};
static final public String[] methods = {INFUSION, DECOCTION, DECOCTION_THICK, DECOCTION_THIN, DIRECT, CEREAL_MASH};
static final public String[] spargeMethods = {FLY, BATCH};
private String notes;
private double tunTemp;
private double spargeTemp;
private double ph;
private Quantity tunWeight;
private double tunSpecificHeat;
private boolean tunAdjust;
private double totalMashLbs;
public Mash(String name, Recipe recipe){
this.name = name;
this.myRecipe = recipe;
}
public void setNotes(String notes) {
this.notes = notes;
}
public String getNotes() {
return notes;
}
public void setTunTemp(double tunTemp) {
this.tunTemp = tunTemp;
}
public double getTunTemp() {
return tunTemp;
}
public void setSpargeTemp(double spargeTemp) {
this.spargeTemp = spargeTemp;
}
public double getSpargeTemp() {
return spargeTemp;
}
public void setPh(double ph) {
this.ph = ph;
}
public double getPh() {
return ph;
}
public void setTunWeight(double tunWeight) {
this.tunWeight = new Quantity();
this.tunWeight.setUnits(Quantity.KG);
this.tunWeight.setAmount(tunWeight);
}
public void setTunWeight(String weightString) {
this.tunWeight = new Quantity(weightString);
}
public Quantity getTunWeight() {
return tunWeight;
}
public void setTunSpecificHeat(double tunSpecificHeat) {
this.tunSpecificHeat = tunSpecificHeat;
}
public double getTunSpecificHeat() {
return tunSpecificHeat;
}
public void setTunAdjust(boolean tunAdjust) {
this.tunAdjust = tunAdjust;
}
public boolean isTunAdjust() {
return tunAdjust;
}
public double getMashRatio(int i) {
return steps.get(i).getMashRatio();
}
public String getMashRatioU(int i) {
return steps.get(i).getMashRatioU();
}
public String getStepInfuseTemp(int i) {
return steps.get(i).getInfuseTemp();
}
public String getStepTempU(int i) {
return steps.get(i).getStrikeTempU();
}
public String getStepName(int i) {
return steps.get(i).getName();
}
public double getStrikeTemp(int i) {
return steps.get(i).getStrikeTemp();
}
public String getDisplayStrikeTemp(int i) {
MashStep s = steps.get(i);
double temp = s.getStrikeTemp();
if (s.getStrikeTempU().equals("F")) {
temp = BrewCalcs.cToF(s.getStrikeTemp());
}
return temp + " " + s.getStrikeTempU();
}
public String getDisplayStepStartTemp(int i) {
MashStep s = steps.get(i);
double temp = s.getStartTemp();
if (s.getStrikeTempU().equals("F")) {
temp = BrewCalcs.cToF(s.getStartTemp());
}
return temp + " " + s.getStrikeTempU();
}
public double getTotalMashLbs() {
myRecipe.setAllowRecalcs(true);
myRecipe.calcMaltTotals();
myRecipe.setAllowRecalcs(false);
return myRecipe.getTotalMashLbs();
}
public class MashStep implements Comparable<MashStep> {
private String type;
private double startTemp;
private double endTemp;
private String method;
private int minutes;
private int rampMin;
private String directions;
private double temp;
private double weightLbs;
public Quantity inVol = new Quantity();
public Quantity outVol = new Quantity();
private double infuseTemp;
private String infuseTempUnit;
private double mashRatio;
private String mashRatioU;
private String strikeTempU = "C";
private double strikeTemp;
private String name;
// public Quantity decoctVol = new Quantity();
public MashStep(String type, double startTemp, double endTemp, String method, int min,
int rmin) {
this.type = type;
this.startTemp = startTemp;
this.endTemp = endTemp;
this.method = method;
minutes = min;
rampMin = rmin;
}
// default constructor:
public MashStep() {
rampMin = 0;
endTemp = ALPHATMPF + 1;
startTemp = ALPHATMPF + 1;
minutes = 60;
method = INFUSION;
type = ALPHA;
weightLbs = 0;
}
// getter/setter methods
public String getDirections() {
return directions;
}
public void setDirections(String directions) {
this.directions = directions;
}
public double getEndTemp() {
return endTemp;
}
public void setEndTemp(double endTemp) {
this.endTemp = endTemp;
}
public Quantity getInVol() {
return inVol;
}
public void setInVol(Quantity vol) {
this.inVol = vol;
}
public Quantity getOutVol() {
return outVol;
}
public void setOutVol(Quantity vol) {
this.outVol = vol;
}
public String getMethod() {
return method;
}
public void setMethod(String method) {
this.method = method;
}
public int getMinutes() {
return minutes;
}
public void setMinutes(int minutes) {
this.minutes = minutes;
}
public int getRampMin() {
return rampMin;
}
public void setRampMin(int rampMin) {
this.rampMin = rampMin;
}
public double getStartTemp() {
return startTemp;
}
public void setStartTemp(double startTemp) {
this.startTemp = startTemp;
}
public double getTemp() {
return temp;
}
public String getType() {
return type;
}
public void setType(String s) {
type = s;
}
public int compareTo(MashStep m) {
int result = ((Double)this.getStartTemp()).compareTo((Double)m.getStartTemp());
return (result == 0 ? -1 : result);
}
public void setInVol(double infuseAmount) {
this.inVol.setUnits(Quantity.LITRES);
this.inVol.setAmount(infuseAmount);
}
public void setInfuseTemp(String infuseTemp) {
String[] split = infuseTemp.trim().split(" ");
try {
this.infuseTemp = Double.parseDouble(split[0].trim());
this.infuseTempUnit = split[1].trim();
} catch (NumberFormatException nfe) {
System.out.println("Couldn't parse: " + split[0] + " as a number.");
nfe.printStackTrace();
}
}
public String getInfuseTemp() {
return infuseTemp + " " + this.getStrikeTempU();
}
public void setMashRatio(String mashRatio) {
try {
this.mashRatio = Double.parseDouble(mashRatio);
} catch (NumberFormatException nfe) {
nfe.printStackTrace();
}
}
public double getMashRatio() {
return mashRatio;
}
public void setMashRatioU(String mashRatioU) {
this.mashRatioU = mashRatioU;
}
public String getMashRatioU() {
return mashRatioU;
}
public void setStrikeTempU(String strikeTempU) {
this.strikeTempU = strikeTempU;
}
public String getStrikeTempU() {
return strikeTempU;
}
public void setStrikeTemp(double strikeTemp) {
this.strikeTemp = strikeTemp;
}
public double getStrikeTemp() {
return strikeTemp;
}
public double getDisplayStrikeTemp() {
if (getStrikeTempU().equals("F"))
return BrewCalcs.cToF(strikeTemp);
return strikeTemp;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
public MashStep addStep(String type, double startTemp, double endTemp, String method, int min,
int rampmin, double weight) {
MashStep step = new MashStep(type, startTemp, endTemp, method, min, rampmin);
step.weightLbs = weight;
steps.add(step);
calcMashSchedule();
return step;
}
public int addStep(){
MashStep step = new MashStep();
// calcStepType(temp);
if (!steps.isEmpty()) {
MashStep lastStep = (Mash.MashStep)steps.get(steps.size() -1);
step.setStartTemp(lastStep.getEndTemp() + 1);
step.setEndTemp(step.getStartTemp());
step.setType(calcStepType(step.getStartTemp()));
}
steps.add(step);
int i = steps.size();
calcMashSchedule();
// return the index of the last added step:
return i-1;
}
public void delStep(int i){
if (steps.size()>i && !steps.isEmpty() && i > -1){
steps.remove(i);
calcMashSchedule();
}
}
// set methods:
// public void setMaltWeight(double mw) { maltWeightLbs = mw; }
public void setMashRatio(double mr){
mashRatio = mr;
calcMashSchedule();
}
public double getMashRatio(){
return mashRatio;
}
public void setMashRatioU(String u){
mashRatioU = u;
calcMashSchedule();
}
public String getMashRatioU(){
return mashRatioU;
}
public void setMashVolUnits(String u){
volUnits = u;
calcMashSchedule();
}
public void setMashTempUnits(String newUnits){
if (newUnits.trim().endsWith("F")) {
tempUnits = "F";
} else {
tempUnits = "C";
}
calcMashSchedule();
}
// TODO hardcoded temp strings should be a static somewhere
public void setGrainTemp(double t){
if (tempUnits.equals("F"))
grainTempF = t;
else
grainTempF = BrewCalcs.cToF(t);
calcMashSchedule();
}
public void setBoilTemp(double t){
if (tempUnits.equals("F"))
boilTempF = t;
else
boilTempF = BrewCalcs.cToF(t);
calcMashSchedule();
}
public void setTunLoss(double t){
if (tempUnits.equals("F"))
tunLossF = t;
else
tunLossF = t * 1.8;
calcMashSchedule();
}
public void setDeadSpace(double d) {
deadSpace.setAmount(d);
}
public double getDeadSpace() {
return deadSpace.getValue();
}
public void setDecoctRatio(String type, double r){
if (type.equals("thick"))
thickDecoctRatio = r;
else
thinDecoctRatio = r;
calcMashSchedule();
}
public void setTempRange(String type, double t){
if (tempUnits.equals("C"))
t = BrewCalcs.cToF(t);
if (type.equalsIgnoreCase(MASHOUT))
MASHOUTTMPF = (float)t;
if (type.equalsIgnoreCase(SPARGE))
SPARGETMPF = (float)t;
}
public void setName(String s) { name = s; }
/**
*
* @param val Value to convert to a string
* @return Val converted to the mash vol, formated to 1 decimal
*/
private String getVolConverted(double val){
double d = Quantity.convertUnit(Quantity.QT, volUnits, val);
String s = SBStringUtils.format(d, 1);
return s;
}
// get methods:
public String getMashVolUnits(){ return volUnits; }
public String getMashTempUnits(){ return tempUnits; }
public int getMashTotalTime(){ return totalTime; }
public double getGrainTemp() {
if (tempUnits.equals("F"))
return grainTempF;
else
return BrewCalcs.fToC(grainTempF);
}
public double getBoilTemp() {
if (tempUnits.equals("F"))
return boilTempF;
else
return BrewCalcs.fToC(boilTempF);
}
public double getTempRange(String type){
double t=0;
if (type.equals(MASHOUT))
t = MASHOUTTMPF;
else if (type.equals(SPARGE))
t = SPARGETMPF;
if (tempUnits.equals("C"))
t = BrewCalcs.fToC(t);
return t;
}
public double getSpargeVol(){
return Quantity.convertUnit(Quantity.QT, volUnits, spargeQTS);
}
public double getSpargeQts(){ return spargeQTS; }
public double getTunLoss() {
if (tempUnits.equals("F"))
return tunLossF;
else
return ( tunLossF / 1.8 );
}
/**
*
* @return A string, which is the total converted to the mash units
* + the units.
*/
public String getMashTotalVol() {
double d = Quantity.convertUnit(Quantity.QT, volUnits, volQts);
String s = SBStringUtils.format(d, 1) + " " + volUnits;
return s;
}
public String getAbsorbedStr() {
return getVolConverted(absorbedQTS);
}
public double getAbsorbedQts() {
return absorbedQTS;
}
public String getTotalWaterStr() {
return getVolConverted(totalWaterQTS);
}
public double getTotalWaterQts() {
return totalWaterQTS;
}
public double getThickDecoctRatio() {
return thickDecoctRatio;
}
public double getThinDecoctRatio() {
return thinDecoctRatio;
}
public String getName(){ return name; }
// mash step methods:
public int setStepType(int i, String t){
if (steps.size() < i || steps.isEmpty())
return -1;
MashStep ms = (MashStep)steps.get(i);
ms.setType(t);
ms.setStartTemp(calcStepTemp(t));
ms.setEndTemp(calcStepTemp(t));
return 0;
}
public String getStepType(int i) {
if (steps.size() < i || steps.isEmpty())
return "";
MashStep ms = (MashStep)steps.get(i);
return ms.getType();
}
public String getStepDirections(int i){
return ((MashStep)steps.get(i)).getDirections();
}
public void setStepMethod(int i, String m){
((MashStep)steps.get(i)).setMethod(m);
if (m.equals(CEREAL_MASH))
((MashStep)steps.get(i)).weightLbs = 0;
calcMashSchedule();
}
public String getStepMethod(int i) {
return ((MashStep)steps.get(i)).getMethod();
}
public void setStepStartTemp(int i, double t){
if (tempUnits.equals("C")){
t = BrewCalcs.cToF(t);
}
((MashStep)steps.get(i)).setStartTemp(t);
((MashStep)steps.get(i)).setEndTemp(t);
((MashStep)steps.get(i)).setType(calcStepType(t));
calcMashSchedule();
}
public double getStepStartTemp(int i) {
if (tempUnits.equals("C"))
return BrewCalcs.fToC(((MashStep)steps.get(i)).getStartTemp());
else
return ((MashStep)steps.get(i)).getStartTemp();
}
public void setStepEndTemp(int i, double t){
if (tempUnits.equals("C"))
((MashStep)steps.get(i)).setEndTemp(BrewCalcs.cToF(t));
else
((MashStep)steps.get(i)).setEndTemp(t);
calcMashSchedule();
}
public double getStepEndTemp(int i) {
if (tempUnits.equals("C"))
return BrewCalcs.fToC(((MashStep)steps.get(i)).getEndTemp());
else
return ((MashStep)steps.get(i)).getEndTemp();
}
public void setStepRampMin(int i, int m){
((MashStep)steps.get(i)).setRampMin(m);
}
public int getStepRampMin(int i) {
return ((MashStep)steps.get(i)).getRampMin();
}
public double getStepTemp(int i) {
if (((MashStep)steps.get(i)).getTemp() == 0)
return 0;
if (tempUnits.equals("F"))
return steps.get(i).getTemp();
else
return BrewCalcs.fToC(steps.get(i).getTemp());
}
public double getStepWeight(int i) {
double w = steps.get(i).weightLbs;
return Quantity.convertUnit(Quantity.LB, myRecipe.getMaltUnits(), w);
}
public void setStepWeight(int i, double w){
// you can only set the weight on a cereal mash step
MashStep s = steps.get(i);
if (s.method.equals(CEREAL_MASH)){
double w2 = Quantity.convertUnit(myRecipe.getMaltUnits(), Quantity.LB, w);
s.weightLbs = w2;
calcMashSchedule();
}
}
public void setStepMin(int i, int m){
steps.get(i).setMinutes(m);
}
public int getStepMin(int i) {
return steps.get(i).getMinutes();
}
public double getStepInVol(int i) {
double vol = steps.get(i).getInVol().getValue();
return Quantity.convertUnit(Quantity.QT, volUnits, vol);
}
public Quantity getStepInQuantity(int i) {
return steps.get(i).getInVol();
}
public double getStepOutVol(int i) {
double vol = steps.get(i).getOutVol().getValue();
return Quantity.convertUnit(Quantity.QT, volUnits, vol);
}
public Quantity getStepOutQuantity(int i) {
return steps.get(i).getOutVol();
}
public int getStepSize(){
return steps.size();
}
// Introducing: the big huge mash calc method!
public void calcMashSchedule() {
// Method to run through the mash table and calculate values
if (!myRecipe.allowRecalcs)
return;
double targetTemp = 0;
double waterAddedQTS = 0;
double waterEquiv = 0;
double currentTemp = getGrainTemp();
double displTemp = 0;
double tunLoss; // figure out a better way to do this, eg: themal mass
double decoct = 0;
int totalMashTime = 0;
int totalSpargeTime = 0;
double mashWaterQTS = 0;
double mashVolQTS = 0;
int numSparge = 0;
double totalWeightLbs = 0;
double totalCerealLbs = 0;
maltWeightLbs = myRecipe.getTotalMashLbs();
// convert CurrentTemp to F
if (tempUnits.equals("C")) {
currentTemp = BrewCalcs.cToF(currentTemp);
tunLoss = tunLossF * 1.8;
} else {
tunLoss = tunLossF;
}
// perform calcs on first record
if (steps.isEmpty())
return;
// sort the list
Collections.sort(steps);
// add up the cereal mash lbs
for (int i = 0; i < steps.size(); i++) {
MashStep stp = ((MashStep) steps.get(i));
// convert mash ratio to qts/lb if in l/kg
double mr = stp.getMashRatio();
if (stp.getMashRatioU().equalsIgnoreCase(L_PER_KG)) {
mr *= 0.479325;
}
if (stp.method.equals("cereal mash")) {
totalCerealLbs += stp.weightLbs;
}
stp.setStrikeTemp(calcStrikeTemp(stp.getStartTemp(), currentTemp, mr, tunLoss));
stp.setStrikeTempU(tempUnits);
waterAddedQTS += stp.weightLbs * mr;
waterEquiv += stp.weightLbs * (0.192 + mr);
mashVolQTS += calcMashVol(stp.weightLbs, mr);
}
totalWeightLbs = maltWeightLbs - totalCerealLbs;
// the first step is always an infusion
MashStep stp = steps.get(0);
totalMashTime += stp.minutes;
mashWaterQTS += waterAddedQTS;
stp.inVol.setUnits(Quantity.QT);
stp.inVol.setAmount(waterAddedQTS);
stp.method = INFUSION;
stp.weightLbs = totalWeightLbs;
// subtract the water added from the Water Equiv so that they are correct when added in the next part of the loop
waterEquiv -= waterAddedQTS;
stp.directions = "Mash in with " + SBStringUtils.format(stp.inVol.getValueAs(volUnits), 1) + " " + volUnits
+ " of water at " + SBStringUtils.format(stp.getDisplayStrikeTemp(), 1) + " " + stp.getStrikeTempU();
// set TargetTemp to the end temp
targetTemp = stp.endTemp;
for (int i = 1; i < steps.size(); i++) {
stp = steps.get(i);
currentTemp = targetTemp; // switch
targetTemp = stp.startTemp;
// if this is a former sparge step that's been changed, change
// the method to infusion
BrewServer.LOG.info("Mash step Type: " + stp.type + " Method: " + stp.method);
if (!stp.type.equals(SPARGE) && (stp.method.equals(FLY) || stp.method.equals(BATCH)))
stp.method = INFUSION;
// do calcs
if (stp.method.equals(INFUSION)) { // calculate an infusion step
decoct = 0;
waterEquiv += waterAddedQTS; // add previous addition to get WE
double strikeTemp = boilTempF; // boiling water
// Updated the water added
waterAddedQTS = calcWaterAddition(targetTemp, currentTemp,
waterEquiv, boilTempF);
stp.outVol.setAmount(0);
stp.inVol.setUnits(Quantity.QT);
stp.inVol.setAmount(waterAddedQTS);
stp.temp = strikeTemp;
stp.weightLbs = totalWeightLbs;
if (tempUnits == "C")
strikeTemp = 100;
stp.directions = "Add " + SBStringUtils.format(stp.inVol.getValueAs(volUnits), 1) + " " + volUnits
+ " of water at " + SBStringUtils.format(strikeTemp, 1) + " " + tempUnits;
mashWaterQTS += waterAddedQTS;
mashVolQTS += waterAddedQTS;
} else if (stp.method.indexOf(DECOCTION) > -1) { // calculate a decoction step
waterEquiv += waterAddedQTS; // add previous addition to get WE
waterAddedQTS = 0;
double ratio = 0.75;
if (stp.method.indexOf(DECOCTION_THICK) > -1)
ratio = thickDecoctRatio;
else if (stp.method.indexOf(DECOCTION_THIN) > -1)
ratio = thinDecoctRatio;
// Calculate volume (qts) of mash to remove
decoct = calcDecoction2(targetTemp, currentTemp, mashWaterQTS, ratio, totalWeightLbs);
stp.outVol.setUnits(Quantity.QT);
stp.outVol.setAmount(decoct);
stp.inVol.setAmount(0);
stp.temp = boilTempF;
stp.weightLbs = totalWeightLbs;
// Updated the decoction, convert to right units & make directions
stp.directions = "Remove " + SBStringUtils.format(stp.outVol.getValueAs(volUnits), 1) + " " + volUnits
+ " of mash, boil, and return to mash.";
} else if (stp.method.equals(DIRECT)) { // calculate a direct heat step
waterEquiv += waterAddedQTS; // add previous addition to get WE
waterAddedQTS = 0;
displTemp = stp.startTemp;
if (tempUnits.equals("C"))
displTemp = BrewCalcs.fToC(displTemp);
stp.directions = "Add direct heat until mash reaches " + displTemp
+ " " + tempUnits + ".";
stp.inVol.setAmount(0);
stp.outVol.setAmount(0);
stp.temp = 0;
stp.weightLbs = totalWeightLbs;
} else if (stp.method.indexOf(CEREAL_MASH) > -1) { // calculate a cereal mash step
double mr = stp.getMashRatio();
if (stp.getMashRatioU().equalsIgnoreCase(L_PER_KG)) {
mr *= 0.479325;
}
waterEquiv += waterAddedQTS; // add previous addition to get WE
targetTemp = stp.startTemp;
double extraWaterQTS = 0;
double cerealTemp = boilTempF;
double cerealTargTemp = cerealMashTemp;
String addStr = "";
/*
* 1. check the temp of the mash when you add the boiling cereal mash @ default ratio back
* 2. if it's > than the step temp, adjust the step temp
* 3. if it's < than the step temp, add extra water to increase the "heat equivalencey" of the cereal mash
*/
double cerealWaterEquiv = stp.weightLbs * (0.192 + mr);
waterAddedQTS = mr * stp.weightLbs;
double strikeTemp = calcStrikeTemp(cerealMashTemp, grainTempF, mr, 0);
double newTemp = ((waterEquiv * currentTemp) + (cerealWaterEquiv * cerealTemp)) / (waterEquiv + cerealWaterEquiv);
if (newTemp > targetTemp) {
stp.startTemp = newTemp;
}
if (newTemp < targetTemp) {
double addQts = ((waterEquiv * (targetTemp - currentTemp)) / (cerealTemp - targetTemp)) - 0.192;
extraWaterQTS = addQts - waterAddedQTS;
addStr = " Add " + SBStringUtils.format(Quantity.convertUnit("qt", volUnits, extraWaterQTS), 1)
+ " " + volUnits + " water to the cereal mash.";
}
// Calculate final temp of cereal mash
// cerealTemp = (targetTemp * (waterEquiv + cerealWaterEquiv) - (waterEquiv * currentTemp)) / cerealWaterEquiv;
totalMashTime += stp.minutes;
mashWaterQTS += waterAddedQTS + extraWaterQTS;
stp.inVol.setUnits(Quantity.QT);
stp.inVol.setAmount(waterAddedQTS);
stp.outVol.setAmount(0);
stp.temp = strikeTemp;
// make directions
String weightStr = SBStringUtils.format(Quantity.convertUnit(Quantity.LB, myRecipe.getMaltUnits(), stp.weightLbs), 1)
+ " " + myRecipe.getMaltUnits();
String volStr = SBStringUtils.format(Quantity.convertUnit(Quantity.QT, volUnits, waterAddedQTS), 1)
+ " " + volUnits;
if (tempUnits == "C") {
strikeTemp = BrewCalcs.fToC(strikeTemp);
cerealTemp = BrewCalcs.fToC(cerealTemp);
targetTemp = BrewCalcs.fToC(targetTemp);
cerealTargTemp = BrewCalcs.fToC(cerealTargTemp);
}
String tempStr = SBStringUtils.format(strikeTemp, 1) + tempUnits;
String tempStr2 = SBStringUtils.format(cerealTemp, 1) + tempUnits;
String tempStr3 = SBStringUtils.format(targetTemp, 1) + tempUnits;
String tempStr4 = SBStringUtils.format(cerealTargTemp, 1) + tempUnits;
stp.directions = "Cereal mash: mash " + weightStr + " grain with " + volStr + " water at " +
tempStr + " to hit " + tempStr4 + " and rest.";
stp.directions += addStr;
stp.directions += " Raise to " + tempStr2 + " and add to the main mash to reach " + tempStr3;
// add cereal mash to total weight
totalWeightLbs += stp.weightLbs;
} else {
BrewServer.LOG.warning("Unrecognised mash step: " + stp.method);
}
if (stp.type.equals(SPARGE))
numSparge++;
else {
totalMashTime += stp.minutes;
}
// set target temp to end temp for next step
targetTemp = stp.endTemp;
} // for steps.size()
waterEquiv += waterAddedQTS; // add previous addition to get WE
totalTime = totalMashTime;
volQts = mashVolQTS;
// water use stats:
BrewServer.LOG.warning("Total weight: " + totalWeightLbs);
absorbedQTS = totalWeightLbs * 0.52; // figure from HBD
// spargeTotalQTS = (myRecipe.getPreBoilVol("qt")) - (mashWaterQTS - absorbedQTS);
totalWaterQTS = mashWaterQTS;
spargeQTS = myRecipe.getPreBoilVol(Quantity.QT) -
(mashWaterQTS - absorbedQTS - deadSpace.getValueAs(Quantity.QT));
BrewServer.LOG.warning("Sparge Quarts: " + spargeQTS);
// Now let's figure out the sparging:
if (numSparge == 0)
return;
// Amount to collect per sparge
double col = myRecipe.getPreBoilVol(Quantity.QT) / numSparge;
double charge[] = new double[numSparge];
double collect[] = new double[numSparge];
double totalCollectQts = myRecipe.getPreBoilVol(Quantity.QT);
// do we need to add more water to charge up
// is the amount we need to collect less than the initial mash volume - absorbption
System.out.println("Collecting: " + col + " MashWater " + mashWaterQTS +
" Absorbed " + absorbedQTS + " Loss: " + deadSpace.getValueAs(Quantity.QT));
if (col <= (mashWaterQTS - absorbedQTS)) {
charge[0] = 0;
collect[0] = mashWaterQTS - absorbedQTS; // how much is left over from the mash
totalCollectQts = totalCollectQts - collect[0];
} else {
charge[0] = col - (mashWaterQTS - absorbedQTS); // add the additional water to get out the desired first collection amount PER sparge
collect[0] = col;
totalCollectQts = totalCollectQts - collect[0];
}
// do we need any more steps?
if (numSparge > 1) {
/*
batch_1_sparge_liters = (boil_size_l/<total number of steps> ) - mash_water_l + grain_wt_kg * 0.625)
batch_2_liters = boil_size_l / <total number of steps>
*/
BrewServer.LOG.info("NumSparge: " + numSparge);
BrewServer.LOG.info("Collecting: " + col);
for (int i = 1; i < numSparge; i++) {
charge[i] = col;
collect[i] = col;
}
}
int j = 0;
for (int i = 1; i < steps.size(); i++) {
stp = ((MashStep) steps.get(i));
if (stp.getType().equals(SPARGE)) {
stp.inVol.setUnits(Quantity.QT);
stp.inVol.setAmount(charge[j]);
stp.outVol.setUnits(Quantity.QT);
stp.outVol.setAmount(collect[j]);
stp.temp = SPARGETMPF;
totalSpargeTime += stp.getMinutes();
String collectStr = SBStringUtils.format(Quantity.convertUnit(Quantity.QT, volUnits, collect[j]), 2) +
" " + volUnits;
String tempStr;
if (tempUnits.equals("F")) {
tempStr = "" + SBStringUtils.format(SPARGETMPF, 1) + "F";
} else {
tempStr = SBStringUtils.format(BrewCalcs.fToC(SPARGETMPF), 1) + "C";
}
if (numSparge > 1) {
stp.setMethod(BATCH);
String add = SBStringUtils.format(Quantity.convertUnit(Quantity.QT, volUnits, charge[j]), 2) +
" " + volUnits;
stp.setDirections("Add " + add + " at " + tempStr + " to collect " + collectStr);
} else {
stp.inVol.setUnits(Quantity.QT);
stp.inVol.setAmount(spargeQTS);
stp.outVol.setUnits(Quantity.QT);
stp.outVol.setAmount(collect[j]);
stp.setMethod(FLY);
stp.setDirections("Sparge with " +
SBStringUtils.format(Quantity.convertUnit("qt", volUnits, spargeQTS), 1) +
" " + volUnits + " at " + tempStr + " to collect " + collectStr);
}
j++;
}
}
}
// private methods:
/* from John Palmer:
* Vd (quarts) = [(T2 - T1)(.4G + 2W)] / [(Td - T1)(.4g + w)]
Where:
Vd = decoction volume
T1 = initial mash temperature
T2 = target mash temperature
Td = decoction temperature (212F)
G = Grainbill weight
W = volume of water in mash (i.e. initial infusion volume)
g = pounds of grain per quart of decoction = 1/(Rd + .32)
w = quarts of water per quart of decoction = g*Rd*water density = 2gRd
Rd = ratio of grain to water in the decoction volume (range of .6 to 1
quart/lb)
thick decoctions will have a ratio of .6-.7, thinner decoctions will
have a ratio of .8-.9
*/
private double calcDecoction2(double targetTemp, double currentTemp, double waterVolQTS, double ratio, double weightLbs){
double decoctQTS=0;
double g = 1 / (ratio + .32);
double w = 2 * g * ratio;
decoctQTS = ((targetTemp - currentTemp) * ((0.4 * weightLbs) + (2 * waterVolQTS)))
/ ((boilTempF - currentTemp) * (0.4 * g + w));
return decoctQTS;
}
private String calcStepType(double temp) {
String stepType = "none";
// less than 90, none
// 86 - 95 - acid
if (temp >= ACIDTMPF && temp < GLUCANTMPF)
stepType = ACID;
// 95 - 113 - glucan
else if (temp < PROTEINTMPF)
stepType = GLUCAN;
// 113 - 131 protein
else if (temp < BETATMPF)
stepType = PROTEIN;
// 131 - 150 beta
else if (temp < ALPHATMPF)
stepType = BETA;
// 150-162 alpha
else if (temp < MASHOUTTMPF)
stepType = ALPHA;
// 163-169, mashout
else if (temp < SPARGETMPF)
stepType = MASHOUT;
// over 170, sparge
else if (temp >= SPARGETMPF)
stepType = SPARGE;
return stepType;
}
private double calcStepTemp(String stepType) {
float stepTempF = 0;
if (stepType == ACID)
stepTempF = (ACIDTMPF + GLUCANTMPF) / 2;
else if (stepType == GLUCAN)
stepTempF = (GLUCANTMPF + PROTEINTMPF) / 2;
else if (stepType == PROTEIN)
stepTempF = (PROTEINTMPF + BETATMPF) / 2;
else if (stepType == BETA)
stepTempF = (BETATMPF + ALPHATMPF) / 2;
else if (stepType == ALPHA)
stepTempF = (ALPHATMPF + MASHOUTTMPF) / 2;
else if (stepType == MASHOUT)
stepTempF = (MASHOUTTMPF + SPARGETMPF) / 2;
else if (stepType == SPARGE)
stepTempF = SPARGETMPF;
return stepTempF;
}
double calcMashVol(double grainWeightLBS, double ratio) {
// given lbs and ratio, what is the volume of the grain in quarts?
// note: this calc is for the first record only, and returns the heat equivalent of
// grain + water added for first infusion
// HBD posts indicate 0.32, but reality is closer to 0.42
return (grainWeightLBS * (0.42 + ratio));
}
double calcStrikeTemp(double targetTemp, double currentTemp, double ratio,
double tunLossF) {
// calculate strike temp
// Ratio is in quarts / lb, TunLoss is in F
// this uses thermal mass:
// double strikeTemp = (maltWeightLbs + thermalMass)*( targetTemp - currentTemp )/( boilTempF - targetTemp );
return (targetTemp + 0.192 * (targetTemp - currentTemp) / ratio)
+ tunLossF;
}
double calcWaterAddition(double targetTemp, double currentTemp,
double mashVol, double boilTempF) {
// calculate amount of boiling water to add to raise mash to new temp
return (mashVol * (targetTemp - currentTemp) / (boilTempF - targetTemp));
}
public String toXml() {
calcMashSchedule();
StringBuffer sb = new StringBuffer();
sb.append(" <MASH>\n");
sb.append(SBStringUtils.xmlElement("NAME", name, 4));
sb.append(SBStringUtils.xmlElement("MASH_VOLUME", SBStringUtils.format(Quantity.convertUnit("qt", volUnits, volQts), 2) , 4));
sb.append(SBStringUtils.xmlElement("MASH_VOL_U", "" + volUnits, 4));
sb.append(SBStringUtils.xmlElement("MASH_RATIO", "" + mashRatio, 4));
sb.append(SBStringUtils.xmlElement("MASH_RATIO_U", "" + mashRatioU, 4));
sb.append(SBStringUtils.xmlElement("MASH_TIME", "" + totalTime, 4));
sb.append(SBStringUtils.xmlElement("MASH_TMP_U", "" + tempUnits, 4));
sb.append(SBStringUtils.xmlElement("THICK_DECOCT_RATIO", "" + thickDecoctRatio, 4));
sb.append(SBStringUtils.xmlElement("THIN_DECOCT_RATIO", "" + thinDecoctRatio, 4));
if (tempUnits.equals("C")){
sb.append(SBStringUtils.xmlElement("MASH_TUNLOSS_TEMP", "" + (tunLossF/1.8), 4));
sb.append(SBStringUtils.xmlElement("GRAIN_TEMP", "" + BrewCalcs.fToC(grainTempF), 4));
sb.append(SBStringUtils.xmlElement("BOIL_TEMP", "" + BrewCalcs.fToC(boilTempF), 4));
}
else {
sb.append(SBStringUtils.xmlElement("MASH_TUNLOSS_TEMP", "" + tunLossF, 4));
sb.append(SBStringUtils.xmlElement("GRAIN_TEMP", "" + grainTempF, 4));
sb.append(SBStringUtils.xmlElement("BOIL_TEMP", "" + boilTempF, 4));
}
for (int i = 0; i < steps.size(); i++) {
MashStep st = (MashStep) steps.get(i);
sb.append(" <ITEM>\n");
sb.append(" <TYPE>" + st.type + "</TYPE>\n");
sb.append(" <TEMP>" + st.startTemp + "</TEMP>\n");
if (tempUnits.equals("C"))
sb.append(" <DISPL_TEMP>" + SBStringUtils.format(BrewCalcs.fToC(st.startTemp), 1) + "</DISPL_TEMP>\n");
else
sb.append(" <DISPL_TEMP>" + st.startTemp + "</DISPL_TEMP>\n");
sb.append(" <END_TEMP>" + st.endTemp + "</END_TEMP>\n");
if (tempUnits.equals("C"))
sb.append(" <DISPL_END_TEMP>" + SBStringUtils.format(BrewCalcs.fToC(st.endTemp), 1) + "</DISPL_END_TEMP>\n");
else
sb.append(" <DISPL_END_TEMP>" + st.endTemp + "</DISPL_END_TEMP>\n");
sb.append(" <MIN>" + st.minutes + "</MIN>\n");
sb.append(" <RAMP_MIN>" + st.rampMin + "</RAMP_MIN>\n");
sb.append(" <METHOD>" + st.method + "</METHOD>\n");
sb.append(" <WEIGHT_LBS>" + st.weightLbs + "</WEIGHT_LBS>\n");
sb.append(" <DIRECTIONS>" + st.directions + "</DIRECTIONS>\n");
sb.append(" </ITEM>\n");
}
sb.append(" </MASH>\n");
return sb.toString();
}
/**
* Return the current mash steps as a JSON Representation.
* @return
*/
public JSONObject toJSONObject(String device) {
JSONObject mashObject = new JSONObject();
for (int i = 0; i < steps.size(); i++) {
MashStep st = (MashStep) steps.get(i);
JSONObject currentStep = new JSONObject();
currentStep.put("type", st.type);
currentStep.put("method", st.method);
currentStep.put("temp", st.getStartTemp());
currentStep.put("duration", st.minutes);
currentStep.put("tempUnit", this.tempUnits);
currentStep.put("position", i);
mashObject.put(i, currentStep);
}
if (device != null) {
mashObject.put("pid", device);
}
return mashObject;
}
} | Remove commas in infusestep
| src/main/java/ca/strangebrew/recipe/Mash.java | Remove commas in infusestep | <ide><path>rc/main/java/ca/strangebrew/recipe/Mash.java
<ide> public void setInfuseTemp(String infuseTemp) {
<ide> String[] split = infuseTemp.trim().split(" ");
<ide> try {
<del> this.infuseTemp = Double.parseDouble(split[0].trim());
<add> this.infuseTemp = Double.parseDouble(split[0].trim().replace(",", ""));
<ide> this.infuseTempUnit = split[1].trim();
<ide> } catch (NumberFormatException nfe) {
<ide> System.out.println("Couldn't parse: " + split[0] + " as a number."); |
|
JavaScript | mit | 425afad7cf7bbe32334bae73821edac71aa845b9 | 0 | rumps/rump-server,rumps/server | 'use strict';
var browserSync = require('browser-sync');
var extend = require('extend');
var gulp = require('gulp');
var path = require('path');
var rump = require('rump');
gulp.task('rump:server', ['rump:watch'], function(callback) {
var options = rump.configs.browserSync;
var files = path.join(rump.configs.main.paths.destination.root,
rump.configs.main.globs.watch.server);
if(rump.configs.main.environment === 'development') {
options = extend({files: files}, options);
}
browserSync(options, callback);
});
| gulp/server.js | 'use strict';
var browserSync = require('browser-sync');
var chalk = require('chalk');
var extend = require('extend');
var gulp = require('gulp');
var path = require('path');
var rump = require('rump');
gulp.task('rump:serve', ['rump:watch'], function(callback) {
var options = rump.configs.browserSync;
var files = path.join(rump.configs.main.paths.destination.root,
rump.configs.main.globs.watch.server);
if(rump.configs.main.environment === 'development') {
options = extend({files: files}, options);
}
browserSync(options, callback);
});
gulp.task('rump:server', ['rump:serve'], function() {
console.log(chalk.red('rump:server is deprecated. Use rump:serve instead.'));
});
| Go back to rump:server
| gulp/server.js | Go back to rump:server | <ide><path>ulp/server.js
<ide> 'use strict';
<ide>
<ide> var browserSync = require('browser-sync');
<del>var chalk = require('chalk');
<ide> var extend = require('extend');
<ide> var gulp = require('gulp');
<ide> var path = require('path');
<ide> var rump = require('rump');
<ide>
<del>gulp.task('rump:serve', ['rump:watch'], function(callback) {
<add>gulp.task('rump:server', ['rump:watch'], function(callback) {
<ide> var options = rump.configs.browserSync;
<ide> var files = path.join(rump.configs.main.paths.destination.root,
<ide> rump.configs.main.globs.watch.server);
<ide> }
<ide> browserSync(options, callback);
<ide> });
<del>
<del>gulp.task('rump:server', ['rump:serve'], function() {
<del> console.log(chalk.red('rump:server is deprecated. Use rump:serve instead.'));
<del>}); |
|
Java | apache-2.0 | 46de504996c97cefd595937930dabf1eef998376 | 0 | madanadit/alluxio,EvilMcJerkface/alluxio,Reidddddd/alluxio,Alluxio/alluxio,jswudi/alluxio,calvinjia/tachyon,PasaLab/tachyon,calvinjia/tachyon,maboelhassan/alluxio,Alluxio/alluxio,maboelhassan/alluxio,jsimsa/alluxio,uronce-cc/alluxio,wwjiang007/alluxio,Alluxio/alluxio,WilliamZapata/alluxio,Reidddddd/alluxio,calvinjia/tachyon,apc999/alluxio,maboelhassan/alluxio,EvilMcJerkface/alluxio,uronce-cc/alluxio,Alluxio/alluxio,yuluo-ding/alluxio,Alluxio/alluxio,PasaLab/tachyon,maobaolong/alluxio,maobaolong/alluxio,maboelhassan/alluxio,PasaLab/tachyon,wwjiang007/alluxio,maboelhassan/alluxio,apc999/alluxio,WilliamZapata/alluxio,jswudi/alluxio,jsimsa/alluxio,maobaolong/alluxio,ChangerYoung/alluxio,bf8086/alluxio,ShailShah/alluxio,riversand963/alluxio,yuluo-ding/alluxio,yuluo-ding/alluxio,bf8086/alluxio,maobaolong/alluxio,Reidddddd/mo-alluxio,ChangerYoung/alluxio,jswudi/alluxio,Reidddddd/alluxio,wwjiang007/alluxio,ShailShah/alluxio,riversand963/alluxio,aaudiber/alluxio,calvinjia/tachyon,yuluo-ding/alluxio,Reidddddd/mo-alluxio,wwjiang007/alluxio,yuluo-ding/alluxio,Reidddddd/alluxio,riversand963/alluxio,Alluxio/alluxio,jsimsa/alluxio,apc999/alluxio,riversand963/alluxio,Alluxio/alluxio,maobaolong/alluxio,EvilMcJerkface/alluxio,PasaLab/tachyon,calvinjia/tachyon,uronce-cc/alluxio,WilliamZapata/alluxio,bf8086/alluxio,aaudiber/alluxio,jswudi/alluxio,ShailShah/alluxio,riversand963/alluxio,ShailShah/alluxio,EvilMcJerkface/alluxio,Reidddddd/alluxio,madanadit/alluxio,wwjiang007/alluxio,calvinjia/tachyon,jswudi/alluxio,EvilMcJerkface/alluxio,PasaLab/tachyon,madanadit/alluxio,aaudiber/alluxio,aaudiber/alluxio,calvinjia/tachyon,yuluo-ding/alluxio,uronce-cc/alluxio,aaudiber/alluxio,uronce-cc/alluxio,uronce-cc/alluxio,maboelhassan/alluxio,WilliamZapata/alluxio,WilliamZapata/alluxio,EvilMcJerkface/alluxio,jsimsa/alluxio,WilliamZapata/alluxio,ChangerYoung/alluxio,PasaLab/tachyon,PasaLab/tachyon,ShailShah/alluxio,aaudiber/alluxio,jsimsa/alluxio,maobaolong/alluxio,calvinjia/tachyon,madanadit/alluxio,aaudiber/alluxio,Reidddddd/mo-alluxio,Reidddddd/mo-alluxio,Alluxio/alluxio,maobaolong/alluxio,Reidddddd/mo-alluxio,ChangerYoung/alluxio,ShailShah/alluxio,wwjiang007/alluxio,EvilMcJerkface/alluxio,bf8086/alluxio,apc999/alluxio,madanadit/alluxio,madanadit/alluxio,apc999/alluxio,riversand963/alluxio,Reidddddd/alluxio,wwjiang007/alluxio,bf8086/alluxio,maboelhassan/alluxio,maobaolong/alluxio,jswudi/alluxio,maobaolong/alluxio,ChangerYoung/alluxio,bf8086/alluxio,wwjiang007/alluxio,Alluxio/alluxio,ChangerYoung/alluxio,Reidddddd/mo-alluxio,Alluxio/alluxio,wwjiang007/alluxio,madanadit/alluxio,apc999/alluxio,bf8086/alluxio,apc999/alluxio,wwjiang007/alluxio,Reidddddd/alluxio,EvilMcJerkface/alluxio,madanadit/alluxio,maobaolong/alluxio,bf8086/alluxio,jsimsa/alluxio | /*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.client.file;
import alluxio.Constants;
import alluxio.annotation.PublicApi;
import alluxio.client.AlluxioStorageType;
import alluxio.client.BoundedStream;
import alluxio.client.Seekable;
import alluxio.client.block.BlockInStream;
import alluxio.client.block.BlockStoreContext;
import alluxio.client.block.BufferedBlockOutStream;
import alluxio.client.block.LocalBlockInStream;
import alluxio.client.block.RemoteBlockInStream;
import alluxio.client.block.UnderStoreBlockInStream;
import alluxio.client.file.options.InStreamOptions;
import alluxio.client.file.policy.FileWriteLocationPolicy;
import alluxio.exception.AlluxioException;
import alluxio.exception.BlockAlreadyExistsException;
import alluxio.exception.BlockDoesNotExistException;
import alluxio.exception.InvalidWorkerStateException;
import alluxio.exception.PreconditionMessage;
import alluxio.master.block.BlockId;
import alluxio.wire.WorkerNetAddress;
import com.google.common.base.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import javax.annotation.concurrent.NotThreadSafe;
/**
* A streaming API to read a file. This API represents a file as a stream of bytes and provides a
* collection of {@link #read} methods to access this stream of bytes. In addition, one can seek
* into a given offset of the stream to read.
* <p>
* This class wraps the {@link BlockInStream} for each of the blocks in the file and abstracts the
* switching between streams. The backing streams can read from Alluxio space in the local machine,
* remote machines, or the under storage system.
*/
@PublicApi
@NotThreadSafe
public class FileInStream extends InputStream implements BoundedStream, Seekable {
private static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_TYPE);
/** How the data should be written into Alluxio space, if at all. */
protected final AlluxioStorageType mAlluxioStorageType;
/** Standard block size in bytes of the file, guaranteed for all but the last block. */
protected final long mBlockSize;
/** The location policy for CACHE type of read into Alluxio. */
protected final FileWriteLocationPolicy mLocationPolicy;
/** Total length of the file in bytes. */
protected final long mFileLength;
/** File System context containing the {@link FileSystemMasterClient} pool. */
protected final FileSystemContext mContext;
/** File information. */
protected URIStatus mStatus;
/** Constant error message for block ID not cached. */
protected static final String BLOCK_ID_NOT_CACHED =
"The block with ID {} could not be cached into Alluxio storage.";
/** Error message for cache collision. */
private static final String BLOCK_ID_EXISTS_SO_NOT_CACHED =
"The block with ID {} is already stored in the target worker, canceling the cache request.";
/** If the stream is closed, this can only go from false to true. */
protected boolean mClosed;
/**
* Current position of the file instream.
*/
protected long mPos;
/**
* Caches the entire block even if only a portion of the block is read. Only valid when
* mShouldCache is true.
*/
private final boolean mShouldCachePartiallyReadBlock;
/** Whether to cache blocks in this file into Alluxio. */
private final boolean mShouldCache;
// The following 3 fields must be kept in sync. They are only updated in updateStreams together.
/** Current {@link BlockInStream} backing this stream. */
protected BlockInStream mCurrentBlockInStream;
/** Current {@link BufferedBlockOutStream} writing the data into Alluxio. */
protected BufferedBlockOutStream mCurrentCacheStream;
/** The blockId used in the block streams. */
private long mStreamBlockId;
/** The read buffer in file seek. This is used in {@link #readCurrentBlockToEnd()}. */
private byte[] mSeekBuffer;
/**
* Creates a new file input stream.
*
* @param status the file status
* @param options the client options
* @return the created {@link FileInStream} instance
*/
public static FileInStream create(URIStatus status, InStreamOptions options) {
if (status.getLength() == Constants.UNKNOWN_SIZE) {
return new UnknownLengthFileInStream(status, options);
}
return new FileInStream(status, options);
}
/**
* Creates a new file input stream.
*
* @param status the file status
* @param options the client options
*/
protected FileInStream(URIStatus status, InStreamOptions options) {
mStatus = status;
mBlockSize = status.getBlockSizeBytes();
mFileLength = status.getLength();
mContext = FileSystemContext.INSTANCE;
mAlluxioStorageType = options.getAlluxioStorageType();
mShouldCache = mAlluxioStorageType.isStore();
mShouldCachePartiallyReadBlock = options.isCachePartiallyReadBlock();
mClosed = false;
mLocationPolicy = options.getLocationPolicy();
if (mShouldCache) {
Preconditions.checkNotNull(options.getLocationPolicy(),
PreconditionMessage.FILE_WRITE_LOCATION_POLICY_UNSPECIFIED);
}
int seekBufferSizeBytes = Math.max((int) options.getSeekBufferSizeBytes(), Constants.KB);
mSeekBuffer = new byte[seekBufferSizeBytes];
LOG.debug(options.toString());
}
@Override
public void close() throws IOException {
if (mClosed) {
return;
}
updateStreams();
if (mCurrentCacheStream != null && mShouldCachePartiallyReadBlock) {
readCurrentBlockToEnd();
}
if (mCurrentBlockInStream != null) {
mCurrentBlockInStream.close();
}
closeOrCancelCacheStream();
mClosed = true;
}
@Override
public int read() throws IOException {
if (remaining() <= 0) {
return -1;
}
updateStreams();
Preconditions.checkState(mCurrentBlockInStream != null, PreconditionMessage.ERR_UNEXPECTED_EOF);
int data = mCurrentBlockInStream.read();
if (data == -1) {
// The underlying stream is done.
return -1;
}
mPos++;
if (mCurrentCacheStream != null) {
try {
mCurrentCacheStream.write(data);
} catch (IOException e) {
handleCacheStreamIOException(e);
}
}
return data;
}
@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
Preconditions.checkArgument(b != null, PreconditionMessage.ERR_READ_BUFFER_NULL);
Preconditions.checkArgument(off >= 0 && len >= 0 && len + off <= b.length,
PreconditionMessage.ERR_BUFFER_STATE.format(b.length, off, len));
if (len == 0) {
return 0;
} else if (remaining() <= 0) {
return -1;
}
int currentOffset = off;
int bytesLeftToRead = len;
while (bytesLeftToRead > 0 && remaining() > 0) {
updateStreams();
Preconditions.checkNotNull(mCurrentBlockInStream, PreconditionMessage.ERR_UNEXPECTED_EOF);
int bytesToRead = (int) Math.min(bytesLeftToRead, mCurrentBlockInStream.remaining());
int bytesRead = mCurrentBlockInStream.read(b, currentOffset, bytesToRead);
if (bytesRead > 0) {
if (mCurrentCacheStream != null) {
try {
mCurrentCacheStream.write(b, currentOffset, bytesRead);
} catch (IOException e) {
handleCacheStreamIOException(e);
}
}
mPos += bytesRead;
bytesLeftToRead -= bytesRead;
currentOffset += bytesRead;
}
}
if (bytesLeftToRead == len && mCurrentBlockInStream.remaining() == 0) {
// Nothing was read, and the underlying stream is done.
return -1;
}
return len - bytesLeftToRead;
}
@Override
public long remaining() {
return mFileLength - mPos;
}
@Override
public void seek(long pos) throws IOException {
if (mPos == pos) {
return;
}
Preconditions.checkArgument(pos >= 0, PreconditionMessage.ERR_SEEK_NEGATIVE.format(pos));
Preconditions.checkArgument(pos <= maxSeekPosition(),
PreconditionMessage.ERR_SEEK_PAST_END_OF_FILE.format(pos));
if (!mShouldCachePartiallyReadBlock) {
seekInternal(pos);
} else {
seekInternalWithCachingPartiallyReadBlock(pos);
}
}
@Override
public long skip(long n) throws IOException {
if (n <= 0) {
return 0;
}
long toSkip = Math.min(n, remaining());
seek(mPos + toSkip);
return toSkip;
}
/**
* @return the maximum position to seek to
*/
protected long maxSeekPosition() {
return mFileLength;
}
/**
* @param pos the position to check
* @return the block size in bytes for the given pos, used for worker allocation
*/
protected long getBlockSizeAllocation(long pos) {
return getBlockSize(pos);
}
/**
* Creates and returns a {@link BlockInStream} for the UFS.
*
* @param blockStart the offset to start the block from
* @param length the length of the block
* @param path the UFS path
* @return the {@link BlockInStream} for the UFS
* @throws IOException if the stream cannot be created
*/
protected BlockInStream createUnderStoreBlockInStream(long blockStart, long length, String path)
throws IOException {
return UnderStoreBlockInStream.Factory.create(blockStart, length, mBlockSize, path);
}
/**
* If we are not in the last block or if the last block is equal to the normal block size,
* return the normal block size. Otherwise return the block size of the last block.
*
* @param pos the position to get the block size for
* @return the size of the block that covers pos
*/
protected long getBlockSize(long pos) {
// The size of the last block, 0 if it is equal to the normal block size
long lastBlockSize = mFileLength % mBlockSize;
if (mFileLength - pos > lastBlockSize) {
return mBlockSize;
} else {
return lastBlockSize;
}
}
/**
* Checks whether block instream and cache outstream should be updated.
* This function is only called by {@link #updateStreams()}.
*
* @param currentBlockId cached result of {@link #getCurrentBlockId()}
* @return true if the block stream should be updated
*/
protected boolean shouldUpdateStreams(long currentBlockId) {
if (mCurrentBlockInStream == null || currentBlockId != mStreamBlockId) {
return true;
}
if (mCurrentCacheStream != null
&& mCurrentBlockInStream.remaining() != mCurrentCacheStream.remaining()) {
throw new IllegalStateException(
String.format("BlockInStream and CacheStream is out of sync %d %d.",
mCurrentBlockInStream.remaining(), mCurrentCacheStream.remaining()));
}
return mCurrentBlockInStream.remaining() == 0;
}
/**
* Closes or cancels {@link #mCurrentCacheStream}.
*/
private void closeOrCancelCacheStream() {
if (mCurrentCacheStream == null) {
return;
}
try {
if (mCurrentCacheStream.remaining() == 0) {
mCurrentCacheStream.close();
} else {
mCurrentCacheStream.cancel();
}
} catch (IOException e) {
if (e.getCause() instanceof BlockDoesNotExistException) {
// This happens if two concurrent readers read trying to cache the same block. One cancelled
// before the other. Then the other reader will see this exception since we only keep
// one block per blockId in block worker.
LOG.info("Block {} does not exist when being cancelled.", getCurrentBlockId());
} else if (e.getCause() instanceof InvalidWorkerStateException) {
// This happens if two concurrent readers trying to cache the same block and they acquired
// different BlockClient (e.g. BlockStoreContext.acquireRemoteWorkerClient)
// instances (each instance has its only session ID).
LOG.info("Block {} has invalid worker state when being cancelled.", getCurrentBlockId());
} else if (e.getCause() instanceof BlockAlreadyExistsException) {
// This happens if two concurrent readers trying to cache the same block. One successfully
// committed. The other reader sees this.
LOG.info("Block {} exists.", getCurrentBlockId());
} else {
// This happens when there are any other cache stream close/cancel related errors (e.g.
// server unreachable due to network partition, server busy due to alluxio worker is
// busy, timeout due to congested network etc). But we want to proceed since we want
// the user to continue reading when one Alluxio worker is having trouble.
LOG.warn("Cache stream close or cancel throws IOExecption {}, read continues.",
e.getMessage());
}
}
mCurrentCacheStream = null;
}
/**
* @return the current block id based on mPos, -1 if at the end of the file
*/
private long getCurrentBlockId() {
if (remaining() <= 0) {
return -1;
}
int index = (int) (mPos / mBlockSize);
Preconditions
.checkState(index < mStatus.getBlockIds().size(), PreconditionMessage.ERR_BLOCK_INDEX);
return mStatus.getBlockIds().get(index);
}
/**
* Handles IO exceptions thrown in response to the worker cache request. Cache stream is closed
* or cancelled after logging some messages about the exceptions.
*
* @param e the exception to handle
*/
private void handleCacheStreamIOException(IOException e) {
if (e.getCause() instanceof BlockAlreadyExistsException) {
// This can happen if there are two readers trying to cache the same block. The first one
// created the block (either as temp block or committed block). The second sees this
// exception.
LOG.info(BLOCK_ID_EXISTS_SO_NOT_CACHED, getCurrentBlockId());
} else {
LOG.warn(BLOCK_ID_NOT_CACHED, getCurrentBlockId());
}
closeOrCancelCacheStream();
}
/**
* Only updates {@link #mCurrentCacheStream}, {@link #mCurrentBlockInStream} and
* {@link #mStreamBlockId} to be in sync with the current block (i.e.
* {@link #getCurrentBlockId()}).
* If this method is called multiple times, the subsequent invokes become no-op.
* Call this function every read and seek unless you are sure about the block streams are
* up-to-date.
*
* @throws IOException if the next cache stream or block stream cannot be created
*/
private void updateStreams() throws IOException {
long currentBlockId = getCurrentBlockId();
if (shouldUpdateStreams(currentBlockId)) {
// The following two function handle negative currentBlockId (i.e. the end of file)
// correctly.
updateBlockInStream(currentBlockId);
updateCacheStream(currentBlockId);
mStreamBlockId = currentBlockId;
}
}
/**
* Updates {@link #mCurrentCacheStream}. When {@code mShouldCache} is true, {@code FileInStream}
* will create an {@code BlockOutStream} to cache the data read only if
* <ol>
* <li>the file is read from under storage, or</li>
* <li>the file is read from a remote worker and we have an available local worker.</li>
* </ol>
* The following preconditions are checked inside:
* <ol>
* <li>{@link #mCurrentCacheStream} is either done or null.</li>
* <li>EOF is reached or {@link #mCurrentBlockInStream} must be valid.</li>
* </ol>
* After this call, {@link #mCurrentCacheStream} is either null or freshly created.
* {@link #mCurrentCacheStream} is created only if the block is not cached in a chosen machine
* and mPos is at the beginning of a block.
* This function is only called by {@link #updateStreams()}.
*
* @param blockId cached result of {@link #getCurrentBlockId()}
* @throws IOException if the next cache stream cannot be created
*/
private void updateCacheStream(long blockId) throws IOException {
// We should really only close a cache stream here. This check is to verify this.
Preconditions.checkState(mCurrentCacheStream == null || mCurrentCacheStream.remaining() == 0);
closeOrCancelCacheStream();
Preconditions.checkState(mCurrentCacheStream == null);
if (blockId < 0) {
// End of file.
return;
}
Preconditions.checkNotNull(mCurrentBlockInStream);
if (!mShouldCache || mCurrentBlockInStream instanceof LocalBlockInStream) {
return;
}
// If this block is read from a remote worker but we don't have a local worker, don't cache
if (mCurrentBlockInStream instanceof RemoteBlockInStream
&& !BlockStoreContext.INSTANCE.hasLocalWorker()) {
return;
}
// Unlike updateBlockInStream below, we never start a block cache stream if mPos is in the
// middle of a block.
if (mPos % mBlockSize != 0) {
return;
}
try {
WorkerNetAddress address = mLocationPolicy.getWorkerForNextBlock(
mContext.getAlluxioBlockStore().getWorkerInfoList(), getBlockSizeAllocation(mPos));
// If we reach here, we need to cache.
mCurrentCacheStream =
mContext.getAlluxioBlockStore().getOutStream(blockId, getBlockSize(mPos), address);
} catch (IOException e) {
handleCacheStreamIOException(e);
} catch (AlluxioException e) {
LOG.warn(BLOCK_ID_NOT_CACHED, blockId, e);
}
}
/**
* Update {@link #mCurrentBlockInStream} to be in-sync with mPos's block. The new block
* stream created with be at position 0.
* This function is only called in {@link #updateStreams()}.
*
* @param blockId cached result of {@link #getCurrentBlockId()}
* @throws IOException if the next {@link BlockInStream} cannot be obtained
*/
private void updateBlockInStream(long blockId) throws IOException {
if (mCurrentBlockInStream != null) {
mCurrentBlockInStream.close();
mCurrentBlockInStream = null;
}
// blockId = -1 if mPos = EOF.
if (blockId < 0) {
return;
}
try {
if (mAlluxioStorageType.isPromote()) {
try {
mContext.getAlluxioBlockStore().promote(blockId);
} catch (IOException e) {
// Failed to promote
LOG.warn("Promotion of block with ID {} failed.", blockId, e);
}
}
mCurrentBlockInStream = mContext.getAlluxioBlockStore().getInStream(blockId);
} catch (IOException e) {
LOG.debug("Failed to get BlockInStream for block with ID {}, using UFS instead. {}", blockId,
e);
if (!mStatus.isPersisted()) {
LOG.error("Could not obtain data for block with ID {} from Alluxio."
+ " The block is also not available in the under storage.", blockId);
throw e;
}
long blockStart = BlockId.getSequenceNumber(blockId) * mBlockSize;
mCurrentBlockInStream =
createUnderStoreBlockInStream(blockStart, getBlockSize(blockStart), mStatus.getUfsPath());
}
}
/**
* Seeks to a file position. Blocks are not cached unless they are fully read. This is only called
* by {@link FileInStream#seek}.
*
* @param pos The position to seek to. It is guaranteed to be valid (pos >= 0 && pos != mPos &&
* pos <= mFileLength)
* @throws IOException if the seek fails due to an error accessing the stream at the position
*/
private void seekInternal(long pos) throws IOException {
closeOrCancelCacheStream();
mPos = pos;
updateStreams();
if (mCurrentBlockInStream != null) {
mCurrentBlockInStream.seek(mPos % mBlockSize);
} else {
Preconditions.checkState(remaining() == 0);
}
}
/**
* Seeks to a file position. Blocks are cached even if they are not fully read. This is only
* called by {@link FileInStream#seek}.
* Invariant: if the current block is to be cached, [0, mPos) should have been cached already.
*
* @param pos The position to seek to. It is guaranteed to be valid (pos >= 0 && pos != mPos &&
* pos <= mFileLength).
* @throws IOException if the seek fails due to an error accessing the stream at the position
*/
private void seekInternalWithCachingPartiallyReadBlock(long pos) throws IOException {
// Precompute this because mPos will be updated several times in this function.
boolean isInCurrentBlock = pos / mBlockSize == mPos / mBlockSize;
// Make sure that mCurrentBlockInStream and mCurrentCacheStream is updated.
// mPos is not updated here.
updateStreams();
if (mCurrentCacheStream != null) {
// Cache till pos if seeking forward within the current block. Otherwise cache the whole
// block.
readCurrentBlockToPos(pos > mPos ? pos : Long.MAX_VALUE);
// Early return if we are at pos already. This happens if we seek forward with caching
// enabled for this block.
if (mPos == pos) {
return;
}
// The early return above guarantees that we won't close an incomplete cache stream.
Preconditions.checkState(mCurrentCacheStream == null || mCurrentCacheStream.remaining() == 0);
closeOrCancelCacheStream();
}
// If seeks within the current block, directly seeks to pos if we are not yet there.
// If seeks outside the current block, seek to the beginning of that block first, then
// cache the prefix (pos % mBlockSize) of that block.
if (isInCurrentBlock) {
mPos = pos;
// updateStreams is necessary when pos = mFileLength.
updateStreams();
if (mCurrentBlockInStream != null) {
mCurrentBlockInStream.seek(mPos % mBlockSize);
} else {
Preconditions.checkState(remaining() == 0);
}
} else {
mPos = pos / mBlockSize * mBlockSize;
updateStreams();
if (mCurrentCacheStream != null) {
readCurrentBlockToPos(pos);
} else if (mCurrentBlockInStream != null) {
mPos = pos;
mCurrentBlockInStream.seek(mPos % mBlockSize);
} else {
Preconditions.checkState(remaining() == 0);
}
}
}
/**
* Reads till the file offset (mPos) equals pos or the end of the current block (whichever is
* met first) if pos > mPos. Otherwise no-op.
*
* @param pos file offset
* @throws IOException if read or cache write fails
*/
private void readCurrentBlockToPos(long pos) throws IOException {
Preconditions.checkNotNull(mCurrentBlockInStream);
Preconditions.checkNotNull(mCurrentCacheStream);
long len = Math.min(pos - mPos, mCurrentBlockInStream.remaining());
if (len <= 0) {
return;
}
do {
// Account for the last read which might be less than mSeekBufferSizeBytes bytes.
int bytesRead = read(mSeekBuffer, 0, (int) Math.min(mSeekBuffer.length, len));
Preconditions.checkState(bytesRead > 0, PreconditionMessage.ERR_UNEXPECTED_EOF);
len -= bytesRead;
} while (len > 0);
}
/**
* Reads the remaining of the current block.
* @throws IOException if read or cache write fails
*/
private void readCurrentBlockToEnd() throws IOException {
readCurrentBlockToPos(Long.MAX_VALUE);
}
}
| core/client/src/main/java/alluxio/client/file/FileInStream.java | /*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.client.file;
import alluxio.Constants;
import alluxio.annotation.PublicApi;
import alluxio.client.AlluxioStorageType;
import alluxio.client.BoundedStream;
import alluxio.client.Seekable;
import alluxio.client.block.BlockInStream;
import alluxio.client.block.BlockStoreContext;
import alluxio.client.block.BufferedBlockOutStream;
import alluxio.client.block.LocalBlockInStream;
import alluxio.client.block.RemoteBlockInStream;
import alluxio.client.block.UnderStoreBlockInStream;
import alluxio.client.file.options.InStreamOptions;
import alluxio.client.file.policy.FileWriteLocationPolicy;
import alluxio.exception.AlluxioException;
import alluxio.exception.BlockAlreadyExistsException;
import alluxio.exception.BlockDoesNotExistException;
import alluxio.exception.InvalidWorkerStateException;
import alluxio.exception.PreconditionMessage;
import alluxio.master.block.BlockId;
import alluxio.wire.WorkerNetAddress;
import com.google.common.base.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import javax.annotation.concurrent.NotThreadSafe;
/**
* A streaming API to read a file. This API represents a file as a stream of bytes and provides a
* collection of {@link #read} methods to access this stream of bytes. In addition, one can seek
* into a given offset of the stream to read.
* <p>
* This class wraps the {@link BlockInStream} for each of the blocks in the file and abstracts the
* switching between streams. The backing streams can read from Alluxio space in the local machine,
* remote machines, or the under storage system.
*/
@PublicApi
@NotThreadSafe
public class FileInStream extends InputStream implements BoundedStream, Seekable {
private static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_TYPE);
/** How the data should be written into Alluxio space, if at all. */
protected final AlluxioStorageType mAlluxioStorageType;
/** Standard block size in bytes of the file, guaranteed for all but the last block. */
protected final long mBlockSize;
/** The location policy for CACHE type of read into Alluxio. */
protected final FileWriteLocationPolicy mLocationPolicy;
/** Total length of the file in bytes. */
protected final long mFileLength;
/** File System context containing the {@link FileSystemMasterClient} pool. */
protected final FileSystemContext mContext;
/** File information. */
protected URIStatus mStatus;
/** Constant error message for block ID not cached. */
protected static final String BLOCK_ID_NOT_CACHED =
"The block with ID {} could not be cached into Alluxio storage.";
/** Error message for cache collision. */
private static final String BLOCK_ID_EXISTS_SO_NOT_CACHED =
"The block with ID {} is already stored in the target worker, canceling the cache request.";
/** If the stream is closed, this can only go from false to true. */
protected boolean mClosed;
/**
* Current position of the file instream.
*/
protected long mPos;
/**
* Caches the entire block even if only a portion of the block is read. Only valid when
* mShouldCache is true.
*/
private final boolean mShouldCachePartiallyReadBlock;
/** Whether to cache blocks in this file into Alluxio. */
private final boolean mShouldCache;
// The following 3 fields must be kept in sync. They are only updated in updateStreams together.
/** Current {@link BlockInStream} backing this stream. */
protected BlockInStream mCurrentBlockInStream;
/** Current {@link BufferedBlockOutStream} writing the data into Alluxio. */
protected BufferedBlockOutStream mCurrentCacheStream;
/** The blockId used in the block streams. */
private long mStreamBlockId;
/** The read buffer in file seek. This is used in {@link #readCurrentBlockToEnd()}. */
private byte[] mSeekBuffer = null;
/**
* Creates a new file input stream.
*
* @param status the file status
* @param options the client options
* @return the created {@link FileInStream} instance
*/
public static FileInStream create(URIStatus status, InStreamOptions options) {
if (status.getLength() == Constants.UNKNOWN_SIZE) {
return new UnknownLengthFileInStream(status, options);
}
return new FileInStream(status, options);
}
/**
* Creates a new file input stream.
*
* @param status the file status
* @param options the client options
*/
protected FileInStream(URIStatus status, InStreamOptions options) {
mStatus = status;
mBlockSize = status.getBlockSizeBytes();
mFileLength = status.getLength();
mContext = FileSystemContext.INSTANCE;
mAlluxioStorageType = options.getAlluxioStorageType();
mShouldCache = mAlluxioStorageType.isStore();
mShouldCachePartiallyReadBlock = options.isCachePartiallyReadBlock();
mClosed = false;
mLocationPolicy = options.getLocationPolicy();
if (mShouldCache) {
Preconditions.checkNotNull(options.getLocationPolicy(),
PreconditionMessage.FILE_WRITE_LOCATION_POLICY_UNSPECIFIED);
}
int seekBufferSizeBytes = (int) options.getSeekBufferSizeBytes();
if (seekBufferSizeBytes > 0) {
mSeekBuffer = new byte[seekBufferSizeBytes];
}
LOG.debug(options.toString());
}
@Override
public void close() throws IOException {
if (mClosed) {
return;
}
updateStreams();
if (mCurrentCacheStream != null && mShouldCachePartiallyReadBlock) {
readCurrentBlockToEnd();
}
if (mCurrentBlockInStream != null) {
mCurrentBlockInStream.close();
}
closeOrCancelCacheStream();
mClosed = true;
}
@Override
public int read() throws IOException {
if (remaining() <= 0) {
return -1;
}
updateStreams();
Preconditions.checkState(mCurrentBlockInStream != null, PreconditionMessage.ERR_UNEXPECTED_EOF);
int data = mCurrentBlockInStream.read();
if (data == -1) {
// The underlying stream is done.
return -1;
}
mPos++;
if (mCurrentCacheStream != null) {
try {
mCurrentCacheStream.write(data);
} catch (IOException e) {
handleCacheStreamIOException(e);
}
}
return data;
}
@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
Preconditions.checkArgument(b != null, PreconditionMessage.ERR_READ_BUFFER_NULL);
Preconditions.checkArgument(off >= 0 && len >= 0 && len + off <= b.length,
PreconditionMessage.ERR_BUFFER_STATE.format(b.length, off, len));
if (len == 0) {
return 0;
} else if (remaining() <= 0) {
return -1;
}
int currentOffset = off;
int bytesLeftToRead = len;
while (bytesLeftToRead > 0 && remaining() > 0) {
updateStreams();
Preconditions.checkNotNull(mCurrentBlockInStream, PreconditionMessage.ERR_UNEXPECTED_EOF);
int bytesToRead = (int) Math.min(bytesLeftToRead, mCurrentBlockInStream.remaining());
int bytesRead = mCurrentBlockInStream.read(b, currentOffset, bytesToRead);
if (bytesRead > 0) {
if (mCurrentCacheStream != null) {
try {
mCurrentCacheStream.write(b, currentOffset, bytesRead);
} catch (IOException e) {
handleCacheStreamIOException(e);
}
}
mPos += bytesRead;
bytesLeftToRead -= bytesRead;
currentOffset += bytesRead;
}
}
if (bytesLeftToRead == len && mCurrentBlockInStream.remaining() == 0) {
// Nothing was read, and the underlying stream is done.
return -1;
}
return len - bytesLeftToRead;
}
@Override
public long remaining() {
return mFileLength - mPos;
}
@Override
public void seek(long pos) throws IOException {
if (mPos == pos) {
return;
}
Preconditions.checkArgument(pos >= 0, PreconditionMessage.ERR_SEEK_NEGATIVE.format(pos));
Preconditions.checkArgument(pos <= maxSeekPosition(),
PreconditionMessage.ERR_SEEK_PAST_END_OF_FILE.format(pos));
if (!mShouldCachePartiallyReadBlock) {
seekInternal(pos);
} else {
seekInternalWithCachingPartiallyReadBlock(pos);
}
}
@Override
public long skip(long n) throws IOException {
if (n <= 0) {
return 0;
}
long toSkip = Math.min(n, remaining());
seek(mPos + toSkip);
return toSkip;
}
/**
* @return the maximum position to seek to
*/
protected long maxSeekPosition() {
return mFileLength;
}
/**
* @param pos the position to check
* @return the block size in bytes for the given pos, used for worker allocation
*/
protected long getBlockSizeAllocation(long pos) {
return getBlockSize(pos);
}
/**
* Creates and returns a {@link BlockInStream} for the UFS.
*
* @param blockStart the offset to start the block from
* @param length the length of the block
* @param path the UFS path
* @return the {@link BlockInStream} for the UFS
* @throws IOException if the stream cannot be created
*/
protected BlockInStream createUnderStoreBlockInStream(long blockStart, long length, String path)
throws IOException {
return UnderStoreBlockInStream.Factory.create(blockStart, length, mBlockSize, path);
}
/**
* If we are not in the last block or if the last block is equal to the normal block size,
* return the normal block size. Otherwise return the block size of the last block.
*
* @param pos the position to get the block size for
* @return the size of the block that covers pos
*/
protected long getBlockSize(long pos) {
// The size of the last block, 0 if it is equal to the normal block size
long lastBlockSize = mFileLength % mBlockSize;
if (mFileLength - pos > lastBlockSize) {
return mBlockSize;
} else {
return lastBlockSize;
}
}
/**
* Checks whether block instream and cache outstream should be updated.
* This function is only called by {@link #updateStreams()}.
*
* @param currentBlockId cached result of {@link #getCurrentBlockId()}
* @return true if the block stream should be updated
*/
protected boolean shouldUpdateStreams(long currentBlockId) {
if (mCurrentBlockInStream == null || currentBlockId != mStreamBlockId) {
return true;
}
if (mCurrentCacheStream != null
&& mCurrentBlockInStream.remaining() != mCurrentCacheStream.remaining()) {
throw new IllegalStateException(
String.format("BlockInStream and CacheStream is out of sync %d %d.",
mCurrentBlockInStream.remaining(), mCurrentCacheStream.remaining()));
}
return mCurrentBlockInStream.remaining() == 0;
}
/**
* Closes or cancels {@link #mCurrentCacheStream}.
*/
private void closeOrCancelCacheStream() {
if (mCurrentCacheStream == null) {
return;
}
try {
if (mCurrentCacheStream.remaining() == 0) {
mCurrentCacheStream.close();
} else {
mCurrentCacheStream.cancel();
}
} catch (IOException e) {
if (e.getCause() instanceof BlockDoesNotExistException) {
// This happens if two concurrent readers read trying to cache the same block. One cancelled
// before the other. Then the other reader will see this exception since we only keep
// one block per blockId in block worker.
LOG.info("Block {} does not exist when being cancelled.", getCurrentBlockId());
} else if (e.getCause() instanceof InvalidWorkerStateException) {
// This happens if two concurrent readers trying to cache the same block and they acquired
// different BlockClient (e.g. BlockStoreContext.acquireRemoteWorkerClient)
// instances (each instance has its only session ID).
LOG.info("Block {} has invalid worker state when being cancelled.", getCurrentBlockId());
} else if (e.getCause() instanceof BlockAlreadyExistsException) {
// This happens if two concurrent readers trying to cache the same block. One successfully
// committed. The other reader sees this.
LOG.info("Block {} exists.", getCurrentBlockId());
} else {
// This happens when there are any other cache stream close/cancel related errors (e.g.
// server unreachable due to network partition, server busy due to alluxio worker is
// busy, timeout due to congested network etc). But we want to proceed since we want
// the user to continue reading when one Alluxio worker is having trouble.
LOG.warn("Cache stream close or cancel throws IOExecption {}, read continues.",
e.getMessage());
}
}
mCurrentCacheStream = null;
}
/**
* @return the current block id based on mPos, -1 if at the end of the file
*/
private long getCurrentBlockId() {
if (remaining() <= 0) {
return -1;
}
int index = (int) (mPos / mBlockSize);
Preconditions
.checkState(index < mStatus.getBlockIds().size(), PreconditionMessage.ERR_BLOCK_INDEX);
return mStatus.getBlockIds().get(index);
}
/**
* Handles IO exceptions thrown in response to the worker cache request. Cache stream is closed
* or cancelled after logging some messages about the exceptions.
*
* @param e the exception to handle
*/
private void handleCacheStreamIOException(IOException e) {
if (e.getCause() instanceof BlockAlreadyExistsException) {
// This can happen if there are two readers trying to cache the same block. The first one
// created the block (either as temp block or committed block). The second sees this
// exception.
LOG.info(BLOCK_ID_EXISTS_SO_NOT_CACHED, getCurrentBlockId());
} else {
LOG.warn(BLOCK_ID_NOT_CACHED, getCurrentBlockId());
}
closeOrCancelCacheStream();
}
/**
* Only updates {@link #mCurrentCacheStream}, {@link #mCurrentBlockInStream} and
* {@link #mStreamBlockId} to be in sync with the current block (i.e.
* {@link #getCurrentBlockId()}).
* If this method is called multiple times, the subsequent invokes become no-op.
* Call this function every read and seek unless you are sure about the block streams are
* up-to-date.
*
* @throws IOException if the next cache stream or block stream cannot be created
*/
private void updateStreams() throws IOException {
long currentBlockId = getCurrentBlockId();
if (shouldUpdateStreams(currentBlockId)) {
// The following two function handle negative currentBlockId (i.e. the end of file)
// correctly.
updateBlockInStream(currentBlockId);
updateCacheStream(currentBlockId);
mStreamBlockId = currentBlockId;
}
}
/**
* Updates {@link #mCurrentCacheStream}. When {@code mShouldCache} is true, {@code FileInStream}
* will create an {@code BlockOutStream} to cache the data read only if
* <ol>
* <li>the file is read from under storage, or</li>
* <li>the file is read from a remote worker and we have an available local worker.</li>
* </ol>
* The following preconditions are checked inside:
* <ol>
* <li>{@link #mCurrentCacheStream} is either done or null.</li>
* <li>EOF is reached or {@link #mCurrentBlockInStream} must be valid.</li>
* </ol>
* After this call, {@link #mCurrentCacheStream} is either null or freshly created.
* {@link #mCurrentCacheStream} is created only if the block is not cached in a chosen machine
* and mPos is at the beginning of a block.
* This function is only called by {@link #updateStreams()}.
*
* @param blockId cached result of {@link #getCurrentBlockId()}
* @throws IOException if the next cache stream cannot be created
*/
private void updateCacheStream(long blockId) throws IOException {
// We should really only close a cache stream here. This check is to verify this.
Preconditions.checkState(mCurrentCacheStream == null || mCurrentCacheStream.remaining() == 0);
closeOrCancelCacheStream();
Preconditions.checkState(mCurrentCacheStream == null);
if (blockId < 0) {
// End of file.
return;
}
Preconditions.checkNotNull(mCurrentBlockInStream);
if (!mShouldCache || mCurrentBlockInStream instanceof LocalBlockInStream) {
return;
}
// If this block is read from a remote worker but we don't have a local worker, don't cache
if (mCurrentBlockInStream instanceof RemoteBlockInStream
&& !BlockStoreContext.INSTANCE.hasLocalWorker()) {
return;
}
// Unlike updateBlockInStream below, we never start a block cache stream if mPos is in the
// middle of a block.
if (mPos % mBlockSize != 0) {
return;
}
try {
WorkerNetAddress address = mLocationPolicy.getWorkerForNextBlock(
mContext.getAlluxioBlockStore().getWorkerInfoList(), getBlockSizeAllocation(mPos));
// If we reach here, we need to cache.
mCurrentCacheStream =
mContext.getAlluxioBlockStore().getOutStream(blockId, getBlockSize(mPos), address);
} catch (IOException e) {
handleCacheStreamIOException(e);
} catch (AlluxioException e) {
LOG.warn(BLOCK_ID_NOT_CACHED, blockId, e);
}
}
/**
* Update {@link #mCurrentBlockInStream} to be in-sync with mPos's block. The new block
* stream created with be at position 0.
* This function is only called in {@link #updateStreams()}.
*
* @param blockId cached result of {@link #getCurrentBlockId()}
* @throws IOException if the next {@link BlockInStream} cannot be obtained
*/
private void updateBlockInStream(long blockId) throws IOException {
if (mCurrentBlockInStream != null) {
mCurrentBlockInStream.close();
mCurrentBlockInStream = null;
}
// blockId = -1 if mPos = EOF.
if (blockId < 0) {
return;
}
try {
if (mAlluxioStorageType.isPromote()) {
try {
mContext.getAlluxioBlockStore().promote(blockId);
} catch (IOException e) {
// Failed to promote
LOG.warn("Promotion of block with ID {} failed.", blockId, e);
}
}
mCurrentBlockInStream = mContext.getAlluxioBlockStore().getInStream(blockId);
} catch (IOException e) {
LOG.debug("Failed to get BlockInStream for block with ID {}, using UFS instead. {}", blockId,
e);
if (!mStatus.isPersisted()) {
LOG.error("Could not obtain data for block with ID {} from Alluxio."
+ " The block is also not available in the under storage.", blockId);
throw e;
}
long blockStart = BlockId.getSequenceNumber(blockId) * mBlockSize;
mCurrentBlockInStream =
createUnderStoreBlockInStream(blockStart, getBlockSize(blockStart), mStatus.getUfsPath());
}
}
/**
* Seeks to a file position. Blocks are not cached unless they are fully read. This is only called
* by {@link FileInStream#seek}.
*
* @param pos The position to seek to. It is guaranteed to be valid (pos >= 0 && pos != mPos &&
* pos <= mFileLength)
* @throws IOException if the seek fails due to an error accessing the stream at the position
*/
private void seekInternal(long pos) throws IOException {
closeOrCancelCacheStream();
mPos = pos;
updateStreams();
if (mCurrentBlockInStream != null) {
mCurrentBlockInStream.seek(mPos % mBlockSize);
} else {
Preconditions.checkState(remaining() == 0);
}
}
/**
* Seeks to a file position. Blocks are cached even if they are not fully read. This is only
* called by {@link FileInStream#seek}.
* Invariant: if the current block is to be cached, [0, mPos) should have been cached already.
*
* @param pos The position to seek to. It is guaranteed to be valid (pos >= 0 && pos != mPos &&
* pos <= mFileLength).
* @throws IOException if the seek fails due to an error accessing the stream at the position
*/
private void seekInternalWithCachingPartiallyReadBlock(long pos) throws IOException {
// Precompute this because mPos will be updated several times in this function.
boolean isInCurrentBlock = pos / mBlockSize == mPos / mBlockSize;
// Make sure that mCurrentBlockInStream and mCurrentCacheStream is updated.
// mPos is not updated here.
updateStreams();
if (mCurrentCacheStream != null) {
// Cache till pos if seeking forward within the current block. Otherwise cache the whole
// block.
readCurrentBlockToPos(pos > mPos ? pos : Long.MAX_VALUE);
// Early return if we are at pos already. This happens if we seek forward with caching
// enabled for this block.
if (mPos == pos) {
return;
}
// The early return above guarantees that we won't close an incomplete cache stream.
Preconditions.checkState(mCurrentCacheStream == null || mCurrentCacheStream.remaining() == 0);
closeOrCancelCacheStream();
}
// If seeks within the current block, directly seeks to pos if we are not yet there.
// If seeks outside the current block, seek to the beginning of that block first, then
// cache the prefix (pos % mBlockSize) of that block.
if (isInCurrentBlock) {
mPos = pos;
// updateStreams is necessary when pos = mFileLength.
updateStreams();
if (mCurrentBlockInStream != null) {
mCurrentBlockInStream.seek(mPos % mBlockSize);
} else {
Preconditions.checkState(remaining() == 0);
}
} else {
mPos = pos / mBlockSize * mBlockSize;
updateStreams();
if (mCurrentCacheStream != null) {
readCurrentBlockToPos(pos);
} else if (mCurrentBlockInStream != null) {
mPos = pos;
mCurrentBlockInStream.seek(mPos % mBlockSize);
} else {
Preconditions.checkState(remaining() == 0);
}
}
}
/**
* Reads till the file offset (mPos) equals pos or the end of the current block (whichever is
* met first) if pos > mPos. Otherwise no-op.
*
* @param pos file offset
* @throws IOException if read or cache write fails
*/
private void readCurrentBlockToPos(long pos) throws IOException {
Preconditions.checkNotNull(mCurrentBlockInStream);
Preconditions.checkNotNull(mCurrentCacheStream);
long len = Math.min(pos - mPos, mCurrentBlockInStream.remaining());
if (len <= 0) {
return;
}
do {
// Account for the last read which might be less than mSeekBufferSizeBytes bytes.
int bytesRead = read(mSeekBuffer, 0, (int) Math.min(mSeekBuffer.length, len));
Preconditions.checkState(bytesRead > 0, PreconditionMessage.ERR_UNEXPECTED_EOF);
len -= bytesRead;
} while (len > 0);
}
/**
* Reads the remaining of the current block.
* @throws IOException if read or cache write fails
*/
private void readCurrentBlockToEnd() throws IOException {
readCurrentBlockToPos(Long.MAX_VALUE);
}
}
| Always allocate buffer
| core/client/src/main/java/alluxio/client/file/FileInStream.java | Always allocate buffer | <ide><path>ore/client/src/main/java/alluxio/client/file/FileInStream.java
<ide> private long mStreamBlockId;
<ide>
<ide> /** The read buffer in file seek. This is used in {@link #readCurrentBlockToEnd()}. */
<del> private byte[] mSeekBuffer = null;
<add> private byte[] mSeekBuffer;
<ide>
<ide> /**
<ide> * Creates a new file input stream.
<ide> Preconditions.checkNotNull(options.getLocationPolicy(),
<ide> PreconditionMessage.FILE_WRITE_LOCATION_POLICY_UNSPECIFIED);
<ide> }
<del> int seekBufferSizeBytes = (int) options.getSeekBufferSizeBytes();
<del> if (seekBufferSizeBytes > 0) {
<del> mSeekBuffer = new byte[seekBufferSizeBytes];
<del> }
<add> int seekBufferSizeBytes = Math.max((int) options.getSeekBufferSizeBytes(), Constants.KB);
<add> mSeekBuffer = new byte[seekBufferSizeBytes];
<ide> LOG.debug(options.toString());
<ide> }
<ide> |
|
Java | apache-2.0 | 3a4c7c9c93dc537d1f33d252a2ac7f18e211334b | 0 | youprofit/netty,jongyeol/netty,netty/netty,JungMinu/netty,AchinthaReemal/netty,mcanthony/netty,KatsuraKKKK/netty,Squarespace/netty,jchambers/netty,netty/netty,sunbeansoft/netty,unei66/netty,hgl888/netty,blademainer/netty,ijuma/netty,nadeeshaan/netty,serioussam/netty,ioanbsu/netty,kjniemi/netty,altihou/netty,jenskordowski/netty,mcanthony/netty,Apache9/netty,ioanbsu/netty,clebertsuconic/netty,chrisprobst/netty,eincs/netty,ajaysarda/netty,LuminateWireless/netty,xingguang2013/netty,ngocdaothanh/netty,andsel/netty,blademainer/netty,maliqq/netty,lightsocks/netty,MediumOne/netty,jovezhougang/netty,xiongzheng/netty,afredlyj/learn-netty,shenguoquan/netty,zhoffice/netty,blademainer/netty,sameira/netty,Spikhalskiy/netty,fengjiachun/netty,zhoffice/netty,orika/netty,chrisprobst/netty,BrunoColin/netty,ichaki5748/netty,slandelle/netty,seetharamireddy540/netty,bigheary/netty,xiexingguang/netty,fengshao0907/netty,caoyanwei/netty,Scottmitch/netty,sunbeansoft/netty,alkemist/netty,WangJunTYTL/netty,nkhuyu/netty,AchinthaReemal/netty,lukehutch/netty,Squarespace/netty,golovnin/netty,hyangtack/netty,nayato/netty,sverkera/netty,silvaran/netty,johnou/netty,sameira/netty,huuthang1993/netty,purplefox/netty-4.0.2.8-hacked,chrisprobst/netty,mosoft521/netty,dongjiaqiang/netty,dongjiaqiang/netty,hepin1989/netty,zzcclp/netty,shuangqiuan/netty,hgl888/netty,fengshao0907/netty,purplefox/netty-4.0.2.8-hacked,mikkokar/netty,satishsaley/netty,sammychen105/netty,zhoffice/netty,Kalvar/netty,andsel/netty,jovezhougang/netty,brennangaunce/netty,imangry/netty-zh,louxiu/netty,lznhust/netty,djchen/netty,liyang1025/netty,yawkat/netty,slandelle/netty,NiteshKant/netty,wangyikai/netty,f7753/netty,liyang1025/netty,mubarak/netty,ninja-/netty,MediumOne/netty,tempbottle/netty,liuciuse/netty,jovezhougang/netty,moyiguket/netty,seetharamireddy540/netty,MediumOne/netty,satishsaley/netty,lukw00/netty,ninja-/netty,fengjiachun/netty,xiongzheng/netty,yonglehou/netty-1,AnselQiao/netty,windie/netty,niuxinghua/netty,NiteshKant/netty,huuthang1993/netty,s-gheldd/netty,Techcable/netty,gerdriesselmann/netty,wuxiaowei907/netty,bigheary/netty,codevelop/netty,hyangtack/netty,mcobrien/netty,mikkokar/netty,jenskordowski/netty,pengzj/netty,ngocdaothanh/netty,idelpivnitskiy/netty,BrunoColin/netty,huuthang1993/netty,ngocdaothanh/netty,lukw00/netty,ioanbsu/netty,wangyikai/netty,yipen9/netty,zhujingling/netty,xingguang2013/netty,jongyeol/netty,castomer/netty,lightsocks/netty,ngocdaothanh/netty,zxhfirefox/netty,IBYoung/netty,lznhust/netty,develar/netty,timboudreau/netty,satishsaley/netty,brennangaunce/netty,sverkera/netty,drowning/netty,netty/netty,joansmith/netty,nkhuyu/netty,unei66/netty,luyiisme/netty,skyao/netty,shenguoquan/netty,chanakaudaya/netty,silvaran/netty,kiril-me/netty,yrcourage/netty,shenguoquan/netty,exinguu/netty,lukw00/netty,sammychen105/netty,s-gheldd/netty,imangry/netty-zh,shism/netty,mcobrien/netty,gerdriesselmann/netty,gigold/netty,exinguu/netty,nat2013/netty,youprofit/netty,carlbai/netty,shism/netty,Apache9/netty,huuthang1993/netty,caoyanwei/netty,JungMinu/netty,danny200309/netty,yonglehou/netty-1,duqiao/netty,fantayeneh/netty,caoyanwei/netty,BrunoColin/netty,lukw00/netty,jongyeol/netty,orika/netty,mx657649013/netty,mosoft521/netty,bob329/netty,eincs/netty,yipen9/netty,moyiguket/netty,liuciuse/netty,bryce-anderson/netty,mx657649013/netty,shism/netty,zer0se7en/netty,lznhust/netty,LuminateWireless/netty,golovnin/netty,hepin1989/netty,jovezhougang/netty,bryce-anderson/netty,mway08/netty,idelpivnitskiy/netty,Techcable/netty,eonezhang/netty,mubarak/netty,blucas/netty,wangyikai/netty,eonezhang/netty,kjniemi/netty,serioussam/netty,x1957/netty,lznhust/netty,ioanbsu/netty,netty/netty,louiscryan/netty,idelpivnitskiy/netty,djchen/netty,lukw00/netty,chinayin/netty,junjiemars/netty,fengjiachun/netty,woshilaiceshide/netty,sja/netty,balaprasanna/netty,AchinthaReemal/netty,blademainer/netty,kvr000/netty,jchambers/netty,LuminateWireless/netty,woshilaiceshide/netty,Mounika-Chirukuri/netty,skyao/netty,huanyi0723/netty,huanyi0723/netty,mx657649013/netty,mikkokar/netty,hgl888/netty,duqiao/netty,danbev/netty,x1957/netty,moyiguket/netty,qingsong-xu/netty,artgon/netty,cnoldtree/netty,wuyinxian124/netty,zer0se7en/netty,altihou/netty,liuciuse/netty,gerdriesselmann/netty,ejona86/netty,zzcclp/netty,eonezhang/netty,carlbai/netty,KatsuraKKKK/netty,louxiu/netty,doom369/netty,BrunoColin/netty,CodingFabian/netty,yrcourage/netty,jenskordowski/netty,daschl/netty,doom369/netty,hgl888/netty,bryce-anderson/netty,zhujingling/netty,timboudreau/netty,windie/netty,blucas/netty,shuangqiuan/netty,ijuma/netty,mcobrien/netty,qingsong-xu/netty,lukehutch/netty,codevelop/netty,wuxiaowei907/netty,johnou/netty,chrisprobst/netty,nmittler/netty,smayoorans/netty,s-gheldd/netty,sunbeansoft/netty,Alwayswithme/netty,AnselQiao/netty,afds/netty,djchen/netty,afds/netty,idelpivnitskiy/netty,chanakaudaya/netty,Scottmitch/netty,Scottmitch/netty,drowning/netty,kiril-me/netty,mcobrien/netty,ichaki5748/netty,skyao/netty,carl-mastrangelo/netty,jdivy/netty,windie/netty,nat2013/netty,wangyikai/netty,clebertsuconic/netty,smayoorans/netty,duqiao/netty,x1957/netty,tbrooks8/netty,nat2013/netty,KatsuraKKKK/netty,DolphinZhao/netty,tempbottle/netty,lightsocks/netty,phlizik/netty,unei66/netty,firebase/netty,clebertsuconic/netty,shenguoquan/netty,mway08/netty,nadeeshaan/netty,huanyi0723/netty,kyle-liu/netty4study,zer0se7en/netty,carl-mastrangelo/netty,xiongzheng/netty,NiteshKant/netty,normanmaurer/netty,shelsonjava/netty,orika/netty,f7753/netty,CodingFabian/netty,luyiisme/netty,LuminateWireless/netty,castomer/netty,Techcable/netty,JungMinu/netty,dongjiaqiang/netty,fengjiachun/netty,bob329/netty,tempbottle/netty,chinayin/netty,ifesdjeen/netty,liyang1025/netty,afds/netty,gerdriesselmann/netty,louiscryan/netty,golovnin/netty,WangJunTYTL/netty,zhujingling/netty,joansmith/netty,luyiisme/netty,pengzj/netty,mubarak/netty,blucas/netty,danbev/netty,kjniemi/netty,Mounika-Chirukuri/netty,ajaysarda/netty,liyang1025/netty,jdivy/netty,yrcourage/netty,kjniemi/netty,Alwayswithme/netty,zer0se7en/netty,sameira/netty,exinguu/netty,yonglehou/netty-1,niuxinghua/netty,xiexingguang/netty,yipen9/netty,maliqq/netty,huuthang1993/netty,cnoldtree/netty,Spikhalskiy/netty,shelsonjava/netty,AchinthaReemal/netty,andsel/netty,zxhfirefox/netty,develar/netty,drowning/netty,shism/netty,IBYoung/netty,tbrooks8/netty,luyiisme/netty,Squarespace/netty,purplefox/netty-4.0.2.8-hacked,nmittler/netty,lukehutch/netty,qingsong-xu/netty,codevelop/netty,mubarak/netty,lugt/netty,zhujingling/netty,eonezhang/netty,silvaran/netty,gigold/netty,NiteshKant/netty,pengzj/netty,silvaran/netty,Kingson4Wu/netty,yawkat/netty,satishsaley/netty,clebertsuconic/netty,cnoldtree/netty,fenik17/netty,afredlyj/learn-netty,Kingson4Wu/netty,Mounika-Chirukuri/netty,nayato/netty,joansmith/netty,woshilaiceshide/netty,jchambers/netty,cnoldtree/netty,seetharamireddy540/netty,normanmaurer/netty,woshilaiceshide/netty,Mounika-Chirukuri/netty,castomer/netty,fengshao0907/netty,tbrooks8/netty,sverkera/netty,jenskordowski/netty,louiscryan/netty,blademainer/netty,unei66/netty,kvr000/netty,louiscryan/netty,jdivy/netty,junjiemars/netty,imangry/netty-zh,Techcable/netty,xiongzheng/netty,sja/netty,exinguu/netty,yrcourage/netty,SinaTadayon/netty,kiril-me/netty,louxiu/netty,woshilaiceshide/netty,xiongzheng/netty,f7753/netty,joansmith/netty,huanyi0723/netty,carl-mastrangelo/netty,Kalvar/netty,timboudreau/netty,yawkat/netty,ijuma/netty,fantayeneh/netty,chanakaudaya/netty,danny200309/netty,buchgr/netty,DavidAlphaFox/netty,jenskordowski/netty,nayato/netty,kvr000/netty,yipen9/netty,olupotd/netty,danbev/netty,Squarespace/netty,jovezhougang/netty,seetharamireddy540/netty,firebase/netty,nadeeshaan/netty,duqiao/netty,Squarespace/netty,yawkat/netty,CodingFabian/netty,lugt/netty,johnou/netty,xiexingguang/netty,mosoft521/netty,tbrooks8/netty,blucas/netty,balaprasanna/netty,chanakaudaya/netty,shuangqiuan/netty,drowning/netty,kiril-me/netty,niuxinghua/netty,KatsuraKKKK/netty,pengzj/netty,carlbai/netty,bryce-anderson/netty,buchgr/netty,junjiemars/netty,shelsonjava/netty,Apache9/netty,gerdriesselmann/netty,Kalvar/netty,yonglehou/netty-1,castomer/netty,tempbottle/netty,youprofit/netty,kvr000/netty,balaprasanna/netty,jdivy/netty,serioussam/netty,chanakaudaya/netty,x1957/netty,eonezhang/netty,zxhfirefox/netty,WangJunTYTL/netty,niuxinghua/netty,mcanthony/netty,lugt/netty,carlbai/netty,slandelle/netty,sja/netty,caoyanwei/netty,imangry/netty-zh,seetharamireddy540/netty,Kalvar/netty,louxiu/netty,shenguoquan/netty,mosoft521/netty,castomer/netty,bigheary/netty,olupotd/netty,MediumOne/netty,mway08/netty,BrunoColin/netty,SinaTadayon/netty,skyao/netty,orika/netty,IBYoung/netty,kvr000/netty,DavidAlphaFox/netty,ajaysarda/netty,nayato/netty,unei66/netty,danny200309/netty,doom369/netty,smayoorans/netty,ijuma/netty,Spikhalskiy/netty,xingguang2013/netty,jdivy/netty,Alwayswithme/netty,serioussam/netty,artgon/netty,sunbeansoft/netty,ejona86/netty,sameira/netty,niuxinghua/netty,phlizik/netty,DolphinZhao/netty,slandelle/netty,mx657649013/netty,ngocdaothanh/netty,MediumOne/netty,purplefox/netty-4.0.2.8-hacked,ninja-/netty,zzcclp/netty,sja/netty,maliqq/netty,JungMinu/netty,mosoft521/netty,alkemist/netty,lukehutch/netty,IBYoung/netty,ifesdjeen/netty,nadeeshaan/netty,junjiemars/netty,SinaTadayon/netty,junjiemars/netty,fenik17/netty,liyang1025/netty,silvaran/netty,Alwayswithme/netty,SinaTadayon/netty,djchen/netty,sammychen105/netty,ejona86/netty,bob329/netty,mikkokar/netty,shuangqiuan/netty,lznhust/netty,shuangqiuan/netty,jchambers/netty,codevelop/netty,balaprasanna/netty,mcanthony/netty,satishsaley/netty,caoyanwei/netty,chrisprobst/netty,artgon/netty,normanmaurer/netty,dongjiaqiang/netty,altihou/netty,tbrooks8/netty,jchambers/netty,louiscryan/netty,shelsonjava/netty,danny200309/netty,luyiisme/netty,eincs/netty,Mounika-Chirukuri/netty,jongyeol/netty,buchgr/netty,Kingson4Wu/netty,doom369/netty,Apache9/netty,rovarga/netty,ninja-/netty,AnselQiao/netty,mway08/netty,wuxiaowei907/netty,bigheary/netty,golovnin/netty,CodingFabian/netty,normanmaurer/netty,doom369/netty,normanmaurer/netty,wuxiaowei907/netty,danbev/netty,daschl/netty,Scottmitch/netty,huanyi0723/netty,ajaysarda/netty,wuyinxian124/netty,KatsuraKKKK/netty,ichaki5748/netty,mx657649013/netty,lightsocks/netty,wuyinxian124/netty,Kalvar/netty,Techcable/netty,moyiguket/netty,blucas/netty,eincs/netty,liuciuse/netty,SinaTadayon/netty,Scottmitch/netty,danbev/netty,mikkokar/netty,orika/netty,daschl/netty,yonglehou/netty-1,xingguang2013/netty,andsel/netty,olupotd/netty,alkemist/netty,exinguu/netty,lugt/netty,wuyinxian124/netty,rovarga/netty,ioanbsu/netty,moyiguket/netty,louxiu/netty,alkemist/netty,s-gheldd/netty,maliqq/netty,johnou/netty,zzcclp/netty,sverkera/netty,zxhfirefox/netty,Apache9/netty,zhoffice/netty,bryce-anderson/netty,DolphinZhao/netty,zer0se7en/netty,altihou/netty,idelpivnitskiy/netty,lukehutch/netty,liuciuse/netty,Kingson4Wu/netty,fenik17/netty,tempbottle/netty,hepin1989/netty,artgon/netty,CodingFabian/netty,DavidAlphaFox/netty,mway08/netty,altihou/netty,hyangtack/netty,f7753/netty,rovarga/netty,ajaysarda/netty,mcanthony/netty,fantayeneh/netty,timboudreau/netty,nadeeshaan/netty,nmittler/netty,brennangaunce/netty,eincs/netty,fantayeneh/netty,hyangtack/netty,qingsong-xu/netty,carl-mastrangelo/netty,serioussam/netty,lugt/netty,Spikhalskiy/netty,phlizik/netty,mcobrien/netty,duqiao/netty,s-gheldd/netty,ninja-/netty,mubarak/netty,AnselQiao/netty,Alwayswithme/netty,zhujingling/netty,shelsonjava/netty,afds/netty,DolphinZhao/netty,zhoffice/netty,AchinthaReemal/netty,zzcclp/netty,DolphinZhao/netty,windie/netty,firebase/netty,IBYoung/netty,wangyikai/netty,Kingson4Wu/netty,brennangaunce/netty,balaprasanna/netty,golovnin/netty,ichaki5748/netty,Spikhalskiy/netty,ejona86/netty,zxhfirefox/netty,AnselQiao/netty,smayoorans/netty,nkhuyu/netty,fenik17/netty,gigold/netty,joansmith/netty,nayato/netty,alkemist/netty,qingsong-xu/netty,kyle-liu/netty4study,shism/netty,hepin1989/netty,afredlyj/learn-netty,lightsocks/netty,gigold/netty,netty/netty,WangJunTYTL/netty,skyao/netty,carlbai/netty,yrcourage/netty,LuminateWireless/netty,dongjiaqiang/netty,WangJunTYTL/netty,jongyeol/netty,f7753/netty,sameira/netty,hgl888/netty,olupotd/netty,windie/netty,sja/netty,youprofit/netty,youprofit/netty,nkhuyu/netty,nkhuyu/netty,smayoorans/netty,rovarga/netty,afds/netty,sunbeansoft/netty,sverkera/netty,brennangaunce/netty,phlizik/netty,cnoldtree/netty,xiexingguang/netty,fenik17/netty,ichaki5748/netty,clebertsuconic/netty,andsel/netty,chinayin/netty,firebase/netty,wuxiaowei907/netty,NiteshKant/netty,artgon/netty,chinayin/netty,johnou/netty,kjniemi/netty,maliqq/netty,fengjiachun/netty,gigold/netty,yawkat/netty,x1957/netty,djchen/netty,fantayeneh/netty,DavidAlphaFox/netty,ijuma/netty,imangry/netty-zh,olupotd/netty,bob329/netty,danny200309/netty,ejona86/netty,xiexingguang/netty,carl-mastrangelo/netty,bigheary/netty,timboudreau/netty,chinayin/netty,buchgr/netty,bob329/netty,kiril-me/netty,xingguang2013/netty | /*
* Copyright 2013 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.util.internal;
import io.netty.util.Recycler;
import io.netty.util.Recycler.Handle;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.RandomAccess;
/**
* A simple list which is reyclable. This implementation does not allow {@code null} elements to be added.
*/
public final class RecyclableArrayList extends ArrayList<Object> {
private static final long serialVersionUID = -8605125654176467947L;
private static final int DEFAULT_INITIAL_CAPACITY = 8;
private static final Recycler<RecyclableArrayList> RECYCLER = new Recycler<RecyclableArrayList>() {
@Override
protected RecyclableArrayList newObject(Handle handle) {
return new RecyclableArrayList(handle);
}
};
/**
* Create a new empty {@link RecyclableArrayList} instance
*/
public static RecyclableArrayList newInstance() {
return newInstance(DEFAULT_INITIAL_CAPACITY);
}
/**
* Create a new empty {@link RecyclableArrayList} instance with the given capacity.
*/
public static RecyclableArrayList newInstance(int minCapacity) {
RecyclableArrayList ret = RECYCLER.get();
ret.ensureCapacity(minCapacity);
return ret;
}
private final Handle handle;
private RecyclableArrayList(Handle handle) {
this(handle, DEFAULT_INITIAL_CAPACITY);
}
private RecyclableArrayList(Handle handle, int initialCapacity) {
super(initialCapacity);
this.handle = handle;
}
@Override
public boolean addAll(Collection<?> c) {
checkNullElements(c);
return super.addAll(c);
}
@Override
public boolean addAll(int index, Collection<?> c) {
checkNullElements(c);
return super.addAll(index, c);
}
private static void checkNullElements(Collection<?> c) {
if (c instanceof RandomAccess && c instanceof List) {
// produce less garbage
List<?> list = (List<?>) c;
int size = list.size();
for (int i = 0; i < size; i++) {
if (list.get(i) == null) {
throw new IllegalArgumentException("c contains null values");
}
}
} else {
for (Object element: c) {
if (element == null) {
throw new IllegalArgumentException("c contains null values");
}
}
}
}
@Override
public boolean add(Object element) {
if (element == null) {
throw new NullPointerException("element");
}
return super.add(element);
}
@Override
public void add(int index, Object element) {
if (element == null) {
throw new NullPointerException("element");
}
super.add(index, element);
}
@Override
public Object set(int index, Object element) {
if (element == null) {
throw new NullPointerException("element");
}
return super.set(index, element);
}
/**
* Clear and recycle this instance.
*/
public boolean recycle() {
clear();
return RECYCLER.recycle(this, handle);
}
}
| common/src/main/java/io/netty/util/internal/RecyclableArrayList.java | /*
* Copyright 2013 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.util.internal;
import io.netty.util.Recycler;
import io.netty.util.Recycler.Handle;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.RandomAccess;
/**
* A simple list that holds the output of a codec.
*/
public final class RecyclableArrayList extends ArrayList<Object> {
private static final long serialVersionUID = -8605125654176467947L;
private static final int DEFAULT_INITIAL_CAPACITY = 8;
private static final Recycler<RecyclableArrayList> RECYCLER = new Recycler<RecyclableArrayList>() {
@Override
protected RecyclableArrayList newObject(Handle handle) {
return new RecyclableArrayList(handle);
}
};
/**
* Create a new empty {@link RecyclableArrayList} instance
*/
public static RecyclableArrayList newInstance() {
return newInstance(DEFAULT_INITIAL_CAPACITY);
}
/**
* Create a new empty {@link RecyclableArrayList} instance with the given capacity.
*/
public static RecyclableArrayList newInstance(int minCapacity) {
RecyclableArrayList ret = RECYCLER.get();
ret.ensureCapacity(minCapacity);
return ret;
}
private final Handle handle;
private RecyclableArrayList(Handle handle) {
this(handle, DEFAULT_INITIAL_CAPACITY);
}
private RecyclableArrayList(Handle handle, int initialCapacity) {
super(initialCapacity);
this.handle = handle;
}
@Override
public boolean addAll(Collection<?> c) {
checkNullElements(c);
return super.addAll(c);
}
@Override
public boolean addAll(int index, Collection<?> c) {
checkNullElements(c);
return super.addAll(index, c);
}
private void checkNullElements(Collection<?> c) {
if (c instanceof RandomAccess && c instanceof List) {
// produce less garbage
List<?> list = (List<?>) c;
int size = list.size();
for (int i = 0; i < size; i++) {
if (list.get(i) == null) {
throw new IllegalArgumentException("c contains null values");
}
}
} else {
for (Object element: c) {
if (element == null) {
throw new IllegalArgumentException("c contains null values");
}
}
}
}
@Override
public boolean add(Object element) {
if (element == null) {
throw new NullPointerException("element");
}
return super.add(element);
}
@Override
public void add(int index, Object element) {
if (element == null) {
throw new NullPointerException("element");
}
super.add(index, element);
}
/**
* Clear and recycle this instance.
*/
public boolean recycle() {
clear();
return RECYCLER.recycle(this, handle);
}
}
| Also dissallow null elements on set
| common/src/main/java/io/netty/util/internal/RecyclableArrayList.java | Also dissallow null elements on set | <ide><path>ommon/src/main/java/io/netty/util/internal/RecyclableArrayList.java
<ide> import java.util.RandomAccess;
<ide>
<ide> /**
<del> * A simple list that holds the output of a codec.
<add> * A simple list which is reyclable. This implementation does not allow {@code null} elements to be added.
<ide> */
<ide> public final class RecyclableArrayList extends ArrayList<Object> {
<ide>
<ide> return super.addAll(index, c);
<ide> }
<ide>
<del> private void checkNullElements(Collection<?> c) {
<add> private static void checkNullElements(Collection<?> c) {
<ide> if (c instanceof RandomAccess && c instanceof List) {
<ide> // produce less garbage
<ide> List<?> list = (List<?>) c;
<ide> super.add(index, element);
<ide> }
<ide>
<add> @Override
<add> public Object set(int index, Object element) {
<add> if (element == null) {
<add> throw new NullPointerException("element");
<add> }
<add> return super.set(index, element);
<add> }
<add>
<ide> /**
<ide> * Clear and recycle this instance.
<ide> */ |
|
Java | mit | 41fbd9229c96297ece12d65ebc21bdcd478f1b9a | 0 | madumlao/oxCore,GluuFederation/oxCore | /*
* oxCore is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/package org.xdi.service.metric;
import java.io.Serializable;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.gluu.site.ldap.persistence.LdapEntryManager;
import org.gluu.site.ldap.persistence.exception.EntryPersistenceException;
import org.jboss.seam.Component;
import org.jboss.seam.annotations.In;
import org.jboss.seam.annotations.Logger;
import org.jboss.seam.annotations.Observer;
import org.jboss.seam.annotations.async.Asynchronous;
import org.jboss.seam.contexts.Contexts;
import org.jboss.seam.contexts.Lifecycle;
import org.jboss.seam.log.Log;
import org.xdi.ldap.model.SimpleBranch;
import org.xdi.model.ApplicationType;
import org.xdi.model.metric.MetricType;
import org.xdi.model.metric.ldap.MetricEntry;
import org.xdi.util.StringHelper;
import com.codahale.metrics.Counter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.unboundid.ldap.sdk.Filter;
/**
* Metric service
*
* @author Yuriy Movchan Date: 07/27/2015
*/
public abstract class MetricService implements Serializable {
private static final long serialVersionUID = -3393618600428448743L;
private static final String EVENT_TYPE = "MetricServiceTimerEvent";
private static final int DEFAULT_METRIC_REPORTER_INTERVAL = 60;
private static final SimpleDateFormat PERIOD_DATE_FORMAT = new SimpleDateFormat("yyyyMM");
private static final AtomicLong initialId = new AtomicLong(System.currentTimeMillis());
private MetricRegistry metricRegistry;
private Set<MetricType> registeredMetricTypes;
@Logger
private Log log;
@In
private LdapEntryManager ldapEntryManager;
public void init(int metricInterval) {
this.metricRegistry = new MetricRegistry();
this.registeredMetricTypes = new HashSet<MetricType>();
LdapEntryReporter ldapEntryReporter = LdapEntryReporter.forRegistry(this.metricRegistry, getComponentName()).build();
int metricReporterInterval = metricInterval;
if (metricReporterInterval <= 0) {
metricReporterInterval = DEFAULT_METRIC_REPORTER_INTERVAL;
}
ldapEntryReporter.start(metricReporterInterval, TimeUnit.SECONDS);
}
@Observer(EVENT_TYPE)
@Asynchronous
public void writeMetricEntries(List<MetricEntry> metricEntries, Date creationTime) {
add(metricEntries, creationTime);
}
public void addBranch(String branchDn, String ou) {
SimpleBranch branch = new SimpleBranch();
branch.setOrganizationalUnitName(ou);
branch.setDn(branchDn);
ldapEntryManager.persist(branch);
}
public boolean containsBranch(String branchDn) {
return ldapEntryManager.contains(SimpleBranch.class, branchDn);
}
public void createBranch(String branchDn, String ou) {
try {
addBranch(branchDn, ou);
} catch (EntryPersistenceException ex) {
// Check if another process added this branch already
if (!containsBranch(branchDn)) {
throw ex;
}
}
}
public void prepareBranch(Date creationDate, ApplicationType applicationType) {
String baseDn = buildDn(null, creationDate, applicationType);
// Create ou=YYYY-MM branch if needed
if (!containsBranch(baseDn)) {
// Create ou=application_type branch if needed
String applicationBaseDn = buildDn(null, null, applicationType);
if (!containsBranch(applicationBaseDn)) {
// Create ou=appliance_inum branch if needed
String applianceBaseDn = buildDn(null, null, null);
if (!containsBranch(applianceBaseDn)) {
createBranch(applianceBaseDn, applianceInum());
}
createBranch(applicationBaseDn, applicationType.getValue());
}
createBranch(baseDn, PERIOD_DATE_FORMAT.format(creationDate));
}
}
@Asynchronous
public void add(List<MetricEntry> metricEntries, Date creationTime) {
prepareBranch(creationTime, ApplicationType.OX_AUTH);
for (MetricEntry metricEntry : metricEntries) {
ldapEntryManager.persist(metricEntry);
}
}
public void add(MetricEntry metricEntry) {
prepareBranch(metricEntry.getCreationDate(), metricEntry.getApplicationType());
ldapEntryManager.persist(metricEntry);
}
public void update(MetricEntry metricEntry) {
prepareBranch(metricEntry.getCreationDate(), metricEntry.getApplicationType());
ldapEntryManager.merge(metricEntry);
}
public void remove(MetricEntry metricEntry) {
prepareBranch(metricEntry.getCreationDate(), metricEntry.getApplicationType());
ldapEntryManager.remove(metricEntry);
}
public void removeBranch(String branchDn) {
ldapEntryManager.removeWithSubtree(branchDn);
}
public MetricEntry getMetricEntryByDn(MetricType metricType, String metricEventDn) {
return ldapEntryManager.find(metricType.getMetricEntryType(), metricEventDn);
}
public Map<MetricType, List<MetricEntry>> findMetricEntry(ApplicationType applicationType, String applianceInum,
List<MetricType> metricTypes, Date startDate, Date endDate, String... returnAttributes) {
prepareBranch(null, applicationType);
Map<MetricType, List<MetricEntry>> result = new HashMap<MetricType, List<MetricEntry>>();
if ((metricTypes == null) || (metricTypes.size() == 0)) {
return result;
}
// Prepare list of DNs
Set<String> metricDns = getBaseDnForPeriod(applicationType, applianceInum, startDate, endDate);
if (metricDns.size() == 0) {
return result;
}
for (MetricType metricType : metricTypes) {
List<MetricEntry> metricTypeResult = new LinkedList<MetricEntry>();
for (String metricDn : metricDns) {
List<Filter> metricTypeFilters = new ArrayList<Filter>();
Filter applicationTypeFilter = Filter.createEqualityFilter("oxApplicationType", applicationType.getValue());
Filter eventTypeTypeFilter = Filter.createEqualityFilter("oxMetricType", metricType.getValue());
Filter startDateFilter = Filter.createGreaterOrEqualFilter("oxStartDate", ldapEntryManager.encodeGeneralizedTime((startDate)));
Filter endDateFilter = Filter.createLessOrEqualFilter("oxEndDate", ldapEntryManager.encodeGeneralizedTime(endDate));
metricTypeFilters.add(applicationTypeFilter);
metricTypeFilters.add(eventTypeTypeFilter);
metricTypeFilters.add(startDateFilter);
metricTypeFilters.add(endDateFilter);
Filter filter = Filter.createANDFilter(metricTypeFilters);
List<MetricEntry> metricTypeMonthResult = (List<MetricEntry>) ldapEntryManager.findEntries(metricDn,
metricType.getMetricEntryType(), returnAttributes, filter);
metricTypeResult.addAll(metricTypeMonthResult);
}
result.put(metricType, metricTypeResult);
}
return result;
}
public List<MetricEntry> getExpiredMetricEntries(String baseDnForPeriod, Date expirationDate) {
Filter expiratioFilter = Filter.createLessOrEqualFilter("oxStartDate", ldapEntryManager.encodeGeneralizedTime(expirationDate));
List<MetricEntry> metricEntries = ldapEntryManager.findEntries(baseDnForPeriod, MetricEntry.class, new String[] { "uniqueIdentifier" }, expiratioFilter);
return metricEntries;
}
public Set<String> findAllPeriodBranches(ApplicationType applicationType, String applianceInum) {
String baseDn = buildDn(null, null, applicationType, applianceInum);
Filter skipRootDnFilter = Filter.createNOTFilter(Filter.createEqualityFilter("ou", applicationType.getValue()));
List<SimpleBranch> periodBranches = (List<SimpleBranch>) ldapEntryManager.findEntries(baseDn, SimpleBranch.class, new String[] { "ou" }, skipRootDnFilter);
Set<String> periodBranchesStrings = new HashSet<String>();
for (SimpleBranch periodBranch: periodBranches) {
if (!StringHelper.equalsIgnoreCase(baseDn, periodBranch.getDn())) {
periodBranchesStrings.add(periodBranch.getDn());
}
}
return periodBranchesStrings;
}
public void removeExpiredMetricEntries(Date expirationDate, ApplicationType applicationType, String applianceInum) {
Set<String> keepBaseDnForPeriod = getBaseDnForPeriod(applicationType, applianceInum, expirationDate, new Date());
Set<String> allBaseDnForPeriod = findAllPeriodBranches(applicationType, applianceInum);
allBaseDnForPeriod.removeAll(keepBaseDnForPeriod);
// Remove expired months
for (String baseDnForPeriod : allBaseDnForPeriod) {
removeBranch(baseDnForPeriod);
}
// Remove expired entries
for (String baseDnForPeriod : keepBaseDnForPeriod) {
List<MetricEntry> expiredMetricEntries = getExpiredMetricEntries(baseDnForPeriod, expirationDate);
for (MetricEntry expiredMetricEntry : expiredMetricEntries) {
remove(expiredMetricEntry);
}
}
}
private Set<String> getBaseDnForPeriod(ApplicationType applicationType, String applianceInum, Date startDate, Date endDate) {
Calendar cal = Calendar.getInstance();
cal.setTimeZone(TimeZone.getTimeZone("UTC"));
cal.setTime(startDate);
Set<String> metricDns = new HashSet<String>();
boolean stopCondition = false;
while (cal.getTime().before(endDate) && !stopCondition) {
Date currentStartDate = cal.getTime();
String baseDn = buildDn(null, currentStartDate, applicationType, applianceInum);
if (containsBranch(baseDn)) {
metricDns.add(baseDn);
}
if (cal.getTime().equals(endDate)) {
break;
} else {
cal.add(Calendar.MONTH, 1);
if (cal.getTime().after(endDate)) {
stopCondition = true;
}
}
}
return metricDns;
}
public String getuUiqueIdentifier() {
return String.valueOf(initialId.incrementAndGet());
}
public Counter getCounter(MetricType metricType) {
if (!registeredMetricTypes.contains(metricType)) {
registeredMetricTypes.add(metricType);
}
return metricRegistry.counter(metricType.getMetricName());
}
public Timer getTimer(MetricType metricType) {
if (!registeredMetricTypes.contains(metricType)) {
registeredMetricTypes.add(metricType);
}
return metricRegistry.timer(metricType.getMetricName());
}
public void incCounter(MetricType metricType) {
Counter counter = getCounter(metricType);
counter.inc();
}
public String buildDn(String uniqueIdentifier, Date creationDate, ApplicationType applicationType) {
return buildDn(uniqueIdentifier, creationDate, applicationType, null);
}
/*
* Should return similar to this pattern DN:
* uniqueIdentifier=id,ou=YYYY-MM,ou=application_type,ou=appliance_inum,ou=metric,ou=organization_name,o=gluu
*/
public String buildDn(String uniqueIdentifier, Date creationDate, ApplicationType applicationType, String currentApplianceInum) {
final StringBuilder dn = new StringBuilder();
if (StringHelper.isNotEmpty(uniqueIdentifier) && (creationDate != null) && (applicationType != null)) {
dn.append(String.format("uniqueIdentifier=%s,", uniqueIdentifier));
}
if ((creationDate != null) && (applicationType != null)) {
dn.append(String.format("ou=%s,", PERIOD_DATE_FORMAT.format(creationDate)));
}
if (applicationType != null) {
dn.append(String.format("ou=%s,", applicationType.getValue()));
}
if (currentApplianceInum == null) {
dn.append(String.format("ou=%s,", applianceInum()));
} else {
dn.append(String.format("ou=%s,", currentApplianceInum));
}
dn.append(baseDn());
return dn.toString();
}
public Set<MetricType> getRegisteredMetricTypes() {
return registeredMetricTypes;
}
// Should return ou=metric,o=gluu
public abstract String baseDn();
// Should return appliance Inum
public abstract String applianceInum();
public abstract String getComponentName();
/**
* Get MetricService instance
*
* @return MetricService instance
*/
public static MetricService instance() {
if (!(Contexts.isEventContextActive() || Contexts.isApplicationContextActive())) {
Lifecycle.beginCall();
}
return (MetricService) Component.getInstance(MetricService.class);
}
}
| oxService/src/main/java/org/xdi/service/metric/MetricService.java | /*
* oxCore is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/package org.xdi.service.metric;
import java.io.Serializable;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.gluu.site.ldap.persistence.LdapEntryManager;
import org.gluu.site.ldap.persistence.exception.EntryPersistenceException;
import org.jboss.seam.Component;
import org.jboss.seam.annotations.In;
import org.jboss.seam.annotations.Logger;
import org.jboss.seam.annotations.Observer;
import org.jboss.seam.annotations.async.Asynchronous;
import org.jboss.seam.contexts.Contexts;
import org.jboss.seam.contexts.Lifecycle;
import org.jboss.seam.log.Log;
import org.xdi.ldap.model.SimpleBranch;
import org.xdi.model.ApplicationType;
import org.xdi.model.metric.MetricType;
import org.xdi.model.metric.ldap.MetricEntry;
import org.xdi.util.StringHelper;
import com.codahale.metrics.Counter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.unboundid.ldap.sdk.Filter;
/**
* Metric service
*
* @author Yuriy Movchan Date: 07/27/2015
*/
public abstract class MetricService implements Serializable {
private static final long serialVersionUID = -3393618600428448743L;
private static final String EVENT_TYPE = "MetricServiceTimerEvent";
private static final int DEFAULT_METRIC_REPORTER_INTERVAL = 60;
private static final SimpleDateFormat PERIOD_DATE_FORMAT = new SimpleDateFormat("yyyyMM");
private static final AtomicLong initialId = new AtomicLong(System.currentTimeMillis());
private MetricRegistry metricRegistry;
private Set<MetricType> registeredMetricTypes;
@Logger
private Log log;
@In
private LdapEntryManager ldapEntryManager;
public void init(int metricInterval) {
this.metricRegistry = new MetricRegistry();
this.registeredMetricTypes = new HashSet<MetricType>();
LdapEntryReporter ldapEntryReporter = LdapEntryReporter.forRegistry(this.metricRegistry, getComponentName()).build();
int metricReporterInterval = metricInterval;
if (metricReporterInterval <= 0) {
metricReporterInterval = DEFAULT_METRIC_REPORTER_INTERVAL;
}
ldapEntryReporter.start(metricReporterInterval, TimeUnit.SECONDS);
}
@Observer(EVENT_TYPE)
@Asynchronous
public void writeMetricEntries(List<MetricEntry> metricEntries, Date creationTime) {
add(metricEntries, creationTime);
}
public void addBranch(String branchDn, String ou) {
SimpleBranch branch = new SimpleBranch();
branch.setOrganizationalUnitName(ou);
branch.setDn(branchDn);
ldapEntryManager.persist(branch);
}
public boolean containsBranch(String branchDn) {
return ldapEntryManager.contains(SimpleBranch.class, branchDn);
}
public void createBranch(String branchDn, String ou) {
try {
addBranch(branchDn, ou);
} catch (EntryPersistenceException ex) {
// Check if another process added this branch already
if (!containsBranch(branchDn)) {
throw ex;
}
}
}
public void prepareBranch(Date creationDate, ApplicationType applicationType) {
String baseDn = buildDn(null, creationDate, applicationType);
// Create ou=YYYY-MM branch if needed
if (!containsBranch(baseDn)) {
// Create ou=application_type branch if needed
String applicationBaseDn = buildDn(null, null, applicationType);
if (!containsBranch(applicationBaseDn)) {
// Create ou=appliance_inum branch if needed
String applianceBaseDn = buildDn(null, null, null);
if (!containsBranch(applianceBaseDn)) {
createBranch(applianceBaseDn, applianceInum());
}
createBranch(applicationBaseDn, applicationType.getValue());
}
createBranch(baseDn, PERIOD_DATE_FORMAT.format(creationDate));
}
}
@Asynchronous
public void add(List<MetricEntry> metricEntries, Date creationTime) {
prepareBranch(creationTime, ApplicationType.OX_AUTH);
for (MetricEntry metricEntry : metricEntries) {
ldapEntryManager.persist(metricEntry);
}
}
public void add(MetricEntry metricEntry) {
prepareBranch(metricEntry.getCreationDate(), metricEntry.getApplicationType());
ldapEntryManager.persist(metricEntry);
}
public void update(MetricEntry metricEntry) {
prepareBranch(metricEntry.getCreationDate(), metricEntry.getApplicationType());
ldapEntryManager.merge(metricEntry);
}
public void remove(MetricEntry metricEntry) {
prepareBranch(metricEntry.getCreationDate(), metricEntry.getApplicationType());
ldapEntryManager.remove(metricEntry);
}
public void removeBranch(String branchDn) {
ldapEntryManager.removeWithSubtree(branchDn);
}
public MetricEntry getMetricEntryByDn(MetricType metricType, String metricEventDn) {
return ldapEntryManager.find(metricType.getMetricEntryType(), metricEventDn);
}
public Map<MetricType, List<MetricEntry>> findMetricEntry(ApplicationType applicationType, String applianceInum,
List<MetricType> metricTypes, Date startDate, Date endDate, String... returnAttributes) {
prepareBranch(null, applicationType);
Map<MetricType, List<MetricEntry>> result = new HashMap<MetricType, List<MetricEntry>>();
if ((metricTypes == null) || (metricTypes.size() == 0)) {
return result;
}
// Prepare list of DNs
Set<String> metricDns = getBaseDnForPeriod(applicationType, applianceInum, startDate, endDate);
if (metricDns.size() == 0) {
return result;
}
for (MetricType metricType : metricTypes) {
List<MetricEntry> metricTypeResult = new LinkedList<MetricEntry>();
for (String metricDn : metricDns) {
List<Filter> metricTypeFilters = new ArrayList<Filter>();
Filter applicationTypeFilter = Filter.createEqualityFilter("oxApplicationType", applicationType.getValue());
Filter eventTypeTypeFilter = Filter.createEqualityFilter("oxMetricType", metricType.getValue());
Filter startDateFilter = Filter.createGreaterOrEqualFilter("oxStartDate", ldapEntryManager.encodeGeneralizedTime((startDate)));
Filter endDateFilter = Filter.createLessOrEqualFilter("oxEndDate", ldapEntryManager.encodeGeneralizedTime(endDate));
metricTypeFilters.add(applicationTypeFilter);
metricTypeFilters.add(eventTypeTypeFilter);
metricTypeFilters.add(startDateFilter);
metricTypeFilters.add(endDateFilter);
Filter filter = Filter.createANDFilter(metricTypeFilters);
List<MetricEntry> metricTypeMonthResult = (List<MetricEntry>) ldapEntryManager.findEntries(metricDn,
metricType.getMetricEntryType(), returnAttributes, filter);
metricTypeResult.addAll(metricTypeMonthResult);
}
result.put(metricType, metricTypeResult);
}
return result;
}
public List<MetricEntry> getExpiredMetricEntries(String baseDnForPeriod, Date expirationDate) {
Filter expiratioFilter = Filter.createLessOrEqualFilter("oxStartDate", ldapEntryManager.encodeGeneralizedTime(expirationDate));
List<MetricEntry> metricEntries = ldapEntryManager.findEntries(baseDnForPeriod, MetricEntry.class, new String[] { "uniqueIdentifier" }, expiratioFilter);
return metricEntries;
}
public Set<String> findAllPeriodBranches(ApplicationType applicationType, String applianceInum) {
String baseDn = buildDn(null, null, applicationType, applianceInum);
Filter skipRootDnFilter = Filter.createNOTFilter(Filter.createEqualityFilter("ou", applicationType.getValue()));
List<SimpleBranch> periodBranches = (List<SimpleBranch>) ldapEntryManager.findEntries(baseDn, SimpleBranch.class, new String[] { "ou" }, skipRootDnFilter);
Set<String> periodBranchesStrings = new HashSet<String>();
for (SimpleBranch periodBranch: periodBranches) {
if (!StringHelper.equalsIgnoreCase(baseDn, periodBranch.getDn())) {
periodBranchesStrings.add(periodBranch.getDn());
}
}
return periodBranchesStrings;
}
public void removeExpiredMetricEntries(Date expirationDate, ApplicationType applicationType, String applianceInum) {
Set<String> keepBaseDnForPeriod = getBaseDnForPeriod(applicationType, applianceInum, expirationDate, new Date());
Set<String> allBaseDnForPeriod = findAllPeriodBranches(applicationType, applianceInum);
allBaseDnForPeriod.removeAll(keepBaseDnForPeriod);
// Remove expired months
for (String baseDnForPeriod : allBaseDnForPeriod) {
removeBranch(baseDnForPeriod);
}
// Remove expired entries
for (String baseDnForPeriod : keepBaseDnForPeriod) {
List<MetricEntry> expiredMetricEntries = getExpiredMetricEntries(baseDnForPeriod, expirationDate);
for (MetricEntry expiredMetricEntry : expiredMetricEntries) {
remove(expiredMetricEntry);
}
}
}
private Set<String> getBaseDnForPeriod(ApplicationType applicationType, String applianceInum, Date startDate, Date endDate) {
Calendar cal = Calendar.getInstance();
cal.setTimeZone(TimeZone.getTimeZone("UTC"));
cal.setTime(startDate);
Set<String> metricDns = new HashSet<String>();
boolean stopCondition = false;
while (cal.getTime().before(endDate) || !stopCondition) {
Date currentStartDate = cal.getTime();
String baseDn = buildDn(null, currentStartDate, applicationType, applianceInum);
if (containsBranch(baseDn)) {
metricDns.add(baseDn);
}
if (cal.getTime().equals(endDate)) {
break;
} else {
cal.add(Calendar.MONTH, 1);
if (cal.getTime().after(endDate)) {
stopCondition = true;
}
}
}
return metricDns;
}
public String getuUiqueIdentifier() {
return String.valueOf(initialId.incrementAndGet());
}
public Counter getCounter(MetricType metricType) {
if (!registeredMetricTypes.contains(metricType)) {
registeredMetricTypes.add(metricType);
}
return metricRegistry.counter(metricType.getMetricName());
}
public Timer getTimer(MetricType metricType) {
if (!registeredMetricTypes.contains(metricType)) {
registeredMetricTypes.add(metricType);
}
return metricRegistry.timer(metricType.getMetricName());
}
public void incCounter(MetricType metricType) {
Counter counter = getCounter(metricType);
counter.inc();
}
public String buildDn(String uniqueIdentifier, Date creationDate, ApplicationType applicationType) {
return buildDn(uniqueIdentifier, creationDate, applicationType, null);
}
/*
* Should return similar to this pattern DN:
* uniqueIdentifier=id,ou=YYYY-MM,ou=application_type,ou=appliance_inum,ou=metric,ou=organization_name,o=gluu
*/
public String buildDn(String uniqueIdentifier, Date creationDate, ApplicationType applicationType, String currentApplianceInum) {
final StringBuilder dn = new StringBuilder();
if (StringHelper.isNotEmpty(uniqueIdentifier) && (creationDate != null) && (applicationType != null)) {
dn.append(String.format("uniqueIdentifier=%s,", uniqueIdentifier));
}
if ((creationDate != null) && (applicationType != null)) {
dn.append(String.format("ou=%s,", PERIOD_DATE_FORMAT.format(creationDate)));
}
if (applicationType != null) {
dn.append(String.format("ou=%s,", applicationType.getValue()));
}
if (currentApplianceInum == null) {
dn.append(String.format("ou=%s,", applianceInum()));
} else {
dn.append(String.format("ou=%s,", currentApplianceInum));
}
dn.append(baseDn());
return dn.toString();
}
public Set<MetricType> getRegisteredMetricTypes() {
return registeredMetricTypes;
}
// Should return ou=metric,o=gluu
public abstract String baseDn();
// Should return appliance Inum
public abstract String applianceInum();
public abstract String getComponentName();
/**
* Get MetricService instance
*
* @return MetricService instance
*/
public static MetricService instance() {
if (!(Contexts.isEventContextActive() || Contexts.isApplicationContextActive())) {
Lifecycle.beginCall();
}
return (MetricService) Component.getInstance(MetricService.class);
}
}
| Optimizee code | oxService/src/main/java/org/xdi/service/metric/MetricService.java | Optimizee code | <ide><path>xService/src/main/java/org/xdi/service/metric/MetricService.java
<ide>
<ide> Set<String> metricDns = new HashSet<String>();
<ide> boolean stopCondition = false;
<del> while (cal.getTime().before(endDate) || !stopCondition) {
<add> while (cal.getTime().before(endDate) && !stopCondition) {
<ide> Date currentStartDate = cal.getTime();
<ide>
<ide> String baseDn = buildDn(null, currentStartDate, applicationType, applianceInum); |
|
Java | apache-2.0 | 3c2e70106c64fba7f5f3b6fa27c5cad845d5baa1 | 0 | gfx/Android-Orma,gfx/Android-Orma,gfx/Android-Orma,gfx/Android-Orma | /*
* Copyright (c) 2015 FUJI Goro (gfx).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.gfx.android.orma.processor;
import com.github.gfx.android.orma.annotation.OnConflict;
import com.github.gfx.android.orma.annotation.Setter;
import com.squareup.javapoet.AnnotationSpec;
import com.squareup.javapoet.ArrayTypeName;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.CodeBlock;
import com.squareup.javapoet.FieldSpec;
import com.squareup.javapoet.MethodSpec;
import com.squareup.javapoet.ParameterSpec;
import com.squareup.javapoet.ParameterizedTypeName;
import com.squareup.javapoet.TypeName;
import com.squareup.javapoet.TypeSpec;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.VariableElement;
/**
* {@code Schema<T>} represents how a model is connected to an SQLite table.
*/
public class SchemaWriter extends BaseWriter {
static final String TABLE_NAME = "$TABLE_NAME";
static final String COLUMNS = "$COLUMNS";
static final String ESCAPED_COLUMN_NAMES = "$ESCAPED_COLUMN_NAMES";
static final Modifier[] publicStaticFinal = {
Modifier.PUBLIC,
Modifier.STATIC,
Modifier.FINAL,
};
private final SchemaDefinition schema;
FieldSpec primaryKey;
public SchemaWriter(ProcessingContext context, SchemaDefinition schema) {
super(context);
this.schema = schema;
}
@Override
public TypeSpec buildTypeSpec() {
TypeSpec.Builder classBuilder = TypeSpec.classBuilder(schema.getSchemaClassName().simpleName());
classBuilder.addModifiers(Modifier.PUBLIC);
classBuilder.addSuperinterface(Types.getSchema(schema.getModelClassName()));
classBuilder.addFields(buildFieldSpecs());
classBuilder.addMethods(buildMethodSpecs());
return classBuilder.build();
}
public List<FieldSpec> buildFieldSpecs() {
List<FieldSpec> fieldSpecs = new ArrayList<>();
fieldSpecs.add(FieldSpec.builder(schema.getSchemaClassName(), "INSTANCE", publicStaticFinal)
.initializer("new $T()", schema.getSchemaClassName())
.build());
List<FieldSpec> columns = new ArrayList<>();
schema.getColumns().forEach(columnDef -> {
FieldSpec fieldSpec = buildColumnFieldSpec(columnDef);
columns.add(fieldSpec);
if (columnDef.primaryKey) {
primaryKey = fieldSpec;
}
});
if (primaryKey == null) {
// Even if primary key is omitted, "_rowid_" is always available.
// (WITHOUT ROWID is not supported by Orma)
primaryKey = buildPrimaryKeyColumn();
fieldSpecs.add(primaryKey);
}
fieldSpecs.addAll(columns);
fieldSpecs.add(
FieldSpec.builder(Types.String, TABLE_NAME)
.addModifiers(publicStaticFinal)
.initializer("$S", schema.tableName)
.build()
);
fieldSpecs.add(
FieldSpec.builder(Types.getColumnDefList(schema.getModelClassName()), COLUMNS)
.addModifiers(publicStaticFinal)
.initializer(buildColumnsInitializer(columns))
.build()
);
fieldSpecs.add(
FieldSpec.builder(Types.StringArray, ESCAPED_COLUMN_NAMES)
.addModifiers(publicStaticFinal)
.initializer(buildEscapedColumnNamesInitializer())
.build()
);
return fieldSpecs;
}
public FieldSpec buildColumnFieldSpec(ColumnDefinition c) {
TypeName type = c.getType();
CodeBlock typeInstance;
if (type instanceof ParameterizedTypeName) {
typeInstance = CodeBlock.builder()
.add("new $T<$T>(){}.getType()", Types.TypeHolder, type
).build();
} else {
typeInstance = CodeBlock.builder()
.add("$T.class", type)
.build();
}
TypeSpec.Builder columnDefType = TypeSpec.anonymousClassBuilder("INSTANCE, $S, $L, $S, $L",
c.columnName, typeInstance, c.getStorageType(), buildColumnFlags(c));
columnDefType.superclass(c.getColumnDefType());
MethodSpec.Builder getBuilder = MethodSpec.methodBuilder("get")
.addAnnotation(Specs.overrideAnnotationSpec())
.addAnnotation(c.nullable ? Specs.nullableAnnotation() : Specs.nonNullAnnotationSpec())
.addModifiers(Modifier.PUBLIC)
.returns(c.getBoxType())
.addParameter(ParameterSpec.builder(schema.getModelClassName(), "model")
.addAnnotation(Specs.nonNullAnnotationSpec())
.build());
if (c.element != null) {
getBuilder.addStatement("return $L", c.buildGetColumnExpr("model"));
} else {
getBuilder.addStatement("throw new $T($S)", Types.NoValueException, "Missing @PrimaryKey definition");
}
columnDefType.addMethod(getBuilder.build());
return FieldSpec.builder(c.getColumnDefType(), c.name)
.addModifiers(publicStaticFinal)
.initializer("$L", columnDefType.build())
.build();
}
public CodeBlock buildColumnFlags(ColumnDefinition c) {
CodeBlock.Builder builder = CodeBlock.builder();
boolean some = false;
if (c.primaryKey) {
builder.add("$T.PRIMARY_KEY", Types.ColumnDef);
some = true;
}
if (c.autoId) {
if (some) {
builder.add(" | ");
}
builder.add("$T.AUTO_VALUE", Types.ColumnDef);
some = true;
}
if (c.autoincrement) {
if (some) {
builder.add(" | ");
}
builder.add("$T.AUTOINCREMENT", Types.ColumnDef);
some = true;
}
if (c.nullable) {
if (some) {
builder.add(" | ");
}
builder.add("$T.NULLABLE", Types.ColumnDef);
some = true;
}
if (c.indexed) {
if (some) {
builder.add(" | ");
}
builder.add("$T.INDEXED", Types.ColumnDef);
some = true;
}
if (c.unique) {
if (some) {
builder.add(" | ");
}
builder.add("$T.UNIQUE", Types.ColumnDef);
some = true;
}
if (!some) {
builder.add("0");
}
return builder.build();
}
public FieldSpec buildPrimaryKeyColumn() {
return buildColumnFieldSpec(ColumnDefinition.createDefaultPrimaryKey(schema));
}
public CodeBlock buildColumnsInitializer(List<FieldSpec> columns) {
CodeBlock.Builder builder = CodeBlock.builder();
builder.add("$T.<$T>asList(\n", Types.Arrays, Types.getColumnDef(schema.getModelClassName(), Types.WildcardType))
.indent();
for (int i = 0; i < columns.size(); i++) {
builder.add("$N", columns.get(i));
if ((i + 1) != columns.size()) {
builder.add(",\n");
} else {
builder.add("\n");
}
}
builder.unindent().add(")");
return builder.build();
}
public CodeBlock buildEscapedColumnNamesInitializer() {
CodeBlock.Builder builder = CodeBlock.builder();
builder.add("{\n").indent();
List<ColumnDefinition> columns = schema.getColumns();
for (int i = 0; i < columns.size(); i++) {
builder.add("$S", '"' + columns.get(i).columnName + '"');
if ((i + 1) != columns.size()) {
builder.add(",\n");
} else {
builder.add("\n");
}
}
builder.unindent().add("}");
return builder.build();
}
public List<MethodSpec> buildMethodSpecs() {
List<MethodSpec> methodSpecs = new ArrayList<>();
List<AnnotationSpec> overrideAndNonNull = Arrays.asList(
Specs.nonNullAnnotationSpec(),
Specs.overrideAnnotationSpec()
);
methodSpecs.add(
MethodSpec.methodBuilder("getModelClass")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(ParameterizedTypeName.get(ClassName.get(Class.class), schema.getModelClassName()))
.addStatement("return $T.class", schema.getModelClassName())
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getTableName")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(Types.String)
.addStatement("return $L", TABLE_NAME)
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getEscapedTableName")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(Types.String)
.addStatement("return '\"' + $L + '\"'", TABLE_NAME)
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getPrimaryKey")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(Types.getColumnDef(schema.getModelClassName(), Types.WildcardType))
.addStatement("return $N", primaryKey)
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getColumns")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(Types.getColumnDefList(schema.getModelClassName()))
.addStatement("return $L", COLUMNS)
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getEscapedColumnNames")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(Types.StringArray)
.addStatement("return $L", ESCAPED_COLUMN_NAMES)
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getCreateTableStatement")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(Types.String)
.addStatement("return $S", sql.buildCreateTableStatement(schema))
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getCreateIndexStatements")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(Types.getList(Types.String))
.addCode(sql.buildCreateIndexStatements(schema))
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getDropTableStatement")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(Types.String)
.addStatement("return $S", sql.buildDropTableStatement(schema))
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getInsertStatement")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.addParameter(ParameterSpec.builder(int.class, "onConflictAlgorithm")
.addAnnotation(OnConflict.class)
.build())
.returns(Types.String)
.addCode(sql.buildInsertStatementCode(schema, "onConflictAlgorithm"))
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("convertToArgs")
.addJavadoc("Provided for debugging\n")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(ArrayTypeName.of(TypeName.OBJECT))
.addParameter(
ParameterSpec.builder(Types.OrmaConnection, "conn")
.addAnnotation(Specs.nonNullAnnotationSpec())
.build())
.addParameter(
ParameterSpec.builder(schema.getModelClassName(), "model")
.addAnnotation(Specs.nonNullAnnotationSpec())
.build())
.addCode(buildConvertToArgs())
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("bindArgs")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(TypeName.VOID)
.addParameter(
ParameterSpec.builder(Types.OrmaConnection, "conn")
.addAnnotation(Specs.nonNullAnnotationSpec())
.build())
.addParameter(
ParameterSpec.builder(Types.SQLiteStatement, "statement")
.addAnnotation(Specs.nonNullAnnotationSpec())
.build())
.addParameter(
ParameterSpec.builder(schema.getModelClassName(), "model")
.addAnnotation(Specs.nonNullAnnotationSpec())
.build())
.addCode(buildBindArgs())
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("newModelFromCursor")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(schema.getModelClassName())
.addParameter(
ParameterSpec.builder(Types.OrmaConnection, "conn")
.addAnnotation(Specs.nonNullAnnotationSpec())
.build())
.addParameter(
ParameterSpec.builder(Types.Cursor, "cursor")
.addAnnotation(Specs.nonNullAnnotationSpec())
.build())
.addCode(buildNewModelFromCursor())
.build()
);
return methodSpecs;
}
private CodeBlock buildConvertToArgs() {
CodeBlock.Builder builder = CodeBlock.builder();
List<ColumnDefinition> columns = schema.getColumnsWithoutAutoId();
builder.addStatement("$T args = new $T[$L]", ArrayTypeName.of(TypeName.OBJECT), TypeName.OBJECT, columns.size());
for (int i = 0; i < columns.size(); i++) {
ColumnDefinition c = columns.get(i);
AssociationDefinition r = c.getAssociation();
CodeBlock rhsExpr = c.buildSerializedColumnExpr("conn", "model");
if (r != null && r.associationType.equals(Types.SingleAssociation)) {
builder.addStatement("args[$L] = $L.getId()", i, c.buildGetColumnExpr("model"));
} else if (c.getSerializedType().equals(TypeName.BOOLEAN)) {
builder.addStatement("args[$L] = $L ? 1 : 0", i, rhsExpr);
} else {
builder.addStatement("args[$L] = $L", i, rhsExpr);
}
}
builder.addStatement("return args");
return builder.build();
}
// http://developer.android.com/intl/ja/reference/android/database/sqlite/SQLiteStatement.html
private CodeBlock buildBindArgs() {
CodeBlock.Builder builder = CodeBlock.builder();
List<ColumnDefinition> columns = schema.getColumnsWithoutAutoId();
for (int i = 0; i < columns.size(); i++) {
int n = i + 1; // bind index starts 1
ColumnDefinition c = columns.get(i);
TypeName serializedType = c.getSerializedType();
AssociationDefinition r = c.getAssociation();
if (c.isNullableInJava()) {
builder.beginControlFlow("if ($L != null)", c.buildGetColumnExpr("model"));
}
CodeBlock rhsExpr = c.buildSerializedColumnExpr("conn", "model");
if (serializedType.equals(TypeName.BOOLEAN)) {
builder.addStatement("statement.bindLong($L, $L ? 1 : 0)", n, rhsExpr);
} else if (Types.looksLikeIntegerType(serializedType)) {
builder.addStatement("statement.bindLong($L, $L)", n, rhsExpr);
} else if (Types.looksLikeFloatType(serializedType)) {
builder.addStatement("statement.bindDouble($L, $L)", n, rhsExpr);
} else if (serializedType.equals(Types.ByteArray)) {
builder.addStatement("statement.bindBlob($L, $L)", n, rhsExpr);
} else if (serializedType.equals(Types.String)) {
builder.addStatement("statement.bindString($L, $L)", n, rhsExpr);
} else if (r != null && r.associationType.equals(Types.SingleAssociation)) {
builder.addStatement("statement.bindLong($L, $L.getId())", n, c.buildGetColumnExpr("model"));
} else {
builder.addStatement("statement.bindString($L, $L)", n, rhsExpr);
// TODO: throw the following errors in v2.0
// throw new ProcessingException("No storage method found for " + serializedType, c.element);
}
if (c.isNullableInJava()) {
builder.endControlFlow();
builder.beginControlFlow("else");
builder.addStatement("statement.bindNull($L)", n);
builder.endControlFlow();
}
}
return builder.build();
}
private CodeBlock buildPopulateValuesIntoCursor(Function<ColumnDefinition, CodeBlock> lhsBaseGen) {
CodeBlock.Builder builder = CodeBlock.builder();
List<ColumnDefinition> columns = schema.getColumns();
for (int i = 0; i < columns.size(); i++) {
ColumnDefinition c = columns.get(i);
TypeName type = c.getUnboxType();
if (Types.isDirectAssociation(context, type)) {
ClassName className = (ClassName) type;
String singleAssocType = "SingleAssociation<" + className.simpleName() + ">";
context.addError("Direct association is not yet supported. Use " + singleAssocType + " instead.", c.element);
} else if (Types.isSingleAssociation(type)) {
AssociationDefinition r = c.getAssociation();
CodeBlock.Builder getRhsExpr = CodeBlock.builder()
.add("new $T<>(conn, $L, cursor.getLong($L))",
r.associationType, context.getSchemaInstanceExpr(r.modelType), i);
builder.addStatement("$L$L", lhsBaseGen.apply(c), c.buildSetColumnExpr(getRhsExpr.build()));
} else {
if (c.isNullableInSQL()) {
builder.beginControlFlow("if (!cursor.isNull($L))", i);
}
CodeBlock getRhsExpr = c.buildDeserializeExpr("conn", cursorGetter(c, i));
builder.addStatement("$L$L", lhsBaseGen.apply(c), c.buildSetColumnExpr(getRhsExpr));
if (c.isNullableInSQL()) {
builder.endControlFlow();
}
}
}
return builder.build();
}
private CodeBlock buildNewModelFromCursor() {
CodeBlock.Builder builder = CodeBlock.builder();
if (schema.hasDefaultConstructor()) {
builder.addStatement("$T model = new $T()", schema.getModelClassName(), schema.getModelClassName());
builder.add(buildPopulateValuesIntoCursor(column -> CodeBlock.builder().add("model.").build()));
builder.addStatement("return model");
} else {
if (schema.getColumns().size() != schema.constructorElement.getParameters().size()) {
// FIXME: check the parameters more strictly
context.addError("The @Setter constructor parameters must satisfy @Column fields", schema.constructorElement);
}
builder.add(buildPopulateValuesIntoCursor(
column -> CodeBlock.builder().add("$T ", column.getType()).build()));
builder.addStatement("return new $T($L)", schema.getModelClassName(),
schema.constructorElement.getParameters()
.stream()
.map(this::extractColumnNameFromParameterElement)
.collect(Collectors.joining(", ")));
}
return builder.build();
}
private String extractColumnNameFromParameterElement(VariableElement parameterElement) {
Setter setter = parameterElement.getAnnotation(Setter.class);
if (setter != null && !Strings.isEmpty(setter.value())) {
return setter.value();
}
return parameterElement.getSimpleName().toString();
}
private String cursorGetter(ColumnDefinition column, int position) {
TypeName type = column.getSerializedType();
if (type.equals(TypeName.BOOLEAN)) {
return "cursor.getLong(" + position + ") != 0";
} else if (type.equals(TypeName.BYTE)) {
return "(byte)cursor.getShort(" + position + ")";
} else if (type.isPrimitive()) {
String s = type.toString();
return "cursor.get" + s.substring(0, 1).toUpperCase() + s.substring(1) + "(" + position + ")";
} else if (type.equals(Types.String)) {
return "cursor.getString(" + position + ")";
} else if (type.equals(Types.ByteArray)) {
return "cursor.getBlob(" + position + ")";
} else {
return "cursor.getString(" + position + ")"; // handled by type adapters
}
}
}
| processor/src/main/java/com/github/gfx/android/orma/processor/SchemaWriter.java | /*
* Copyright (c) 2015 FUJI Goro (gfx).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.gfx.android.orma.processor;
import com.github.gfx.android.orma.annotation.OnConflict;
import com.github.gfx.android.orma.annotation.Setter;
import com.squareup.javapoet.AnnotationSpec;
import com.squareup.javapoet.ArrayTypeName;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.CodeBlock;
import com.squareup.javapoet.FieldSpec;
import com.squareup.javapoet.MethodSpec;
import com.squareup.javapoet.ParameterSpec;
import com.squareup.javapoet.ParameterizedTypeName;
import com.squareup.javapoet.TypeName;
import com.squareup.javapoet.TypeSpec;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.VariableElement;
/**
* {@code Schema<T>} represents how a model is connected to an SQLite table.
*/
public class SchemaWriter extends BaseWriter {
static final String TABLE_NAME = "$TABLE_NAME";
static final String COLUMNS = "$COLUMNS";
static final String ESCAPED_COLUMN_NAMES = "$ESCAPED_COLUMN_NAMES";
static final Modifier[] publicStaticFinal = {
Modifier.PUBLIC,
Modifier.STATIC,
Modifier.FINAL,
};
private final SchemaDefinition schema;
FieldSpec primaryKey;
public SchemaWriter(ProcessingContext context, SchemaDefinition schema) {
super(context);
this.schema = schema;
}
@Override
public TypeSpec buildTypeSpec() {
TypeSpec.Builder classBuilder = TypeSpec.classBuilder(schema.getSchemaClassName().simpleName());
classBuilder.addModifiers(Modifier.PUBLIC);
classBuilder.addSuperinterface(Types.getSchema(schema.getModelClassName()));
classBuilder.addFields(buildFieldSpecs());
classBuilder.addMethods(buildMethodSpecs());
return classBuilder.build();
}
public List<FieldSpec> buildFieldSpecs() {
List<FieldSpec> fieldSpecs = new ArrayList<>();
fieldSpecs.add(FieldSpec.builder(schema.getSchemaClassName(), "INSTANCE", publicStaticFinal)
.initializer("new $T()", schema.getSchemaClassName())
.build());
List<FieldSpec> columns = new ArrayList<>();
schema.getColumns().forEach(columnDef -> {
FieldSpec fieldSpec = buildColumnFieldSpec(columnDef);
columns.add(fieldSpec);
if (columnDef.primaryKey) {
primaryKey = fieldSpec;
}
});
if (primaryKey == null) {
// Even if primary key is omitted, "_rowid_" is always available.
// (WITHOUT ROWID is not supported by Orma)
primaryKey = buildPrimaryKeyColumn();
fieldSpecs.add(primaryKey);
}
fieldSpecs.addAll(columns);
fieldSpecs.add(
FieldSpec.builder(Types.String, TABLE_NAME)
.addModifiers(publicStaticFinal)
.initializer("$S", schema.tableName)
.build()
);
fieldSpecs.add(
FieldSpec.builder(Types.getColumnDefList(schema.getModelClassName()), COLUMNS)
.addModifiers(publicStaticFinal)
.initializer(buildColumnsInitializer(columns))
.build()
);
fieldSpecs.add(
FieldSpec.builder(Types.StringArray, ESCAPED_COLUMN_NAMES)
.addModifiers(publicStaticFinal)
.initializer(buildEscapedColumnNamesInitializer())
.build()
);
return fieldSpecs;
}
public FieldSpec buildColumnFieldSpec(ColumnDefinition c) {
TypeName type = c.getType();
CodeBlock typeInstance;
if (type instanceof ParameterizedTypeName) {
typeInstance = CodeBlock.builder()
.add("new $T<$T>(){}.getType()", Types.TypeHolder, type
).build();
} else {
typeInstance = CodeBlock.builder()
.add("$T.class", type)
.build();
}
TypeSpec.Builder columnDefType = TypeSpec.anonymousClassBuilder("INSTANCE, $S, $L, $S, $L",
c.columnName, typeInstance, c.getStorageType(), buildColumnFlags(c));
columnDefType.superclass(c.getColumnDefType());
MethodSpec.Builder getBuilder = MethodSpec.methodBuilder("get")
.addAnnotation(Specs.overrideAnnotationSpec())
.addAnnotation(c.nullable ? Specs.nullableAnnotation() : Specs.nonNullAnnotationSpec())
.addModifiers(Modifier.PUBLIC)
.returns(c.getBoxType())
.addParameter(ParameterSpec.builder(schema.getModelClassName(), "model")
.addAnnotation(Specs.nonNullAnnotationSpec())
.build());
if (c.element != null) {
getBuilder.addStatement("return $L", c.buildGetColumnExpr("model"));
} else {
getBuilder.addStatement("throw new $T($S)", Types.NoValueException, "Missing @PrimaryKey definition");
}
columnDefType.addMethod(getBuilder.build());
return FieldSpec.builder(c.getColumnDefType(), c.name)
.addModifiers(publicStaticFinal)
.initializer("$L", columnDefType.build())
.build();
}
public CodeBlock buildColumnFlags(ColumnDefinition c) {
CodeBlock.Builder builder = CodeBlock.builder();
boolean some = false;
if (c.primaryKey) {
builder.add("$T.PRIMARY_KEY", Types.ColumnDef);
some = true;
}
if (c.autoId) {
if (some) {
builder.add(" | ");
}
builder.add("$T.AUTO_VALUE", Types.ColumnDef);
some = true;
}
if (c.autoincrement) {
if (some) {
builder.add(" | ");
}
builder.add("$T.AUTOINCREMENT", Types.ColumnDef);
some = true;
}
if (c.nullable) {
if (some) {
builder.add(" | ");
}
builder.add("$T.NULLABLE", Types.ColumnDef);
some = true;
}
if (c.indexed) {
if (some) {
builder.add(" | ");
}
builder.add("$T.INDEXED", Types.ColumnDef);
some = true;
}
if (c.unique) {
if (some) {
builder.add(" | ");
}
builder.add("$T.UNIQUE", Types.ColumnDef);
some = true;
}
if (!some) {
builder.add("0");
}
return builder.build();
}
public FieldSpec buildPrimaryKeyColumn() {
return buildColumnFieldSpec(ColumnDefinition.createDefaultPrimaryKey(schema));
}
public CodeBlock buildColumnsInitializer(List<FieldSpec> columns) {
CodeBlock.Builder builder = CodeBlock.builder();
builder.add("$T.<$T>asList(\n", Types.Arrays, Types.getColumnDef(schema.getModelClassName(), Types.WildcardType))
.indent();
for (int i = 0; i < columns.size(); i++) {
builder.add("$N", columns.get(i));
if ((i + 1) != columns.size()) {
builder.add(",\n");
} else {
builder.add("\n");
}
}
builder.unindent().add(")");
return builder.build();
}
public CodeBlock buildEscapedColumnNamesInitializer() {
CodeBlock.Builder builder = CodeBlock.builder();
builder.add("{\n").indent();
List<ColumnDefinition> columns = schema.getColumns();
for (int i = 0; i < columns.size(); i++) {
builder.add("$S", '"' + columns.get(i).columnName + '"');
if ((i + 1) != columns.size()) {
builder.add(",\n");
} else {
builder.add("\n");
}
}
builder.unindent().add("}");
return builder.build();
}
public List<MethodSpec> buildMethodSpecs() {
List<MethodSpec> methodSpecs = new ArrayList<>();
List<AnnotationSpec> overrideAndNonNull = Arrays.asList(
Specs.nonNullAnnotationSpec(),
Specs.overrideAnnotationSpec()
);
methodSpecs.add(
MethodSpec.methodBuilder("getModelClass")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(ParameterizedTypeName.get(ClassName.get(Class.class), schema.getModelClassName()))
.addStatement("return $T.class", schema.getModelClassName())
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getTableName")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(Types.String)
.addStatement("return $L", TABLE_NAME)
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getEscapedTableName")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(Types.String)
.addStatement("return '\"' + $L + '\"'", TABLE_NAME)
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getPrimaryKey")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(Types.getColumnDef(schema.getModelClassName(), Types.WildcardType))
.addStatement("return $N", primaryKey)
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getColumns")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(Types.getColumnDefList(schema.getModelClassName()))
.addStatement("return $L", COLUMNS)
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getEscapedColumnNames")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(Types.StringArray)
.addStatement("return $L", ESCAPED_COLUMN_NAMES)
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getCreateTableStatement")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(Types.String)
.addStatement("return $S", sql.buildCreateTableStatement(schema))
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getCreateIndexStatements")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(Types.getList(Types.String))
.addCode(sql.buildCreateIndexStatements(schema))
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getDropTableStatement")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(Types.String)
.addStatement("return $S", sql.buildDropTableStatement(schema))
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("getInsertStatement")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.addParameter(ParameterSpec.builder(int.class, "onConflictAlgorithm")
.addAnnotation(OnConflict.class)
.build())
.returns(Types.String)
.addCode(sql.buildInsertStatementCode(schema, "onConflictAlgorithm"))
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("convertToArgs")
.addJavadoc("Provided for debugging\n")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(ArrayTypeName.of(TypeName.OBJECT))
.addParameter(
ParameterSpec.builder(Types.OrmaConnection, "conn")
.addAnnotation(Specs.nonNullAnnotationSpec())
.build())
.addParameter(
ParameterSpec.builder(schema.getModelClassName(), "model")
.addAnnotation(Specs.nonNullAnnotationSpec())
.build())
.addCode(buildConvertToArgs())
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("bindArgs")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(TypeName.VOID)
.addParameter(
ParameterSpec.builder(Types.OrmaConnection, "conn")
.addAnnotation(Specs.nonNullAnnotationSpec())
.build())
.addParameter(
ParameterSpec.builder(Types.SQLiteStatement, "statement")
.addAnnotation(Specs.nonNullAnnotationSpec())
.build())
.addParameter(
ParameterSpec.builder(schema.getModelClassName(), "model")
.addAnnotation(Specs.nonNullAnnotationSpec())
.build())
.addCode(buildBindArgs())
.build()
);
methodSpecs.add(
MethodSpec.methodBuilder("newModelFromCursor")
.addAnnotations(overrideAndNonNull)
.addModifiers(Modifier.PUBLIC)
.returns(schema.getModelClassName())
.addParameter(
ParameterSpec.builder(Types.OrmaConnection, "conn")
.addAnnotation(Specs.nonNullAnnotationSpec())
.build())
.addParameter(
ParameterSpec.builder(Types.Cursor, "cursor")
.addAnnotation(Specs.nonNullAnnotationSpec())
.build())
.addCode(buildNewModelFromCursor())
.build()
);
return methodSpecs;
}
private CodeBlock buildConvertToArgs() {
CodeBlock.Builder builder = CodeBlock.builder();
List<ColumnDefinition> columns = schema.getColumnsWithoutAutoId();
builder.addStatement("$T args = new $T[$L]", ArrayTypeName.of(TypeName.OBJECT), TypeName.OBJECT, columns.size());
for (int i = 0; i < columns.size(); i++) {
ColumnDefinition c = columns.get(i);
AssociationDefinition r = c.getAssociation();
CodeBlock rhsExpr = c.buildSerializedColumnExpr("conn", "model");
if (r != null && r.associationType.equals(Types.SingleAssociation)) {
builder.addStatement("args[$L] = $L.getId()", i, c.buildGetColumnExpr("model"));
} else if (c.getSerializedType().equals(TypeName.BOOLEAN)) {
builder.addStatement("args[$L] = $L ? 1 : 0", i, rhsExpr);
} else {
builder.addStatement("args[$L] = $L", i, rhsExpr);
}
}
builder.addStatement("return args");
return builder.build();
}
// http://developer.android.com/intl/ja/reference/android/database/sqlite/SQLiteStatement.html
private CodeBlock buildBindArgs() {
CodeBlock.Builder builder = CodeBlock.builder();
List<ColumnDefinition> columns = schema.getColumnsWithoutAutoId();
for (int i = 0; i < columns.size(); i++) {
int n = i + 1; // bind index starts 1
ColumnDefinition c = columns.get(i);
TypeName serializedType = c.getSerializedType();
AssociationDefinition r = c.getAssociation();
if (c.isNullableInJava()) {
builder.beginControlFlow("if ($L != null)", c.buildGetColumnExpr("model"));
}
CodeBlock rhsExpr = c.buildSerializedColumnExpr("conn", "model");
if (serializedType.equals(TypeName.BOOLEAN)) {
builder.addStatement("statement.bindLong($L, $L ? 1 : 0)", n, rhsExpr);
} else if (Types.looksLikeIntegerType(serializedType)) {
builder.addStatement("statement.bindLong($L, $L)", n, rhsExpr);
} else if (Types.looksLikeFloatType(serializedType)) {
builder.addStatement("statement.bindDouble($L, $L)", n, rhsExpr);
} else if (serializedType.equals(Types.ByteArray)) {
builder.addStatement("statement.bindBlob($L, $L)", n, rhsExpr);
} else if (serializedType.equals(Types.String)) {
builder.addStatement("statement.bindString($L, $L)", n, rhsExpr);
} else if (r != null && r.associationType.equals(Types.SingleAssociation)) {
builder.addStatement("statement.bindLong($L, $L.getId())", n, c.buildGetColumnExpr("model"));
} else {
builder.addStatement("statement.bindString($L, $L)", n, rhsExpr);
// TODO: throw the following errors in v2.0
// throw new ProcessingException("No storage method found for " + serializedType, c.element);
}
if (c.isNullableInJava()) {
builder.endControlFlow();
builder.beginControlFlow("else");
builder.addStatement("statement.bindNull($L)", n);
builder.endControlFlow();
}
}
return builder.build();
}
private CodeBlock buildPopulateValuesIntoCursor(Function<ColumnDefinition, CodeBlock> lhsBaseGen) {
CodeBlock.Builder builder = CodeBlock.builder();
List<ColumnDefinition> columns = schema.getColumns();
for (int i = 0; i < columns.size(); i++) {
ColumnDefinition c = columns.get(i);
TypeName type = c.getUnboxType();
if (Types.isDirectAssociation(context, type)) {
ClassName className = (ClassName) type;
String singleAssocType = "SingleAssociation<" + className.simpleName() + ">";
context.addError("Direct association is not yet supported. Use " + singleAssocType + " instead.", c.element);
} else if (Types.isSingleAssociation(type)) {
AssociationDefinition r = c.getAssociation();
CodeBlock.Builder getRhsExpr = CodeBlock.builder()
.add("new $T<>(conn, $L, cursor.getLong($L))",
r.associationType, context.getSchemaInstanceExpr(r.modelType), i);
builder.addStatement("$L$L", lhsBaseGen.apply(c), c.buildSetColumnExpr(getRhsExpr.build()));
} else {
if (c.isNullableInSQL()) {
builder.beginControlFlow("if (!cursor.isNull($L))", i);
}
CodeBlock getRhsExpr = c.buildDeserializeExpr("conn", cursorGetter(c, i));
builder.addStatement("$L$L", lhsBaseGen.apply(c), c.buildSetColumnExpr(getRhsExpr));
if (c.isNullableInSQL()) {
builder.endControlFlow();
}
}
}
return builder.build();
}
private CodeBlock buildNewModelFromCursor() {
CodeBlock.Builder builder = CodeBlock.builder();
if (schema.hasDefaultConstructor()) {
builder.addStatement("$T model = new $T()", schema.getModelClassName(), schema.getModelClassName());
builder.add(buildPopulateValuesIntoCursor(column -> CodeBlock.builder().add("model.").build()));
builder.addStatement("return model");
} else {
if (schema.getColumns().size() != schema.constructorElement.getParameters().size()) {
// FIXME: check the parameters more strictly
context.addError("The @Setter constructor parameters must satisfy @Column fields", schema.constructorElement);
}
builder.add(buildPopulateValuesIntoCursor(
column -> CodeBlock.builder().add("$T ", column.getType()).build()));
builder.addStatement("return new $T($L)", schema.getModelClassName(),
schema.constructorElement.getParameters()
.stream()
.map(this::extractColumnNameFromParameterElement)
.collect(Collectors.joining(", ")));
}
return builder.build();
}
private String extractColumnNameFromParameterElement(VariableElement parameterElement) {
Setter setter = parameterElement.getAnnotation(Setter.class);
if (setter != null && Strings.isEmpty(setter.value())) {
return setter.value();
}
return parameterElement.getSimpleName().toString();
}
private String cursorGetter(ColumnDefinition column, int position) {
TypeName type = column.getSerializedType();
if (type.equals(TypeName.BOOLEAN)) {
return "cursor.getLong(" + position + ") != 0";
} else if (type.equals(TypeName.BYTE)) {
return "(byte)cursor.getShort(" + position + ")";
} else if (type.isPrimitive()) {
String s = type.toString();
return "cursor.get" + s.substring(0, 1).toUpperCase() + s.substring(1) + "(" + position + ")";
} else if (type.equals(Types.String)) {
return "cursor.getString(" + position + ")";
} else if (type.equals(Types.ByteArray)) {
return "cursor.getBlob(" + position + ")";
} else {
return "cursor.getString(" + position + ")"; // handled by type adapters
}
}
}
| fix @Setter(name) for constructors
| processor/src/main/java/com/github/gfx/android/orma/processor/SchemaWriter.java | fix @Setter(name) for constructors | <ide><path>rocessor/src/main/java/com/github/gfx/android/orma/processor/SchemaWriter.java
<ide> builder.add(buildPopulateValuesIntoCursor(column -> CodeBlock.builder().add("model.").build()));
<ide> builder.addStatement("return model");
<ide> } else {
<add>
<ide> if (schema.getColumns().size() != schema.constructorElement.getParameters().size()) {
<ide> // FIXME: check the parameters more strictly
<ide> context.addError("The @Setter constructor parameters must satisfy @Column fields", schema.constructorElement);
<ide>
<ide> private String extractColumnNameFromParameterElement(VariableElement parameterElement) {
<ide> Setter setter = parameterElement.getAnnotation(Setter.class);
<del> if (setter != null && Strings.isEmpty(setter.value())) {
<add> if (setter != null && !Strings.isEmpty(setter.value())) {
<ide> return setter.value();
<ide> }
<ide> return parameterElement.getSimpleName().toString(); |
|
Java | apache-2.0 | ef830428989e0b31de7660e831dcff7ed5cc6b83 | 0 | francis-pouatcha/javaext.description | package org.adorsys.javaext.display;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
@Target({ ElementType.TYPE, METHOD, FIELD })
@Retention(RUNTIME)
@Documented
public @interface Association {
/**
* Returns fields of the referenced entity to be displayed
* by the referencing entity.
* @return
*/
public String[] fields() default {};
public SelectionMode selectionMode() default SelectionMode.FORWARD;
}
| src/main/java/org/adorsys/javaext/display/Association.java | package org.adorsys.javaext.display;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
@Target({ ElementType.TYPE, METHOD, FIELD })
@Retention(RUNTIME)
@Documented
public @interface Association {
/**
* Returns fields of the referenced entity to be displayed
* by the referencing entity.
* @return
*/
public String[] fields();
public SelectionMode selectionMode() default SelectionMode.FORWARD;
}
| Imporving implementation of associationg
| src/main/java/org/adorsys/javaext/display/Association.java | Imporving implementation of associationg | <ide><path>rc/main/java/org/adorsys/javaext/display/Association.java
<ide> * by the referencing entity.
<ide> * @return
<ide> */
<del> public String[] fields();
<add> public String[] fields() default {};
<ide>
<ide> public SelectionMode selectionMode() default SelectionMode.FORWARD;
<ide> } |
|
Java | bsd-3-clause | 5599cee9cecb27b42bb42d5e8e1cb3dd92b7c879 | 0 | asamgir/openspecimen,asamgir/openspecimen,krishagni/openspecimen,krishagni/openspecimen,asamgir/openspecimen,krishagni/openspecimen | import java.io.File;
import java.sql.Connection;
import java.sql.Statement;
import java.sql.SQLException;
import java.util.Collection;
import java.util.Random;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.hibernate.cfg.Configuration;
import edu.common.dynamicextensions.domaininterface.AttributeInterface;
import edu.common.dynamicextensions.domaininterface.EntityInterface;
import edu.common.dynamicextensions.entitymanager.EntityManager;
import edu.common.dynamicextensions.entitymanager.EntityManagerInterface;
public class MaskUsingDEMetatdata
{
private static int randomNumber;
public static void main(String[] args)
{
MaskUsingDEMetatdata mask=new MaskUsingDEMetatdata();
mask.maskIdentifiedData();
}
public void maskIdentifiedData()
{
Random generator = new Random();
randomNumber=generator.nextInt(50);
try
{
EntityManagerInterface entityManager = EntityManager.getInstance();
Collection<EntityInterface> entities = entityManager.getAllEntities();
int totalNoOfEntities = entities.size();
System.out.println("No Of entities:"+totalNoOfEntities);
Configuration cfg = new Configuration();
File file = new File(".//classes//hibernate.cfg.xml");
File file1 = new File(file.getAbsolutePath());
System.out.println(file.getAbsolutePath());
cfg.configure(file1);
SessionFactory sf = cfg.buildSessionFactory();
Session session = sf.openSession();
Transaction tx = session.beginTransaction();
for(EntityInterface entity: entities)
{
Collection<AttributeInterface> attributeCollection = entity.getAttributeCollection();
for (AttributeInterface attribute: attributeCollection)
{
if(attribute.getIsIdentified()!=null && attribute.getIsIdentified()==true && attribute.getAttributeTypeInformation().getDataType().equalsIgnoreCase("String"))
{
maskString(attribute.getColumnProperties().getName(),entity.getTableProperties().getName(), session);
}
else if(attribute.getAttributeTypeInformation().getDataType().equalsIgnoreCase("Date"))
{
maskDate(attribute.getColumnProperties().getName(),entity.getTableProperties().getName(), session);
}
}
}
// sql String to update ParticipantMedicalIdentifier table
String sqlString="truncate table CATISSUE_PART_MEDICAL_ID";
executeQuery(sqlString, session);
// sql String to delete ReportQueue table
sqlString="truncate table CATISSUE_REPORT_QUEUE";
executeQuery(sqlString, session);
// sql String to delete ReportQueue table
sqlString="truncate table CATISSUE_REPORT_PARTICIP_REL";
executeQuery(sqlString, session);
maskReportText(session);
tx.commit();
session.close();
}
catch (Exception e)
{
System.out.println(e);
}
}
private void maskDate(String columnName, String tableName, Session session) throws SQLException
{
String sqlString=null;
String dbType=session.connection().getMetaData().getDatabaseProductName();
if(dbType.equalsIgnoreCase("oracle"))
{
sqlString="update "+tableName+" set "+columnName+"=add_months("+columnName+", "+randomNumber+")";
}
if(dbType.equalsIgnoreCase("mysql"))
{
sqlString="update "+tableName+" set "+columnName+"=date_add("+columnName+", INTERVAL "+randomNumber+" MONTH);";
}
executeQuery(sqlString, session);
}
private void maskString(String columnName, String tableName, Session session)
{
String sqlString="update "+tableName+" set "+columnName+"=null";
executeQuery(sqlString, session);
}
private void maskReportText(Session session) throws SQLException
{
String sqlString=null;
String dbType=session.connection().getMetaData().getDatabaseProductName();
if(dbType.equalsIgnoreCase("oracle"))
{
sqlString="update catissue_report_content set report_data=NULL where identifier in(select a.identifier from catissue_report_content a join catissue_report_textcontent b on a.identifier=b.identifier join catissue_pathology_report c on c.identifier=b.report_id where c.REPORT_STATUS in ('DEIDENTIFIED','DEID_PROCESS_FAILED','PENDING_FOR_DEID'))";
}
if(dbType.equalsIgnoreCase("mysql"))
{
sqlString="update CATISSUE_REPORT_CONTENT as rc, CATISSUE_REPORT_TEXTCONTENT as rt, CATISSUE_PATHOLOGY_REPORT as pr set rc.REPORT_DATA=NULL where pr.IDENTIFIER=rt.report_id and rt.IDENTIFIER=rc.IDENTIFIER and pr.REPORT_STATUS in ('DEIDENTIFIED','DEID_PROCESS_FAILED','PENDING_FOR_DEID')";
}
executeQuery(sqlString, session);
}
private void executeQuery(String sqlString, Session session)
{
try
{
System.out.println(sqlString);
Connection con=session.connection();
Statement stmt=con.createStatement();
stmt.execute(sqlString);
}
catch (Exception e)
{
System.out.println("Error in maskString ");
e.printStackTrace();
}
}
}
| private_public_migrator/MaskUsingDEMetatdata.java | import java.io.File;
import java.sql.Connection;
import java.sql.Statement;
import java.sql.SQLException;
import java.util.Collection;
import java.util.Random;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.hibernate.cfg.Configuration;
import edu.common.dynamicextensions.domaininterface.AttributeInterface;
import edu.common.dynamicextensions.domaininterface.EntityInterface;
import edu.common.dynamicextensions.entitymanager.EntityManager;
import edu.common.dynamicextensions.entitymanager.EntityManagerInterface;
public class MaskUsingDEMetatdata
{
private static int randomNumber;
public static void main(String[] args)
{
MaskUsingDEMetatdata mask=new MaskUsingDEMetatdata();
mask.maskIdentifiedData();
}
public void maskIdentifiedData()
{
Random generator = new Random();
randomNumber=generator.nextInt(50);
try
{
EntityManagerInterface entityManager = EntityManager.getInstance();
Collection<EntityInterface> entities = entityManager.getAllEntities();
int totalNoOfEntities = entities.size();
System.out.println("No Of entities:"+totalNoOfEntities);
Configuration cfg = new Configuration();
File file = new File(".//classes//hibernate.cfg.xml");
File file1 = new File(file.getAbsolutePath());
System.out.println(file.getAbsolutePath());
cfg.configure(file1);
SessionFactory sf = cfg.buildSessionFactory();
Session session = sf.openSession();
Transaction tx = session.beginTransaction();
for(EntityInterface entity: entities)
{
Collection<AttributeInterface> attributeCollection = entity.getAttributeCollection();
for (AttributeInterface attribute: attributeCollection)
{
if(attribute.getIsIdentified()!=null && attribute.getIsIdentified()==true && attribute.getAttributeTypeInformation().getDataType().equalsIgnoreCase("String"))
{
maskString(attribute.getColumnProperties().getName(),entity.getTableProperties().getName(), session);
}
else if(attribute.getAttributeTypeInformation().getDataType().equalsIgnoreCase("Date"))
{
maskDate(attribute.getColumnProperties().getName(),entity.getTableProperties().getName(), session);
}
}
}
// hql String to update ParticipantMedicalIdentifier table
String sqlString="delete from CATISSUE_PART_MEDICAL_ID";
executeQuery(sqlString, session);
maskReportText(session);
tx.commit();
session.close();
}
catch (Exception e)
{
System.out.println(e);
}
}
private void maskDate(String columnName, String tableName, Session session) throws SQLException
{
String sqlString=null;
String dbType=session.connection().getMetaData().getDatabaseProductName();
if(dbType.equalsIgnoreCase("oracle"))
{
sqlString="update "+tableName+" set "+columnName+"=add_months("+columnName+", "+randomNumber+")";
}
if(dbType.equalsIgnoreCase("mysql"))
{
sqlString="update "+tableName+" set "+columnName+"=date_add("+columnName+", INTERVAL "+randomNumber+" MONTH);";
}
executeQuery(sqlString, session);
}
private void maskString(String columnName, String tableName, Session session)
{
String sqlString="update "+tableName+" set "+columnName+"=null";
executeQuery(sqlString, session);
}
private void maskReportText(Session session) throws SQLException
{
String sqlString=null;
String dbType=session.connection().getMetaData().getDatabaseProductName();
if(dbType.equalsIgnoreCase("oracle"))
{
sqlString="update catissue_report_content set report_data=NULL where identifier in(select a.identifier from catissue_report_content a join catissue_report_textcontent b on a.identifier=b.identifier join catissue_pathology_report c on c.identifier=b.report_id where c.REPORT_STATUS in ('DEIDENTIFIED','DEID_PROCESS_FAILED','PENDING_FOR_DEID'))";
}
if(dbType.equalsIgnoreCase("mysql"))
{
sqlString="update CATISSUE_REPORT_CONTENT as rc, CATISSUE_REPORT_TEXTCONTENT as rt, CATISSUE_PATHOLOGY_REPORT as pr set rc.REPORT_DATA=NULL where pr.IDENTIFIER=rt.report_id and rt.IDENTIFIER=rc.IDENTIFIER and pr.REPORT_STATUS in ('DEIDENTIFIED','DEID_PROCESS_FAILED','PENDING_FOR_DEID')";
}
executeQuery(sqlString, session);
}
private void executeQuery(String sqlString, Session session)
{
try
{
System.out.println(sqlString);
Connection con=session.connection();
Statement stmt=con.createStatement();
stmt.execute(sqlString);
}
catch (Exception e)
{
System.out.println("Error in maskString ");
e.printStackTrace();
}
}
}
| changes for truncating report loader queue.
SVN-Revision: 11258
| private_public_migrator/MaskUsingDEMetatdata.java | changes for truncating report loader queue. | <ide><path>rivate_public_migrator/MaskUsingDEMetatdata.java
<ide> }
<ide> }
<ide>
<del> // hql String to update ParticipantMedicalIdentifier table
<del> String sqlString="delete from CATISSUE_PART_MEDICAL_ID";
<add> // sql String to update ParticipantMedicalIdentifier table
<add> String sqlString="truncate table CATISSUE_PART_MEDICAL_ID";
<add> executeQuery(sqlString, session);
<add>
<add> // sql String to delete ReportQueue table
<add> sqlString="truncate table CATISSUE_REPORT_QUEUE";
<add> executeQuery(sqlString, session);
<add>
<add> // sql String to delete ReportQueue table
<add> sqlString="truncate table CATISSUE_REPORT_PARTICIP_REL";
<ide> executeQuery(sqlString, session);
<ide>
<ide> maskReportText(session); |
|
JavaScript | apache-2.0 | 6cab7718ca204e03ac33bb3d4938b4679505b6d4 | 0 | RoonLabs/node-roon-api,RoonLabs/node-roon-api | "use strict";
/**
Roon API.
* @class RoonApi
* @param {object} desc - Information about your extension. Used by Roon to display to the end user what is trying to access Roon.
* @param {string} desc.extension_id - A unique ID for this extension. Something like @com.your_company_or_name.name_of_extension@.
* @param {string} desc.display_name - The name of your extension.
* @param {string} desc.display_version - A version string that is displayed to the user for this extension. Can be anything you want.
* @param {string} desc.publisher - The name of the developer of the extension.
* @param {string} desc.website - Website for more information about the extension.
* @param {string} desc.log_level - How much logging information to print. "all" for all messages, "none" for no messages, anything else for all messages not tagged as "quiet" by the Roon core.
* @param {RoonApi~core_paired} [desc.core_paired] - Called when Roon pairs you.
* @param {RoonApi~core_unpaired} [desc.core_unpaired] - Called when Roon unpairs you.
* @param {RoonApi~core_found} [desc.core_found] - Called when a Roon Core is found. Usually, you want to implement pairing instead of using this.
* @param {RoonApi~core_lost} [desc.core_lost] - Called when Roon Core is lost. Usually, you want to implement pairing instead of using this.
*/
/**
* @callback RoonApi~core_paired
* @param {Core} core
*/
/**
* @callback RoonApi~core_unpaired
* @param {Core} core
*/
/**
* @callback RoonApi~core_found
* @param {Core} core
*/
/**
* @callback RoonApi~core_lost
* @param {Core} core
*/
var Transport = require('./transport-websocket.js'),
MooMessage = require('./moomsg.js'),
Core = require('./core.js');
function Logger(roonapi) {
this.roonapi = roonapi;
};
Logger.prototype.log = function() {
if (this.roonapi.log_level != "none") {
console.log.apply(null, arguments);
}
};
function RoonApi(o) {
this._service_request_handlers = {};
if (typeof(o.extension_id) != 'string') throw new Error("Roon Extension options is missing the required 'extension_id' property.");
if (typeof(o.display_name) != 'string') throw new Error("Roon Extension options is missing the required 'display_name' property.");
if (typeof(o.display_version) != 'string') throw new Error("Roon Extension options is missing the required 'display_version' property.");
if (typeof(o.publisher) != 'string') throw new Error("Roon Extension options is missing the required 'publisher' property.");
if (typeof(o.email) != 'string') throw new Error("Roon Extension options is missing the required 'email' property.");
if (typeof(o.set_persisted_state) == 'undefined')
this.set_persisted_state = state => { this.save_config("roonstate", state); };
else
this.set_persisted_state = o.set_persisted_state;
if (typeof(o.get_persisted_state) == 'undefined')
this.get_persisted_state = () => { return this.load_config("roonstate") || {}; };
else
this.get_persisted_state = o.get_persisted_state;
if (o.core_found && !o.core_lost) throw new Error("Roon Extension options .core_lost is required if you implement .core_found.");
if (!o.core_found && o.core_lost) throw new Error("Roon Extension options .core_found is required if you implement .core_lost.");
if (o.core_paired && !o.core_unpaired) throw new Error("Roon Extension options .core_unpaired is required if you implement .core_paired.");
if (!o.core_paired && o.core_unpaired) throw new Error("Roon Extension options .core_paired is required if you implement .core_unpaired.");
if (o.core_paired && o.core_found) throw new Error("Roon Extension options can not specify both .core_paired and .core_found.");
if (o.core_found && typeof(o.core_found) != "function") throw new Error("Roon Extensions options has a .core_found which is not a function");
if (o.core_lost && typeof(o.core_lost) != "function") throw new Error("Roon Extensions options has a .core_lost which is not a function");
if (o.core_paired && typeof(o.core_paired) != "function") throw new Error("Roon Extensions options has a .core_paired which is not a function");
if (o.core_unpaired && typeof(o.core_unpaired) != "function") throw new Error("Roon Extensions options has a .core_unpaired which is not a function");
this.extension_reginfo = {
extension_id: o.extension_id,
display_name: o.display_name,
display_version: o.display_version,
publisher: o.publisher,
email: o.email,
required_services: [],
optional_services: [],
provided_services: []
};
if (o.website) this.extension_reginfo.website = o.website;
this.logger = new Logger(this);
this.log_level = o.log_level;
this.extension_opts = o;
this.is_paired = false;
}
/**
* Initializes the services you require and that you provide.
*
* @this RoonApi
* @param {object} services - Information about your extension. Used by Roon to display to the end user what is trying to access Roon.
* @param {object[]} [services.required_services] - A list of services which the Roon Core must provide.
* @param {object[]} [services.optional_services] - A list of services which the Roon Core may provide.
* @param {object[]} [services.provided_services] - A list of services which this extension provides to the Roon Core.
*/
RoonApi.prototype.init_services = function(o) {
if (!(o.required_services instanceof Array)) o.required_services = [];
if (!(o.optional_services instanceof Array)) o.optional_services = [];
if (!(o.provided_services instanceof Array)) o.provided_services = [];
if (o.required_services.length || o.optional_services.length)
if (!this.extension_opts.core_paired && !this.extension_opts.core_found) throw new Error("Roon Extensions options has required or optional services, but has neither .core_paired nor .core_found.");
if (this.extension_opts.core_paired) {
let svc = this.register_service("com.roonlabs.pairing:1", {
subscriptions: [
{
subscribe_name: "subscribe_pairing",
unsubscribe_name: "unsubscribe_pairing",
start: (req) => {
req.send_continue("Subscribed", { paired_core_id: this.paired_core_id });
}
}
],
methods: {
get_pairing: (req) => {
req.send_complete("Success", { paired_core_id: this.paired_core_id });
},
pair: (req) => {
this.paired_core_id = req.moo.core.core_id;
svc.send_continue_all("subscribe_pairing", "Changed", { paired_core_id: this.paired_core_id })
},
}
});
this.pairing_service_1 = {
services: [ svc ],
found_core: core => {
if (!this.paired_core_id) {
let settings = this.get_persisted_state();
settings.paired_core_id = core.core_id;
this.set_persisted_state(settings);
this.paired_core_id = core.core_id;
this.is_paired = true;
svc.send_continue_all("subscribe_pairing", "Changed", { paired_core_id: this.paired_core_id })
}
if (core.core_id == this.paired_core_id)
if (this.extension_opts.core_paired) this.extension_opts.core_paired(core);
},
lost_core: core => {
if (core.core_id == this.paired_core_id)
this.is_paired = false;
if (this.extension_opts.core_unpaired) this.extension_opts.core_unpaired(core);
},
};
o.provided_services.push(this.pairing_service_1);
}
o.provided_services.push({ services: [ this.register_service("com.roonlabs.ping:1", {
methods: {
ping: function(req) {
req.send_complete("Success");
},
}
})]})
o.required_services.forEach(svcobj => { svcobj.services.forEach(svc => { this.extension_reginfo.required_services.push(svc.name); }); });
o.optional_services.forEach(svcobj => { svcobj.services.forEach(svc => { this.extension_reginfo.optional_services.push(svc.name); }); });
o.provided_services.forEach(svcobj => { svcobj.services.forEach(svc => { this.extension_reginfo.provided_services.push(svc.name); }); });
this.services_opts = o;
};
// - pull in Sood and provide discovery methods in Node, but not in WebBrowser
//
// - implement save_config/load_config based on:
// Node: require('fs')
// WebBrowser: localStroage
//
if (typeof(window) == "undefined" || typeof(nw) !== "undefined") {
/**
* Begin the discovery process to find/connect to a Roon Core.
*/
RoonApi.prototype.start_discovery = function() {
if (this._sood) return;
this._sood = require('./sood.js');
this._sood.logger = this.logger;
this._sood_conns = {};
this._sood.on('message', msg => {
// this.logger.log(msg);
if (msg.props.service_id == "00720724-5143-4a9b-abac-0e50cba674bb" && msg.props.unique_id) {
if (this._sood_conns[msg.props.unique_id]) return;
this._sood_conns[msg.props.unique_id] = true;
this.connect(new Transport(msg.from.ip, msg.props.http_port, msg.props.tcp_port, this.logger), () => { delete(this._sood_conns[msg.props.unique_id]); });
}
});
this._sood.on('network', () => {
this._sood.query({ query_service_id: "00720724-5143-4a9b-abac-0e50cba674bb" });
});
this._sood.start(() => {
this._sood.query({ query_service_id: "00720724-5143-4a9b-abac-0e50cba674bb" });
setInterval(() => this.periodic_scan(), (10 * 1000));
this.scan_count = -1;
});
};
RoonApi.prototype.periodic_scan = function() {
this.scan_count += 1;
if (this.is_paired) return;
if ((this.scan_count < 6) || ((this.scan_count % 6) == 0)) {
this._sood.query({ query_service_id: "00720724-5143-4a9b-abac-0e50cba674bb" });
}
};
var fs = ((typeof _fs) === 'undefined') ? require('fs') : _fs;
/**
* Save a key value pair in the configuration data store.
* @param {string} key
* @param {object} value
*/
RoonApi.prototype.save_config = function(k, v) {
try {
let config;
try {
let content = fs.readFileSync("config.json", { encoding: 'utf8' });
config = JSON.parse(content) || {};
} catch (e) {
config = {};
}
if (v === undefined || v === null)
delete(config[k]);
else
config[k] = v;
fs.writeFileSync("config.json", JSON.stringify(config, null, ' '));
} catch (e) { }
};
/**
* Load a key value pair in the configuration data store.
* @param {string} key
* @return {object} value
*/
RoonApi.prototype.load_config = function(k) {
try {
let content = fs.readFileSync("config.json", { encoding: 'utf8' });
return JSON.parse(content)[k];
} catch (e) {
return undefined;
}
};
} else {
RoonApi.prototype.save_config = function(k, v) {
if (v === undefined || v === null)
localStorage.removeItem(k);
else
localStorage.setItem(k, JSON.stringify(v));
};
RoonApi.prototype.load_config = function(k) {
try {
let r = localStorage.getItem(k);
return r ? JSON.parse(r) : undefined;
} catch (e) {
return undefined;
}
};
}
RoonApi.prototype.register_service = function(svcname, spec) {
let ret = {
_subtypes: { }
};
if (spec.subscriptions) {
for (let x in spec.subscriptions) {
let s = spec.subscriptions[x];
let subname = s.subscribe_name;
ret._subtypes[subname] = { };
spec.methods[subname] = (req) => {
// XXX make sure req.body.subscription_key exists or respond send_complete with error
req.orig_send_complete = req.send_complete;
req.send_complete = function() {
this.orig_send_complete.apply(this, arguments);
delete(ret._subtypes[subname][req.moo.mooid][this.body.subscription_key]);
};
s.start(req);
if (!ret._subtypes[subname].hasOwnProperty(req.moo.mooid)) {
ret._subtypes[subname][req.moo.mooid] = { };
}
ret._subtypes[subname][req.moo.mooid][req.body.subscription_key] = req;
};
spec.methods[s.unsubscribe_name] = (req) => {
// XXX make sure req.body.subscription_key exists or respond send_complete with error
delete(ret._subtypes[subname][req.moo.mooid][req.body.subscription_key]);
if (s.end) s.end(req);
req.send_complete("Unsubscribed");
};
}
}
// process incoming requests from the other side
this._service_request_handlers[svcname] = (req, mooid) => {
// make sure the req's request name is something we know about
if (req) {
let method = spec.methods[req.msg.name];
if (method) {
method(req);
} else {
req.send_complete("InvalidRequest", { error: "unknown request name (" + svcname + ") : " + req.msg.name });
}
} else {
if (spec.subscriptions) {
for (let x in spec.subscriptions) {
let s = spec.subscriptions[x];
let subname = s.subscribe_name;
ret._subtypes[subname][mooid] = { };
if (s.end) s.end(req);
}
}
}
};
ret.name = svcname;
ret.send_continue_all = (subtype, name, props) => {
for (let id in ret._subtypes[subtype]) {
for (let x in ret._subtypes[subtype][id]) (ret._subtypes[subtype][id][x].send_continue(name, props));
}
};
ret.send_complete_all = (subtype, name, props) => {
for (let id in ret._subtypes[subtype]) {
for (let x in ret._subtypes[subtype][id]) (ret._subtypes[subtype][id][x].send_complete(name, props));
}
};
return ret;
};
RoonApi.prototype.connect = function(transport, cb) {
transport.onopen = () => {
// this.logger.log("OPEN");
transport.moo.send_request("com.roonlabs.registry:1/info",
(msg, body) => {
if (!msg) return;
let s = this.get_persisted_state();
if (s.tokens && s.tokens[body.core_id]) this.extension_reginfo.token = s.tokens[body.core_id];
transport.moo.send_request("com.roonlabs.registry:1/register", this.extension_reginfo,
(msg, body) => {
ev_registered.call(this, transport, msg, body);
});
});
};
transport.onclose = () => {
// this.logger.log("CLOSE");
if (transport.moo) {
Object.keys(this._service_request_handlers).forEach(e => this._service_request_handlers[e] && this._service_request_handlers[e](null, transport.moo.mooid));
transport.moo.close();
transport.moo = undefined;
}
transport.close();
cb && cb();
};
/*
transport.onerror = err => {
// this.logger.log("ERROR", err);
if (transport.moo) transport.moo.close();
transport.moo = undefined;
transport.close();
};*/
transport.onmessage = msg => {
// this.logger.log("GOTMSG");
var body = msg.body;
delete(msg.body);
var logging = msg && msg.headers && msg.headers["Logging"];
msg.log = ((this.log_level == "all") || (logging != "quiet"));
if (msg.verb == "REQUEST") {
if (msg.log) this.logger.log('<-', msg.verb, msg.request_id, msg.service + "/" + msg.name, body ? JSON.stringify(body) : "");
var req = new MooMessage(transport.moo, msg, body, this.logger);
var handler = this._service_request_handlers[msg.service];
if (handler)
handler(req, req.moo.mooid);
else
req.send_complete("InvalidRequest", { error: "unknown service: " + msg.service });
} else {
if (msg.log) this.logger.log('<-', msg.verb, msg.request_id, msg.name, body ? JSON.stringify(body) : "");
transport.moo.handle_response(msg, body);
}
};
return transport;
};
RoonApi.prototype.connect_to_host = function(host, http_port, tcp_port, cb) {
return this.connect(new Transport(host, http_port, tcp_port, this.logger), cb);
};
RoonApi.prototype.connect_to_host_with_token = function(host, http_port, tcp_port, token, cb) {
var transport = this.connect_to_host(host, http_port, tcp_port, cb)
transport.onopen = () => {
let args = Object.assign({}, this.extension_reginfo);
args.token = token;
transport.moo.send_request("com.roonlabs.registry:1/register_one_time_token", args,
(msg, body) => {
ev_registered.call(this, transport, msg, body);
});
};
return transport;
}
function ev_registered(transport, msg, body) {
if (!msg) { // lost connection
if (transport.moo.core) {
if (this.pairing_service_1) this.pairing_service_1.lost_core(transport.moo.core);
if (this.extension_opts.core_lost) this.extension_opts.core_lost(transport.moo.core);
transport.moo.core = undefined;
}
} else if (msg.name == "Registered") {
transport.moo.core = new Core(transport.moo, this, body, this.logger);
let settings = this.get_persisted_state();
if (!settings.tokens) settings.tokens = {};
settings.tokens[body.core_id] = body.token;
this.set_persisted_state(settings);
if (this.pairing_service_1) this.pairing_service_1.found_core(transport.moo.core);
if (this.extension_opts.core_found) this.extension_opts.core_found(transport.moo.core);
}
}
exports = module.exports = RoonApi;
| lib.js | "use strict";
/**
Roon API.
* @class RoonApi
* @param {object} desc - Information about your extension. Used by Roon to display to the end user what is trying to access Roon.
* @param {string} desc.extension_id - A unique ID for this extension. Something like @com.your_company_or_name.name_of_extension@.
* @param {string} desc.display_name - The name of your extension.
* @param {string} desc.display_version - A version string that is displayed to the user for this extension. Can be anything you want.
* @param {string} desc.publisher - The name of the developer of the extension.
* @param {string} desc.website - Website for more information about the extension.
* @param {string} desc.log_level - How much logging information to print. "all" for all messages, "none" for no messages, anything else for all messages not tagged as "quiet" by the Roon core.
* @param {RoonApi~core_paired} [desc.core_paired] - Called when Roon pairs you.
* @param {RoonApi~core_unpaired} [desc.core_unpaired] - Called when Roon unpairs you.
* @param {RoonApi~core_found} [desc.core_found] - Called when a Roon Core is found. Usually, you want to implement pairing instead of using this.
* @param {RoonApi~core_lost} [desc.core_lost] - Called when Roon Core is lost. Usually, you want to implement pairing instead of using this.
*/
/**
* @callback RoonApi~core_paired
* @param {Core} core
*/
/**
* @callback RoonApi~core_unpaired
* @param {Core} core
*/
/**
* @callback RoonApi~core_found
* @param {Core} core
*/
/**
* @callback RoonApi~core_lost
* @param {Core} core
*/
var Transport = require('./transport-websocket.js'),
MooMessage = require('./moomsg.js'),
Core = require('./core.js');
function Logger(roonapi) {
this.roonapi = roonapi;
};
Logger.prototype.log = function() {
if (this.roonapi.log_level != "none") {
console.log.apply(null, arguments);
}
};
function RoonApi(o) {
this._service_request_handlers = {};
if (typeof(o.extension_id) != 'string') throw new Error("Roon Extension options is missing the required 'extension_id' property.");
if (typeof(o.display_name) != 'string') throw new Error("Roon Extension options is missing the required 'display_name' property.");
if (typeof(o.display_version) != 'string') throw new Error("Roon Extension options is missing the required 'display_version' property.");
if (typeof(o.publisher) != 'string') throw new Error("Roon Extension options is missing the required 'publisher' property.");
if (typeof(o.email) != 'string') throw new Error("Roon Extension options is missing the required 'email' property.");
if (typeof(o.set_persisted_state) == 'undefined')
this.set_persisted_state = state => { this.save_config("roonstate", state); };
else
this.set_persisted_state = o.set_persisted_state;
if (typeof(o.get_persisted_state) == 'undefined')
this.get_persisted_state = () => { return this.load_config("roonstate") || {}; };
else
this.get_persisted_state = o.get_persisted_state;
if (o.core_found && !o.core_lost) throw new Error("Roon Extension options .core_lost is required if you implement .core_found.");
if (!o.core_found && o.core_lost) throw new Error("Roon Extension options .core_found is required if you implement .core_lost.");
if (o.core_paired && !o.core_unpaired) throw new Error("Roon Extension options .core_unpaired is required if you implement .core_paired.");
if (!o.core_paired && o.core_unpaired) throw new Error("Roon Extension options .core_paired is required if you implement .core_unpaired.");
if (o.core_paired && o.core_found) throw new Error("Roon Extension options can not specify both .core_paired and .core_found.");
if (o.core_found && typeof(o.core_found) != "function") throw new Error("Roon Extensions options has a .core_found which is not a function");
if (o.core_lost && typeof(o.core_lost) != "function") throw new Error("Roon Extensions options has a .core_lost which is not a function");
if (o.core_paired && typeof(o.core_paired) != "function") throw new Error("Roon Extensions options has a .core_paired which is not a function");
if (o.core_unpaired && typeof(o.core_unpaired) != "function") throw new Error("Roon Extensions options has a .core_unpaired which is not a function");
this.extension_reginfo = {
extension_id: o.extension_id,
display_name: o.display_name,
display_version: o.display_version,
publisher: o.publisher,
email: o.email,
required_services: [],
optional_services: [],
provided_services: []
};
if (o.website) this.extension_reginfo.website = o.website;
this.logger = new Logger(this);
this.log_level = o.log_level;
this.extension_opts = o;
this.is_paired = false;
}
/**
* Initializes the services you require and that you provide.
*
* @this RoonApi
* @param {object} services - Information about your extension. Used by Roon to display to the end user what is trying to access Roon.
* @param {object[]} [services.required_services] - A list of services which the Roon Core must provide.
* @param {object[]} [services.optional_services] - A list of services which the Roon Core may provide.
* @param {object[]} [services.provided_services] - A list of services which this extension provides to the Roon Core.
*/
RoonApi.prototype.init_services = function(o) {
if (!(o.required_services instanceof Array)) o.required_services = [];
if (!(o.optional_services instanceof Array)) o.optional_services = [];
if (!(o.provided_services instanceof Array)) o.provided_services = [];
if (o.required_services.length || o.optional_services.length)
if (!this.extension_opts.core_paired && !this.extension_opts.core_found) throw new Error("Roon Extensions options has required or optional services, but has neither .core_paired nor .core_found.");
if (this.extension_opts.core_paired) {
let svc = this.register_service("com.roonlabs.pairing:1", {
subscriptions: [
{
subscribe_name: "subscribe_pairing",
unsubscribe_name: "unsubscribe_pairing",
start: (req) => {
req.send_continue("Subscribed", { paired_core_id: this.paired_core_id });
}
}
],
methods: {
get_pairing: (req) => {
req.send_complete("Success", { paired_core_id: this.paired_core_id });
},
pair: (req) => {
this.paired_core_id = req.moo.core.core_id;
svc.send_continue_all("subscribe_pairing", "Changed", { paired_core_id: this.paired_core_id })
},
}
});
this.pairing_service_1 = {
services: [ svc ],
found_core: core => {
if (!this.paired_core_id) {
let settings = this.get_persisted_state();
settings.paired_core_id = core.core_id;
this.set_persisted_state(settings);
this.paired_core_id = core.core_id;
this.is_paired = true;
svc.send_continue_all("subscribe_pairing", "Changed", { paired_core_id: this.paired_core_id })
}
if (core.core_id == this.paired_core_id)
if (this.extension_opts.core_paired) this.extension_opts.core_paired(core);
},
lost_core: core => {
if (core.core_id == this.paired_core_id)
this.is_paired = false;
if (this.extension_opts.core_unpaired) this.extension_opts.core_unpaired(core);
},
};
o.provided_services.push(this.pairing_service_1);
}
o.provided_services.push({ services: [ this.register_service("com.roonlabs.ping:1", {
methods: {
ping: function(req) {
req.send_complete("Success");
},
}
})]})
o.required_services.forEach(svcobj => { svcobj.services.forEach(svc => { this.extension_reginfo.required_services.push(svc.name); }); });
o.optional_services.forEach(svcobj => { svcobj.services.forEach(svc => { this.extension_reginfo.optional_services.push(svc.name); }); });
o.provided_services.forEach(svcobj => { svcobj.services.forEach(svc => { this.extension_reginfo.provided_services.push(svc.name); }); });
this.services_opts = o;
};
// - pull in Sood and provide discovery methods in Node, but not in WebBrowser
//
// - implement save_config/load_config based on:
// Node: require('fs')
// WebBrowser: localStroage
//
if (typeof(window) == "undefined" || typeof(nw) !== "undefined") {
/**
* Begin the discovery process to find/connect to a Roon Core.
*/
RoonApi.prototype.start_discovery = function() {
if (this._sood) return;
this._sood = require('./sood.js');
this._sood.logger = this.logger;
this._sood_conns = {};
this._sood.on('message', msg => {
// this.logger.log(msg);
if (msg.props.service_id == "00720724-5143-4a9b-abac-0e50cba674bb" && msg.props.unique_id) {
if (this._sood_conns[msg.props.unique_id]) return;
this._sood_conns[msg.props.unique_id] = true;
var trans = new Transport(msg.from.ip, msg.props.http_port, msg.props.tcp_port, this.logger);
this.connect(trans, () => {
delete(this._sood_conns[msg.props.unique_id]);
});
}
});
this._sood.on('network', () => {
this._sood.query({ query_service_id: "00720724-5143-4a9b-abac-0e50cba674bb" });
});
this._sood.start(() => {
this._sood.query({ query_service_id: "00720724-5143-4a9b-abac-0e50cba674bb" });
setInterval(() => this.periodic_scan(), (10 * 1000));
this.scan_count = -1;
});
};
RoonApi.prototype.periodic_scan = function() {
this.scan_count += 1;
if (this.is_paired) return;
if ((this.scan_count < 6) || ((this.scan_count % 6) == 0)) {
this._sood.query({ query_service_id: "00720724-5143-4a9b-abac-0e50cba674bb" });
}
};
var fs = ((typeof _fs) === 'undefined') ? require('fs') : _fs;
/**
* Save a key value pair in the configuration data store.
* @param {string} key
* @param {object} value
*/
RoonApi.prototype.save_config = function(k, v) {
try {
let config;
try {
let content = fs.readFileSync("config.json", { encoding: 'utf8' });
config = JSON.parse(content) || {};
} catch (e) {
config = {};
}
if (v === undefined || v === null)
delete(config[k]);
else
config[k] = v;
fs.writeFileSync("config.json", JSON.stringify(config, null, ' '));
} catch (e) { }
};
/**
* Load a key value pair in the configuration data store.
* @param {string} key
* @return {object} value
*/
RoonApi.prototype.load_config = function(k) {
try {
let content = fs.readFileSync("config.json", { encoding: 'utf8' });
return JSON.parse(content)[k];
} catch (e) {
return undefined;
}
};
} else {
RoonApi.prototype.save_config = function(k, v) {
if (v === undefined || v === null)
localStorage.removeItem(k);
else
localStorage.setItem(k, JSON.stringify(v));
};
RoonApi.prototype.load_config = function(k) {
try {
let r = localStorage.getItem(k);
return r ? JSON.parse(r) : undefined;
} catch (e) {
return undefined;
}
};
}
RoonApi.prototype.register_service = function(svcname, spec) {
let ret = {
_subtypes: { }
};
if (spec.subscriptions) {
for (let x in spec.subscriptions) {
let s = spec.subscriptions[x];
let subname = s.subscribe_name;
ret._subtypes[subname] = { };
spec.methods[subname] = (req) => {
// XXX make sure req.body.subscription_key exists or respond send_complete with error
req.orig_send_complete = req.send_complete;
req.send_complete = function() {
this.orig_send_complete.apply(this, arguments);
delete(ret._subtypes[subname][req.moo.mooid][this.body.subscription_key]);
};
s.start(req);
if (!ret._subtypes[subname].hasOwnProperty(req.moo.mooid)) {
ret._subtypes[subname][req.moo.mooid] = { };
}
ret._subtypes[subname][req.moo.mooid][req.body.subscription_key] = req;
};
spec.methods[s.unsubscribe_name] = (req) => {
// XXX make sure req.body.subscription_key exists or respond send_complete with error
delete(ret._subtypes[subname][req.moo.mooid][req.body.subscription_key]);
if (s.end) s.end(req);
req.send_complete("Unsubscribed");
};
}
}
// process incoming requests from the other side
this._service_request_handlers[svcname] = (req, mooid) => {
// make sure the req's request name is something we know about
if (req) {
let method = spec.methods[req.msg.name];
if (method) {
method(req);
} else {
req.send_complete("InvalidRequest", { error: "unknown request name (" + svcname + ") : " + req.msg.name });
}
} else {
if (spec.subscriptions) {
for (let x in spec.subscriptions) {
let s = spec.subscriptions[x];
let subname = s.subscribe_name;
ret._subtypes[subname][mooid] = { };
if (s.end) s.end(req);
}
}
}
};
ret.name = svcname;
ret.send_continue_all = (subtype, name, props) => {
for (let id in ret._subtypes[subtype]) {
for (let x in ret._subtypes[subtype][id]) (ret._subtypes[subtype][id][x].send_continue(name, props));
}
};
ret.send_complete_all = (subtype, name, props) => {
for (let id in ret._subtypes[subtype]) {
for (let x in ret._subtypes[subtype][id]) (ret._subtypes[subtype][id][x].send_complete(name, props));
}
};
return ret;
};
RoonApi.prototype.connect = function(transport, cb) {
transport.onopen = () => {
// this.logger.log("OPEN");
transport.moo.send_request("com.roonlabs.registry:1/info",
(msg, body) => {
if (!msg) return;
let s = this.get_persisted_state();
if (s.tokens && s.tokens[body.core_id]) this.extension_reginfo.token = s.tokens[body.core_id];
transport.moo.send_request("com.roonlabs.registry:1/register", this.extension_reginfo,
(msg, body) => {
ev_registered.call(this, transport, msg, body);
});
});
};
transport.onclose = () => {
// this.logger.log("CLOSE");
Object.keys(this._service_request_handlers).forEach(e => this._service_request_handlers[e] && this._service_request_handlers[e](null, transport.moo.mooid));
if (transport.moo) transport.moo.close();
transport.moo = undefined;
transport.close();
cb && cb();
};
/*
transport.onerror = err => {
// this.logger.log("ERROR", err);
if (transport.moo) transport.moo.close();
transport.moo = undefined;
transport.close();
};*/
transport.onmessage = msg => {
// this.logger.log("GOTMSG");
var body = msg.body;
delete(msg.body);
var logging = msg && msg.headers && msg.headers["Logging"];
msg.log = ((this.log_level == "all") || (logging != "quiet"));
if (msg.verb == "REQUEST") {
if (msg.log) this.logger.log('<-', msg.verb, msg.request_id, msg.service + "/" + msg.name, body ? JSON.stringify(body) : "");
var req = new MooMessage(transport.moo, msg, body, this.logger);
var handler = this._service_request_handlers[msg.service];
if (handler)
handler(req, req.moo.mooid);
else
req.send_complete("InvalidRequest", { error: "unknown service: " + msg.service });
} else {
if (msg.log) this.logger.log('<-', msg.verb, msg.request_id, msg.name, body ? JSON.stringify(body) : "");
transport.moo.handle_response(msg, body);
}
};
return transport;
};
RoonApi.prototype.connect_to_host = function(host, http_port, tcp_port, cb) {
var transport = new Transport(host, http_port, tcp_port, this.logger);
return this.connect(transport, cb);
};
RoonApi.prototype.connect_to_host_with_token = function(host, http_port, tcp_port, token, cb) {
var transport = this.connect_to_host(host, http_port, tcp_port, cb)
transport.onopen = () => {
let args = Object.assign({}, this.extension_reginfo);
args.token = token;
transport.moo.send_request("com.roonlabs.registry:1/register_one_time_token", args,
(msg, body) => {
ev_registered.call(this, transport, msg, body);
});
};
return transport;
}
function ev_registered(transport, msg, body) {
if (!msg) { // lost connection
if (transport.moo.core) {
if (this.pairing_service_1) this.pairing_service_1.lost_core(transport.moo.core);
if (this.extension_opts.core_lost) this.extension_opts.core_lost(transport.moo.core);
transport.moo.core = undefined;
}
} else if (msg.name == "Registered") {
transport.moo.core = new Core(transport.moo, this, body, this.logger);
let settings = this.get_persisted_state();
if (!settings.tokens) settings.tokens = {};
settings.tokens[body.core_id] = body.token;
this.set_persisted_state(settings);
if (this.pairing_service_1) this.pairing_service_1.found_core(transport.moo.core);
if (this.extension_opts.core_found) this.extension_opts.core_found(transport.moo.core);
}
}
exports = module.exports = RoonApi;
| dont call service handlers if this connection has no moo. this is just a failed connection; no possibility for subscriptions in progress here.
| lib.js | dont call service handlers if this connection has no moo. this is just a failed connection; no possibility for subscriptions in progress here. | <ide><path>ib.js
<ide> if (msg.props.service_id == "00720724-5143-4a9b-abac-0e50cba674bb" && msg.props.unique_id) {
<ide> if (this._sood_conns[msg.props.unique_id]) return;
<ide> this._sood_conns[msg.props.unique_id] = true;
<del> var trans = new Transport(msg.from.ip, msg.props.http_port, msg.props.tcp_port, this.logger);
<del> this.connect(trans, () => {
<del> delete(this._sood_conns[msg.props.unique_id]);
<del> });
<add> this.connect(new Transport(msg.from.ip, msg.props.http_port, msg.props.tcp_port, this.logger), () => { delete(this._sood_conns[msg.props.unique_id]); });
<ide> }
<ide> });
<ide> this._sood.on('network', () => {
<ide>
<ide> transport.onclose = () => {
<ide> // this.logger.log("CLOSE");
<del> Object.keys(this._service_request_handlers).forEach(e => this._service_request_handlers[e] && this._service_request_handlers[e](null, transport.moo.mooid));
<del> if (transport.moo) transport.moo.close();
<del> transport.moo = undefined;
<add> if (transport.moo) {
<add> Object.keys(this._service_request_handlers).forEach(e => this._service_request_handlers[e] && this._service_request_handlers[e](null, transport.moo.mooid));
<add> transport.moo.close();
<add> transport.moo = undefined;
<add> }
<ide> transport.close();
<ide> cb && cb();
<ide> };
<ide> };
<ide>
<ide> RoonApi.prototype.connect_to_host = function(host, http_port, tcp_port, cb) {
<del> var transport = new Transport(host, http_port, tcp_port, this.logger);
<del> return this.connect(transport, cb);
<add> return this.connect(new Transport(host, http_port, tcp_port, this.logger), cb);
<ide> };
<ide>
<ide> RoonApi.prototype.connect_to_host_with_token = function(host, http_port, tcp_port, token, cb) { |
|
JavaScript | agpl-3.0 | 96d79f3d9a271c3fb22a2cd526f89dc171e7d265 | 0 | bewest/node-multienv,bewest/node-multienv,bewest/node-multienv |
var restify = require('restify');
var fs = require('fs');
var path = require('path');
var tmp = require('tmp');
var mv = require('mv');
var Readable = require('stream').Readable;
var bunyan = require('bunyan');
function createServer (opts) {
var cluster = opts.cluster;
var master = opts.create;
if (opts) {
opts.handleUpgrades = true;
}
var server = restify.createServer(opts);
server.on('after', restify.plugins.auditLogger({
log: bunyan.createLogger({
name: 'audit',
stream: process.stdout
})
, event: 'after'
}));
server.get('/cluster', function (req, res, next) {
var h = { };
var worker;
for (var id in cluster.workers) {
worker = cluster.workers[id];
var v = {
id: id
, custom_env: worker.custom_env
, state: worker.state
, isDead: worker.isDead && worker.isDead( )
, url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/'
, status_url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/api/v1/status.json'
};
// console.log(worker);
h[id] = v;
}
res.send(h);
next( );
});
server.get('/stats/active', function (req, res, next) {
var stats = {
total: {
active: Object.keys(cluster.workers).length
}
};
res.send(stats);
next( );
});
server.get('/resolve/:id', function (req, res, next) {
var id = parseInt(req.params.id);
var worker = cluster.workers[id] || {custom_env: { }, state: 'missing'};
// console.log('worker', worker);
var v = {
id: id
, state: worker.state
, name: path.basename(worker.custom_env.envfile, '.env')
, url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/'
, port: worker.custom_env.PORT
, status_url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/api/v1/status.json'
};
console.log(req.url, req.headers);
res.header('X-Backend-State', v.state);
res.header('X-Backend-Name', v.name);
res.header('X-Backend', v.url);
res.end( );
next( );
});
server.get('/resolve/:id/test', function (req, res, next) {
var id = parseInt(req.params.id);
var worker = cluster.workers[id] || {custom_env: { }, state: 'missing'};
var port = worker.custom_env.PORT;
// console.log('worker', port, worker);
var v = {
id: id
, state: worker.state
, envfile: worker.custom_env.envfile
, name: path.basename(worker.custom_env.envfile, '.env')
, port: port
, url: "http://" + [ 'localhost', port ].join(':') + '/'
, status_url: "http://" + [ 'localhost', port ].join(':') + '/api/v1/status.json'
};
// console.log(req.url, req.headers);
res.header('X-Backend-State', v.state);
res.header('X-Backend-Name', v.name);
res.header('X-Backend', v.url);
res.header('X-Backend-Port', v.port);
// var internal = '/x-accel-redirect/' + v.port + '/api/v1/status.json';
var internal = '@proxy/' + v.port + '/' + v.id;
console.log('internal!', internal);
res.header('x-accel-redirect', internal);
res.end( );
next( );
});
server.get('/cluster/:id', function (req, res, next) {
var id = parseInt(req.params.id);
var h = { };
var worker = cluster.workers[req.params.id];
var v = {
id: id
, custom_env: worker.custom_env
, state: worker.state
, isDead: worker.isDead && worker.isDead( )
, url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/'
, status_url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/api/v1/status.json'
};
res.send(h);
next( );
});
server.get('/history', function (req, res, next) {
var h = { };
var worker;
for (var file in master.handlers) {
worker = master.handlers[file].worker;
// console.log(worker);
var v = {
id: worker.id
, custom_env: worker.custom_env
, state: worker.state
, isDead: worker.isDead && worker.isDead( )
// , url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/'
// , status_url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/api/v1/status.json'
};
h[file] = v;
}
res.send(h);
next( );
});
server.get('/environs', function (req, res, next) {
master.scan(master.env, function iter (err, environs) {
var h = { };
var worker;
for (var i in environs) {
var environ = environs[i];
var file = environ.envfile;
var handler = master.handlers[file];
var worker = handler ? handler.worker : { };
// console.log(worker);
var v = {
id: worker.id || null
, custom_env: worker.custom_env || environ
, state: worker.state || 'missing'
, isDead: worker.isDead && worker.isDead( )
// , url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/'
// , status_url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/api/v1/status.json'
};
h[file] = v;
}
res.send(h);
next( );
});
});
server.get('/environs/:name', function (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
var handler = master.handlers[file];
var worker = handler ? handler.worker : { };
var v = {
id: worker.id || null
, custom_env: worker.custom_env
, state: worker.state || 'missing'
, isDead: worker.isDead && worker.isDead( )
// , url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/'
// , status_url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/api/v1/status.json'
};
res.header('content-type', 'application/json');
res.send(v);
next( );
});
// server.get(/^\/environs\/(.*)\/resolver\/(.*)?$/, function (req, res, next) { })
function resolverA (req, res, next) {
req.params.name = req.params[0];
req.params.target = req.params[1] || '';
console.log('found target', req.params.target);
next( );
}
function resolverB (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
var name = req.params.name;
var frontend = req.params.frontend;
var hostname = req.header('host');
var scheme = req.isSecure( ) ? 'https' : 'http';
var handler = master.handlers[file];
var worker = handler
? ( cluster.workers[handler.worker.id]
? cluster.workers[handler.worker.id]
: handler.worker
)
: { }
;
var port = worker.custom_env.PORT || 80;
var missing_url = scheme + '://' + hostname + '/';
// console.log('LIVE WORKER', worker);
console.log('PORT WORKER', port);
var v = {
id: worker.id || 'missing'
, custom_env: worker.custom_env
, port: port
, state: worker.state || 'missing'
, isDead: worker.isDead && worker.isDead( )
// , url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/'
// , status_url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/api/v1/status.json'
};
var internal = '@proxy/' + v.port + '/' + v.id + '/' + req.params.target;
// var internal = '@proxy/' + v.port + '/' + v.id;
console.log('internal!', internal, v);
res.header('x-accel-redirect', internal);
res.end( );
next( );
}
/*
server.get(/^\/environs\/(.*)\/resolver\/(.*)?$/, resolverA, resolverB);
server.del(/^\/environs\/(.*)\/resolver\/(.*)?$/, resolverA, resolverB);
server.post(/^\/environs\/(.*)\/resolver\/(.*)?$/, resolverA, resolverB);
server.put(/^\/environs\/(.*)\/resolver\/(.*)?$/, resolverA, resolverB);
server.head(/^\/environs\/(.*)\/resolver\/(.*)?$/, resolverA, resolverB);
server.opts(/^\/environs\/(.*)\/resolver\/(.*)?$/, resolverA, resolverB);
server.patch(/^\/environs\/(.*)\/resolver\/(.*)?$/, resolverA, resolverB);
*/
server.use(restify.plugins.queryParser( ));
server.use(restify.plugins.bodyParser( ));
server.del('/environs/:name', function (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
console.log("DELETING", req.params.name, file);
fs.unlink(file, function (ev) {
console.log('OK', arguments);
res.status(204);
res.send("");
// next( );
});
});
server.get('/environs/:name/env', function (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
var handler = master.handlers[file];
var worker = handler ? handler.worker : { };
res.send(worker.custom_env);
next( );
});
/*
server.get('/environs/:name/worker', function (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
var worker = master.handlers[file].worker || { };
res.json(worker);
next( );
});
*/
server.get('/environs/:name/env/:field', function (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
var worker = master.handlers[file].worker || { };
var field = worker.custom_env[req.params.field];
console.log(req.params.field, field);
if (typeof field !== 'undefined') {
res.status(200);
res.json(field);
} else {
res.status(404);
res.send({msg: "field unknown", field: req.params.field});
}
next( );
});
server.post('/environs/:name/env/:field', function (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
var env = master.read(file);
var field = req.params.field;
env[req.params.field] = req.params[req.params.field] || req.body[field] || '';
var tmpname = tmp.tmpNameSync( );
var out = fs.createWriteStream(tmpname);
// if (fs.existsSync(file)) { fs.unlinkSync(file); }
out.on('close', function (ev) {
mv(tmpname, file, function (err) {
console.error(err);
if (err) return next(err);
res.status(201);
res.header('Location', '/environs/' + req.params.name);
res.json(env[req.params.field]);
// setTimeout(function ( ) { }, 800);
});
});
var text = [ ];
for (x in env) {
text.push([x, env[x] ].join('='));
}
text.push('');
Readable.from(text.join("\n")).pipe(out);
// out.write(text.join("\n"));
// out.write("\n");
// out.end( );
// next( );
});
server.del('/environs/:name/env/:field', function (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
var env = master.read(file);
delete env[req.params.field];
var tmpname = tmp.tmpNameSync( );
var out = fs.createWriteStream(tmpname);
out.on('close', function (ev) {
mv(tmpname, file, function (err) {
res.status(204);
res.json(env[req.params.field]);
next(err);
});
});
var text = [ ];
for (x in env) {
text.push([x, env[x] ].join('='));
}
text.push('');
Readable.from(text.join("\n")).pipe(out);
// if (fs.existsSync(file)) { fs.unlinkSync(file); }
// out.write(text.join("\n"));
// out.write("\n");
// out.end( );
// res.status(201);
// res.header('Location', '/environs/' + req.params.name);
// next( );
});
server.post('/environs/:name', function (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
var tmpname = tmp.tmpNameSync( );
var text = [ ];
var item = { };
var out = fs.createWriteStream(tmpname);
// if (fs.existsSync(file)) { fs.unlinkSync(file); }
out.on('close', function (ev) {
mv(tmpname, file, function (err) {
res.status(201);
res.header('Location', '/environs/' + req.params.name);
res.send(item);
next(err);
// setTimeout(function ( ) { }, 800);
});
});
console.log('query', req.query);
console.log('input', req.body);
var x;
text.push(['WEB_NAME', req.params.name ].join('='));
item['WEB_NAME'] = req.params.name;
for (x in req.query) {
text.push([x, req.query[x] ].join('='));
item[x] = req.query[x];
}
for (x in req.body) {
text.push([x, req.body[x] ].join('='));
item[x] = req.body[x];
}
console.log('writing', file);
text.push('');
Readable.from(text.join("\n")).pipe(out);
// out.write(text.join("\n"));
// out.write("\n");
// out.end( );
});
return server;
}
exports = module.exports = createServer;
| server.js |
var restify = require('restify');
var fs = require('fs');
var path = require('path');
var tmp = require('tmp');
var mv = require('mv');
var bunyan = require('bunyan');
function createServer (opts) {
var cluster = opts.cluster;
var master = opts.create;
if (opts) {
opts.handleUpgrades = true;
}
var server = restify.createServer(opts);
server.on('after', restify.plugins.auditLogger({
log: bunyan.createLogger({
name: 'audit',
stream: process.stdout
})
, event: 'after'
}));
server.get('/cluster', function (req, res, next) {
var h = { };
var worker;
for (var id in cluster.workers) {
worker = cluster.workers[id];
var v = {
id: id
, custom_env: worker.custom_env
, state: worker.state
, isDead: worker.isDead && worker.isDead( )
, url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/'
, status_url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/api/v1/status.json'
};
// console.log(worker);
h[id] = v;
}
res.send(h);
next( );
});
server.get('/stats/active', function (req, res, next) {
var stats = {
total: {
active: Object.keys(cluster.workers).length
}
};
res.send(stats);
next( );
});
server.get('/resolve/:id', function (req, res, next) {
var id = parseInt(req.params.id);
var worker = cluster.workers[id] || {custom_env: { }, state: 'missing'};
// console.log('worker', worker);
var v = {
id: id
, state: worker.state
, name: path.basename(worker.custom_env.envfile, '.env')
, url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/'
, port: worker.custom_env.PORT
, status_url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/api/v1/status.json'
};
console.log(req.url, req.headers);
res.header('X-Backend-State', v.state);
res.header('X-Backend-Name', v.name);
res.header('X-Backend', v.url);
res.end( );
next( );
});
server.get('/resolve/:id/test', function (req, res, next) {
var id = parseInt(req.params.id);
var worker = cluster.workers[id] || {custom_env: { }, state: 'missing'};
var port = worker.custom_env.PORT;
// console.log('worker', port, worker);
var v = {
id: id
, state: worker.state
, envfile: worker.custom_env.envfile
, name: path.basename(worker.custom_env.envfile, '.env')
, port: port
, url: "http://" + [ 'localhost', port ].join(':') + '/'
, status_url: "http://" + [ 'localhost', port ].join(':') + '/api/v1/status.json'
};
// console.log(req.url, req.headers);
res.header('X-Backend-State', v.state);
res.header('X-Backend-Name', v.name);
res.header('X-Backend', v.url);
res.header('X-Backend-Port', v.port);
// var internal = '/x-accel-redirect/' + v.port + '/api/v1/status.json';
var internal = '@proxy/' + v.port + '/' + v.id;
console.log('internal!', internal);
res.header('x-accel-redirect', internal);
res.end( );
next( );
});
server.get('/cluster/:id', function (req, res, next) {
var id = parseInt(req.params.id);
var h = { };
var worker = cluster.workers[req.params.id];
var v = {
id: id
, custom_env: worker.custom_env
, state: worker.state
, isDead: worker.isDead && worker.isDead( )
, url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/'
, status_url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/api/v1/status.json'
};
res.send(h);
next( );
});
server.get('/history', function (req, res, next) {
var h = { };
var worker;
for (var file in master.handlers) {
worker = master.handlers[file].worker;
// console.log(worker);
var v = {
id: worker.id
, custom_env: worker.custom_env
, state: worker.state
, isDead: worker.isDead && worker.isDead( )
// , url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/'
// , status_url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/api/v1/status.json'
};
h[file] = v;
}
res.send(h);
next( );
});
server.get('/environs', function (req, res, next) {
master.scan(master.env, function iter (err, environs) {
var h = { };
var worker;
for (var i in environs) {
var environ = environs[i];
var file = environ.envfile;
var handler = master.handlers[file];
var worker = handler ? handler.worker : { };
// console.log(worker);
var v = {
id: worker.id || null
, custom_env: worker.custom_env || environ
, state: worker.state || 'missing'
, isDead: worker.isDead && worker.isDead( )
// , url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/'
// , status_url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/api/v1/status.json'
};
h[file] = v;
}
res.send(h);
next( );
});
});
server.get('/environs/:name', function (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
var handler = master.handlers[file];
var worker = handler ? handler.worker : { };
var v = {
id: worker.id || null
, custom_env: worker.custom_env
, state: worker.state || 'missing'
, isDead: worker.isDead && worker.isDead( )
// , url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/'
// , status_url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/api/v1/status.json'
};
res.header('content-type', 'application/json');
res.send(v);
next( );
});
// server.get(/^\/environs\/(.*)\/resolver\/(.*)?$/, function (req, res, next) { })
function resolverA (req, res, next) {
req.params.name = req.params[0];
req.params.target = req.params[1] || '';
console.log('found target', req.params.target);
next( );
}
function resolverB (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
var name = req.params.name;
var frontend = req.params.frontend;
var hostname = req.header('host');
var scheme = req.isSecure( ) ? 'https' : 'http';
var handler = master.handlers[file];
var worker = handler
? ( cluster.workers[handler.worker.id]
? cluster.workers[handler.worker.id]
: handler.worker
)
: { }
;
var port = worker.custom_env.PORT || 80;
var missing_url = scheme + '://' + hostname + '/';
// console.log('LIVE WORKER', worker);
console.log('PORT WORKER', port);
var v = {
id: worker.id || 'missing'
, custom_env: worker.custom_env
, port: port
, state: worker.state || 'missing'
, isDead: worker.isDead && worker.isDead( )
// , url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/'
// , status_url: "http://" + [ 'localhost', worker.custom_env.PORT ].join(':') + '/api/v1/status.json'
};
var internal = '@proxy/' + v.port + '/' + v.id + '/' + req.params.target;
// var internal = '@proxy/' + v.port + '/' + v.id;
console.log('internal!', internal, v);
res.header('x-accel-redirect', internal);
res.end( );
next( );
}
/*
server.get(/^\/environs\/(.*)\/resolver\/(.*)?$/, resolverA, resolverB);
server.del(/^\/environs\/(.*)\/resolver\/(.*)?$/, resolverA, resolverB);
server.post(/^\/environs\/(.*)\/resolver\/(.*)?$/, resolverA, resolverB);
server.put(/^\/environs\/(.*)\/resolver\/(.*)?$/, resolverA, resolverB);
server.head(/^\/environs\/(.*)\/resolver\/(.*)?$/, resolverA, resolverB);
server.opts(/^\/environs\/(.*)\/resolver\/(.*)?$/, resolverA, resolverB);
server.patch(/^\/environs\/(.*)\/resolver\/(.*)?$/, resolverA, resolverB);
*/
server.use(restify.plugins.queryParser( ));
server.use(restify.plugins.bodyParser( ));
server.del('/environs/:name', function (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
console.log("DELETING", req.params.name, file);
fs.unlink(file, function (ev) {
console.log('OK', arguments);
res.status(204);
res.send("");
// next( );
});
});
server.get('/environs/:name/env', function (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
var handler = master.handlers[file];
var worker = handler ? handler.worker : { };
res.send(worker.custom_env);
next( );
});
/*
server.get('/environs/:name/worker', function (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
var worker = master.handlers[file].worker || { };
res.json(worker);
next( );
});
*/
server.get('/environs/:name/env/:field', function (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
var worker = master.handlers[file].worker || { };
var field = worker.custom_env[req.params.field];
console.log(req.params.field, field);
if (typeof field !== 'undefined') {
res.status(200);
res.json(field);
} else {
res.status(404);
res.send({msg: "field unknown", field: req.params.field});
}
next( );
});
server.post('/environs/:name/env/:field', function (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
var env = master.read(file);
var field = req.params.field;
env[req.params.field] = req.params[req.params.field] || req.body[field] || '';
var tmpname = tmp.tmpNameSync( );
var out = fs.createWriteStream(tmpname);
if (fs.existsSync(file)) { fs.unlinkSync(file); }
out.on('close', function (ev) {
mv(tmpname, file, function (err) {
console.error(err);
if (err) return next(err);
res.status(201);
res.header('Location', '/environs/' + req.params.name);
res.json(env[req.params.field]);
// setTimeout(function ( ) { }, 800);
});
});
var text = [ ];
for (x in env) {
text.push([x, env[x] ].join('='));
}
out.write(text.join("\n"));
out.write("\n");
out.end( );
// next( );
});
server.del('/environs/:name/env/:field', function (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
var env = master.read(file);
delete env[req.params.field];
var tmpname = tmp.tmpNameSync( );
var out = fs.createWriteStream(tmpname);
out.on('close', function (ev) {
mv(tmpname, file, function (err) {
res.status(204);
res.json(env[req.params.field]);
next(err);
});
});
var text = [ ];
for (x in env) {
text.push([x, env[x] ].join('='));
}
if (fs.existsSync(file)) {
fs.unlinkSync(file);
}
out.write(text.join("\n"));
out.write("\n");
out.end( );
// res.status(201);
// res.header('Location', '/environs/' + req.params.name);
// next( );
});
server.post('/environs/:name', function (req, res, next) {
var file = path.resolve(master.env.WORKER_ENV, path.basename(req.params.name + '.env'));
var tmpname = tmp.tmpNameSync( );
var text = [ ];
var item = { };
var out = fs.createWriteStream(tmpname);
if (fs.existsSync(file)) { fs.unlinkSync(file); }
out.on('close', function (ev) {
mv(tmpname, file, function (err) {
res.status(201);
res.header('Location', '/environs/' + req.params.name);
res.send(item);
next(err);
// setTimeout(function ( ) { }, 800);
});
});
console.log('query', req.query);
console.log('input', req.body);
var x;
text.push(['WEB_NAME', req.params.name ].join('='));
item['WEB_NAME'] = req.params.name;
for (x in req.query) {
text.push([x, req.query[x] ].join('='));
item[x] = req.query[x];
}
for (x in req.body) {
text.push([x, req.body[x] ].join('='));
item[x] = req.body[x];
}
console.log('writing', file);
out.write(text.join("\n"));
out.write("\n");
out.end( );
});
return server;
}
exports = module.exports = createServer;
| try to eliminate data loss using streams
Try to follow streams best practice.
| server.js | try to eliminate data loss using streams | <ide><path>erver.js
<ide> var path = require('path');
<ide> var tmp = require('tmp');
<ide> var mv = require('mv');
<add>var Readable = require('stream').Readable;
<ide> var bunyan = require('bunyan');
<ide>
<ide> function createServer (opts) {
<ide> env[req.params.field] = req.params[req.params.field] || req.body[field] || '';
<ide> var tmpname = tmp.tmpNameSync( );
<ide> var out = fs.createWriteStream(tmpname);
<del> if (fs.existsSync(file)) { fs.unlinkSync(file); }
<add> // if (fs.existsSync(file)) { fs.unlinkSync(file); }
<ide> out.on('close', function (ev) {
<ide> mv(tmpname, file, function (err) {
<ide> console.error(err);
<ide> text.push([x, env[x] ].join('='));
<ide> }
<ide>
<del> out.write(text.join("\n"));
<del> out.write("\n");
<del> out.end( );
<add> text.push('');
<add> Readable.from(text.join("\n")).pipe(out);
<add>
<add> // out.write(text.join("\n"));
<add> // out.write("\n");
<add> // out.end( );
<ide> // next( );
<ide>
<ide> });
<ide> text.push([x, env[x] ].join('='));
<ide> }
<ide>
<del> if (fs.existsSync(file)) {
<del> fs.unlinkSync(file);
<del> }
<del>
<del> out.write(text.join("\n"));
<del> out.write("\n");
<del> out.end( );
<add> text.push('');
<add> Readable.from(text.join("\n")).pipe(out);
<add>
<add> // if (fs.existsSync(file)) { fs.unlinkSync(file); }
<add>
<add> // out.write(text.join("\n"));
<add> // out.write("\n");
<add> // out.end( );
<ide> // res.status(201);
<ide> // res.header('Location', '/environs/' + req.params.name);
<ide> // next( );
<ide> var text = [ ];
<ide> var item = { };
<ide> var out = fs.createWriteStream(tmpname);
<del> if (fs.existsSync(file)) { fs.unlinkSync(file); }
<add> // if (fs.existsSync(file)) { fs.unlinkSync(file); }
<ide> out.on('close', function (ev) {
<ide> mv(tmpname, file, function (err) {
<ide> res.status(201);
<ide> }
<ide>
<ide> console.log('writing', file);
<del> out.write(text.join("\n"));
<del>
<del> out.write("\n");
<del> out.end( );
<add> text.push('');
<add> Readable.from(text.join("\n")).pipe(out);
<add> // out.write(text.join("\n"));
<add>
<add> // out.write("\n");
<add> // out.end( );
<ide> });
<ide>
<ide> return server; |
|
Java | apache-2.0 | 0296736cd5dff10981b8176b25c34745c56bb6e3 | 0 | Doloops/arondor-common-reflection,Doloops/arondor-common-reflection | /*
* Copyright 2013, Arondor
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.arondor.common.reflection.gwt.client.view;
import java.util.Collection;
import java.util.logging.Logger;
import com.arondor.common.reflection.gwt.client.presenter.ImplementingClassPresenter.Display;
import com.google.gwt.event.dom.client.ChangeEvent;
import com.google.gwt.event.dom.client.ChangeHandler;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.ListBox;
public class ImplementingClassView extends Composite implements Display
{
private static final Logger LOG = Logger.getLogger(ImplementingClassView.class.getName());
private ListBox implementingListInput = new ListBox();
private String selectedClass = null;
public ImplementingClassView()
{
initWidget(implementingListInput);
}
public void setImplementingClasses(Collection<String> implementingClasses)
{
LOG.finest("Selected classes : " + implementingClasses);
implementingListInput.clear();
for (String implementingClass : implementingClasses)
{
implementingListInput.addItem(implementingClass);
if (selectedClass != null && selectedClass.equals(implementingClass))
{
implementingListInput.setSelectedIndex(implementingListInput.getItemCount() - 1);
}
}
}
private void doSelect(String className)
{
selectedClass = className;
if (className == null)
{
return;
}
LOG.finest("Selecting class : " + className + " from a choice of " + implementingListInput.getItemCount()
+ " items");
for (int idx = 0; idx < implementingListInput.getItemCount(); idx++)
{
if (implementingListInput.getItemText(idx).equals(className))
{
implementingListInput.setSelectedIndex(idx);
return;
}
}
implementingListInput.addItem(className);
implementingListInput.setSelectedIndex(implementingListInput.getItemCount() - 1);
// LOG.warning("Could not select class : " + className);
}
public HandlerRegistration addValueChangeHandler(final ValueChangeHandler<String> valueChangeHandler)
{
return implementingListInput.addChangeHandler(new ChangeHandler()
{
public void onChange(ChangeEvent event)
{
if (implementingListInput.getSelectedIndex() != -1)
{
String value = implementingListInput.getValue(implementingListInput.getSelectedIndex());
selectedClass = value;
valueChangeHandler.onValueChange(new MyValueChangeEvent<String>(value));
}
}
});
}
public void setBaseClassName(String baseClassName)
{
doSelect(baseClassName);
}
public void selectImplementingClass(String implementingClassName)
{
doSelect(implementingClassName);
}
}
| arondor-common-reflection-gwt/src/main/java/com/arondor/common/reflection/gwt/client/view/ImplementingClassView.java | /*
* Copyright 2013, Arondor
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.arondor.common.reflection.gwt.client.view;
import java.util.Collection;
import java.util.logging.Logger;
import com.arondor.common.reflection.gwt.client.presenter.ImplementingClassPresenter.Display;
import com.google.gwt.event.dom.client.ChangeEvent;
import com.google.gwt.event.dom.client.ChangeHandler;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.ListBox;
public class ImplementingClassView extends Composite implements Display
{
private static final Logger LOG = Logger.getLogger(ImplementingClassView.class.getName());
private ListBox implementingListInput = new ListBox();
private String selectedClass = null;
public ImplementingClassView()
{
initWidget(implementingListInput);
}
public void setImplementingClasses(Collection<String> implementingClasses)
{
LOG.finest("Selected classes : " + implementingClasses);
implementingListInput.clear();
for (String implementingClass : implementingClasses)
{
implementingListInput.addItem(implementingClass);
if (selectedClass != null && selectedClass.equals(implementingClass))
{
implementingListInput.setSelectedIndex(implementingListInput.getItemCount() - 1);
}
}
}
private void doSelect(String className)
{
selectedClass = className;
LOG.finest("Selecting class : " + className + " from a choice of " + implementingListInput.getItemCount()
+ " items");
for (int idx = 0; idx < implementingListInput.getItemCount(); idx++)
{
if (implementingListInput.getItemText(idx).equals(className))
{
implementingListInput.setSelectedIndex(idx);
return;
}
}
LOG.warning("Could not select class : " + className);
}
public HandlerRegistration addValueChangeHandler(final ValueChangeHandler<String> valueChangeHandler)
{
return implementingListInput.addChangeHandler(new ChangeHandler()
{
public void onChange(ChangeEvent event)
{
if (implementingListInput.getSelectedIndex() != -1)
{
String value = implementingListInput.getValue(implementingListInput.getSelectedIndex());
selectedClass = value;
valueChangeHandler.onValueChange(new MyValueChangeEvent<String>(value));
}
}
});
}
public void setBaseClassName(String baseClassName)
{
doSelect(baseClassName);
}
public void selectImplementingClass(String implementingClassName)
{
doSelect(implementingClassName);
}
}
| View shall handle when selected item is not part of the choices,
wouldn't it ? | arondor-common-reflection-gwt/src/main/java/com/arondor/common/reflection/gwt/client/view/ImplementingClassView.java | View shall handle when selected item is not part of the choices, wouldn't it ? | <ide><path>rondor-common-reflection-gwt/src/main/java/com/arondor/common/reflection/gwt/client/view/ImplementingClassView.java
<ide> private void doSelect(String className)
<ide> {
<ide> selectedClass = className;
<add> if (className == null)
<add> {
<add> return;
<add> }
<ide> LOG.finest("Selecting class : " + className + " from a choice of " + implementingListInput.getItemCount()
<ide> + " items");
<ide> for (int idx = 0; idx < implementingListInput.getItemCount(); idx++)
<ide> return;
<ide> }
<ide> }
<del> LOG.warning("Could not select class : " + className);
<add> implementingListInput.addItem(className);
<add> implementingListInput.setSelectedIndex(implementingListInput.getItemCount() - 1);
<add> // LOG.warning("Could not select class : " + className);
<ide> }
<ide>
<ide> public HandlerRegistration addValueChangeHandler(final ValueChangeHandler<String> valueChangeHandler) |
|
Java | apache-2.0 | 9b500d994a212d5ea585c26248163cafce0b1a64 | 0 | itfsw/mybatis-generator-plugin | /*
* Copyright (c) 2017.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.itfsw.mybatis.generator.plugins;
import com.itfsw.mybatis.generator.plugins.utils.BasePlugin;
import org.mybatis.generator.api.IntrospectedColumn;
import org.mybatis.generator.api.IntrospectedTable;
import org.mybatis.generator.api.dom.java.*;
import org.mybatis.generator.internal.util.JavaBeansUtil;
/**
* ---------------------------------------------------------------------------
* 数据Model属性对应Column获取插件
* ---------------------------------------------------------------------------
* @author: hewei
* @time:2017/1/17 11:20
* ---------------------------------------------------------------------------
*/
public class ModelColumnPlugin extends BasePlugin {
public static final String ENUM_NAME = "Column"; // 内部Enum名
/**
* Model Methods 生成
* 具体执行顺序 http://www.mybatis.org/generator/reference/pluggingIn.html
* @param topLevelClass
* @param introspectedTable
* @return
*/
@Override
public boolean modelBaseRecordClassGenerated(TopLevelClass topLevelClass, IntrospectedTable introspectedTable) {
topLevelClass.addInnerEnum(this.generateColumnEnum(topLevelClass, introspectedTable));
return true;
}
/**
* Model Methods 生成
* 具体执行顺序 http://www.mybatis.org/generator/reference/pluggingIn.html
* @param topLevelClass
* @param introspectedTable
* @return
*/
@Override
public boolean modelRecordWithBLOBsClassGenerated(TopLevelClass topLevelClass, IntrospectedTable introspectedTable) {
topLevelClass.addInnerEnum(this.generateColumnEnum(topLevelClass, introspectedTable));
return true;
}
/**
* 生成Column字段枚举
*
* @param topLevelClass
* @param introspectedTable
* @return
*/
private InnerEnum generateColumnEnum(TopLevelClass topLevelClass, IntrospectedTable introspectedTable){
// 生成内部枚举
InnerEnum innerEnum = new InnerEnum(new FullyQualifiedJavaType(ENUM_NAME));
innerEnum.setVisibility(JavaVisibility.PUBLIC);
innerEnum.setStatic(true);
commentGenerator.addEnumComment(innerEnum, introspectedTable);
logger.debug("itfsw(数据Model属性对应Column获取插件):" + topLevelClass.getType().getShortName() + "增加内部Builder类。");
// 生成属性和构造函数
Field columnField = new Field("column", FullyQualifiedJavaType.getStringInstance());
columnField.setVisibility(JavaVisibility.PRIVATE);
columnField.setFinal(true);
commentGenerator.addFieldComment(columnField, introspectedTable);
innerEnum.addField(columnField);
Method mValue = new Method("value");
mValue.setVisibility(JavaVisibility.PUBLIC);
mValue.setReturnType(FullyQualifiedJavaType.getStringInstance());
mValue.addBodyLine("return this.column;");
commentGenerator.addGeneralMethodComment(mValue, introspectedTable);
innerEnum.addMethod(mValue);
Method mGetValue = new Method("getValue");
mGetValue.setVisibility(JavaVisibility.PUBLIC);
mGetValue.setReturnType(FullyQualifiedJavaType.getStringInstance());
mGetValue.addBodyLine("return this.column;");
commentGenerator.addGeneralMethodComment(mGetValue, introspectedTable);
innerEnum.addMethod(mGetValue);
Method constructor = new Method(ENUM_NAME);
constructor.setConstructor(true);
constructor.addBodyLine("this.column = column;");
constructor.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "column"));
commentGenerator.addGeneralMethodComment(constructor, introspectedTable);
innerEnum.addMethod(constructor);
logger.debug("itfsw(数据Model属性对应Column获取插件):" + topLevelClass.getType().getShortName() + ".Column增加构造方法和column属性。");
// Enum枚举
for (IntrospectedColumn introspectedColumn : introspectedTable.getAllColumns()) {
Field field = JavaBeansUtil.getJavaBeansField(introspectedColumn, context, introspectedTable);
StringBuffer sb = new StringBuffer();
sb.append(field.getName());
sb.append("(\"");
sb.append(introspectedColumn.getActualColumnName());
sb.append("\")");
innerEnum.addEnumConstant(sb.toString());
logger.debug("itfsw(数据Model属性对应Column获取插件):" + topLevelClass.getType().getShortName() + ".Column增加" + field.getName() + "枚举。");
}
// asc 和 desc 方法
Method desc = new Method("desc");
desc.setVisibility(JavaVisibility.PUBLIC);
desc.setReturnType(FullyQualifiedJavaType.getStringInstance());
desc.addBodyLine("return this.column + \" DESC\";");
commentGenerator.addGeneralMethodComment(desc, introspectedTable);
innerEnum.addMethod(desc);
Method asc = new Method("asc");
asc.setVisibility(JavaVisibility.PUBLIC);
asc.setReturnType(FullyQualifiedJavaType.getStringInstance());
asc.addBodyLine("return this.column + \" ASC\";");
commentGenerator.addGeneralMethodComment(asc, introspectedTable);
innerEnum.addMethod(asc);
logger.debug("itfsw(数据Model属性对应Column获取插件):" + topLevelClass.getType().getShortName() + ".Column增加asc()和desc()方法。");
return innerEnum;
}
}
| src/main/java/com/itfsw/mybatis/generator/plugins/ModelColumnPlugin.java | /*
* Copyright (c) 2017.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.itfsw.mybatis.generator.plugins;
import com.itfsw.mybatis.generator.plugins.utils.BasePlugin;
import org.mybatis.generator.api.IntrospectedColumn;
import org.mybatis.generator.api.IntrospectedTable;
import org.mybatis.generator.api.dom.java.*;
import org.mybatis.generator.internal.util.JavaBeansUtil;
import java.util.List;
/**
* ---------------------------------------------------------------------------
* 数据Model属性对应Column获取插件
* ---------------------------------------------------------------------------
* @author: hewei
* @time:2017/1/17 11:20
* ---------------------------------------------------------------------------
*/
public class ModelColumnPlugin extends BasePlugin {
public static final String ENUM_NAME = "Column"; // 内部Enum名
/**
* Model Methods 生成
* 具体执行顺序 http://www.mybatis.org/generator/reference/pluggingIn.html
* @param topLevelClass
* @param introspectedTable
* @return
*/
@Override
public boolean modelBaseRecordClassGenerated(TopLevelClass topLevelClass, IntrospectedTable introspectedTable) {
List<Field> fields = topLevelClass.getFields();
// 生成内部枚举
InnerEnum innerEnum = new InnerEnum(new FullyQualifiedJavaType(ENUM_NAME));
innerEnum.setVisibility(JavaVisibility.PUBLIC);
innerEnum.setStatic(true);
commentGenerator.addEnumComment(innerEnum, introspectedTable);
logger.debug("itfsw(数据Model属性对应Column获取插件):" + topLevelClass.getType().getShortName() + "增加内部Builder类。");
// 生成属性和构造函数
Field columnField = new Field("column", FullyQualifiedJavaType.getStringInstance());
columnField.setVisibility(JavaVisibility.PRIVATE);
columnField.setFinal(true);
commentGenerator.addFieldComment(columnField, introspectedTable);
innerEnum.addField(columnField);
Method mValue = new Method("value");
mValue.setVisibility(JavaVisibility.PUBLIC);
mValue.setReturnType(FullyQualifiedJavaType.getStringInstance());
mValue.addBodyLine("return this.column;");
commentGenerator.addGeneralMethodComment(mValue, introspectedTable);
innerEnum.addMethod(mValue);
Method mGetValue = new Method("getValue");
mGetValue.setVisibility(JavaVisibility.PUBLIC);
mGetValue.setReturnType(FullyQualifiedJavaType.getStringInstance());
mGetValue.addBodyLine("return this.column;");
commentGenerator.addGeneralMethodComment(mGetValue, introspectedTable);
innerEnum.addMethod(mGetValue);
Method constructor = new Method(ENUM_NAME);
constructor.setConstructor(true);
constructor.addBodyLine("this.column = column;");
constructor.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "column"));
commentGenerator.addGeneralMethodComment(constructor, introspectedTable);
innerEnum.addMethod(constructor);
logger.debug("itfsw(数据Model属性对应Column获取插件):" + topLevelClass.getType().getShortName() + ".Column增加构造方法和column属性。");
// Enum枚举
for (IntrospectedColumn introspectedColumn : introspectedTable.getAllColumns()) {
Field field = JavaBeansUtil.getJavaBeansField(introspectedColumn, context, introspectedTable);
StringBuffer sb = new StringBuffer();
sb.append(field.getName());
sb.append("(\"");
sb.append(introspectedColumn.getActualColumnName());
sb.append("\")");
innerEnum.addEnumConstant(sb.toString());
logger.debug("itfsw(数据Model属性对应Column获取插件):" + topLevelClass.getType().getShortName() + ".Column增加" + field.getName() + "枚举。");
}
// asc 和 desc 方法
Method desc = new Method("desc");
desc.setVisibility(JavaVisibility.PUBLIC);
desc.setReturnType(FullyQualifiedJavaType.getStringInstance());
desc.addBodyLine("return this.column + \" DESC\";");
commentGenerator.addGeneralMethodComment(desc, introspectedTable);
innerEnum.addMethod(desc);
Method asc = new Method("asc");
asc.setVisibility(JavaVisibility.PUBLIC);
asc.setReturnType(FullyQualifiedJavaType.getStringInstance());
asc.addBodyLine("return this.column + \" ASC\";");
commentGenerator.addGeneralMethodComment(asc, introspectedTable);
innerEnum.addMethod(asc);
logger.debug("itfsw(数据Model属性对应Column获取插件):" + topLevelClass.getType().getShortName() + ".Column增加asc()和desc()方法。");
topLevelClass.addInnerEnum(innerEnum);
return true;
}
}
| 解决Model生成WithBLOBs类时,Column枚举在WithBLOBs类也应该存在的问题
| src/main/java/com/itfsw/mybatis/generator/plugins/ModelColumnPlugin.java | 解决Model生成WithBLOBs类时,Column枚举在WithBLOBs类也应该存在的问题 | <ide><path>rc/main/java/com/itfsw/mybatis/generator/plugins/ModelColumnPlugin.java
<ide> import org.mybatis.generator.api.dom.java.*;
<ide> import org.mybatis.generator.internal.util.JavaBeansUtil;
<ide>
<del>import java.util.List;
<del>
<ide> /**
<ide> * ---------------------------------------------------------------------------
<ide> * 数据Model属性对应Column获取插件
<ide> */
<ide> @Override
<ide> public boolean modelBaseRecordClassGenerated(TopLevelClass topLevelClass, IntrospectedTable introspectedTable) {
<del> List<Field> fields = topLevelClass.getFields();
<add> topLevelClass.addInnerEnum(this.generateColumnEnum(topLevelClass, introspectedTable));
<add> return true;
<add> }
<ide>
<add> /**
<add> * Model Methods 生成
<add> * 具体执行顺序 http://www.mybatis.org/generator/reference/pluggingIn.html
<add> * @param topLevelClass
<add> * @param introspectedTable
<add> * @return
<add> */
<add> @Override
<add> public boolean modelRecordWithBLOBsClassGenerated(TopLevelClass topLevelClass, IntrospectedTable introspectedTable) {
<add> topLevelClass.addInnerEnum(this.generateColumnEnum(topLevelClass, introspectedTable));
<add> return true;
<add> }
<add>
<add> /**
<add> * 生成Column字段枚举
<add> *
<add> * @param topLevelClass
<add> * @param introspectedTable
<add> * @return
<add> */
<add> private InnerEnum generateColumnEnum(TopLevelClass topLevelClass, IntrospectedTable introspectedTable){
<ide> // 生成内部枚举
<ide> InnerEnum innerEnum = new InnerEnum(new FullyQualifiedJavaType(ENUM_NAME));
<ide> innerEnum.setVisibility(JavaVisibility.PUBLIC);
<ide> innerEnum.addMethod(asc);
<ide> logger.debug("itfsw(数据Model属性对应Column获取插件):" + topLevelClass.getType().getShortName() + ".Column增加asc()和desc()方法。");
<ide>
<del> topLevelClass.addInnerEnum(innerEnum);
<del> return true;
<add> return innerEnum;
<ide> }
<ide> } |
|
Java | lgpl-2.1 | b1aa1fccaed18b52b8a2a58e1fe31e5eefec143e | 0 | dizzzz/exist,wolfgangmm/exist,kohsah/exist,jessealama/exist,eXist-db/exist,adamretter/exist,ambs/exist,patczar/exist,shabanovd/exist,windauer/exist,ljo/exist,RemiKoutcherawy/exist,wshager/exist,lcahlander/exist,eXist-db/exist,wolfgangmm/exist,MjAbuz/exist,jensopetersen/exist,patczar/exist,patczar/exist,MjAbuz/exist,shabanovd/exist,lcahlander/exist,adamretter/exist,zwobit/exist,shabanovd/exist,dizzzz/exist,wshager/exist,zwobit/exist,RemiKoutcherawy/exist,lcahlander/exist,shabanovd/exist,olvidalo/exist,kohsah/exist,opax/exist,jessealama/exist,dizzzz/exist,hungerburg/exist,ljo/exist,jensopetersen/exist,lcahlander/exist,jensopetersen/exist,windauer/exist,hungerburg/exist,hungerburg/exist,MjAbuz/exist,kohsah/exist,windauer/exist,kohsah/exist,ljo/exist,eXist-db/exist,lcahlander/exist,wolfgangmm/exist,opax/exist,ambs/exist,ambs/exist,olvidalo/exist,lcahlander/exist,zwobit/exist,adamretter/exist,eXist-db/exist,eXist-db/exist,ambs/exist,wshager/exist,patczar/exist,windauer/exist,hungerburg/exist,eXist-db/exist,joewiz/exist,hungerburg/exist,jessealama/exist,opax/exist,dizzzz/exist,opax/exist,olvidalo/exist,dizzzz/exist,MjAbuz/exist,zwobit/exist,kohsah/exist,zwobit/exist,jessealama/exist,jensopetersen/exist,olvidalo/exist,olvidalo/exist,shabanovd/exist,joewiz/exist,windauer/exist,RemiKoutcherawy/exist,joewiz/exist,ljo/exist,joewiz/exist,shabanovd/exist,kohsah/exist,RemiKoutcherawy/exist,wolfgangmm/exist,jessealama/exist,adamretter/exist,wolfgangmm/exist,wolfgangmm/exist,adamretter/exist,joewiz/exist,ljo/exist,opax/exist,ambs/exist,RemiKoutcherawy/exist,joewiz/exist,patczar/exist,ljo/exist,wshager/exist,patczar/exist,jessealama/exist,MjAbuz/exist,wshager/exist,jensopetersen/exist,ambs/exist,wshager/exist,windauer/exist,RemiKoutcherawy/exist,dizzzz/exist,MjAbuz/exist,zwobit/exist,jensopetersen/exist,adamretter/exist | /*
* eXist Open Source Native XML Database
* Copyright (C) 2001-04 Wolfgang M. Meier
* [email protected]
* http://exist-db.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* $Id$
*/
package org.exist.http.servlets;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Reader;
import java.io.StringWriter;
import java.security.Principal;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletInputStream;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.xmlrpc.Base64;
import org.exist.EXistException;
import org.exist.http.BadRequestException;
import org.exist.http.NotFoundException;
import org.exist.http.RESTServer;
import org.exist.http.Response;
import org.exist.security.PermissionDeniedException;
import org.exist.security.SecurityManager;
import org.exist.security.User;
import org.exist.security.XmldbPrincipal;
import org.exist.storage.BrokerPool;
import org.exist.storage.DBBroker;
import org.exist.util.Configuration;
import org.exist.util.DatabaseConfigurationException;
import org.xmldb.api.DatabaseManager;
import org.xmldb.api.base.Database;
import org.xmldb.api.base.XMLDBException;
public class EXistServlet extends HttpServlet {
private BrokerPool pool = null;
private User defaultUser = null;
private RESTServer server = new RESTServer();
/* (non-Javadoc)
* @see javax.servlet.GenericServlet#init(javax.servlet.ServletConfig)
*/
public void init(ServletConfig config) throws ServletException {
super.init(config);
try {
if (BrokerPool.isConfigured()) {
this.log("Database already started. Skipping configuration ...");
} else {
String confFile = config.getInitParameter("configuration");
String dbHome = config.getInitParameter("basedir");
String start = config.getInitParameter("start");
if (confFile == null)
confFile = "conf.xml";
dbHome = (dbHome == null) ? config.getServletContext().getRealPath(
".") : config.getServletContext().getRealPath(dbHome);
this.log("DatabaseAdminServlet: exist.home=" + dbHome);
File f = new File(dbHome + File.separator + confFile);
this.log("reading configuration from " + f.getAbsolutePath());
if (!f.canRead())
throw new ServletException("configuration file " + confFile
+ " not found or not readable");
Configuration configuration = new Configuration(confFile, dbHome);
if (start != null && start.equals("true"))
startup(configuration);
}
pool = BrokerPool.getInstance();
defaultUser = pool.getSecurityManager().getUser(SecurityManager.GUEST_USER);
} catch (EXistException e) {
throw new ServletException("No database instance available");
} catch (DatabaseConfigurationException e) {
throw new ServletException("Unable to configure database instance: " + e.getMessage(), e);
}
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServlet#doPut(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)
*/
protected void doPut(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
User user = authenticate(request);
if (user == null) {
response.sendError(HttpServletResponse.SC_FORBIDDEN,
"Permission denied: unknown user " + "or password");
return;
}
String path = request.getPathInfo();
int p = path.lastIndexOf(';');
if (p > -1)
path = path.substring(0, p);
ServletInputStream is = request.getInputStream();
int len = request.getContentLength();
// put may send a lot of data, so save it
// to a temporary file first.
File tempFile = File.createTempFile("exist", ".tmp");
OutputStream os = new FileOutputStream(tempFile);
byte[] buffer = new byte[4096];
int count, l = 0;
do {
count = is.read(buffer);
if (count > 0)
os.write(buffer, 0, count);
l += count;
} while (l < len);
os.close();
DBBroker broker = null;
try {
broker = pool.get(user);
Response r = server.doPut(broker, tempFile, request.getContentType(), path);
writeResponse(r, response);
} catch (BadRequestException e) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
} catch (PermissionDeniedException e) {
response.sendError(HttpServletResponse.SC_FORBIDDEN, e.getMessage());
} catch (EXistException e) {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage());
} finally {
pool.release(broker);
}
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)
*/
protected void doGet(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
User user = authenticate(request);
if (user == null) {
response.sendError(HttpServletResponse.SC_FORBIDDEN,
"Permission denied: unknown user " + "or password");
return;
}
String path = request.getPathInfo();
int p = path.lastIndexOf(';');
if (p > -1)
path = path.substring(0, p);
DBBroker broker = null;
try {
broker = pool.get(user);
Map parameters = getParameters(request);
Response r = server.doGet(broker, parameters, path);
writeResponse(r, response);
} catch (BadRequestException e) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST, e
.getMessage());
} catch (PermissionDeniedException e) {
response
.sendError(HttpServletResponse.SC_FORBIDDEN, e.getMessage());
} catch (NotFoundException e) {
response
.sendError(HttpServletResponse.SC_NOT_FOUND, e.getMessage());
} catch (EXistException e) {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e
.getMessage());
} finally {
pool.release(broker);
}
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServlet#doDelete(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)
*/
protected void doDelete(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
User user = authenticate(request);
if (user == null) {
response.sendError(HttpServletResponse.SC_FORBIDDEN,
"Permission denied: unknown user " + "or password");
return;
}
String path = request.getPathInfo();
int p = path.lastIndexOf(';');
if (p > -1)
path = path.substring(0, p);
DBBroker broker = null;
try {
broker = pool.get(user);
Response r = server.doDelete(broker, path);
writeResponse(r, response);
} catch (PermissionDeniedException e) {
response
.sendError(HttpServletResponse.SC_FORBIDDEN, e.getMessage());
} catch (NotFoundException e) {
response
.sendError(HttpServletResponse.SC_NOT_FOUND, e.getMessage());
} catch (EXistException e) {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e
.getMessage());
} finally {
pool.release(broker);
}
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServlet#doPost(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
User user = authenticate(request);
if (user == null) {
response.sendError(HttpServletResponse.SC_FORBIDDEN,
"Permission denied: unknown user " + "or password");
return;
}
String path = request.getPathInfo();
if(path == null)
path = "";
else {
int p = path.lastIndexOf(';');
if (p > -1)
path = path.substring(0, p);
}
String encoding = request.getCharacterEncoding();
if(encoding == null)
encoding = "UTF-8";
ServletInputStream is = request.getInputStream();
Reader reader = new InputStreamReader(is, encoding);
StringWriter content = new StringWriter();
char ch[] = new char[4096];
int len = 0;
while((len = reader.read(ch)) > -1)
content.write(ch, 0, len);
String xml = content.toString();
DBBroker broker = null;
try {
broker = pool.get(user);
Response r = server.doPost(broker, xml, path);
writeResponse(r, response);
} catch (PermissionDeniedException e) {
response
.sendError(HttpServletResponse.SC_FORBIDDEN, e.getMessage());
} catch (EXistException e) {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e
.getMessage());
} catch (BadRequestException e) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
} finally {
pool.release(broker);
}
}
/* (non-Javadoc)
* @see javax.servlet.GenericServlet#destroy()
*/
public void destroy() {
super.destroy();
BrokerPool.stopAll(false);
}
private User authenticate(HttpServletRequest request) {
// First try to validate the principial if passed from the servlet engine
Principal principal = request.getUserPrincipal();
if(principal instanceof XmldbPrincipal){
String username = ((XmldbPrincipal)principal).getName();
String password = ((XmldbPrincipal)principal).getPassword();
this.log("Validating Principle: " + principal.getName());
User user = pool.getSecurityManager().getUser(username);
if (user != null){
if (password.equalsIgnoreCase(user.getPassword())){
this.log("Valid User: " + user.getName());
return user;
}else{
this.log( "Password invalid for user: " + username );
}
this.log("User not found: " + principal.getName());
}
}
String auth = request.getHeader("Authorization");
if(auth == null)
return defaultUser;
byte[] c = Base64.decode(auth.substring(6).getBytes());
String s = new String(c);
int p = s.indexOf(':');
String username = s.substring(0, p);
String password = s.substring(p + 1);
User user = pool.getSecurityManager().getUser(username);
if (user == null)
return null;
if (!user.validate(password))
return null;
return user;
}
private Map getParameters(HttpServletRequest request) {
Map params = new HashMap();
String key;
for(Enumeration e = request.getParameterNames(); e.hasMoreElements(); ) {
key = (String)e.nextElement();
params.put(key, request.getParameter(key));
}
return params;
}
private void writeResponse(Response internal, HttpServletResponse response)
throws IOException {
if (internal.getResponseCode() != HttpServletResponse.SC_OK) {
response.sendError(internal.getResponseCode(), internal
.getDescription());
} else {
String contentType = internal.getContentType() + "; charset="
+ internal.getEncoding();
response.setContentType(contentType);
if(internal.getContent() == null) {
if(internal.getDescription() != null)
internal.setContent(internal.getDescription());
else
internal.setContent("OK");
}
ServletOutputStream os = response.getOutputStream();
os.write(internal.getContent());
}
}
private void startup(Configuration configuration) throws ServletException {
if ( configuration == null )
throw new ServletException( "database has not been " +
"configured" );
this.log("configuring eXist instance");
try {
if ( !BrokerPool.isConfigured() )
BrokerPool.configure( 1, 5, configuration );
} catch ( EXistException e ) {
throw new ServletException( e.getMessage() );
}
try {
this.log("registering XMLDB driver");
Class clazz = Class.forName("org.exist.xmldb.DatabaseImpl");
Database database = (Database)clazz.newInstance();
DatabaseManager.registerDatabase(database);
} catch (ClassNotFoundException e) {
this.log("ERROR", e);
} catch (InstantiationException e) {
this.log("ERROR", e);
} catch (IllegalAccessException e) {
this.log("ERROR", e);
} catch (XMLDBException e) {
this.log("ERROR", e);
}
}
}
| src/org/exist/http/servlets/EXistServlet.java | /*
* eXist Open Source Native XML Database
* Copyright (C) 2001-04 Wolfgang M. Meier
* [email protected]
* http://exist-db.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* $Id$
*/
package org.exist.http.servlets;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Reader;
import java.io.StringWriter;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletInputStream;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.xmlrpc.Base64;
import org.exist.EXistException;
import org.exist.http.BadRequestException;
import org.exist.http.NotFoundException;
import org.exist.http.RESTServer;
import org.exist.http.Response;
import org.exist.security.PermissionDeniedException;
import org.exist.security.SecurityManager;
import org.exist.security.User;
import org.exist.storage.BrokerPool;
import org.exist.storage.DBBroker;
import org.exist.util.Configuration;
import org.exist.util.DatabaseConfigurationException;
import org.xmldb.api.DatabaseManager;
import org.xmldb.api.base.Database;
import org.xmldb.api.base.XMLDBException;
public class EXistServlet extends HttpServlet {
private BrokerPool pool = null;
private User defaultUser = null;
private RESTServer server = new RESTServer();
/* (non-Javadoc)
* @see javax.servlet.GenericServlet#init(javax.servlet.ServletConfig)
*/
public void init(ServletConfig config) throws ServletException {
super.init(config);
try {
if (BrokerPool.isConfigured()) {
this.log("Database already started. Skipping configuration ...");
} else {
String confFile = config.getInitParameter("configuration");
String dbHome = config.getInitParameter("basedir");
String start = config.getInitParameter("start");
if (confFile == null)
confFile = "conf.xml";
dbHome = (dbHome == null) ? config.getServletContext().getRealPath(
".") : config.getServletContext().getRealPath(dbHome);
this.log("DatabaseAdminServlet: exist.home=" + dbHome);
File f = new File(dbHome + File.separator + confFile);
this.log("reading configuration from " + f.getAbsolutePath());
if (!f.canRead())
throw new ServletException("configuration file " + confFile
+ " not found or not readable");
Configuration configuration = new Configuration(confFile, dbHome);
if (start != null && start.equals("true"))
startup(configuration);
}
pool = BrokerPool.getInstance();
defaultUser = pool.getSecurityManager().getUser(SecurityManager.GUEST_USER);
} catch (EXistException e) {
throw new ServletException("No database instance available");
} catch (DatabaseConfigurationException e) {
throw new ServletException("Unable to configure database instance: " + e.getMessage(), e);
}
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServlet#doPut(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)
*/
protected void doPut(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
User user = authenticate(request);
if (user == null) {
response.sendError(HttpServletResponse.SC_FORBIDDEN,
"Permission denied: unknown user " + "or password");
return;
}
String path = request.getPathInfo();
int p = path.lastIndexOf(';');
if (p > -1)
path = path.substring(0, p);
ServletInputStream is = request.getInputStream();
int len = request.getContentLength();
// put may send a lot of data, so save it
// to a temporary file first.
File tempFile = File.createTempFile("exist", ".tmp");
OutputStream os = new FileOutputStream(tempFile);
byte[] buffer = new byte[4096];
int count, l = 0;
do {
count = is.read(buffer);
if (count > 0)
os.write(buffer, 0, count);
l += count;
} while (l < len);
os.close();
DBBroker broker = null;
try {
broker = pool.get(user);
Response r = server.doPut(broker, tempFile, request.getContentType(), path);
writeResponse(r, response);
} catch (BadRequestException e) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
} catch (PermissionDeniedException e) {
response.sendError(HttpServletResponse.SC_FORBIDDEN, e.getMessage());
} catch (EXistException e) {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage());
} finally {
pool.release(broker);
}
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)
*/
protected void doGet(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
User user = authenticate(request);
if (user == null) {
response.sendError(HttpServletResponse.SC_FORBIDDEN,
"Permission denied: unknown user " + "or password");
return;
}
String path = request.getPathInfo();
int p = path.lastIndexOf(';');
if (p > -1)
path = path.substring(0, p);
DBBroker broker = null;
try {
broker = pool.get(user);
Map parameters = getParameters(request);
Response r = server.doGet(broker, parameters, path);
writeResponse(r, response);
} catch (BadRequestException e) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST, e
.getMessage());
} catch (PermissionDeniedException e) {
response
.sendError(HttpServletResponse.SC_FORBIDDEN, e.getMessage());
} catch (NotFoundException e) {
response
.sendError(HttpServletResponse.SC_NOT_FOUND, e.getMessage());
} catch (EXistException e) {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e
.getMessage());
} finally {
pool.release(broker);
}
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServlet#doDelete(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)
*/
protected void doDelete(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
User user = authenticate(request);
if (user == null) {
response.sendError(HttpServletResponse.SC_FORBIDDEN,
"Permission denied: unknown user " + "or password");
return;
}
String path = request.getPathInfo();
int p = path.lastIndexOf(';');
if (p > -1)
path = path.substring(0, p);
DBBroker broker = null;
try {
broker = pool.get(user);
Response r = server.doDelete(broker, path);
writeResponse(r, response);
} catch (PermissionDeniedException e) {
response
.sendError(HttpServletResponse.SC_FORBIDDEN, e.getMessage());
} catch (NotFoundException e) {
response
.sendError(HttpServletResponse.SC_NOT_FOUND, e.getMessage());
} catch (EXistException e) {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e
.getMessage());
} finally {
pool.release(broker);
}
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServlet#doPost(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
User user = authenticate(request);
if (user == null) {
response.sendError(HttpServletResponse.SC_FORBIDDEN,
"Permission denied: unknown user " + "or password");
return;
}
String path = request.getPathInfo();
if(path == null)
path = "";
else {
int p = path.lastIndexOf(';');
if (p > -1)
path = path.substring(0, p);
}
String encoding = request.getCharacterEncoding();
if(encoding == null)
encoding = "UTF-8";
ServletInputStream is = request.getInputStream();
Reader reader = new InputStreamReader(is, encoding);
StringWriter content = new StringWriter();
char ch[] = new char[4096];
int len = 0;
while((len = reader.read(ch)) > -1)
content.write(ch, 0, len);
String xml = content.toString();
DBBroker broker = null;
try {
broker = pool.get(user);
Response r = server.doPost(broker, xml, path);
writeResponse(r, response);
} catch (PermissionDeniedException e) {
response
.sendError(HttpServletResponse.SC_FORBIDDEN, e.getMessage());
} catch (EXistException e) {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e
.getMessage());
} catch (BadRequestException e) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
} finally {
pool.release(broker);
}
}
/* (non-Javadoc)
* @see javax.servlet.GenericServlet#destroy()
*/
public void destroy() {
super.destroy();
BrokerPool.stopAll(false);
}
private User authenticate(HttpServletRequest request) {
String auth = request.getHeader("Authorization");
if(auth == null)
return defaultUser;
byte[] c = Base64.decode(auth.substring(6).getBytes());
String s = new String(c);
int p = s.indexOf(':');
String username = s.substring(0, p);
String password = s.substring(p + 1);
User user = pool.getSecurityManager().getUser(username);
if (user == null)
return null;
if (!user.validate(password))
return null;
return user;
}
private Map getParameters(HttpServletRequest request) {
Map params = new HashMap();
String key;
for(Enumeration e = request.getParameterNames(); e.hasMoreElements(); ) {
key = (String)e.nextElement();
params.put(key, request.getParameter(key));
}
return params;
}
private void writeResponse(Response internal, HttpServletResponse response)
throws IOException {
if (internal.getResponseCode() != HttpServletResponse.SC_OK) {
response.sendError(internal.getResponseCode(), internal
.getDescription());
} else {
String contentType = internal.getContentType() + "; charset="
+ internal.getEncoding();
response.setContentType(contentType);
if(internal.getContent() == null) {
if(internal.getDescription() != null)
internal.setContent(internal.getDescription());
else
internal.setContent("OK");
}
ServletOutputStream os = response.getOutputStream();
os.write(internal.getContent());
}
}
private void startup(Configuration configuration) throws ServletException {
if ( configuration == null )
throw new ServletException( "database has not been " +
"configured" );
this.log("configuring eXist instance");
try {
if ( !BrokerPool.isConfigured() )
BrokerPool.configure( 1, 5, configuration );
} catch ( EXistException e ) {
throw new ServletException( e.getMessage() );
}
try {
this.log("registering XMLDB driver");
Class clazz = Class.forName("org.exist.xmldb.DatabaseImpl");
Database database = (Database)clazz.newInstance();
DatabaseManager.registerDatabase(database);
} catch (ClassNotFoundException e) {
this.log("ERROR", e);
} catch (InstantiationException e) {
this.log("ERROR", e);
} catch (IllegalAccessException e) {
this.log("ERROR", e);
} catch (XMLDBException e) {
this.log("ERROR", e);
}
}
}
| Check if an XmldbPrincipal has been passed.
svn path=/trunk/eXist-1.0/; revision=299
| src/org/exist/http/servlets/EXistServlet.java | Check if an XmldbPrincipal has been passed. | <ide><path>rc/org/exist/http/servlets/EXistServlet.java
<ide> import java.io.OutputStream;
<ide> import java.io.Reader;
<ide> import java.io.StringWriter;
<add>import java.security.Principal;
<ide> import java.util.Enumeration;
<ide> import java.util.HashMap;
<ide> import java.util.Map;
<ide> import org.exist.security.PermissionDeniedException;
<ide> import org.exist.security.SecurityManager;
<ide> import org.exist.security.User;
<add>import org.exist.security.XmldbPrincipal;
<ide> import org.exist.storage.BrokerPool;
<ide> import org.exist.storage.DBBroker;
<ide> import org.exist.util.Configuration;
<ide> }
<ide>
<ide> private User authenticate(HttpServletRequest request) {
<add> // First try to validate the principial if passed from the servlet engine
<add> Principal principal = request.getUserPrincipal();
<add>
<add> if(principal instanceof XmldbPrincipal){
<add> String username = ((XmldbPrincipal)principal).getName();
<add> String password = ((XmldbPrincipal)principal).getPassword();
<add>
<add> this.log("Validating Principle: " + principal.getName());
<add> User user = pool.getSecurityManager().getUser(username);
<add>
<add> if (user != null){
<add> if (password.equalsIgnoreCase(user.getPassword())){
<add> this.log("Valid User: " + user.getName());
<add> return user;
<add> }else{
<add> this.log( "Password invalid for user: " + username );
<add> }
<add> this.log("User not found: " + principal.getName());
<add> }
<add> }
<add>
<ide> String auth = request.getHeader("Authorization");
<ide> if(auth == null)
<ide> return defaultUser; |
|
Java | apache-2.0 | c4823c063abb65553d68ea25ac22af44d1ceb95d | 0 | shwenzhang/AndResGuard,shwenzhang/AndResGuard | /**
* Copyright 2014 Ryszard Wiśniewski <[email protected]>
* Copyright 2016 sim sun <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tencent.mm.androlib.res.decoder;
import com.mindprod.ledatastream.LEDataInputStream;
import com.mindprod.ledatastream.LEDataOutputStream;
import com.tencent.mm.androlib.AndrolibException;
import com.tencent.mm.androlib.ApkDecoder;
import com.tencent.mm.androlib.res.data.ResPackage;
import com.tencent.mm.androlib.res.data.ResType;
import com.tencent.mm.androlib.res.util.StringUtil;
import com.tencent.mm.resourceproguard.Configuration;
import com.tencent.mm.util.ExtDataInput;
import com.tencent.mm.util.ExtDataOutput;
import com.tencent.mm.util.FileOperation;
import com.tencent.mm.util.TypedValue;
import com.tencent.mm.util.Utils;
import java.io.BufferedWriter;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.Writer;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import java.util.regex.Pattern;
public class ARSCDecoder {
private final static boolean DEBUG = false;
private final static short ENTRY_FLAG_COMPLEX = 0x0001;
private static final Logger LOGGER = Logger.getLogger(ARSCDecoder.class.getName());
private static final int KNOWN_CONFIG_BYTES = 56;
public static Map<Integer, String> mTableStringsResguard = new LinkedHashMap<>();
private final Map<String, String> mOldFileName;
private final Map<String, Integer> mCurSpecNameToPos;
private final HashSet<String> mShouldResguardTypeSet;
private final ApkDecoder mApkDecoder;
private ExtDataInput mIn;
private ExtDataOutput mOut;
private Header mHeader;
private StringBlock mTableStrings;
private StringBlock mTypeNames;
private StringBlock mSpecNames;
private ResPackage mPkg;
private ResType mType;
private ResPackage[] mPkgs;
private int[] mPkgsLenghtChange;
private int mTableLenghtChange = 0;
private int mResId;
private int mCurrTypeID = -1;
private int mCurEntryID = -1;
private int mCurPackageID = -1;
private ResguardStringBuilder mResguardBuilder;
private boolean mShouldResguardForType = false;
private Writer mMappingWriter;
private ARSCDecoder(InputStream arscStream, ApkDecoder decoder) throws AndrolibException, IOException {
mOldFileName = new LinkedHashMap<>();
mCurSpecNameToPos = new LinkedHashMap<>();
mShouldResguardTypeSet = new HashSet<>();
mIn = new ExtDataInput(new LEDataInputStream(arscStream));
mApkDecoder = decoder;
proguardFileName();
}
private ARSCDecoder(InputStream arscStream, ApkDecoder decoder, ResPackage[] pkgs) throws FileNotFoundException {
mOldFileName = new LinkedHashMap<>();
mCurSpecNameToPos = new LinkedHashMap<>();
mShouldResguardTypeSet = new HashSet<>();
mApkDecoder = decoder;
mIn = new ExtDataInput(new LEDataInputStream(arscStream));
mOut = new ExtDataOutput(new LEDataOutputStream(new FileOutputStream(mApkDecoder.getOutTempARSCFile(), false)));
mPkgs = pkgs;
mPkgsLenghtChange = new int[pkgs.length];
}
public static ResPackage[] decode(InputStream arscStream, ApkDecoder apkDecoder) throws AndrolibException {
try {
ARSCDecoder decoder = new ARSCDecoder(arscStream, apkDecoder);
ResPackage[] pkgs = decoder.readTable();
return pkgs;
} catch (IOException ex) {
throw new AndrolibException("Could not decode arsc file", ex);
}
}
public static void write(InputStream arscStream, ApkDecoder decoder, ResPackage[] pkgs) throws AndrolibException {
try {
ARSCDecoder writer = new ARSCDecoder(arscStream, decoder, pkgs);
writer.writeTable();
} catch (IOException ex) {
throw new AndrolibException("Could not decode arsc file", ex);
}
}
private void proguardFileName() throws IOException, AndrolibException {
mMappingWriter = new BufferedWriter(new FileWriter(mApkDecoder.getResMappingFile(), false));
mResguardBuilder = new ResguardStringBuilder();
mResguardBuilder.reset();
final Configuration config = mApkDecoder.getConfig();
File rawResFile = mApkDecoder.getRawResFile();
File[] resFiles = rawResFile.listFiles();
// 需要看看哪些类型是要混淆文件路径的
for (File resFile : resFiles) {
String raw = resFile.getName();
if (raw.contains("-")) {
raw = raw.substring(0, raw.indexOf("-"));
}
mShouldResguardTypeSet.add(raw);
}
if (!config.mKeepRoot) {
// 需要保持之前的命名方式
if (config.mUseKeepMapping) {
HashMap<String, String> fileMapping = config.mOldFileMapping;
List<String> keepFileNames = new ArrayList<>();
// 这里面为了兼容以前,也需要用以前的文件名前缀,即res混淆成什么
String resRoot = TypedValue.RES_FILE_PATH;
for (String name : fileMapping.values()) {
int dot = name.indexOf("/");
if (dot == -1) {
throw new IOException(String.format("the old mapping res file path should be like r/a, yours %s\n", name));
}
resRoot = name.substring(0, dot);
keepFileNames.add(name.substring(dot + 1));
}
// 去掉所有之前保留的命名,为了简单操作,mapping里面有的都去掉
mResguardBuilder.removeStrings(keepFileNames);
for (File resFile : resFiles) {
String raw = "res" + "/" + resFile.getName();
if (fileMapping.containsKey(raw)) {
mOldFileName.put(raw, fileMapping.get(raw));
} else {
mOldFileName.put(raw, resRoot + "/" + mResguardBuilder.getReplaceString());
}
}
} else {
for (int i = 0; i < resFiles.length; i++) {
// 这里也要用linux的分隔符,如果普通的话,就是r
mOldFileName.put("res" + "/" + resFiles[i].getName(),
TypedValue.RES_FILE_PATH + "/" + mResguardBuilder.getReplaceString()
);
}
}
generalFileResMapping();
}
Utils.cleanDir(mApkDecoder.getOutResFile());
}
private ResPackage[] readTable() throws IOException, AndrolibException {
nextChunkCheckType(Header.TYPE_TABLE);
int packageCount = mIn.readInt();
mTableStrings = StringBlock.read(mIn);
ResPackage[] packages = new ResPackage[packageCount];
nextChunk();
for (int i = 0; i < packageCount; i++) {
packages[i] = readPackage();
}
System.out.printf("resources mapping file %s done\n", mApkDecoder.getResMappingFile().getAbsolutePath());
mMappingWriter.close();
return packages;
}
private void writeTable() throws IOException, AndrolibException {
System.out.printf("writing new resources.arsc \n");
mTableLenghtChange = 0;
writeNextChunkCheck(Header.TYPE_TABLE, 0);
int packageCount = mIn.readInt();
mOut.writeInt(packageCount);
mTableLenghtChange += StringBlock.writeTableNameStringBlock(mIn, mOut, mTableStringsResguard);
writeNextChunk(0);
if (packageCount != mPkgs.length) {
throw new AndrolibException(String.format("writeTable package count is different before %d, now %d",
mPkgs.length,
packageCount
));
}
for (int i = 0; i < packageCount; i++) {
mCurPackageID = i;
writePackage();
}
// 最后需要把整个的size重写回去
reWriteTable();
}
private void generalFileResMapping() throws IOException {
mMappingWriter.write("res path mapping:\n");
for (String raw : mOldFileName.keySet()) {
mMappingWriter.write(" " + raw + " -> " + mOldFileName.get(raw));
mMappingWriter.write("\n");
}
mMappingWriter.write("\n\n");
mMappingWriter.write("res id mapping:\n");
mMappingWriter.flush();
}
private void generalResIDMapping(
String packageName, String typename, String specName, String replace) throws IOException {
mMappingWriter.write(" "
+ packageName
+ ".R."
+ typename
+ "."
+ specName
+ " -> "
+ packageName
+ ".R."
+ typename
+ "."
+ replace);
mMappingWriter.write("\n");
mMappingWriter.flush();
}
private void reWriteTable() throws AndrolibException, IOException {
mIn = new ExtDataInput(new LEDataInputStream(new FileInputStream(mApkDecoder.getOutTempARSCFile())));
mOut = new ExtDataOutput(new LEDataOutputStream(new FileOutputStream(mApkDecoder.getOutARSCFile(), false)));
writeNextChunkCheck(Header.TYPE_TABLE, mTableLenghtChange);
int packageCount = mIn.readInt();
mOut.writeInt(packageCount);
StringBlock.writeAll(mIn, mOut);
for (int i = 0; i < packageCount; i++) {
mCurPackageID = i;
writeNextChunk(mPkgsLenghtChange[mCurPackageID]);
mOut.writeBytes(mIn, mHeader.chunkSize - 8);
}
mApkDecoder.getOutTempARSCFile().delete();
}
private ResPackage readPackage() throws IOException, AndrolibException {
checkChunkType(Header.TYPE_PACKAGE);
int id = (byte) mIn.readInt();
String name = mIn.readNullEndedString(128, true);
System.out.printf("reading packagename %s\n", name);
/* typeNameStrings */
mIn.skipInt();
/* typeNameCount */
mIn.skipInt();
/* specNameStrings */
mIn.skipInt();
/* specNameCount */
mIn.skipInt();
mCurrTypeID = -1;
mTypeNames = StringBlock.read(mIn);
mSpecNames = StringBlock.read(mIn);
mResId = id << 24;
mPkg = new ResPackage(id, name);
// 系统包名不混淆
if (mPkg.getName().equals("android")) {
mPkg.setCanResguard(false);
} else {
mPkg.setCanResguard(true);
}
nextChunk();
while (mHeader.type == Header.TYPE_LIBRARY) {
readLibraryType();
}
while (mHeader.type == Header.TYPE_SPEC_TYPE) {
readTableTypeSpec();
}
return mPkg;
}
private void writePackage() throws IOException, AndrolibException {
checkChunkType(Header.TYPE_PACKAGE);
int id = (byte) mIn.readInt();
mOut.writeInt(id);
mResId = id << 24;
//char_16的,一共256byte
mOut.writeBytes(mIn, 256);
/* typeNameStrings */
mOut.writeInt(mIn.readInt());
/* typeNameCount */
mOut.writeInt(mIn.readInt());
/* specNameStrings */
mOut.writeInt(mIn.readInt());
/* specNameCount */
mOut.writeInt(mIn.readInt());
StringBlock.writeAll(mIn, mOut);
if (mPkgs[mCurPackageID].isCanResguard()) {
int specSizeChange = StringBlock.writeSpecNameStringBlock(mIn,
mOut,
mPkgs[mCurPackageID].getSpecNamesBlock(),
mCurSpecNameToPos
);
mPkgsLenghtChange[mCurPackageID] += specSizeChange;
mTableLenghtChange += specSizeChange;
} else {
StringBlock.writeAll(mIn, mOut);
}
writeNextChunk(0);
while (mHeader.type == Header.TYPE_LIBRARY) {
writeLibraryType();
}
while (mHeader.type == Header.TYPE_SPEC_TYPE) {
writeTableTypeSpec();
}
}
/**
* 如果是保持mapping的话,需要去掉某部分已经用过的mapping
*/
private void reduceFromOldMappingFile() {
if (mPkg.isCanResguard()) {
if (mApkDecoder.getConfig().mUseKeepMapping) {
// 判断是否走keepmapping
HashMap<String, HashMap<String, HashMap<String, String>>> resMapping = mApkDecoder.getConfig().mOldResMapping;
String packName = mPkg.getName();
if (resMapping.containsKey(packName)) {
HashMap<String, HashMap<String, String>> typeMaps = resMapping.get(packName);
String typeName = mType.getName();
if (typeMaps.containsKey(typeName)) {
HashMap<String, String> proguard = typeMaps.get(typeName);
// 去掉所有之前保留的命名,为了简单操作,mapping里面有的都去掉
mResguardBuilder.removeStrings(proguard.values());
}
}
}
}
}
/**
* reduce white list string from proguard builder
*/
private void reduceFromWhiteListFile() {
final Configuration config = mApkDecoder.getConfig();
final String packName = mPkg.getName();
if (config.mWhiteList.containsKey(packName)) {
if (mApkDecoder.getConfig().mUseWhiteList) {
HashMap<String, HashSet<Pattern>> typeMaps = config.mWhiteList.get(packName);
String typeName = mType.getName();
HashSet<Pattern> patterns = typeMaps.get(typeName);
if (patterns != null) {
for (Iterator<Pattern> it = patterns.iterator(); it.hasNext(); ) {
String tmp = it.next().pattern();
mResguardBuilder.removeString(tmp);
}
}
}
}
}
private void readLibraryType() throws AndrolibException, IOException {
checkChunkType(Header.TYPE_LIBRARY);
int libraryCount = mIn.readInt();
int packageId;
String packageName;
for (int i = 0; i < libraryCount; i++) {
packageId = mIn.readInt();
packageName = mIn.readNullEndedString(128, true);
System.out.printf("Decoding Shared Library (%s), pkgId: %d\n", packageName, packageId);
}
while (nextChunk().type == Header.TYPE_TYPE) {
readTableTypeSpec();
}
}
private void readTableTypeSpec() throws AndrolibException, IOException {
checkChunkType(Header.TYPE_SPEC_TYPE);
byte id = mIn.readByte();
mIn.skipBytes(3);
int entryCount = mIn.readInt();
// first meet a type of resource
if (mCurrTypeID != id) {
mCurrTypeID = id;
initResGuardBuild(mCurrTypeID);
}
// 是否混淆文件路径
mShouldResguardForType = isToResguardFile(mTypeNames.getString(id - 1));
// 对,这里是用来描述差异性的!!!
mIn.skipBytes(entryCount * 4);
mResId = (0xff000000 & mResId) | id << 16;
mType = new ResType(mTypeNames.getString(id - 1), mPkg);
while (nextChunk().type == Header.TYPE_TYPE) {
readConfig();
}
}
private void initResGuardBuild(int resTypeId) {
// init resguard builder
mResguardBuilder.reset();
mResguardBuilder.removeStrings(RawARSCDecoder.getExistTypeSpecNameStrings(resTypeId));
// 如果是保持mapping的话,需要去掉某部分已经用过的mapping
reduceFromOldMappingFile();
// remove string from resguard candidate list if it exists in white list
reduceFromWhiteListFile();
}
private void writeLibraryType() throws AndrolibException, IOException {
checkChunkType(Header.TYPE_LIBRARY);
int libraryCount = mIn.readInt();
mOut.writeInt(libraryCount);
for (int i = 0; i < libraryCount; i++) {
mOut.writeInt(mIn.readInt());/*packageId*/
mOut.writeBytes(mIn, 256); /*packageName*/
}
writeNextChunk(0);
while (mHeader.type == Header.TYPE_TYPE) {
writeTableTypeSpec();
}
}
private void writeTableTypeSpec() throws AndrolibException, IOException {
checkChunkType(Header.TYPE_SPEC_TYPE);
byte id = mIn.readByte();
mOut.writeByte(id);
mResId = (0xff000000 & mResId) | id << 16;
mOut.writeBytes(mIn, 3);
int entryCount = mIn.readInt();
mOut.writeInt(entryCount);
// 对,这里是用来描述差异性的!!!
///* flags */mIn.skipBytes(entryCount * 4);
int[] entryOffsets = mIn.readIntArray(entryCount);
mOut.writeIntArray(entryOffsets);
while (writeNextChunk(0).type == Header.TYPE_TYPE) {
writeConfig();
}
}
private void readConfig() throws IOException, AndrolibException {
checkChunkType(Header.TYPE_TYPE);
/* typeId */
mIn.skipInt();
int entryCount = mIn.readInt();
int entriesStart = mIn.readInt();
readConfigFlags();
int[] entryOffsets = mIn.readIntArray(entryCount);
for (int i = 0; i < entryOffsets.length; i++) {
mCurEntryID = i;
if (entryOffsets[i] != -1) {
mResId = (mResId & 0xffff0000) | i;
readEntry();
}
}
}
private void writeConfig() throws IOException, AndrolibException {
checkChunkType(Header.TYPE_TYPE);
/* typeId */
mOut.writeInt(mIn.readInt());
/* entryCount */
int entryCount = mIn.readInt();
mOut.writeInt(entryCount);
/* entriesStart */
mOut.writeInt(mIn.readInt());
writeConfigFlags();
int[] entryOffsets = mIn.readIntArray(entryCount);
mOut.writeIntArray(entryOffsets);
for (int i = 0; i < entryOffsets.length; i++) {
if (entryOffsets[i] != -1) {
mResId = (mResId & 0xffff0000) | i;
writeEntry();
}
}
}
private void readEntry() throws IOException, AndrolibException {
mIn.skipBytes(2);
short flags = mIn.readShort();
int specNamesId = mIn.readInt();
if (mPkg.isCanResguard()) {
// 混淆过或者已经添加到白名单的都不需要再处理了
if (!mResguardBuilder.isReplaced(mCurEntryID) && !mResguardBuilder.isInWhiteList(mCurEntryID)) {
Configuration config = mApkDecoder.getConfig();
boolean isWhiteList = false;
if (config.mUseWhiteList) {
isWhiteList = dealWithWhiteList(specNamesId, config);
}
if (!isWhiteList) {
dealWithNonWhiteList(specNamesId, config);
}
}
}
if ((flags & ENTRY_FLAG_COMPLEX) == 0) {
readValue(true, specNamesId);
} else {
readComplexEntry(false, specNamesId);
}
}
/**
* deal with whitelist
*
* @param specNamesId resource spec name id
* @param config {@Configuration} AndResGuard configuration
* @return isWhiteList whether this resource is processed by whitelist
*/
private boolean dealWithWhiteList(int specNamesId, Configuration config) throws AndrolibException {
String packName = mPkg.getName();
if (config.mWhiteList.containsKey(packName)) {
HashMap<String, HashSet<Pattern>> typeMaps = config.mWhiteList.get(packName);
String typeName = mType.getName();
if (typeMaps.containsKey(typeName)) {
String specName = mSpecNames.get(specNamesId).toString();
HashSet<Pattern> patterns = typeMaps.get(typeName);
for (Iterator<Pattern> it = patterns.iterator(); it.hasNext(); ) {
Pattern p = it.next();
if (p.matcher(specName).matches()) {
if (DEBUG) {
System.out.println(String.format("[match] matcher %s ,typeName %s, specName :%s",
p.pattern(),
typeName,
specName
));
}
mPkg.putSpecNamesReplace(mResId, specName);
mPkg.putSpecNamesblock(specName);
mResguardBuilder.setInWhiteList(mCurEntryID, true);
mType.putSpecResguardName(specName);
return true;
}
}
}
}
return false;
}
private void dealWithNonWhiteList(int specNamesId, Configuration config) throws AndrolibException, IOException {
String replaceString = null;
boolean keepMapping = false;
if (config.mUseKeepMapping) {
String packName = mPkg.getName();
if (config.mOldResMapping.containsKey(packName)) {
HashMap<String, HashMap<String, String>> typeMaps = config.mOldResMapping.get(packName);
String typeName = mType.getName();
if (typeMaps.containsKey(typeName)) {
HashMap<String, String> nameMap = typeMaps.get(typeName);
String specName = mSpecNames.get(specNamesId).toString();
if (nameMap.containsKey(specName)) {
keepMapping = true;
replaceString = nameMap.get(specName);
}
}
}
}
if (!keepMapping) {
replaceString = mResguardBuilder.getReplaceString();
}
mResguardBuilder.setInReplaceList(mCurEntryID, true);
if (replaceString == null) {
throw new AndrolibException("readEntry replaceString == null");
}
generalResIDMapping(mPkg.getName(), mType.getName(), mSpecNames.get(specNamesId).toString(), replaceString);
mPkg.putSpecNamesReplace(mResId, replaceString);
mPkg.putSpecNamesblock(replaceString);
mType.putSpecResguardName(replaceString);
}
private void writeEntry() throws IOException, AndrolibException {
/* size */
mOut.writeBytes(mIn, 2);
short flags = mIn.readShort();
mOut.writeShort(flags);
int specNamesId = mIn.readInt();
ResPackage pkg = mPkgs[mCurPackageID];
if (pkg.isCanResguard()) {
specNamesId = mCurSpecNameToPos.get(pkg.getSpecRepplace(mResId));
if (specNamesId < 0) {
throw new AndrolibException(String.format("writeEntry new specNamesId < 0 %d", specNamesId));
}
}
mOut.writeInt(specNamesId);
if ((flags & ENTRY_FLAG_COMPLEX) == 0) {
writeValue();
} else {
writeComplexEntry();
}
}
/**
* @param flags whether read direct
*/
private void readComplexEntry(boolean flags, int specNamesId) throws IOException, AndrolibException {
int parent = mIn.readInt();
int count = mIn.readInt();
for (int i = 0; i < count; i++) {
mIn.readInt();
readValue(flags, specNamesId);
}
}
private void writeComplexEntry() throws IOException, AndrolibException {
mOut.writeInt(mIn.readInt());
int count = mIn.readInt();
mOut.writeInt(count);
for (int i = 0; i < count; i++) {
mOut.writeInt(mIn.readInt());
writeValue();
}
}
/**
* @param flags whether read direct
*/
private void readValue(boolean flags, int specNamesId) throws IOException, AndrolibException {
/* size */
mIn.skipCheckShort((short) 8);
/* zero */
mIn.skipCheckByte((byte) 0);
byte type = mIn.readByte();
int data = mIn.readInt();
//这里面有几个限制,一对于string ,id, array我们是知道肯定不用改的,第二看要那个type是否对应有文件路径
if (mPkg.isCanResguard()
&& flags
&& type == TypedValue.TYPE_STRING
&& mShouldResguardForType
&& mShouldResguardTypeSet.contains(mType.getName())) {
if (mTableStringsResguard.get(data) == null) {
String raw = mTableStrings.get(data).toString();
if (StringUtil.isBlank(raw) || raw.equalsIgnoreCase("null")) return;
String proguard = mPkg.getSpecRepplace(mResId);
//这个要写死这个,因为resources.arsc里面就是用这个
int secondSlash = raw.lastIndexOf("/");
if (secondSlash == -1) {
throw new AndrolibException(String.format("can not find \\ or raw string in res path = %s", raw));
}
String newFilePath = raw.substring(0, secondSlash);
if (!mApkDecoder.getConfig().mKeepRoot) {
newFilePath = mOldFileName.get(raw.substring(0, secondSlash));
}
if (newFilePath == null) {
System.err.printf("can not found new res path, raw=%s\n", raw);
return;
}
//同理这里不能用File.separator,因为resources.arsc里面就是用这个
String result = newFilePath + "/" + proguard;
int firstDot = raw.indexOf(".");
if (firstDot != -1) {
result += raw.substring(firstDot);
}
String compatibaleraw = new String(raw);
String compatibaleresult = new String(result);
//为了适配window要做一次转换
if (!File.separator.contains("/")) {
compatibaleresult = compatibaleresult.replace("/", File.separator);
compatibaleraw = compatibaleraw.replace("/", File.separator);
}
File resRawFile = new File(mApkDecoder.getOutTempDir().getAbsolutePath() + File.separator + compatibaleraw);
File resDestFile = new File(mApkDecoder.getOutDir().getAbsolutePath() + File.separator + compatibaleresult);
//这里用的是linux的分隔符
HashMap<String, Integer> compressData = mApkDecoder.getCompressData();
if (compressData.containsKey(raw)) {
compressData.put(result, compressData.get(raw));
} else {
System.err.printf("can not find the compress dataresFile=%s\n", raw);
}
if (!resRawFile.exists()) {
System.err.printf("can not find res file, you delete it? path: resFile=%s\n", resRawFile.getAbsolutePath());
return;
} else {
if (resDestFile.exists()) {
throw new AndrolibException(String.format("res dest file is already found: destFile=%s",
resDestFile.getAbsolutePath()
));
}
FileOperation.copyFileUsingStream(resRawFile, resDestFile);
//already copied
mApkDecoder.removeCopiedResFile(resRawFile.toPath());
mTableStringsResguard.put(data, result);
}
}
}
}
private void writeValue() throws IOException, AndrolibException {
/* size */
mOut.writeCheckShort(mIn.readShort(), (short) 8);
/* zero */
mOut.writeCheckByte(mIn.readByte(), (byte) 0);
byte type = mIn.readByte();
mOut.writeByte(type);
int data = mIn.readInt();
mOut.writeInt(data);
}
private void readConfigFlags() throws IOException, AndrolibException {
int size = mIn.readInt();
int read = 28;
if (size < 28) {
throw new AndrolibException("Config size < 28");
}
boolean isInvalid = false;
short mcc = mIn.readShort();
short mnc = mIn.readShort();
char[] language = new char[] { (char) mIn.readByte(), (char) mIn.readByte() };
char[] country = new char[] { (char) mIn.readByte(), (char) mIn.readByte() };
byte orientation = mIn.readByte();
byte touchscreen = mIn.readByte();
int density = mIn.readUnsignedShort();
byte keyboard = mIn.readByte();
byte navigation = mIn.readByte();
byte inputFlags = mIn.readByte();
/* inputPad0 */
mIn.skipBytes(1);
short screenWidth = mIn.readShort();
short screenHeight = mIn.readShort();
short sdkVersion = mIn.readShort();
/* minorVersion, now must always be 0 */
mIn.skipBytes(2);
byte screenLayout = 0;
byte uiMode = 0;
short smallestScreenWidthDp = 0;
if (size >= 32) {
screenLayout = mIn.readByte();
uiMode = mIn.readByte();
smallestScreenWidthDp = mIn.readShort();
read = 32;
}
short screenWidthDp = 0;
short screenHeightDp = 0;
if (size >= 36) {
screenWidthDp = mIn.readShort();
screenHeightDp = mIn.readShort();
read = 36;
}
char[] localeScript = null;
char[] localeVariant = null;
if (size >= 48) {
localeScript = readScriptOrVariantChar(4).toCharArray();
localeVariant = readScriptOrVariantChar(8).toCharArray();
read = 48;
}
byte screenLayout2 = 0;
if (size >= 52) {
screenLayout2 = mIn.readByte();
mIn.skipBytes(3); // reserved padding
read = 52;
}
if (size >= 56) {
mIn.skipBytes(4);
read = 56;
}
int exceedingSize = size - KNOWN_CONFIG_BYTES;
if (exceedingSize > 0) {
byte[] buf = new byte[exceedingSize];
read += exceedingSize;
mIn.readFully(buf);
BigInteger exceedingBI = new BigInteger(1, buf);
if (exceedingBI.equals(BigInteger.ZERO)) {
LOGGER.fine(String.format("Config flags size > %d, but exceeding bytes are all zero, so it should be ok.",
KNOWN_CONFIG_BYTES
));
} else {
LOGGER.warning(String.format("Config flags size > %d. Exceeding bytes: 0x%X.",
KNOWN_CONFIG_BYTES,
exceedingBI
));
isInvalid = true;
}
}
}
private String readScriptOrVariantChar(int length) throws AndrolibException, IOException {
StringBuilder string = new StringBuilder(16);
while (length-- != 0) {
short ch = mIn.readByte();
if (ch == 0) {
break;
}
string.append((char) ch);
}
mIn.skipBytes(length);
return string.toString();
}
private void writeConfigFlags() throws IOException, AndrolibException {
//总的有多大
int size = mIn.readInt();
if (size < 28) {
throw new AndrolibException("Config size < 28");
}
mOut.writeInt(size);
mOut.writeBytes(mIn, size - 4);
}
private Header nextChunk() throws IOException {
return mHeader = Header.read(mIn);
}
private void checkChunkType(int expectedType) throws AndrolibException {
if (mHeader.type != expectedType) {
throw new AndrolibException(String.format("Invalid chunk type: expected=0x%08x, got=0x%08x",
expectedType,
mHeader.type
));
}
}
private void nextChunkCheckType(int expectedType) throws IOException, AndrolibException {
nextChunk();
checkChunkType(expectedType);
}
private Header writeNextChunk(int diffSize) throws IOException, AndrolibException {
mHeader = Header.readAndWriteHeader(mIn, mOut, diffSize);
return mHeader;
}
private Header writeNextChunkCheck(int expectedType, int diffSize) throws IOException, AndrolibException {
mHeader = Header.readAndWriteHeader(mIn, mOut, diffSize);
if (mHeader.type != expectedType) {
throw new AndrolibException(String.format("Invalid chunk type: expected=%d, got=%d", expectedType, mHeader.type));
}
return mHeader;
}
/**
* 为了加速,不需要处理string,id,array,这几个是肯定不是的
*/
private boolean isToResguardFile(String name) {
return (!name.equals("string") && !name.equals("id") && !name.equals("array"));
}
public static class Header {
public final static short TYPE_NONE = -1, TYPE_TABLE = 0x0002, TYPE_PACKAGE = 0x0200, TYPE_TYPE = 0x0201,
TYPE_SPEC_TYPE = 0x0202, TYPE_LIBRARY = 0x0203;
public final short type;
public final int chunkSize;
public Header(short type, int size) {
this.type = type;
this.chunkSize = size;
}
public static Header read(ExtDataInput in) throws IOException {
short type;
try {
type = in.readShort();
short count = in.readShort();
int size = in.readInt();
return new Header(type, size);
} catch (EOFException ex) {
return new Header(TYPE_NONE, 0);
}
}
public static Header readAndWriteHeader(ExtDataInput in, ExtDataOutput out, int diffSize)
throws IOException, AndrolibException {
short type;
int size;
try {
type = in.readShort();
out.writeShort(type);
short count = in.readShort();
out.writeShort(count);
size = in.readInt();
size -= diffSize;
if (size <= 0) {
throw new AndrolibException(String.format("readAndWriteHeader size < 0: size=%d", size));
}
out.writeInt(size);
} catch (EOFException ex) {
return new Header(TYPE_NONE, 0);
}
return new Header(type, size);
}
}
public static class FlagsOffset {
public final int offset;
public final int count;
public FlagsOffset(int offset, int count) {
this.offset = offset;
this.count = count;
}
}
private class ResguardStringBuilder {
private List<String> mReplaceStringBuffer = new ArrayList<>();
private boolean[] mIsReplaced;
private boolean[] mIsWhiteList;
private String[] mAToZ = {
"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v",
"w", "x", "y", "z"
};
private String[] mAToAll = {
"0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "_", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k",
"l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z"
};
/**
* 在window上面有些关键字是不能作为文件名的
* CON, PRN, AUX, CLOCK$, NUL
* COM1, COM2, COM3, COM4, COM5, COM6, COM7, COM8, COM9
* LPT1, LPT2, LPT3, LPT4, LPT5, LPT6, LPT7, LPT8, and LPT9.
*/
private HashSet<String> mFileNameBlackList;
public ResguardStringBuilder() {
mFileNameBlackList = new HashSet<>();
mFileNameBlackList.add("con");
mFileNameBlackList.add("prn");
mFileNameBlackList.add("aux");
mFileNameBlackList.add("nul");
}
public void reset() {
mReplaceStringBuffer.clear();
for (int i = 0; i < mAToZ.length; i++) {
mReplaceStringBuffer.add(mAToZ[i]);
}
for (int i = 0; i < mAToZ.length; i++) {
String first = mAToZ[i];
for (int j = 0; j < mAToAll.length; j++) {
String second = mAToAll[j];
mReplaceStringBuffer.add(first + second);
}
}
for (int i = 0; i < mAToZ.length; i++) {
String first = mAToZ[i];
for (int j = 0; j < mAToAll.length; j++) {
String second = mAToAll[j];
for (int k = 0; k < mAToAll.length; k++) {
String third = mAToAll[k];
String result = first + second + third;
if (!mFileNameBlackList.contains(result)) {
mReplaceStringBuffer.add(first + second + third);
}
}
}
}
final int size = mReplaceStringBuffer.size() * 2;
mIsReplaced = new boolean[size];
mIsWhiteList = new boolean[size];
for (int i = 0; i < size; i++) {
mIsReplaced[i] = false;
mIsWhiteList[i] = false;
}
}
public void removeString(String str) {
if (str != null) {
mReplaceStringBuffer.remove(str);
}
}
// 对于某种类型用过的mapping,全部不能再用了
public void removeStrings(Collection<String> collection) {
if (collection == null) return;
mReplaceStringBuffer.removeAll(collection);
}
public boolean isReplaced(int id) {
return mIsReplaced[id];
}
public boolean isInWhiteList(int id) {
return mIsWhiteList[id];
}
public void setInWhiteList(int id, boolean set) {
mIsWhiteList[id] = set;
}
public void setInReplaceList(int id, boolean set) {
mIsReplaced[id] = set;
}
// 开始设计是根据id来get,但是为了实现保持mapping的方式,取消了这个
public String getReplaceString() throws AndrolibException {
if (mReplaceStringBuffer.isEmpty()) {
throw new AndrolibException(String.format("now can only proguard less than 35594 in a single type\n"));
}
return mReplaceStringBuffer.remove(0);
}
public int lenght() {
return mReplaceStringBuffer.size();
}
}
} | AndResGuard-core/src/main/java/com/tencent/mm/androlib/res/decoder/ARSCDecoder.java | /**
* Copyright 2014 Ryszard Wiśniewski <[email protected]>
* Copyright 2016 sim sun <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tencent.mm.androlib.res.decoder;
import com.mindprod.ledatastream.LEDataInputStream;
import com.mindprod.ledatastream.LEDataOutputStream;
import com.tencent.mm.androlib.AndrolibException;
import com.tencent.mm.androlib.ApkDecoder;
import com.tencent.mm.androlib.res.data.ResPackage;
import com.tencent.mm.androlib.res.data.ResType;
import com.tencent.mm.androlib.res.util.StringUtil;
import com.tencent.mm.resourceproguard.Configuration;
import com.tencent.mm.util.ExtDataInput;
import com.tencent.mm.util.ExtDataOutput;
import com.tencent.mm.util.FileOperation;
import com.tencent.mm.util.TypedValue;
import com.tencent.mm.util.Utils;
import java.io.BufferedWriter;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.Writer;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Logger;
import java.util.regex.Pattern;
public class ARSCDecoder {
private final static boolean DEBUG = false;
private final static short ENTRY_FLAG_COMPLEX = 0x0001;
private static final Logger LOGGER = Logger.getLogger(ARSCDecoder.class.getName());
private static final int KNOWN_CONFIG_BYTES = 56;
public static Map<Integer, String> mTableStringsResguard = new LinkedHashMap<>();
private final Map<String, String> mOldFileName;
private final Map<String, Integer> mCurSpecNameToPos;
private final HashSet<String> mShouldResguardTypeSet;
private final ApkDecoder mApkDecoder;
private ExtDataInput mIn;
private ExtDataOutput mOut;
private Header mHeader;
private StringBlock mTableStrings;
private StringBlock mTypeNames;
private StringBlock mSpecNames;
private ResPackage mPkg;
private ResType mType;
private ResPackage[] mPkgs;
private int[] mPkgsLenghtChange;
private int mTableLenghtChange = 0;
private int mResId;
private int mCurTypeID = -1;
private int mCurEntryID = -1;
private int mCurPackageID = -1;
private ResguardStringBuilder mResguardBuilder;
private boolean mShouldResguardForType = false;
private Writer mMappingWriter;
private ARSCDecoder(InputStream arscStream, ApkDecoder decoder) throws AndrolibException, IOException {
mOldFileName = new LinkedHashMap<>();
mCurSpecNameToPos = new LinkedHashMap<>();
mShouldResguardTypeSet = new HashSet<>();
mIn = new ExtDataInput(new LEDataInputStream(arscStream));
mApkDecoder = decoder;
proguardFileName();
}
private ARSCDecoder(InputStream arscStream, ApkDecoder decoder, ResPackage[] pkgs) throws FileNotFoundException {
mOldFileName = new LinkedHashMap<>();
mCurSpecNameToPos = new LinkedHashMap<>();
mShouldResguardTypeSet = new HashSet<>();
mApkDecoder = decoder;
mIn = new ExtDataInput(new LEDataInputStream(arscStream));
mOut = new ExtDataOutput(new LEDataOutputStream(new FileOutputStream(mApkDecoder.getOutTempARSCFile(), false)));
mPkgs = pkgs;
mPkgsLenghtChange = new int[pkgs.length];
}
public static ResPackage[] decode(InputStream arscStream, ApkDecoder apkDecoder) throws AndrolibException {
try {
ARSCDecoder decoder = new ARSCDecoder(arscStream, apkDecoder);
ResPackage[] pkgs = decoder.readTable();
return pkgs;
} catch (IOException ex) {
throw new AndrolibException("Could not decode arsc file", ex);
}
}
public static void write(InputStream arscStream, ApkDecoder decoder, ResPackage[] pkgs) throws AndrolibException {
try {
ARSCDecoder writer = new ARSCDecoder(arscStream, decoder, pkgs);
writer.writeTable();
} catch (IOException ex) {
throw new AndrolibException("Could not decode arsc file", ex);
}
}
private void proguardFileName() throws IOException, AndrolibException {
mMappingWriter = new BufferedWriter(new FileWriter(mApkDecoder.getResMappingFile(), false));
mResguardBuilder = new ResguardStringBuilder();
mResguardBuilder.reset();
final Configuration config = mApkDecoder.getConfig();
File rawResFile = mApkDecoder.getRawResFile();
File[] resFiles = rawResFile.listFiles();
// 需要看看哪些类型是要混淆文件路径的
for (File resFile : resFiles) {
String raw = resFile.getName();
if (raw.contains("-")) {
raw = raw.substring(0, raw.indexOf("-"));
}
mShouldResguardTypeSet.add(raw);
}
if (!config.mKeepRoot) {
// 需要保持之前的命名方式
if (config.mUseKeepMapping) {
HashMap<String, String> fileMapping = config.mOldFileMapping;
List<String> keepFileNames = new ArrayList<>();
// 这里面为了兼容以前,也需要用以前的文件名前缀,即res混淆成什么
String resRoot = TypedValue.RES_FILE_PATH;
for (String name : fileMapping.values()) {
int dot = name.indexOf("/");
if (dot == -1) {
throw new IOException(String.format("the old mapping res file path should be like r/a, yours %s\n", name));
}
resRoot = name.substring(0, dot);
keepFileNames.add(name.substring(dot + 1));
}
// 去掉所有之前保留的命名,为了简单操作,mapping里面有的都去掉
mResguardBuilder.removeStrings(keepFileNames);
for (File resFile : resFiles) {
String raw = "res" + "/" + resFile.getName();
if (fileMapping.containsKey(raw)) {
mOldFileName.put(raw, fileMapping.get(raw));
} else {
mOldFileName.put(raw, resRoot + "/" + mResguardBuilder.getReplaceString());
}
}
} else {
for (int i = 0; i < resFiles.length; i++) {
// 这里也要用linux的分隔符,如果普通的话,就是r
mOldFileName.put("res" + "/" + resFiles[i].getName(),
TypedValue.RES_FILE_PATH + "/" + mResguardBuilder.getReplaceString()
);
}
}
generalFileResMapping();
}
Utils.cleanDir(mApkDecoder.getOutResFile());
}
private ResPackage[] readTable() throws IOException, AndrolibException {
nextChunkCheckType(Header.TYPE_TABLE);
int packageCount = mIn.readInt();
mTableStrings = StringBlock.read(mIn);
ResPackage[] packages = new ResPackage[packageCount];
nextChunk();
for (int i = 0; i < packageCount; i++) {
packages[i] = readPackage();
}
System.out.printf("resources mapping file %s done\n", mApkDecoder.getResMappingFile().getAbsolutePath());
mMappingWriter.close();
return packages;
}
private void writeTable() throws IOException, AndrolibException {
System.out.printf("writing new resources.arsc \n");
mTableLenghtChange = 0;
writeNextChunkCheck(Header.TYPE_TABLE, 0);
int packageCount = mIn.readInt();
mOut.writeInt(packageCount);
mTableLenghtChange += StringBlock.writeTableNameStringBlock(mIn, mOut, mTableStringsResguard);
writeNextChunk(0);
if (packageCount != mPkgs.length) {
throw new AndrolibException(String.format("writeTable package count is different before %d, now %d",
mPkgs.length,
packageCount
));
}
for (int i = 0; i < packageCount; i++) {
mCurPackageID = i;
writePackage();
}
// 最后需要把整个的size重写回去
reWriteTable();
}
private void generalFileResMapping() throws IOException {
mMappingWriter.write("res path mapping:\n");
for (String raw : mOldFileName.keySet()) {
mMappingWriter.write(" " + raw + " -> " + mOldFileName.get(raw));
mMappingWriter.write("\n");
}
mMappingWriter.write("\n\n");
mMappingWriter.write("res id mapping:\n");
mMappingWriter.flush();
}
private void generalResIDMapping(
String packageName, String typename, String specName, String replace) throws IOException {
mMappingWriter.write(" "
+ packageName
+ ".R."
+ typename
+ "."
+ specName
+ " -> "
+ packageName
+ ".R."
+ typename
+ "."
+ replace);
mMappingWriter.write("\n");
mMappingWriter.flush();
}
private void reWriteTable() throws AndrolibException, IOException {
mIn = new ExtDataInput(new LEDataInputStream(new FileInputStream(mApkDecoder.getOutTempARSCFile())));
mOut = new ExtDataOutput(new LEDataOutputStream(new FileOutputStream(mApkDecoder.getOutARSCFile(), false)));
writeNextChunkCheck(Header.TYPE_TABLE, mTableLenghtChange);
int packageCount = mIn.readInt();
mOut.writeInt(packageCount);
StringBlock.writeAll(mIn, mOut);
for (int i = 0; i < packageCount; i++) {
mCurPackageID = i;
writeNextChunk(mPkgsLenghtChange[mCurPackageID]);
mOut.writeBytes(mIn, mHeader.chunkSize - 8);
}
mApkDecoder.getOutTempARSCFile().delete();
}
private ResPackage readPackage() throws IOException, AndrolibException {
checkChunkType(Header.TYPE_PACKAGE);
int id = (byte) mIn.readInt();
String name = mIn.readNullEndedString(128, true);
System.out.printf("reading packagename %s\n", name);
/* typeNameStrings */
mIn.skipInt();
/* typeNameCount */
mIn.skipInt();
/* specNameStrings */
mIn.skipInt();
/* specNameCount */
mIn.skipInt();
mCurTypeID = -1;
mTypeNames = StringBlock.read(mIn);
mSpecNames = StringBlock.read(mIn);
mResId = id << 24;
mPkg = new ResPackage(id, name);
// 系统包名不混淆
if (mPkg.getName().equals("android")) {
mPkg.setCanResguard(false);
} else {
mPkg.setCanResguard(true);
}
nextChunk();
while (mHeader.type == Header.TYPE_LIBRARY) {
readLibraryType();
}
while (mHeader.type == Header.TYPE_SPEC_TYPE) {
readTableTypeSpec();
}
return mPkg;
}
private void writePackage() throws IOException, AndrolibException {
checkChunkType(Header.TYPE_PACKAGE);
int id = (byte) mIn.readInt();
mOut.writeInt(id);
mResId = id << 24;
//char_16的,一共256byte
mOut.writeBytes(mIn, 256);
/* typeNameStrings */
mOut.writeInt(mIn.readInt());
/* typeNameCount */
mOut.writeInt(mIn.readInt());
/* specNameStrings */
mOut.writeInt(mIn.readInt());
/* specNameCount */
mOut.writeInt(mIn.readInt());
StringBlock.writeAll(mIn, mOut);
if (mPkgs[mCurPackageID].isCanResguard()) {
int specSizeChange = StringBlock.writeSpecNameStringBlock(mIn,
mOut,
mPkgs[mCurPackageID].getSpecNamesBlock(),
mCurSpecNameToPos
);
mPkgsLenghtChange[mCurPackageID] += specSizeChange;
mTableLenghtChange += specSizeChange;
} else {
StringBlock.writeAll(mIn, mOut);
}
writeNextChunk(0);
while (mHeader.type == Header.TYPE_LIBRARY) {
writeLibraryType();
}
while (mHeader.type == Header.TYPE_SPEC_TYPE) {
writeTableTypeSpec();
}
}
/**
* 如果是保持mapping的话,需要去掉某部分已经用过的mapping
*/
private void reduceFromOldMappingFile() {
if (mPkg.isCanResguard()) {
if (mApkDecoder.getConfig().mUseKeepMapping) {
// 判断是否走keepmapping
HashMap<String, HashMap<String, HashMap<String, String>>> resMapping = mApkDecoder.getConfig().mOldResMapping;
String packName = mPkg.getName();
if (resMapping.containsKey(packName)) {
HashMap<String, HashMap<String, String>> typeMaps = resMapping.get(packName);
String typeName = mType.getName();
if (typeMaps.containsKey(typeName)) {
HashMap<String, String> proguard = typeMaps.get(typeName);
// 去掉所有之前保留的命名,为了简单操作,mapping里面有的都去掉
mResguardBuilder.removeStrings(proguard.values());
}
}
}
}
}
/**
* reduce white list string from proguard builder
*/
private void reduceFromWhiteListFile() {
final Configuration config = mApkDecoder.getConfig();
final String packName = mPkg.getName();
if (config.mWhiteList.containsKey(packName)) {
if (mApkDecoder.getConfig().mUseWhiteList) {
HashMap<String, HashSet<Pattern>> typeMaps = config.mWhiteList.get(packName);
String typeName = mType.getName();
HashSet<Pattern> patterns = typeMaps.get(typeName);
if (patterns != null) {
for (Iterator<Pattern> it = patterns.iterator(); it.hasNext(); ) {
mResguardBuilder.removeString(it.next().pattern());
}
}
}
}
}
private void readLibraryType() throws AndrolibException, IOException {
checkChunkType(Header.TYPE_LIBRARY);
int libraryCount = mIn.readInt();
int packageId;
String packageName;
for (int i = 0; i < libraryCount; i++) {
packageId = mIn.readInt();
packageName = mIn.readNullEndedString(128, true);
System.out.printf("Decoding Shared Library (%s), pkgId: %d\n", packageName, packageId);
}
while (nextChunk().type == Header.TYPE_TYPE) {
readTableTypeSpec();
}
}
private void readTableTypeSpec() throws AndrolibException, IOException {
checkChunkType(Header.TYPE_SPEC_TYPE);
byte id = mIn.readByte();
mIn.skipBytes(3);
int entryCount = mIn.readInt();
if (mCurTypeID != id) {
mResguardBuilder.reset();
mCurTypeID = id;
Set<String> existNames = RawARSCDecoder.getExistTypeSpecNameStrings(mCurTypeID);
mResguardBuilder.removeStrings(existNames);
}
// 是否混淆文件路径
mShouldResguardForType = isToResguardFile(mTypeNames.getString(id - 1));
// 对,这里是用来描述差异性的!!!
mIn.skipBytes(entryCount * 4);
mResId = (0xff000000 & mResId) | id << 16;
mType = new ResType(mTypeNames.getString(id - 1), mPkg);
// 如果是保持mapping的话,需要去掉某部分已经用过的mapping
reduceFromOldMappingFile();
// remove string from resguard candidate list if it exists in white list
reduceFromWhiteListFile();
while (nextChunk().type == Header.TYPE_TYPE) {
readConfig();
}
}
private void writeLibraryType() throws AndrolibException, IOException {
checkChunkType(Header.TYPE_LIBRARY);
int libraryCount = mIn.readInt();
mOut.writeInt(libraryCount);
for (int i = 0; i < libraryCount; i++) {
mOut.writeInt(mIn.readInt());/*packageId*/
mOut.writeBytes(mIn, 256); /*packageName*/
}
writeNextChunk(0);
while (mHeader.type == Header.TYPE_TYPE) {
writeTableTypeSpec();
}
}
private void writeTableTypeSpec() throws AndrolibException, IOException {
checkChunkType(Header.TYPE_SPEC_TYPE);
byte id = mIn.readByte();
mOut.writeByte(id);
mResId = (0xff000000 & mResId) | id << 16;
mOut.writeBytes(mIn, 3);
int entryCount = mIn.readInt();
mOut.writeInt(entryCount);
// 对,这里是用来描述差异性的!!!
///* flags */mIn.skipBytes(entryCount * 4);
int[] entryOffsets = mIn.readIntArray(entryCount);
mOut.writeIntArray(entryOffsets);
while (writeNextChunk(0).type == Header.TYPE_TYPE) {
writeConfig();
}
}
private void readConfig() throws IOException, AndrolibException {
checkChunkType(Header.TYPE_TYPE);
/* typeId */
mIn.skipInt();
int entryCount = mIn.readInt();
int entriesStart = mIn.readInt();
readConfigFlags();
int[] entryOffsets = mIn.readIntArray(entryCount);
for (int i = 0; i < entryOffsets.length; i++) {
mCurEntryID = i;
if (entryOffsets[i] != -1) {
mResId = (mResId & 0xffff0000) | i;
readEntry();
}
}
}
private void writeConfig() throws IOException, AndrolibException {
checkChunkType(Header.TYPE_TYPE);
/* typeId */
mOut.writeInt(mIn.readInt());
/* entryCount */
int entryCount = mIn.readInt();
mOut.writeInt(entryCount);
/* entriesStart */
mOut.writeInt(mIn.readInt());
writeConfigFlags();
int[] entryOffsets = mIn.readIntArray(entryCount);
mOut.writeIntArray(entryOffsets);
for (int i = 0; i < entryOffsets.length; i++) {
if (entryOffsets[i] != -1) {
mResId = (mResId & 0xffff0000) | i;
writeEntry();
}
}
}
private void readEntry() throws IOException, AndrolibException {
mIn.skipBytes(2);
short flags = mIn.readShort();
int specNamesId = mIn.readInt();
if (mPkg.isCanResguard()) {
// 混淆过或者已经添加到白名单的都不需要再处理了
if (!mResguardBuilder.isReplaced(mCurEntryID) && !mResguardBuilder.isInWhiteList(mCurEntryID)) {
Configuration config = mApkDecoder.getConfig();
boolean isWhiteList = false;
if (config.mUseWhiteList) {
isWhiteList = dealWithWhiteList(specNamesId, config);
}
if (!isWhiteList) {
dealWithNonWhiteList(specNamesId, config);
}
}
}
if ((flags & ENTRY_FLAG_COMPLEX) == 0) {
readValue(true, specNamesId);
} else {
readComplexEntry(false, specNamesId);
}
}
/**
* deal with whitelist
*
* @param specNamesId resource spec name id
* @param config {@Configuration} AndResGuard configuration
* @return isWhiteList whether this resource is processed by whitelist
*/
private boolean dealWithWhiteList(int specNamesId, Configuration config) throws AndrolibException {
String packName = mPkg.getName();
if (config.mWhiteList.containsKey(packName)) {
HashMap<String, HashSet<Pattern>> typeMaps = config.mWhiteList.get(packName);
String typeName = mType.getName();
if (typeMaps.containsKey(typeName)) {
String specName = mSpecNames.get(specNamesId).toString();
HashSet<Pattern> patterns = typeMaps.get(typeName);
for (Iterator<Pattern> it = patterns.iterator(); it.hasNext(); ) {
Pattern p = it.next();
if (p.matcher(specName).matches()) {
if (DEBUG) {
System.out.println(String.format("[match] matcher %s ,typeName %s, specName :%s",
p.pattern(),
typeName,
specName
));
}
mPkg.putSpecNamesReplace(mResId, specName);
mPkg.putSpecNamesblock(specName);
mResguardBuilder.setInWhiteList(mCurEntryID, true);
mType.putSpecResguardName(specName);
return true;
}
}
}
}
return false;
}
private void dealWithNonWhiteList(int specNamesId, Configuration config) throws AndrolibException, IOException {
String replaceString = null;
boolean keepMapping = false;
if (config.mUseKeepMapping) {
String packName = mPkg.getName();
if (config.mOldResMapping.containsKey(packName)) {
HashMap<String, HashMap<String, String>> typeMaps = config.mOldResMapping.get(packName);
String typeName = mType.getName();
if (typeMaps.containsKey(typeName)) {
HashMap<String, String> nameMap = typeMaps.get(typeName);
String specName = mSpecNames.get(specNamesId).toString();
if (nameMap.containsKey(specName)) {
keepMapping = true;
replaceString = nameMap.get(specName);
}
}
}
}
if (!keepMapping) {
replaceString = mResguardBuilder.getReplaceString();
}
mResguardBuilder.setInReplaceList(mCurEntryID, true);
if (replaceString == null) {
throw new AndrolibException("readEntry replaceString == null");
}
generalResIDMapping(mPkg.getName(), mType.getName(), mSpecNames.get(specNamesId).toString(), replaceString);
mPkg.putSpecNamesReplace(mResId, replaceString);
mPkg.putSpecNamesblock(replaceString);
mType.putSpecResguardName(replaceString);
}
private void writeEntry() throws IOException, AndrolibException {
/* size */
mOut.writeBytes(mIn, 2);
short flags = mIn.readShort();
mOut.writeShort(flags);
int specNamesId = mIn.readInt();
ResPackage pkg = mPkgs[mCurPackageID];
if (pkg.isCanResguard()) {
specNamesId = mCurSpecNameToPos.get(pkg.getSpecRepplace(mResId));
if (specNamesId < 0) {
throw new AndrolibException(String.format("writeEntry new specNamesId < 0 %d", specNamesId));
}
}
mOut.writeInt(specNamesId);
if ((flags & ENTRY_FLAG_COMPLEX) == 0) {
writeValue();
} else {
writeComplexEntry();
}
}
/**
* @param flags whether read direct
*/
private void readComplexEntry(boolean flags, int specNamesId) throws IOException, AndrolibException {
int parent = mIn.readInt();
int count = mIn.readInt();
for (int i = 0; i < count; i++) {
mIn.readInt();
readValue(flags, specNamesId);
}
}
private void writeComplexEntry() throws IOException, AndrolibException {
mOut.writeInt(mIn.readInt());
int count = mIn.readInt();
mOut.writeInt(count);
for (int i = 0; i < count; i++) {
mOut.writeInt(mIn.readInt());
writeValue();
}
}
/**
* @param flags whether read direct
*/
private void readValue(boolean flags, int specNamesId) throws IOException, AndrolibException {
/* size */
mIn.skipCheckShort((short) 8);
/* zero */
mIn.skipCheckByte((byte) 0);
byte type = mIn.readByte();
int data = mIn.readInt();
//这里面有几个限制,一对于string ,id, array我们是知道肯定不用改的,第二看要那个type是否对应有文件路径
if (mPkg.isCanResguard()
&& flags
&& type == TypedValue.TYPE_STRING
&& mShouldResguardForType
&& mShouldResguardTypeSet.contains(mType.getName())) {
if (mTableStringsResguard.get(data) == null) {
String raw = mTableStrings.get(data).toString();
if (StringUtil.isBlank(raw) || raw.equalsIgnoreCase("null")) return;
String proguard = mPkg.getSpecRepplace(mResId);
//这个要写死这个,因为resources.arsc里面就是用这个
int secondSlash = raw.lastIndexOf("/");
if (secondSlash == -1) {
throw new AndrolibException(String.format("can not find \\ or raw string in res path = %s", raw));
}
String newFilePath = raw.substring(0, secondSlash);
if (!mApkDecoder.getConfig().mKeepRoot) {
newFilePath = mOldFileName.get(raw.substring(0, secondSlash));
}
if (newFilePath == null) {
System.err.printf("can not found new res path, raw=%s\n", raw);
return;
}
//同理这里不能用File.separator,因为resources.arsc里面就是用这个
String result = newFilePath + "/" + proguard;
int firstDot = raw.indexOf(".");
if (firstDot != -1) {
result += raw.substring(firstDot);
}
String compatibaleraw = new String(raw);
String compatibaleresult = new String(result);
//为了适配window要做一次转换
if (!File.separator.contains("/")) {
compatibaleresult = compatibaleresult.replace("/", File.separator);
compatibaleraw = compatibaleraw.replace("/", File.separator);
}
File resRawFile = new File(mApkDecoder.getOutTempDir().getAbsolutePath() + File.separator + compatibaleraw);
File resDestFile = new File(mApkDecoder.getOutDir().getAbsolutePath() + File.separator + compatibaleresult);
//这里用的是linux的分隔符
HashMap<String, Integer> compressData = mApkDecoder.getCompressData();
if (compressData.containsKey(raw)) {
compressData.put(result, compressData.get(raw));
} else {
System.err.printf("can not find the compress dataresFile=%s\n", raw);
}
if (!resRawFile.exists()) {
System.err.printf("can not find res file, you delete it? path: resFile=%s\n", resRawFile.getAbsolutePath());
return;
} else {
if (resDestFile.exists()) {
throw new AndrolibException(String.format("res dest file is already found: destFile=%s",
resDestFile.getAbsolutePath()
));
}
FileOperation.copyFileUsingStream(resRawFile, resDestFile);
//already copied
mApkDecoder.removeCopiedResFile(resRawFile.toPath());
mTableStringsResguard.put(data, result);
}
}
}
}
private void writeValue() throws IOException, AndrolibException {
/* size */
mOut.writeCheckShort(mIn.readShort(), (short) 8);
/* zero */
mOut.writeCheckByte(mIn.readByte(), (byte) 0);
byte type = mIn.readByte();
mOut.writeByte(type);
int data = mIn.readInt();
mOut.writeInt(data);
}
private void readConfigFlags() throws IOException, AndrolibException {
int size = mIn.readInt();
int read = 28;
if (size < 28) {
throw new AndrolibException("Config size < 28");
}
boolean isInvalid = false;
short mcc = mIn.readShort();
short mnc = mIn.readShort();
char[] language = new char[] { (char) mIn.readByte(), (char) mIn.readByte() };
char[] country = new char[] { (char) mIn.readByte(), (char) mIn.readByte() };
byte orientation = mIn.readByte();
byte touchscreen = mIn.readByte();
int density = mIn.readUnsignedShort();
byte keyboard = mIn.readByte();
byte navigation = mIn.readByte();
byte inputFlags = mIn.readByte();
/* inputPad0 */
mIn.skipBytes(1);
short screenWidth = mIn.readShort();
short screenHeight = mIn.readShort();
short sdkVersion = mIn.readShort();
/* minorVersion, now must always be 0 */
mIn.skipBytes(2);
byte screenLayout = 0;
byte uiMode = 0;
short smallestScreenWidthDp = 0;
if (size >= 32) {
screenLayout = mIn.readByte();
uiMode = mIn.readByte();
smallestScreenWidthDp = mIn.readShort();
read = 32;
}
short screenWidthDp = 0;
short screenHeightDp = 0;
if (size >= 36) {
screenWidthDp = mIn.readShort();
screenHeightDp = mIn.readShort();
read = 36;
}
char[] localeScript = null;
char[] localeVariant = null;
if (size >= 48) {
localeScript = readScriptOrVariantChar(4).toCharArray();
localeVariant = readScriptOrVariantChar(8).toCharArray();
read = 48;
}
byte screenLayout2 = 0;
if (size >= 52) {
screenLayout2 = mIn.readByte();
mIn.skipBytes(3); // reserved padding
read = 52;
}
if (size >= 56) {
mIn.skipBytes(4);
read = 56;
}
int exceedingSize = size - KNOWN_CONFIG_BYTES;
if (exceedingSize > 0) {
byte[] buf = new byte[exceedingSize];
read += exceedingSize;
mIn.readFully(buf);
BigInteger exceedingBI = new BigInteger(1, buf);
if (exceedingBI.equals(BigInteger.ZERO)) {
LOGGER.fine(String.format("Config flags size > %d, but exceeding bytes are all zero, so it should be ok.",
KNOWN_CONFIG_BYTES
));
} else {
LOGGER.warning(String.format("Config flags size > %d. Exceeding bytes: 0x%X.",
KNOWN_CONFIG_BYTES,
exceedingBI
));
isInvalid = true;
}
}
}
private String readScriptOrVariantChar(int length) throws AndrolibException, IOException {
StringBuilder string = new StringBuilder(16);
while (length-- != 0) {
short ch = mIn.readByte();
if (ch == 0) {
break;
}
string.append((char) ch);
}
mIn.skipBytes(length);
return string.toString();
}
private void writeConfigFlags() throws IOException, AndrolibException {
//总的有多大
int size = mIn.readInt();
if (size < 28) {
throw new AndrolibException("Config size < 28");
}
mOut.writeInt(size);
mOut.writeBytes(mIn, size - 4);
}
private Header nextChunk() throws IOException {
return mHeader = Header.read(mIn);
}
private void checkChunkType(int expectedType) throws AndrolibException {
if (mHeader.type != expectedType) {
throw new AndrolibException(String.format("Invalid chunk type: expected=0x%08x, got=0x%08x",
expectedType,
mHeader.type
));
}
}
private void nextChunkCheckType(int expectedType) throws IOException, AndrolibException {
nextChunk();
checkChunkType(expectedType);
}
private Header writeNextChunk(int diffSize) throws IOException, AndrolibException {
mHeader = Header.readAndWriteHeader(mIn, mOut, diffSize);
return mHeader;
}
private Header writeNextChunkCheck(int expectedType, int diffSize) throws IOException, AndrolibException {
mHeader = Header.readAndWriteHeader(mIn, mOut, diffSize);
if (mHeader.type != expectedType) {
throw new AndrolibException(String.format("Invalid chunk type: expected=%d, got=%d", expectedType, mHeader.type));
}
return mHeader;
}
/**
* 为了加速,不需要处理string,id,array,这几个是肯定不是的
*/
private boolean isToResguardFile(String name) {
return (!name.equals("string") && !name.equals("id") && !name.equals("array"));
}
public static class Header {
public final static short TYPE_NONE = -1, TYPE_TABLE = 0x0002, TYPE_PACKAGE = 0x0200, TYPE_TYPE = 0x0201,
TYPE_SPEC_TYPE = 0x0202, TYPE_LIBRARY = 0x0203;
public final short type;
public final int chunkSize;
public Header(short type, int size) {
this.type = type;
this.chunkSize = size;
}
public static Header read(ExtDataInput in) throws IOException {
short type;
try {
type = in.readShort();
short count = in.readShort();
int size = in.readInt();
return new Header(type, size);
} catch (EOFException ex) {
return new Header(TYPE_NONE, 0);
}
}
public static Header readAndWriteHeader(ExtDataInput in, ExtDataOutput out, int diffSize)
throws IOException, AndrolibException {
short type;
int size;
try {
type = in.readShort();
out.writeShort(type);
short count = in.readShort();
out.writeShort(count);
size = in.readInt();
size -= diffSize;
if (size <= 0) {
throw new AndrolibException(String.format("readAndWriteHeader size < 0: size=%d", size));
}
out.writeInt(size);
} catch (EOFException ex) {
return new Header(TYPE_NONE, 0);
}
return new Header(type, size);
}
}
public static class FlagsOffset {
public final int offset;
public final int count;
public FlagsOffset(int offset, int count) {
this.offset = offset;
this.count = count;
}
}
private class ResguardStringBuilder {
private List<String> mReplaceStringBuffer = new ArrayList<>();
private boolean[] mIsReplaced;
private boolean[] mIsWhiteList;
private String[] mAToZ = {
"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v",
"w", "x", "y", "z"
};
private String[] mAToAll = {
"0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "_", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k",
"l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z"
};
/**
* 在window上面有些关键字是不能作为文件名的
* CON, PRN, AUX, CLOCK$, NUL
* COM1, COM2, COM3, COM4, COM5, COM6, COM7, COM8, COM9
* LPT1, LPT2, LPT3, LPT4, LPT5, LPT6, LPT7, LPT8, and LPT9.
*/
private HashSet<String> mFileNameBlackList;
public ResguardStringBuilder() {
mFileNameBlackList = new HashSet<>();
mFileNameBlackList.add("con");
mFileNameBlackList.add("prn");
mFileNameBlackList.add("aux");
mFileNameBlackList.add("nul");
}
public void reset() {
mReplaceStringBuffer.clear();
for (int i = 0; i < mAToZ.length; i++) {
mReplaceStringBuffer.add(mAToZ[i]);
}
for (int i = 0; i < mAToZ.length; i++) {
String first = mAToZ[i];
for (int j = 0; j < mAToAll.length; j++) {
String second = mAToAll[j];
mReplaceStringBuffer.add(first + second);
}
}
for (int i = 0; i < mAToZ.length; i++) {
String first = mAToZ[i];
for (int j = 0; j < mAToAll.length; j++) {
String second = mAToAll[j];
for (int k = 0; k < mAToAll.length; k++) {
String third = mAToAll[k];
String result = first + second + third;
if (!mFileNameBlackList.contains(result)) {
mReplaceStringBuffer.add(first + second + third);
}
}
}
}
final int size = mReplaceStringBuffer.size() * 2;
mIsReplaced = new boolean[size];
mIsWhiteList = new boolean[size];
for (int i = 0; i < size; i++) {
mIsReplaced[i] = false;
mIsWhiteList[i] = false;
}
}
public void removeString(String str) {
if (str != null) {
mReplaceStringBuffer.remove(str);
}
}
// 对于某种类型用过的mapping,全部不能再用了
public void removeStrings(Collection<String> collection) {
if (collection == null) return;
mReplaceStringBuffer.removeAll(collection);
}
public boolean isReplaced(int id) {
return mIsReplaced[id];
}
public boolean isInWhiteList(int id) {
return mIsWhiteList[id];
}
public void setInWhiteList(int id, boolean set) {
mIsWhiteList[id] = set;
}
public void setInReplaceList(int id, boolean set) {
mIsReplaced[id] = set;
}
// 开始设计是根据id来get,但是为了实现保持mapping的方式,取消了这个
public String getReplaceString() throws AndrolibException {
if (mReplaceStringBuffer.isEmpty()) {
throw new AndrolibException(String.format("now can only proguard less than 35594 in a single type\n"));
}
return mReplaceStringBuffer.remove(0);
}
public int lenght() {
return mReplaceStringBuffer.size();
}
}
} | refactor(core): extract init resguard builder logic to seperated function
| AndResGuard-core/src/main/java/com/tencent/mm/androlib/res/decoder/ARSCDecoder.java | refactor(core): extract init resguard builder logic to seperated function | <ide><path>ndResGuard-core/src/main/java/com/tencent/mm/androlib/res/decoder/ARSCDecoder.java
<ide> import java.util.LinkedHashMap;
<ide> import java.util.List;
<ide> import java.util.Map;
<del>import java.util.Set;
<ide> import java.util.logging.Logger;
<ide> import java.util.regex.Pattern;
<ide>
<ide> private int[] mPkgsLenghtChange;
<ide> private int mTableLenghtChange = 0;
<ide> private int mResId;
<del> private int mCurTypeID = -1;
<add> private int mCurrTypeID = -1;
<ide> private int mCurEntryID = -1;
<ide> private int mCurPackageID = -1;
<ide> private ResguardStringBuilder mResguardBuilder;
<ide> mIn.skipInt();
<ide> /* specNameCount */
<ide> mIn.skipInt();
<del> mCurTypeID = -1;
<add> mCurrTypeID = -1;
<ide> mTypeNames = StringBlock.read(mIn);
<ide> mSpecNames = StringBlock.read(mIn);
<ide> mResId = id << 24;
<ide> HashSet<Pattern> patterns = typeMaps.get(typeName);
<ide> if (patterns != null) {
<ide> for (Iterator<Pattern> it = patterns.iterator(); it.hasNext(); ) {
<del> mResguardBuilder.removeString(it.next().pattern());
<add> String tmp = it.next().pattern();
<add> mResguardBuilder.removeString(tmp);
<ide> }
<ide> }
<ide> }
<ide> mIn.skipBytes(3);
<ide> int entryCount = mIn.readInt();
<ide>
<del> if (mCurTypeID != id) {
<del> mResguardBuilder.reset();
<del> mCurTypeID = id;
<del>
<del> Set<String> existNames = RawARSCDecoder.getExistTypeSpecNameStrings(mCurTypeID);
<del> mResguardBuilder.removeStrings(existNames);
<add> // first meet a type of resource
<add> if (mCurrTypeID != id) {
<add> mCurrTypeID = id;
<add> initResGuardBuild(mCurrTypeID);
<ide> }
<ide> // 是否混淆文件路径
<ide> mShouldResguardForType = isToResguardFile(mTypeNames.getString(id - 1));
<ide> mResId = (0xff000000 & mResId) | id << 16;
<ide> mType = new ResType(mTypeNames.getString(id - 1), mPkg);
<ide>
<add> while (nextChunk().type == Header.TYPE_TYPE) {
<add> readConfig();
<add> }
<add> }
<add>
<add> private void initResGuardBuild(int resTypeId) {
<add> // init resguard builder
<add> mResguardBuilder.reset();
<add>
<add> mResguardBuilder.removeStrings(RawARSCDecoder.getExistTypeSpecNameStrings(resTypeId));
<ide> // 如果是保持mapping的话,需要去掉某部分已经用过的mapping
<ide> reduceFromOldMappingFile();
<ide> // remove string from resguard candidate list if it exists in white list
<ide> reduceFromWhiteListFile();
<del>
<del> while (nextChunk().type == Header.TYPE_TYPE) {
<del> readConfig();
<del> }
<ide> }
<ide>
<ide> private void writeLibraryType() throws AndrolibException, IOException { |
|
JavaScript | mit | 139111b2a565fca0a4dd21db9548ee2d005caf21 | 0 | pagarme/hermes | 'use strict';
angular.module('hermes', [])
.factory('HttpMultipartForm', function($rootScope, $q, $browser) {
return function(request) {
var defered = $q.defer();
var xhr = new XMLHttpRequest();
var form = new FormData();
var stringifyQS = function(obj) {
var result = '';
for(var key in obj) {
result += key + '=' + encodeURIComponent(obj[key]);
}
return result;
};
var extraBody = '';
// if (typeof request.data == 'string') {
// extraBody = request.data;
// } else {
// extraBody = stringifyQS(request.data);
// }
extraBody = stringifyQS(request.data);
form.append(request.form.alias, request.form.file);
xhr.onload = function() {
defered.resolve({
status: 200,
data: xhr.response,
headers: xhr.getAllResponseHeaders()
});
};
xhr.onerror = function() {
defered.reject({
status: xhr.status,
data: xhr.response,
headers: xhr.getAllResponseHeaders()
});
};
xhr.open(request.method, request.url + '?' + stringifyQS(request.params) + '&' + extraBody, true);
// xhr.setRequestHeader("Content-Type","multipart/form-data");
// angular.forEach(request.headers, function(value, name) {
// xhr.setRequestHeader(name, value);
// });
xhr.send(form);
return defered.promise;
};
})
.factory('HttpStream', function($rootScope, $q, $browser) {
return function(request) {
var defered = $q.defer();
var xhr = new XMLHttpRequest();
var headersArrived = false, finished = false;
var partialProgress = 0;
var stringifyQS = function(obj) {
var result = '';
for(var key in obj) {
result += key + '=' + encodeURIComponent(obj[key]);
}
return result;
};
var checkHeaders = function() {
if (headersArrived) {
return;
}
$rootScope.$apply(function() {
request.stream.writePreamble({
status: xhr.status,
headers: xhr.getAllResponseHeaders()
});
});
headersArrived = true;
};
var progress = function(final) {
var response;
response = xhr.response.slice(partialProgress);
partialProgress = xhr.response.length;
$rootScope.$apply(function() {
request.stream.write(response);
});
if (final) {
if (xhr.status == 200) {
defered.resolve({
status: 200,
data: xhr.response,
headers: xhr.getAllResponseHeaders()
});
} else {
defered.reject({
status: xhr.status,
data: xhr.response,
headers: xhr.getAllResponseHeaders()
});
}
$rootScope.$apply(function() {
request.stream.close();
});
if ($browser.$$completeOutstandingRequest) {
$browser.$$completeOutstandingRequest(angular.noop);
}
finished = true;
}
};
var trackProgress = function() {
if (finished) {
return;
}
if (xhr.readyState == 2) {
checkHeaders();
} else if (xhr.readyState == 3 || xhr.readyState == 4) {
checkHeaders();
progress(xhr.readyState == 4);
}
};
xhr.open(request.method, request.url + '?' + stringifyQS(request.params), true);
xhr.onprogress = function() {
if (xhr.readyState == 4) {
trackProgress();
}
};
xhr.onreadystatechange = function() {
trackProgress();
};
_.each(request.headers, function(value, key) {
if (value !== undefined) {
xhr.setRequestHeader(key, value);
}
});
if ($browser.$$incOutstandingRequestCount) {
$browser.$$incOutstandingRequestCount();
}
xhr.send(request.data || null);
return defered.promise;
};
})
.factory('HermesPromise', function($q) {
return function() {
var defered = $q.defer();
defered.promise.success = function(handler) {
this.then(function(response) {
return handler(response.data, response.status, response.headers);
});
return this;
};
defered.promise.error = function(handler) {
this.then(null, function(response) {
return handler(response.data, response.status, response.headers);
});
return this;
};
return defered;
};
})
.factory('HermesFileUploader', function($q) {
var HermesFileUploader = function() {
this.file = null;
this.alias = 'file';
};
HermesFileUploader.prototype.setFile = function(content) {
this.file = content[0].files[0];
};
HermesFileUploader.prototype.getFile = function() {
return this.file;
};
return HermesFileUploader;
})
.provider('Hermes', function() {
var HermesProvider = this;
var defaultConfiguration;
var Configuration = function() {
this.baseUrl = '';
this.setBaseUrl = function(url) {
this.baseUrl = url;
};
};
var Request = function(element, method, request) {
var rawRequest = _.clone(request);
this.method = method;
this.element = element;
this.getRawRequest = function() {
return rawRequest;
};
this.build = function(service) {
var request = _.merge({
url: element.url,
method: this.method,
headers: {},
params: {},
cache: false
}, this.getRawRequest());
return service.prepareRequest(request) || request;
};
};
var Element = function(service, baseUrl, name) {
var elementCache;
if (HermesProvider.cacheElements) {
elementCache = {};
}
this.url = baseUrl + '/' + name;
_.each(HermesProvider.methods, function(method) {
var self = this;
var prepareName = 'prepare' + method.charAt(0).toUpperCase() + method.slice(1);
this[prepareName] = function(config) {
return new Request(this, method, config);
};
this[method] = function(config) {
return service.dispatchRequest(this[prepareName](config || {}));
};
}, this);
this.element = function(name) {
var self = this;
var create = function() {
return new HermesProvider.Element(service, self.url, name);
};
if (HermesProvider.cacheElements) {
return elementCache[name] || (elementCache[name] = create());
} else {
return create();
}
};
};
var Service = function($q, $rootScope, $http, HermesPromise, HttpStream, HttpMultipartForm, configuration) {
var hookIdCounter = 0;
var builderHooks = [];
var errorHooks = [];
var elementCache;
if (HermesProvider.cacheElements) {
elementCache = {};
}
var addHook = function(db, fn, priority) {
var defered = $q.defer();
var id = hookIdCounter++;
if (!priority) {
priority = 0;
}
for (var i = 0; i <= db.length; i++) {
if (i == db.length || priority >= db[i].priority) {
db.splice(i, 0, {
id: id,
priority: priority,
fn: fn
});
break;
}
}
defered.promise.then(function() {
_.remove(db, { id: id });
});
return defered;
};
this.addBuilderHook = _.bind(addHook, this, builderHooks);
this.addErrorHook = _.bind(addHook, this, errorHooks);
this.prepareRequest = function(request) {
request.url = configuration.baseUrl + request.url;
_.each(builderHooks, function(hook) {
request = hook.fn(request);
});
};
this.dispatchRequest = function(request) {
var defered = new HermesPromise();
this.processRequest({
request: request,
result: defered
});
return defered.promise;
};
this.sendRequest = function(request) {
var built = request.build(this);
if (built.form) {
return HttpMultipartForm(built);
} else if (built.stream) {
return HttpStream(built);
} else {
return $http(built);
}
};
this.processRequest = function(requestData) {
var self = this;
this.sendRequest(requestData.request)
.then(function(res) {
requestData.result.resolve(res);
}, function(res) {
var waiter;
_.each(errorHooks, function(hook) {
var result = hook.fn(res.data, res.status, res.headers, requestData.request);
if (result && _.isFunction(result.then)) {
waiter = result;
}
});
if (waiter) {
waiter.then(function() {
self.processRequest(requestData);
}, function() {
requestData.result.reject(res);
});
} else {
requestData.result.reject(res);
}
});
};
this.element = function(name) {
var self = this;
var create = function() {
return new HermesProvider.Element(self, '', name);
};
if (HermesProvider.cacheElements) {
return elementCache[name] || (elementCache[name] = create());
} else {
return create();
}
};
};
this.$get = function($injector) {
return this.createService($injector, this.defaultConfiguration);
};
this.createService = function(injector, configuration) {
return injector.instantiate(this.Service, {
configuration: configuration
});
};
this.methods = ['get', 'put', 'post', 'patch', 'delete'];
this.defaultConfiguration = new Configuration();
this.Configuration = Configuration;
this.Service = Service;
this.Element = Element;
});
| src/hermes.js | 'use strict';
angular.module('hermes', [])
.factory('HttpMultipartForm', function($rootScope, $q, $browser) {
return function(request) {
var defered = $q.defer();
var xhr = new XMLHttpRequest();
var form = new FormData();
var stringifyQS = function(obj) {
var result = '';
for(var key in obj) {
result += key + '=' + encodeURIComponent(obj[key]);
}
return result;
};
var extraBody = '';
if (typeof request.data == 'string') {
extraBody = request.data;
} else {
extraBody = stringifyQS(request.data);
}
form.append(request.form.alias, request.form.file);
xhr.onload = function() {
defered.resolve({
status: 200,
data: xhr.response,
headers: xhr.getAllResponseHeaders()
});
};
xhr.onerror = function() {
defered.reject({
status: xhr.status,
data: xhr.response,
headers: xhr.getAllResponseHeaders()
});
};
xhr.open(request.method, request.url + '?' + stringifyQS(request.params) + '&' + extraBody, true);
// xhr.setRequestHeader("Content-Type","multipart/form-data");
// angular.forEach(request.headers, function(value, name) {
// xhr.setRequestHeader(name, value);
// });
xhr.send(form);
return defered.promise;
};
})
.factory('HttpStream', function($rootScope, $q, $browser) {
return function(request) {
var defered = $q.defer();
var xhr = new XMLHttpRequest();
var headersArrived = false, finished = false;
var partialProgress = 0;
var stringifyQS = function(obj) {
var result = '';
for(var key in obj) {
result += key + '=' + encodeURIComponent(obj[key]);
}
return result;
};
var checkHeaders = function() {
if (headersArrived) {
return;
}
$rootScope.$apply(function() {
request.stream.writePreamble({
status: xhr.status,
headers: xhr.getAllResponseHeaders()
});
});
headersArrived = true;
};
var progress = function(final) {
var response;
response = xhr.response.slice(partialProgress);
partialProgress = xhr.response.length;
$rootScope.$apply(function() {
request.stream.write(response);
});
if (final) {
if (xhr.status == 200) {
defered.resolve({
status: 200,
data: xhr.response,
headers: xhr.getAllResponseHeaders()
});
} else {
defered.reject({
status: xhr.status,
data: xhr.response,
headers: xhr.getAllResponseHeaders()
});
}
$rootScope.$apply(function() {
request.stream.close();
});
if ($browser.$$completeOutstandingRequest) {
$browser.$$completeOutstandingRequest(angular.noop);
}
finished = true;
}
};
var trackProgress = function() {
if (finished) {
return;
}
if (xhr.readyState == 2) {
checkHeaders();
} else if (xhr.readyState == 3 || xhr.readyState == 4) {
checkHeaders();
progress(xhr.readyState == 4);
}
};
xhr.open(request.method, request.url + '?' + stringifyQS(request.params), true);
xhr.onprogress = function() {
if (xhr.readyState == 4) {
trackProgress();
}
};
xhr.onreadystatechange = function() {
trackProgress();
};
_.each(request.headers, function(value, key) {
if (value !== undefined) {
xhr.setRequestHeader(key, value);
}
});
if ($browser.$$incOutstandingRequestCount) {
$browser.$$incOutstandingRequestCount();
}
xhr.send(request.data || null);
return defered.promise;
};
})
.factory('HermesPromise', function($q) {
return function() {
var defered = $q.defer();
defered.promise.success = function(handler) {
this.then(function(response) {
return handler(response.data, response.status, response.headers);
});
return this;
};
defered.promise.error = function(handler) {
this.then(null, function(response) {
return handler(response.data, response.status, response.headers);
});
return this;
};
return defered;
};
})
.factory('HermesFileUploader', function($q) {
var HermesFileUploader = function() {
this.file = null;
this.alias = 'file';
};
HermesFileUploader.prototype.setFile = function(content) {
this.file = content[0].files[0];
};
HermesFileUploader.prototype.getFile = function() {
return this.file;
};
return HermesFileUploader;
})
.provider('Hermes', function() {
var HermesProvider = this;
var defaultConfiguration;
var Configuration = function() {
this.baseUrl = '';
this.setBaseUrl = function(url) {
this.baseUrl = url;
};
};
var Request = function(element, method, request) {
var rawRequest = _.clone(request);
this.method = method;
this.element = element;
this.getRawRequest = function() {
return rawRequest;
};
this.build = function(service) {
var request = _.merge({
url: element.url,
method: this.method,
headers: {},
params: {},
cache: false
}, this.getRawRequest());
return service.prepareRequest(request) || request;
};
};
var Element = function(service, baseUrl, name) {
var elementCache;
if (HermesProvider.cacheElements) {
elementCache = {};
}
this.url = baseUrl + '/' + name;
_.each(HermesProvider.methods, function(method) {
var self = this;
var prepareName = 'prepare' + method.charAt(0).toUpperCase() + method.slice(1);
this[prepareName] = function(config) {
return new Request(this, method, config);
};
this[method] = function(config) {
return service.dispatchRequest(this[prepareName](config || {}));
};
}, this);
this.element = function(name) {
var self = this;
var create = function() {
return new HermesProvider.Element(service, self.url, name);
};
if (HermesProvider.cacheElements) {
return elementCache[name] || (elementCache[name] = create());
} else {
return create();
}
};
};
var Service = function($q, $rootScope, $http, HermesPromise, HttpStream, HttpMultipartForm, configuration) {
var hookIdCounter = 0;
var builderHooks = [];
var errorHooks = [];
var elementCache;
if (HermesProvider.cacheElements) {
elementCache = {};
}
var addHook = function(db, fn, priority) {
var defered = $q.defer();
var id = hookIdCounter++;
if (!priority) {
priority = 0;
}
for (var i = 0; i <= db.length; i++) {
if (i == db.length || priority >= db[i].priority) {
db.splice(i, 0, {
id: id,
priority: priority,
fn: fn
});
break;
}
}
defered.promise.then(function() {
_.remove(db, { id: id });
});
return defered;
};
this.addBuilderHook = _.bind(addHook, this, builderHooks);
this.addErrorHook = _.bind(addHook, this, errorHooks);
this.prepareRequest = function(request) {
request.url = configuration.baseUrl + request.url;
_.each(builderHooks, function(hook) {
request = hook.fn(request);
});
};
this.dispatchRequest = function(request) {
var defered = new HermesPromise();
this.processRequest({
request: request,
result: defered
});
return defered.promise;
};
this.sendRequest = function(request) {
var built = request.build(this);
if (built.form) {
return HttpMultipartForm(built);
} else if (built.stream) {
return HttpStream(built);
} else {
return $http(built);
}
};
this.processRequest = function(requestData) {
var self = this;
this.sendRequest(requestData.request)
.then(function(res) {
requestData.result.resolve(res);
}, function(res) {
var waiter;
_.each(errorHooks, function(hook) {
var result = hook.fn(res.data, res.status, res.headers, requestData.request);
if (result && _.isFunction(result.then)) {
waiter = result;
}
});
if (waiter) {
waiter.then(function() {
self.processRequest(requestData);
}, function() {
requestData.result.reject(res);
});
} else {
requestData.result.reject(res);
}
});
};
this.element = function(name) {
var self = this;
var create = function() {
return new HermesProvider.Element(self, '', name);
};
if (HermesProvider.cacheElements) {
return elementCache[name] || (elementCache[name] = create());
} else {
return create();
}
};
};
this.$get = function($injector) {
return this.createService($injector, this.defaultConfiguration);
};
this.createService = function(injector, configuration) {
return injector.instantiate(this.Service, {
configuration: configuration
});
};
this.methods = ['get', 'put', 'post', 'patch', 'delete'];
this.defaultConfiguration = new Configuration();
this.Configuration = Configuration;
this.Service = Service;
this.Element = Element;
});
| Fix url encoding.
| src/hermes.js | Fix url encoding. | <ide><path>rc/hermes.js
<ide>
<ide> var extraBody = '';
<ide>
<del> if (typeof request.data == 'string') {
<del> extraBody = request.data;
<del> } else {
<del> extraBody = stringifyQS(request.data);
<del> }
<add> // if (typeof request.data == 'string') {
<add> // extraBody = request.data;
<add> // } else {
<add> // extraBody = stringifyQS(request.data);
<add> // }
<add>
<add> extraBody = stringifyQS(request.data);
<ide>
<ide> form.append(request.form.alias, request.form.file);
<ide> |
|
JavaScript | mit | 7a52953bca5916e183a12c37c0a9e348204f3f7f | 0 | larsvers/tree-of-charts,larsvers/tree-of-charts,larsvers/tree-of-charts | /* dataindex js | v.7.0.4 | 1/7/15 | lv */
/* width of svg adjusted (to get the container as wide as possible w/o x-scrollbars) */
// utility ---------------------------------------------------------------------------------------------------------
var log = console.log.bind(console); // snippet log
var dir = console.dir.bind(console); // snippet dir
var spaceLess = function(x) { return x.replace(/[^A-Za-z]/g,''); } // remove all non-letter characters
var arrToStr = function(x) { return x.toString().replace(/,|,\s/g,' \u00B7 '); } // used e.g. in more info area
// ! not used, replaced with lodash's _.intersection !
// Prototype adjustments: compare 2 arrays and return true if there are any matches (*this* is the Array to call it on)
Array.prototype.anymatch = function(arr) {
var result = false;
for(var i = 0; i < this.length; i++) {
if(arr.indexOf(this[i]) > -1) result = true;
}
return result;
};
var util = {};
util.unik = function(arr) { // http://jszen.com/best-way-to-get-unique-values-of-an-array-in-javascript.7.html
var n = {}, r = [];
for (var i = 0; i < arr.length; i++) { // do for each element of the array
if (!n[arr[i]]) { // if this element is not yet in n
n[arr[i]] = true; // put it into the object n with the flag 'true'
r.push(arr[i]); // and push it into the array r
}
}
return r;
};
var vis = vis || {}; // top namespace
// data ------------------------------------------------------------------------------------------------------------
/* example data:
// tag-groups
var tags = [
{ group: 'tight tags', tags: ['pimmeldimmelbummelbum', 'pom', 'pum'] },
{ group: 'broad tags', tags: ['bim', 'bom', 'bum', 'tim', 'tom', 'tum'] },
{ group: 'report names', tags: ['DTH-report', 'Nordics Brand Tracking', 'Scandi Cross-Promo', 'Eybrow-length report'] }
];
// data-index
var index = [
{ name: 'blub', id: 'BUB', descr: 'this is a report about blubs', tags: ['pim', 'pom', 'DTH-report', 'pum'] },
{ name: 'blob', id: 'BOB', descr: 'this is a report about blobs', tags: ['bim', 'bom', 'bum', 'Eybrow-length report'] },
{ name: 'blab', id: 'BAB', descr: 'this is a report about blabs', tags: ['pimmeldimmelbummelbum', 'pimmeldimmelbummelbam', 'pimmeldimmelbummelbim'] },
{ name: 'blib', id: 'BIB', descr: 'this is a report about blibs', tags: ['tim', 'tom', 'Scandi Cross-Promo', 'tum', ] }
];
*/
d3.tsv('data/data.tsv', function(err, data){
if (err) log('data loading error');
// log('data', data);
// search data -------------------------------------------
util.searchdataTransform = function(variable, groupname) {
var arr = [];
var tagdata = data.map(function(d){
return { tags: d[variable] } // function parameter 1
});
function shove(x) { arr.push(x); } // utility fun: push each element into arr
for (var i=0; i < tagdata.length; i++) {
tagdata[i].tags.split(", ").forEach(shove); // for each tagdata-array, split the tags and push into an array
}
arr = util.unik(arr).sort(); // sort the array
var tagobject = {}; // create final object per tag-group
tagobject.group = groupname; // function parameter 2
tagobject.tags = arr;
return tagobject;
} // util.searchdataTransform; input: raw data-index, variable of interest, desired label; output: object in appropriate shape
// note: this can be moved up to the utility section but we would need to make the original data (here called 'data') available globally.
// decide here which tags should become searchable.
// Then create searchData array holding all the searchable tabs from all reports
var tagobjects = {}; // create the objects you need for the search box (param 1: variable name, param 2: semantic name for the user)
tagobjects.a = util.searchdataTransform('name', 'Name');
tagobjects.b = util.searchdataTransform('dataset_type', 'What | Data Type (Munzner)');
tagobjects.c = util.searchdataTransform('dataset_type_shneiderman', 'What | Data Type (Shneiderman)');
tagobjects.d = util.searchdataTransform('number_of_variables_rough', 'What | Number of variables');
tagobjects.e = util.searchdataTransform('categories_wo_aid_sum', 'What | Number of categories');
tagobjects.f = util.searchdataTransform('values_sum', 'What | Number of values');
tagobjects.g = util.searchdataTransform('data_type_sum_long', 'What | Variable type');
tagobjects.h = util.searchdataTransform('target_usage_munzner_all', 'Why | General target of visual (Munzner)');
tagobjects.i = util.searchdataTransform('target_specific_munzner', 'Why | Specific target of visual (Munzner)');
tagobjects.j = util.searchdataTransform('target_usage_alternative_all', 'Why | General target of visual (alternative)');
tagobjects.k = util.searchdataTransform('action_analysis', 'Why | Analysis action');
tagobjects.l = util.searchdataTransform('action_search', 'Why | Search action');
tagobjects.m = util.searchdataTransform('action_query_all', 'Why | Query action');
tagobjects.n = util.searchdataTransform('all_marks', 'How | Visual mark');
tagobjects.o = util.searchdataTransform('channel', 'How | Visual channel');
tagobjects.p = util.searchdataTransform('type', 'How | Chart type');
tagobjects.q = util.searchdataTransform('family', 'How | Chart family');
var searchData = []; // create the final search data array of objects
for (key in tagobjects) {
searchData.push(tagobjects[key]);
}
// log('searchData', searchData);
// report card data -----------------------------------------
// (1) Move all string-lists into arrays (to have clean objects) and
// (2) put all searchable tags into an extra property per object called 'searchTags'.
// This array searchTags will later be matched with the user-selected tags which were produced for and live in searchData
var reportData = data;
// not in function; can be done with a little more flexibility allowing for variable number of key-names to coerce to objects (function would need to be based on arg-length)
for (key in reportData) {
if (reportData.hasOwnProperty(key)) {
// turn comma-seperated lists into arrays
reportData[key].data_type = reportData[key].data_type.split(', ');
reportData[key].target_usage_munzner_all = reportData[key].target_usage_munzner_all.split(', ');
reportData[key].target_specific_munzner = reportData[key].target_specific_munzner.split(', ');
reportData[key].target_usage_alternative_all = reportData[key].target_usage_alternative_all.split(', ');
reportData[key].action_query_all = reportData[key].action_query_all.split(', ');
reportData[key].all_marks = reportData[key].all_marks.split(', ');
reportData[key].channel = reportData[key].channel.split(', ');
// prepare extra arrays for report cards
reportData[key].whatData = _.union(
[reportData[key].dataset_type],
[reportData[key].dataset_type_shneiderman],
[reportData[key].data_type_sum_long]
);
reportData[key].whatScale = _.union(
[reportData[key].number_of_variables_exact],
[reportData[key].categories_wo_aid],
[reportData[key].values_detailed]
);
reportData[key].whyTargets = _.union(
reportData[key].target_usage_munzner_all,
reportData[key].target_specific_munzner,
reportData[key].target_usage_alternative_all
);
reportData[key].whyActions = _.union(
[reportData[key].action_analysis],
[reportData[key].action_search],
reportData[key].action_query_all
);
reportData[key].howMarksChannels = _.union(
reportData[key].all_marks,
reportData[key].channel
);
reportData[key].cardTags = _.union(
reportData[key].whatData,
reportData[key].whatScale,
reportData[key].whyTargets,
reportData[key].whyActions,
reportData[key].howMarksChannels
)
// create list of all searchable tags to display correct report cards
reportData[key].searchTags = _.union(
[reportData[key].name],
[reportData[key].dataset_type],
[reportData[key].dataset_type_shneiderman],
[reportData[key].number_of_variables_rough],
[reportData[key].categories_wo_aid_sum],
[reportData[key].values_sum],
[reportData[key].data_type_sum_long],
reportData[key].target_usage_munzner_all,
reportData[key].target_specific_munzner,
reportData[key].target_usage_alternative_all,
[reportData[key].action_analysis],
[reportData[key].action_search],
reportData[key].action_query_all,
reportData[key].all_marks,
reportData[key].channel,
[reportData[key].type],
[reportData[key].family]
); // create an array of unique elements (_.union requires arrays as input. Hence turn single strings into arrays with the [brackets])
} // check for only enumerable non-inherited properties (objects at least have the length-property) - actually don't. at least not for the tree of charts
} // for each row
// log('reportData', reportData);
// tree data -----------------------------------------------
/* different tree structures ================================ */
/* feed reportData into the d3.nest().entries() if the first category only includes flat, single values */
/* feed reportDataClone into the d3.nest().entries() if the first category originally includes an array which is fanned out */
// function to build nested data for the tree
// number of nesting levels dependent on the array of variables we specify
// sorts each key ascending unless we specify a custom order through the 'order' argument
var setTreeStructure = function(arr, data, order) {
var l = arr.length;
var dataNest;
if (l === 2) {
dataNest = d3.nest()
// .key(function(d) { return d[arr[0]]; }).sortKeys(d3.ascending)
.key(function(d) { return d[arr[0]]; }).sortKeys(order[0].length === 0 ? d3.ascending : function(a,b) { return order[0].indexOf(a) - order[0].indexOf(b); })
.key(function(d) { return d[arr[1]]; }).sortKeys(order[1].length === 0 ? d3.ascending : function(a,b) { return order[1].indexOf(a) - order[1].indexOf(b); })
.entries(data);
dataNest.forEach(function(d) {
d.children = d.values;
delete d.values;
d.children.forEach(function(d) {
d.children = d.values;
delete d.values;
}); // name change 2nd level
}); // name change 1st level
} else if (l === 3) {
dataNest = d3.nest()
.key(function(d) { return d[arr[0]]; }).sortKeys(order[0].length === 0 ? d3.ascending : function(a,b) { return order[0].indexOf(a) - order[0].indexOf(b); })
.key(function(d) { return d[arr[1]]; }).sortKeys(order[1].length === 0 ? d3.ascending : function(a,b) { return order[1].indexOf(a) - order[1].indexOf(b); })
.key(function(d) { return d[arr[2]]; }).sortKeys(order[2].length === 0 ? d3.ascending : function(a,b) { return order[2].indexOf(a) - order[2].indexOf(b); })
.entries(data);
dataNest.forEach(function(d) {
d.children = d.values;
delete d.values;
d.children.forEach(function(d) {
d.children = d.values;
delete d.values;
d.children.forEach(function(d) {
d.children = d.values;
delete d.values;
}); // name change 3rd level
}); // name change 2nd level
}); // name change 1st level
} else if (l === 4) {
dataNest = d3.nest()
.key(function(d) { return d[arr[0]]; }).sortKeys(order[0].length === 0 ? d3.ascending : function(a,b) { return order[0].indexOf(a) - order[0].indexOf(b); })
.key(function(d) { return d[arr[1]]; }).sortKeys(order[1].length === 0 ? d3.ascending : function(a,b) { return order[1].indexOf(a) - order[1].indexOf(b); })
.key(function(d) { return d[arr[2]]; }).sortKeys(order[2].length === 0 ? d3.ascending : function(a,b) { return order[2].indexOf(a) - order[2].indexOf(b); })
.key(function(d) { return d[arr[3]]; }).sortKeys(order[3].length === 0 ? d3.ascending : function(a,b) { return order[3].indexOf(a) - order[3].indexOf(b); })
.entries(data);
dataNest.forEach(function(d) {
d.children = d.values;
delete d.values;
d.children.forEach(function(d) {
d.children = d.values;
delete d.values;
d.children.forEach(function(d) {
d.children = d.values;
delete d.values;
d.children.forEach(function(d) {
d.children = d.values;
delete d.values;
}); // name change 4th level
}); // name change 3rd level
}); // name change 2nd level
}); // name change 1st level
} else {
console.warn('Problem with setTreeStructure()')
} // test for the array length and build dataNest accordingly
return dataNest;
} // setTreeStructure()
// get the dataset names into an array (object you want to see first in button list needs to be last in array to show up first in view as it gets .appended)
var treeDataNames = [
{ data1: 'across', data2: 'c', label: 'scale \u00B7 action', id: 'scaleaction' },
{ data1: 'across', data2: 'b', label: 'action \u00B7 scale \u00B7 type', id: 'actionscaletype' },
{ data1: 'across', data2: 'a', label: 'data \u00B7 scale \u00B7 usage', id: 'datascaleusage' },
{ data1: 'within', data2: 'e', label: 'what type', id: 'whatdata' },
{ data1: 'within', data2: 'd', label: 'how', id: 'whatdata' },
{ data1: 'within', data2: 'c', label: 'why', id: 'why' },
{ data1: 'within', data2: 'b', label: 'what scale', id: 'whatscale' },
{ data1: 'within', data2: 'a', label: 'what data', id: 'whatdata' }
];
// variables for tree that require custom order (3rd argument to setTreeStructure)
var data_type_sum_short_order = ['Cat', 'Quant', 'Cat, Ord', 'Cat, Quant', 'Cat, Ord, Quant'],
categories_wo_aid_sum_order = ['under 10 categories', '10-99 categories', '100-999 categories', '1000 and more categories'],
values_sum_order = ['under 20 values', 'Dozens of values', 'Hundreds of values', 'Thousands of values', 'Infinite no. of values'],
action_analysis_order = ['Explain', 'Discover', 'Discover and Explain'],
main_mark_order = ['Points', 'Lines', 'Areas', 'Connection', 'Containment'],
family_order = ['Bar chart', 'Line chart', 'Map', 'Tree', 'Network', 'Scatterplot', 'Distribution plot', 'Euler diagram', 'Flow diagram', 'Pie chart', 'Table', 'Multidimensional plot2', 'Other'];
// set the arguments and get the data
var treeData = {}; // holds all tree data
treeData.within = {}; // data for within visual process stages
treeData.across = {}; // data for across visual process stages
// these are the nest combinations I chose (there can be more or less)
treeData.within.a = setTreeStructure(['dataset_type', 'data_type_sum_short'], reportData, [[],data_type_sum_short_order]);
treeData.within.b = setTreeStructure(['number_of_variables_rough', 'categories_wo_aid_sum', 'values_sum'], reportData, [[],categories_wo_aid_sum_order,values_sum_order]);
treeData.within.c = setTreeStructure(['target_usage_munzner_main', 'action_analysis', 'action_query'], reportData, [[],action_analysis_order,[]]);
treeData.within.d = setTreeStructure(['main_mark', 'main_channel'], reportData, [main_mark_order,[]]);
treeData.within.e = setTreeStructure(['type', 'family'], reportData, [[],family_order]);
treeData.across.a = setTreeStructure(['data_type_sum_short', 'number_of_variables_rough', 'target_usage_alternative_main'], reportData, [data_type_sum_short_order,[],[]]);
treeData.across.b = setTreeStructure(['action_analysis', 'number_of_variables_rough', 'family'], reportData, [action_analysis_order,[],family_order]);
treeData.across.c = setTreeStructure(['values_sum', 'action_search', 'action_query'], reportData, [values_sum_order,[],[]]);
// set buttons
d3.select('div#containerTree').selectAll('.buttons')
.data(treeDataNames)
.enter()
.append('button')
.attr('class', 'setTreeStructure')
.attr('id', function(d) { return d.id; })
.html(function(d) { return d.label; });
// needs to be in Object for d3.tree() - - -
var dataTree = {};
dataTree.key = "Chart tree";
dataTree.children = treeData.within.a;
log('dataTree', dataTree);
// save data to a global window-object ----------------------
// save data as a window object to let every function have access to it http://stackoverflow.com/questions/9491885/csv-to-array-in-d3-js
window.gvis = {};
gvis.dataTree = dataTree; // save data for tree as window object
gvis.dataSearch = searchData; // save data for searchbox as window object
gvis.allData = reportData; // save data for the report cards as window object
gvis.root; // the data variable used by the tree
gvis.searchField; // search variable (for eval)
gvis.searchText; // text to search
// initialise select and tree -------------------------------
// initialise the select box and the tree (report cards will be updated by the select module)
vis.selectbox.init();
vis.tree.init();
// transition to basic instructions
d3.selectAll('div#noteSearch, div#noteBrowse')
.style('display', 'inherit')
.style('opacity', 0)
.transition()
.duration(500)
.delay(750)
.style('opacity', 1);
// add button handlers
d3.selectAll('.setTreeStructure').on('mousedown', function() {
var self = d3.select(this),
data1 = self.data()[0].data1,
data2 = self.data()[0].data2,
data = treeData[data1][data2];
// log(data);
dataTree.children = data;
vis.tree.clearAll(gvis.root);
gvis.root.children.forEach(vis.tree.collapse);
vis.tree.update(gvis.root);
}); //
}); // data load and prep
// select box --------------------------------------------------------------------------------------------------------
vis.selectbox = (function() {
var searchMethodBouncer = 'any';
var my = {},
reports;
my.init = function() {
// set inline-width with .style('width') if required or remove for auto width (as long as longest tag)
// build nested select for tags (https://gist.github.com/jfreels/6811178)
var selectOptions = d3.select('select#select')
.selectAll('.optgroup')
.data(gvis.dataSearch) // data saved globally
.enter()
.append('optgroup')
.attr('label', function(d) { return d.group; })
.selectAll('.option')
.data(function(d) { return d.tags; })
.enter()
.append('option')
.attr('value', function(d) { return d; })
.text(function(d) { return d; });
// button mechanics and related styles
d3.select('button#any').style('color', '#555');
d3.selectAll('.method').on('click', function() {
d3.selectAll('.method').style('color', '#bbb');
d3.select(this).style('color', '#555');
searchMethodBouncer = d3.select(this).attr('id');
// !!! best to trigger new search with currently searched for tags; alternative - clear searchbox and trigger new search
}); // toggle serch method used for search
// start select2
$('#select').select2({
placeholder: "Start typing or select...",
allowClear: true
});
// get the data-objects that match the selected tags
function showSelectedReports() {
var objSelected = $('#select').select2('data'); // get the full array of tags https://github.com/select2/select2/issues/2929
var tagsSelected = []; // this will hold the chosen tags
for (var i = 0; i < objSelected.length; i++) tagsSelected.push(objSelected[i].text); // loop through all tags in the array and push only the chosen into tagsSelected
reports = gvis.allData.filter(function(el) {
var intersect = _.intersection(el.searchTags, tagsSelected); // for easier reading - not needed
var intersectLength = intersect.length;
if (searchMethodBouncer === 'any') {
return intersectLength > 0; // 'any' mode: if there are any intersections between the chosen tags and the report's tags
} else if (searchMethodBouncer === 'strict') {
return intersectLength === tagsSelected.length; // 'strict' mode: if all chosen tags can be found in the report's tag-array
}
}); // return only the data-index-members that match the selected tags.
vis.cards.updateCards(reports);
} // showSelectedReports
$('#select').on('change', showSelectedReports); // update reports object through select field
d3.select('button#refresh').on('click', showSelectedReports); // update reports object through refresh button
d3.select('button#refresh').on('mouseover', function(){
d3.select(this).transition().style({'background': '#eaeaea', 'color': '#555'});
}); // button style change
d3.select('button#refresh').on('mouseout', function(){
d3.select(this).transition().style({'background': '#f7f7f7', 'color': '#999'});
}); // button style change
$('#select').on('select2:close', function(e){
if ($('#select').select2('data').length === 0) {
vis.tree.clearAll(gvis.root);
gvis.root.children.forEach(vis.tree.collapse);
vis.tree.update(gvis.root);
d3.selectAll('.report').remove(); // the listener reacts differently for strict search mode and shows all reports upon closing. lazy remove instead of de-bugging
d3.selectAll('div#noteSearch, div#noteBrowse')
.style('display', 'inherit')
.style('opacity', 0)
.transition().duration(500)
.style('opacity', .5);
d3.select('div#cards')
.style('overflow', 'hidden'); // remove scrollbar on pc's
} // collapse tree if selection is empty
});
}; // my.init
return my;
})(); // vis.selectbox module
// report cards ------------------------------------------------------------------------------------------------------
vis.cards = (function() {
var my = {};
my.updateCards = function(data){
d3.selectAll('div#noteSearch, div#noteBrowse').style('display', 'none');
d3.selectAll('.containerCards').remove(); // hard clean, no enter - update - exit
d3.select('div#cards')
.style('overflow', 'scroll')
.style('overflow-x', 'hidden'); // switch on only y-scrolling (for windows run browsers)
var containerCards = d3.select('div#cards')
.append('div')
.attr('class', 'containerCards');
d3.selectAll('.report').remove();
var reports = containerCards.selectAll('.report')
.data(data);
reports.enter()
.append('div')
.attr('class', 'report')
.attr('id', function(d) { return spaceLess(d.name); })
.style('opacity', 0)
.style('background-color', '#fff')
.transition()
.style('opacity', 1)
.style('background-color', function(d,i) { return i%2 === 0 ? '#F7F7F7' : '#fff' });
// headers
reports.append('h1')
.attr('class', 'header1')
.html(function(d) { return d.name; });
reports.append('h3')
.attr('class', 'header2')
.html(function(d) {
return d.alternative_names !== "NA" ?
'Type: ' + d.type + ' \u00B7 Family: ' + d.family + ' \u00B7 Alternative: ' + d.alternative_names :
'Type: ' + d.type + ' \u00B7 Family: ' + d.family;
});
// buttons
var buttonList = reports.append('ul')
.attr('class', 'taglist');
buttonList.append('li')
.attr('class', 'buttonTags')
.attr('id', 'browseTree')
.html('browse tree');
buttonList.append('li')
.attr('class', 'buttonTags')
.attr('id', 'moreInfo')
.html('more info');
buttonList.append('li')
.attr('class', 'buttonTags')
.attr('id', 'source')
.html('web/source');
buttonList.append('li')
.attr('class', 'buttonTags')
.attr('id', 'goodExample')
.html('example(s)');
// image
reports.append('img')
.attr('src', function(d) { return 'images/graphs/' + d.image_file; })
.attr('id', 'graph');
// description
reports.append('p')
.attr('class', 'description')
.html(function(d) { return d.description; });
// paragraph element underneath the floated image to clear the float
// this way the image doesn't overlap the container in any case
reports.append('p')
.attr('class', 'clearFix')
.html('');
// more info
reports.append('p')
.attr('class', 'moreInfoText')
.attr('id', function(d) { return 'moreInfoText' + spaceLess(d.name); })
.html(function(d) {
return '<strong>What</strong> data: ' + arrToStr(d.whatData)
+ ' \u007C <strong>What</strong> scale: ' + arrToStr(d.whatScale)
+ ' \u007C <strong>What</strong> scale: ' + arrToStr(d.whatScale)
+ ' \u007C <strong>Why</strong> targets: ' + arrToStr(d.whyTargets)
+ ' \u007C <strong>Why</strong> actions: ' + arrToStr(d.whyActions)
+ ' \u007C <strong>How</strong> encoding: ' + arrToStr(d.howMarksChannels)
+ '</br></br>'
+ '<strong>History</strong>: ' + d.history;
});
/* button-listeners and handlers */
// image hover shows picture in big
d3.selectAll('img#graph').on('mouseover', function() {
// get client width (the browser-safe way)
var w = window.innerWidth || document.documentElement.clientWidth || document.body.clientWidth;
var h = window.innerHeight || document.documentElement.clientHeight || document.body.clientHeight;
// get the image tag code (including inline style to set max-width and max-height)
var image = '<img src=' + this.src + ' style="max-width:'+ w/2 + 'px; max-height:' + (h * 0.9) + 'px">'
// tooltip to always be 5% from top and a maximum of 90% tall so that it's always in sight'
d3.select('div.tooltip')
.style('left', (d3.event.pageX + 20) + 'px')
.style('top', '5vh')
.html(image)
.style('opacity', 0)
.transition()
.style('opacity', 0.9);
});
d3.selectAll('img#graph').on('mousemove', function() {
// only move horizontally
d3.select('div.tooltip')
.style('left', (d3.event.pageX + 20) + 'px')
.style('top', '5vh');
});
d3.selectAll('img#graph').on('mouseout', function() {
d3.select('div.tooltip')
.transition()
.style('opacity', 0);
});
// find report in tree
d3.selectAll('.header1, header2, #browseTree, .description').on('mousedown', function(e){
vis.tree.clearAll(gvis.root); // collapse data
vis.tree.expandAll(gvis.root); // expand data
vis.tree.update(gvis.root); // show tree
// !!! to sort out when we get there, but probably best to pass a 'search' object with searchField, searchText and maybe the data ?
gvis.searchField = "d.identifier"; // find the right node(s)...
gvis.searchText = e.identifier;
vis.tree.searchTree(gvis.root);
gvis.root.children.forEach(vis.tree.collapseAllNotFound); // collapse all non-found
vis.tree.update(gvis.root); // update
});
// go to web
d3.selectAll('#source').on('mousedown', function(e){
// open report in new window
var url = e.source;
var win = window.open(url, '_blank');
win ? win.focus() : alert('please allow pop-ups for this site'); // this needs to get tested
});
// show more info
d3.selectAll('#moreInfo').on('mousedown', function(d) {
if (d3.select('#moreInfoText' + spaceLess(d.name)).style('display') === 'none') {
d3.select(this).html('less info'); // toggle name
d3.select('#moreInfoText' + spaceLess(d.name))
.style('display', 'inherit')
.style('font-size', 1e-6 + 'px')
.transition()
.style('font-size', '1em'); // toggle display
} else {
d3.select(this).html('more info'); // toggle name
d3.select('#moreInfoText' + spaceLess(d.name))
.style('font-size', '1em')
.transition()
.style('font-size', 1e-6 + 'px'); // toggle display
setTimeout(function(){
d3.select('#moreInfoText' + spaceLess(d.name))
.style('display', 'none');
}, 250); // wait until transition has finished before setting display to none
} // toggle based on display property
});
} // vis.cards.updateCards()
return my;
})(); // vis.cards module
// tree --------------------------------------------------------------------------------------------------------------
vis.tree = (function() {
var my = {};
//===============================================
my.searchTree = function(d) {
if (d.children)
d.children.forEach(my.searchTree);
else if (d._children)
d._children.forEach(my.searchTree);
var searchFieldValue = eval(gvis.searchField);
if (searchFieldValue && searchFieldValue.match(gvis.searchText)) {
// Walk parent chain
var ancestors = [];
var parent = d;
while (typeof(parent) !== "undefined") {
ancestors.push(parent);
parent.class = "found";
parent = parent.parent;
}
}
}
//===============================================
my.clearAll = function(d) {
d.class = "";
if (d.children)
d.children.forEach(my.clearAll);
else if (d._children)
d._children.forEach(my.clearAll);
}
//===============================================
my.collapse = function(d) {
if (d.children) {
d._children = d.children;
d._children.forEach(my.collapse);
d.children = null;
}
}
//===============================================
my.collapseAllNotFound = function(d) {
if (d.children) {
if (d.class !== "found") {
d._children = d.children;
d._children.forEach(my.collapseAllNotFound);
d.children = null;
} else
d.children.forEach(my.collapseAllNotFound);
}
}
//===============================================
my.expandAll = function(d) {
if (d._children) {
d.children = d._children;
d.children.forEach(my.expandAll);
d._children = null;
} else if (d.children)
d.children.forEach(my.expandAll);
}
//= private func ================================
// Toggle children on click.
function toggle(d) {
if (d.children) {
d._children = d.children;
d.children = null;
} else {
d.children = d._children;
d._children = null;
}
my.clearAll(gvis.root);
my.update(d);
// $("#searchName").select2("val", "");
// $("#searchName").val("").trigger("change");
}
var width = document.getElementById('containerTree').clientWidth * .9,
height = document.getElementById('containerTree').clientHeight * .9,
margin = { top: height * .05, right: width * .1, bottom: height * 1e-6, left: width * .2 };
var i = 0,
duration = 600;
var tree = d3.layout.tree()
.size([height, width]);
var diagonal = d3.svg.diagonal()
.projection(function(d) { return [d.y, d.x]; });
var svg; // needs to be accessible for both my.init and my.update function
var expandBouncer = false; // for opening/closing all nodes on index-node double-click
// public variables ----------------------------
my.init = function() {
svg = d3.select('div#containerTree')
.append('svg')
.attr('id', 'svg')
.attr('width', width + margin.right)
.attr('height', height + margin.top + margin.bottom)
.append('g')
.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
// rename data for use in tree
gvis.root = gvis.dataTree;
gvis.root.x0 = height / 2;
gvis.root.y0 = 0;
gvis.root.children.forEach(my.collapse); // applies collapse only to the root's children
my.update(gvis.root); // calls the vis-building function
// allow opening/closing entire tree on index-node double-click
var l = d3.selectAll('.node')[0].length;
var indexNode = d3.selectAll('.node')[0][l-1];
indexNode.id = 'indexNode';
d3.select('.node#indexNode').on('dblclick', function(){
expandBouncer = !expandBouncer;
if(expandBouncer){
my.collapse(gvis.root);
my.expandAll(gvis.root);
my.update(gvis.root);
} else {
my.clearAll(gvis.root);
gvis.root.children.forEach(vis.tree.collapse);
my.update(gvis.root);
}
}); // listener and handler for opening/closing all nodes
}
my.update = function(source) {
// Compute the new tree layout.
var nodes = tree.nodes(gvis.root).reverse(), // first node will be last in the returned object. doesn't seem to impact on the layout though
links = tree.links(nodes);
// Normalize for fixed-depth.
nodes.forEach(function(d) { d.y = d.depth * 100; }); // change node-distances (width of the vis)
// Update the nodes…
var node = svg.selectAll("g.node")
.data(nodes, function(d) { return d.id || (d.id = ++i); }); // key function. Rank by id or assign an id based on the last available id + 1
// Enter any new nodes at the parent's previous position.
var nodeEnter = node.enter().append("g")
.attr("class", "node")
.attr("transform", function(d) { return "translate(" + source.y0 + "," + source.x0 + ")"; })
.on("click", toggle); // moves child objects into children key
nodeEnter.append("circle")
.attr("r", 1e-6)
.style("fill", function(d) { return d._children ? "#ccc" : "#fff"; });
nodeEnter.append("text")
.classed('reportNode', function(d) { return !d.children && !d._children; })
.attr("x", function(d) { return d.children || d._children ? -10 : 10; })
.attr("dy", ".35em")
.attr("text-anchor", function(d) { return d.children || d._children ? "end" : "start"; })
.text(function(d) { return d.key || d.name; }) // returns the parent name for all parents and the report name for all leave nodes
.style("fill-opacity", 1e-6); // small number for transition
// event listender and handler for node-report-link
d3.selectAll('.reportNode').on('mousedown', function(d){
// option (1) show report card
vis.cards.updateCards([d]);
// // option (2) open report in new window
// var url = d.Link;
// var win = window.open(url, '_blank');
// win ? win.focus() : alert('please allow pop-ups for this site'); // this needs to get tested
});
// Transition nodes to their new position.
var nodeUpdate = node.transition()
.duration(duration)
.attr("transform", function(d) { return "translate(" + d.y + "," + d.x + ")"; });
nodeUpdate.select("circle")
.attr("r", 4.5)
.style("fill", function(d) {
if (d.class === "found") {
return "#437F3F"; // dark green
} else if (d._children) {
return "#ccc";
} else {
return "#fff";
}
})
.style("stroke", function(d) {
if (d.class === "found") {
return "#437F3F"; // dark green
}
});
nodeUpdate.select("text")
.style("fill-opacity", 1);
// Transition exiting nodes to the parent's new position.
var nodeExit = node.exit().transition()
.duration(duration)
.attr("transform", function(d) { return "translate(" + source.y + "," + source.x + ")"; })
.remove();
nodeExit.select("circle")
.attr("r", 1e-6);
nodeExit.select("text")
.style("fill-opacity", 1e-6);
// Update the links…
var link = svg.selectAll("path.link")
.data(links, function(d) { return d.target.id; }); // key function is the id - equivalent to the node id above
// Enter any new links at the parent's previous position.
link.enter().insert("path", "g")
.attr("class", "link")
.attr("d", function(d) {
var o = {x: source.x0, y: source.y0};
return diagonal({source: o, target: o});
});
// Transition links to their new position.
link.transition()
.duration(duration)
.attr("d", diagonal)
.style("stroke", function(d) {
if (d.target.class === "found") {
return "#437F3F"; // dark green
}
});
// Transition exiting nodes to the parent's new position.
link.exit().transition()
.duration(duration)
.attr("d", function(d) {
var o = {x: source.x, y: source.y};
return diagonal({source: o, target: o});
})
.remove();
// Stash the old positions for transition.
nodes.forEach(function(d) {
d.x0 = d.x;
d.y0 = d.y;
});
} // update function ?
return my;
})(); // vis.tree
| search.js | /* dataindex js | v.7.0.4 | 1/7/15 | lv */
/* width of svg adjusted (to get the container as wide as possible w/o x-scrollbars) */
// utility ---------------------------------------------------------------------------------------------------------
var log = console.log.bind(console); // snippet log
var dir = console.dir.bind(console); // snippet dir
var spaceLess = function(x) { return x.replace(/[^A-Za-z]/g,''); } // remove all non-letter characters
var arrToStr = function(x) { return x.toString().replace(/,|,\s/g,' \u00B7 '); } // used e.g. in more info area
// ! not used, replaced with lodash's _.intersection !
// Prototype adjustments: compare 2 arrays and return true if there are any matches (*this* is the Array to call it on)
Array.prototype.anymatch = function(arr) {
var result = false;
for(var i = 0; i < this.length; i++) {
if(arr.indexOf(this[i]) > -1) result = true;
}
return result;
};
var util = {};
util.unik = function(arr) { // http://jszen.com/best-way-to-get-unique-values-of-an-array-in-javascript.7.html
var n = {}, r = [];
for (var i = 0; i < arr.length; i++) { // do for each element of the array
if (!n[arr[i]]) { // if this element is not yet in n
n[arr[i]] = true; // put it into the object n with the flag 'true'
r.push(arr[i]); // and push it into the array r
}
}
return r;
};
var vis = vis || {}; // top namespace
// data ------------------------------------------------------------------------------------------------------------
/* example data:
// tag-groups
var tags = [
{ group: 'tight tags', tags: ['pimmeldimmelbummelbum', 'pom', 'pum'] },
{ group: 'broad tags', tags: ['bim', 'bom', 'bum', 'tim', 'tom', 'tum'] },
{ group: 'report names', tags: ['DTH-report', 'Nordics Brand Tracking', 'Scandi Cross-Promo', 'Eybrow-length report'] }
];
// data-index
var index = [
{ name: 'blub', id: 'BUB', descr: 'this is a report about blubs', tags: ['pim', 'pom', 'DTH-report', 'pum'] },
{ name: 'blob', id: 'BOB', descr: 'this is a report about blobs', tags: ['bim', 'bom', 'bum', 'Eybrow-length report'] },
{ name: 'blab', id: 'BAB', descr: 'this is a report about blabs', tags: ['pimmeldimmelbummelbum', 'pimmeldimmelbummelbam', 'pimmeldimmelbummelbim'] },
{ name: 'blib', id: 'BIB', descr: 'this is a report about blibs', tags: ['tim', 'tom', 'Scandi Cross-Promo', 'tum', ] }
];
*/
d3.tsv('data/data.tsv', function(err, data){
if (err) log('data loading error');
// log('data', data);
// search data -------------------------------------------
util.searchdataTransform = function(variable, groupname) {
var arr = [];
var tagdata = data.map(function(d){
return { tags: d[variable] } // function parameter 1
});
function shove(x) { arr.push(x); } // utility fun: push each element into arr
for (var i=0; i < tagdata.length; i++) {
tagdata[i].tags.split(", ").forEach(shove); // for each tagdata-array, split the tags and push into an array
}
arr = util.unik(arr).sort(); // sort the array
var tagobject = {}; // create final object per tag-group
tagobject.group = groupname; // function parameter 2
tagobject.tags = arr;
return tagobject;
} // util.searchdataTransform; input: raw data-index, variable of interest, desired label; output: object in appropriate shape
// note: this can be moved up to the utility section but we would need to make the original data (here called 'data') available globally.
// decide here which tags should become searchable.
// Then create searchData array holding all the searchable tabs from all reports
var tagobjects = {}; // create the objects you need for the search box (param 1: variable name, param 2: semantic name for the user)
tagobjects.a = util.searchdataTransform('name', 'Name');
tagobjects.b = util.searchdataTransform('dataset_type', 'What | Data Type (Munzner)');
tagobjects.c = util.searchdataTransform('dataset_type_shneiderman', 'What | Data Type (Shneiderman)');
tagobjects.d = util.searchdataTransform('number_of_variables_rough', 'What | Number of variables');
tagobjects.e = util.searchdataTransform('categories_wo_aid_sum', 'What | Number of categories');
tagobjects.f = util.searchdataTransform('values_sum', 'What | Number of values');
tagobjects.g = util.searchdataTransform('data_type_sum_long', 'What | Variable type');
tagobjects.h = util.searchdataTransform('target_usage_munzner_all', 'Why | General target of visual (Munzner)');
tagobjects.i = util.searchdataTransform('target_specific_munzner', 'Why | Specific target of visual (Munzner)');
tagobjects.j = util.searchdataTransform('target_usage_alternative_all', 'Why | General target of visual (alternative)');
tagobjects.k = util.searchdataTransform('action_analysis', 'Why | Analysis action');
tagobjects.l = util.searchdataTransform('action_search', 'Why | Search action');
tagobjects.m = util.searchdataTransform('action_query_all', 'Why | Query action');
tagobjects.n = util.searchdataTransform('all_marks', 'How | Visual mark');
tagobjects.o = util.searchdataTransform('channel', 'How | Visual channel');
tagobjects.p = util.searchdataTransform('type', 'How | Chart type');
tagobjects.q = util.searchdataTransform('family', 'How | Chart family');
var searchData = []; // create the final search data array of objects
for (key in tagobjects) {
searchData.push(tagobjects[key]);
}
// log('searchData', searchData);
// report card data -----------------------------------------
// (1) Move all string-lists into arrays (to have clean objects) and
// (2) put all searchable tags into an extra property per object called 'searchTags'.
// This array searchTags will later be matched with the user-selected tags which were produced for and live in searchData
var reportData = data;
// not in function; can be done with a little more flexibility allowing for variable number of key-names to coerce to objects (function would need to be based on arg-length)
for (key in reportData) {
if (reportData.hasOwnProperty(key)) {
// turn comma-seperated lists into arrays
reportData[key].data_type = reportData[key].data_type.split(', ');
reportData[key].target_usage_munzner_all = reportData[key].target_usage_munzner_all.split(', ');
reportData[key].target_specific_munzner = reportData[key].target_specific_munzner.split(', ');
reportData[key].target_usage_alternative_all = reportData[key].target_usage_alternative_all.split(', ');
reportData[key].action_query_all = reportData[key].action_query_all.split(', ');
reportData[key].all_marks = reportData[key].all_marks.split(', ');
reportData[key].channel = reportData[key].channel.split(', ');
// prepare extra arrays for report cards
reportData[key].whatData = _.union(
[reportData[key].dataset_type],
[reportData[key].dataset_type_shneiderman],
[reportData[key].data_type_sum_long]
);
reportData[key].whatScale = _.union(
[reportData[key].number_of_variables_exact],
[reportData[key].categories_wo_aid],
[reportData[key].values_detailed]
);
reportData[key].whyTargets = _.union(
reportData[key].target_usage_munzner_all,
reportData[key].target_specific_munzner,
reportData[key].target_usage_alternative_all
);
reportData[key].whyActions = _.union(
[reportData[key].action_analysis],
[reportData[key].action_search],
reportData[key].action_query_all
);
reportData[key].howMarksChannels = _.union(
reportData[key].all_marks,
reportData[key].channel
);
reportData[key].cardTags = _.union(
reportData[key].whatData,
reportData[key].whatScale,
reportData[key].whyTargets,
reportData[key].whyActions,
reportData[key].howMarksChannels
)
// create list of all searchable tags to display correct report cards
reportData[key].searchTags = _.union(
[reportData[key].name],
[reportData[key].dataset_type],
[reportData[key].dataset_type_shneiderman],
[reportData[key].number_of_variables_rough],
[reportData[key].categories_wo_aid_sum],
[reportData[key].values_sum],
[reportData[key].data_type_sum_long],
reportData[key].target_usage_munzner_all,
reportData[key].target_specific_munzner,
reportData[key].target_usage_alternative_all,
[reportData[key].action_analysis],
[reportData[key].action_search],
reportData[key].action_query_all,
reportData[key].all_marks,
reportData[key].channel,
[reportData[key].type],
[reportData[key].family]
); // create an array of unique elements (_.union requires arrays as input. Hence turn single strings into arrays with the [brackets])
} // check for only enumerable non-inherited properties (objects at least have the length-property) - actually don't. at least not for the tree of charts
} // for each row
// log('reportData', reportData);
// tree data -----------------------------------------------
/* different tree structures ================================ */
/* feed reportData into the d3.nest().entries() if the first category only includes flat, single values */
/* feed reportDataClone into the d3.nest().entries() if the first category originally includes an array which is fanned out */
// function to build nested data for the tree
// number of nesting levels dependent on the array of variables we specify
// sorts each key ascending unless we specify a custom order through the 'order' argument
var setTreeStructure = function(arr, data, order) {
var l = arr.length;
var dataNest;
if (l === 2) {
dataNest = d3.nest()
// .key(function(d) { return d[arr[0]]; }).sortKeys(d3.ascending)
.key(function(d) { return d[arr[0]]; }).sortKeys(order[0].length === 0 ? d3.ascending : function(a,b) { return order[0].indexOf(a) - order[0].indexOf(b); })
.key(function(d) { return d[arr[1]]; }).sortKeys(order[1].length === 0 ? d3.ascending : function(a,b) { return order[1].indexOf(a) - order[1].indexOf(b); })
.entries(data);
dataNest.forEach(function(d) {
d.children = d.values;
delete d.values;
d.children.forEach(function(d) {
d.children = d.values;
delete d.values;
}); // name change 2nd level
}); // name change 1st level
} else if (l === 3) {
dataNest = d3.nest()
.key(function(d) { return d[arr[0]]; }).sortKeys(order[0].length === 0 ? d3.ascending : function(a,b) { return order[0].indexOf(a) - order[0].indexOf(b); })
.key(function(d) { return d[arr[1]]; }).sortKeys(order[1].length === 0 ? d3.ascending : function(a,b) { return order[1].indexOf(a) - order[1].indexOf(b); })
.key(function(d) { return d[arr[2]]; }).sortKeys(order[2].length === 0 ? d3.ascending : function(a,b) { return order[2].indexOf(a) - order[2].indexOf(b); })
.entries(data);
dataNest.forEach(function(d) {
d.children = d.values;
delete d.values;
d.children.forEach(function(d) {
d.children = d.values;
delete d.values;
d.children.forEach(function(d) {
d.children = d.values;
delete d.values;
}); // name change 3rd level
}); // name change 2nd level
}); // name change 1st level
} else if (l === 4) {
dataNest = d3.nest()
.key(function(d) { return d[arr[0]]; }).sortKeys(order[0].length === 0 ? d3.ascending : function(a,b) { return order[0].indexOf(a) - order[0].indexOf(b); })
.key(function(d) { return d[arr[1]]; }).sortKeys(order[1].length === 0 ? d3.ascending : function(a,b) { return order[1].indexOf(a) - order[1].indexOf(b); })
.key(function(d) { return d[arr[2]]; }).sortKeys(order[2].length === 0 ? d3.ascending : function(a,b) { return order[2].indexOf(a) - order[2].indexOf(b); })
.key(function(d) { return d[arr[3]]; }).sortKeys(order[3].length === 0 ? d3.ascending : function(a,b) { return order[3].indexOf(a) - order[3].indexOf(b); })
.entries(data);
dataNest.forEach(function(d) {
d.children = d.values;
delete d.values;
d.children.forEach(function(d) {
d.children = d.values;
delete d.values;
d.children.forEach(function(d) {
d.children = d.values;
delete d.values;
d.children.forEach(function(d) {
d.children = d.values;
delete d.values;
}); // name change 4th level
}); // name change 3rd level
}); // name change 2nd level
}); // name change 1st level
} else {
console.warn('Problem with setTreeStructure()')
} // test for the array length and build dataNest accordingly
return dataNest;
} // setTreeStructure()
// get the dataset names into an array (object you want to see first in button list needs to be last in array to show up first in view as it gets .appended)
var treeDataNames = [
{ data1: 'across', data2: 'c', label: 'scale \u00B7 action', id: 'scaleaction' },
{ data1: 'across', data2: 'b', label: 'action \u00B7 scale \u00B7 type', id: 'actionscaletype' },
{ data1: 'across', data2: 'a', label: 'data \u00B7 scale \u00B7 usage', id: 'datascaleusage' },
{ data1: 'within', data2: 'e', label: 'what type', id: 'whatdata' },
{ data1: 'within', data2: 'd', label: 'how', id: 'whatdata' },
{ data1: 'within', data2: 'c', label: 'why', id: 'why' },
{ data1: 'within', data2: 'b', label: 'what scale', id: 'whatscale' },
{ data1: 'within', data2: 'a', label: 'what data', id: 'whatdata' }
];
// variables for tree that require custom order (3rd argument to setTreeStructure)
var data_type_sum_short_order = ['Cat', 'Quant', 'Cat, Ord', 'Cat, Quant', 'Cat, Ord, Quant'],
categories_wo_aid_sum_order = ['under 10 categories', '10-99 categories', '100-999 categories', '1000 and more categories'],
values_sum_order = ['under 20 values', 'Dozens of values', 'Hundreds of values', 'Thousands of values', 'Infinite no. of values'],
action_analysis_order = ['Explain', 'Discover', 'Discover and Explain'],
main_mark_order = ['Points', 'Lines', 'Areas', 'Connection', 'Containment'],
family_order = ['Bar chart', 'Line chart', 'Map', 'Tree', 'Network', 'Scatterplot', 'Distribution plot', 'Euler diagram', 'Flow diagram', 'Pie chart', 'Table', 'Multidimensional plot2', 'Other'];
// set the arguments and get the data
var treeData = {}; // holds all tree data
treeData.within = {}; // data for within visual process stages
treeData.across = {}; // data for across visual process stages
// these are the nest combinations I chose (there can be more or less)
treeData.within.a = setTreeStructure(['dataset_type', 'data_type_sum_short'], reportData, [[],data_type_sum_short_order]);
treeData.within.b = setTreeStructure(['number_of_variables_rough', 'categories_wo_aid_sum', 'values_sum'], reportData, [[],categories_wo_aid_sum_order,values_sum_order]);
treeData.within.c = setTreeStructure(['target_usage_munzner_main', 'action_analysis', 'action_query'], reportData, [[],action_analysis_order,[]]);
treeData.within.d = setTreeStructure(['main_mark', 'main_channel'], reportData, [main_mark_order,[]]);
treeData.within.e = setTreeStructure(['type', 'family'], reportData, [[],family_order]);
treeData.across.a = setTreeStructure(['data_type_sum_short', 'number_of_variables_rough', 'target_usage_alternative_main'], reportData, [data_type_sum_short_order,[],[]]);
treeData.across.b = setTreeStructure(['action_analysis', 'number_of_variables_rough', 'family'], reportData, [action_analysis_order,[],family_order]);
treeData.across.c = setTreeStructure(['values_sum', 'action_search', 'action_query'], reportData, [values_sum_order,[],[]]);
// set buttons
d3.select('div#containerTree').selectAll('.buttons')
.data(treeDataNames)
.enter()
.append('button')
.attr('class', 'setTreeStructure')
.attr('id', function(d) { return d.id; })
.html(function(d) { return d.label; });
// needs to be in Object for d3.tree() - - -
var dataTree = {};
dataTree.key = "Chart tree";
dataTree.children = treeData.within.a;
log('dataTree', dataTree);
// save data to a global window-object ----------------------
// save data as a window object to let every function have access to it http://stackoverflow.com/questions/9491885/csv-to-array-in-d3-js
window.gvis = {};
gvis.dataTree = dataTree; // save data for tree as window object
gvis.dataSearch = searchData; // save data for searchbox as window object
gvis.allData = reportData; // save data for the report cards as window object
gvis.root; // the data variable used by the tree
gvis.searchField; // search variable (for eval)
gvis.searchText; // text to search
// initialise select and tree -------------------------------
// initialise the select box and the tree (report cards will be updated by the select module)
vis.selectbox.init();
vis.tree.init();
// transition to basic instructions
d3.selectAll('div#noteSearch, div#noteBrowse')
.style('display', 'inherit')
.style('opacity', 0)
.transition()
.duration(500)
.delay(750)
.style('opacity', 1);
// add button handlers
d3.selectAll('.setTreeStructure').on('mousedown', function() {
var self = d3.select(this),
data1 = self.data()[0].data1,
data2 = self.data()[0].data2,
data = treeData[data1][data2];
// log(data);
dataTree.children = data;
vis.tree.clearAll(gvis.root);
gvis.root.children.forEach(vis.tree.collapse);
vis.tree.update(gvis.root);
}); //
}); // data load and prep
// select box --------------------------------------------------------------------------------------------------------
vis.selectbox = (function() {
var searchMethodBouncer = 'any';
var my = {},
reports;
my.init = function() {
// set inline-width with .style('width') if required or remove for auto width (as long as longest tag)
// build nested select for tags (https://gist.github.com/jfreels/6811178)
var selectOptions = d3.select('select#select')
.selectAll('.optgroup')
.data(gvis.dataSearch) // data saved globally
.enter()
.append('optgroup')
.attr('label', function(d) { return d.group; })
.selectAll('.option')
.data(function(d) { return d.tags; })
.enter()
.append('option')
.attr('value', function(d) { return d; })
.text(function(d) { return d; });
// button mechanics and related styles
d3.select('button#any').style('color', '#555');
d3.selectAll('.method').on('click', function() {
d3.selectAll('.method').style('color', '#bbb');
d3.select(this).style('color', '#555');
searchMethodBouncer = d3.select(this).attr('id');
// !!! best to trigger new search with currently searched for tags; alternative - clear searchbox and trigger new search
}); // toggle serch method used for search
// start select2
$('#select').select2({
placeholder: "Start typing or select...",
allowClear: true
});
// get the data-objects that match the selected tags
function showSelectedReports() {
var objSelected = $('#select').select2('data'); // get the full array of tags https://github.com/select2/select2/issues/2929
var tagsSelected = []; // this will hold the chosen tags
for (var i = 0; i < objSelected.length; i++) tagsSelected.push(objSelected[i].text); // loop through all tags in the array and push only the chosen into tagsSelected
reports = gvis.allData.filter(function(el) {
var intersect = _.intersection(el.searchTags, tagsSelected); // for easier reading - not needed
var intersectLength = intersect.length;
if (searchMethodBouncer === 'any') {
return intersectLength > 0; // 'any' mode: if there are any intersections between the chosen tags and the report's tags
} else if (searchMethodBouncer === 'strict') {
return intersectLength === tagsSelected.length; // 'strict' mode: if all chosen tags can be found in the report's tag-array
}
}); // return only the data-index-members that match the selected tags.
vis.cards.updateCards(reports);
} // showSelectedReports
$('#select').on('change', showSelectedReports); // update reports object through select field
d3.select('button#refresh').on('click', showSelectedReports); // update reports object through refresh button
d3.select('button#refresh').on('mouseover', function(){
d3.select(this).transition().style({'background': '#eaeaea', 'color': '#555'});
}); // button style change
d3.select('button#refresh').on('mouseout', function(){
d3.select(this).transition().style({'background': '#f7f7f7', 'color': '#999'});
}); // button style change
$('#select').on('select2:close', function(e){
if ($('#select').select2('data').length === 0) {
vis.tree.clearAll(gvis.root);
gvis.root.children.forEach(vis.tree.collapse);
vis.tree.update(gvis.root);
d3.selectAll('.report').remove(); // the listener reacts differently for strict search mode and shows all reports upon closing. lazy remove instead of de-bugging
d3.selectAll('div#noteSearch, div#noteBrowse')
.style('display', 'inherit')
.style('opacity', 0)
.transition().duration(500)
.style('opacity', .5);
d3.select('div#cards')
.style('overflow', 'hidden'); // remove scrollbar on pc's
} // collapse tree if selection is empty
});
}; // my.init
return my;
})(); // vis.selectbox module
// report cards ------------------------------------------------------------------------------------------------------
vis.cards = (function() {
var my = {};
my.updateCards = function(data){
d3.selectAll('div#noteSearch, div#noteBrowse').style('display', 'none');
d3.selectAll('.containerCards').remove(); // hard clean, no enter - update - exit
d3.select('div#cards')
.style('overflow', 'scroll')
.style('overflow-x', 'hidden'); // switch on only y-scrolling (for windows run browsers)
var containerCards = d3.select('div#cards')
.append('div')
.attr('class', 'containerCards');
d3.selectAll('.report').remove();
var reports = containerCards.selectAll('.report')
.data(data);
reports.enter()
.append('div')
.attr('class', 'report')
.attr('id', function(d) { return spaceLess(d.name); })
.style('opacity', 0)
.style('background-color', '#fff')
.transition()
.style('opacity', 1)
.style('background-color', function(d,i) { return i%2 === 0 ? '#F7F7F7' : '#fff' });
// headers
reports.append('h1')
.attr('class', 'header1')
.html(function(d) { return d.name; });
reports.append('h3')
.attr('class', 'header2')
.html(function(d) {
return d.alternative_names !== "NA" ?
'Type: ' + d.type + ' \u00B7 Family: ' + d.family + ' \u00B7 Alternative: ' + d.alternative_names :
'Type: ' + d.type + ' \u00B7 Family: ' + d.family;
});
// buttons
var buttonList = reports.append('ul')
.attr('class', 'taglist');
buttonList.append('li')
.attr('class', 'buttonTags')
.attr('id', 'browseTree')
.html('browse tree');
buttonList.append('li')
.attr('class', 'buttonTags')
.attr('id', 'moreInfo')
.html('more info');
buttonList.append('li')
.attr('class', 'buttonTags')
.attr('id', 'source')
.html('web/source');
buttonList.append('li')
.attr('class', 'buttonTags')
.attr('id', 'goodExample')
.html('example(s)');
// image
reports.append('img')
.attr('src', function(d) { return 'images/graphs/' + d.image_file; })
.attr('id', 'graph');
// description
reports.append('p')
.attr('class', 'description')
.html(function(d) { return d.description; });
// paragraph element underneath the floated image to clear the float
// this way the image doesn't overlap the container in any case
reports.append('p')
.attr('class', 'clearFix')
.html('');
// more info
reports.append('p')
.attr('class', 'moreInfoText')
.attr('id', function(d) { return 'moreInfoText' + spaceLess(d.name); })
.html(function(d) {
return '<strong>What</strong> data: ' + arrToStr(d.whatData)
+ ' \u007C <strong>What</strong> scale: ' + arrToStr(d.whatScale)
+ ' \u007C <strong>What</strong> scale: ' + arrToStr(d.whatScale)
+ ' \u007C <strong>Why</strong> targets: ' + arrToStr(d.whyTargets)
+ ' \u007C <strong>Why</strong> actions: ' + arrToStr(d.whyActions)
+ ' \u007C <strong>How</strong> encoding: ' + arrToStr(d.howMarksChannels)
+ '</br></br>'
+ '<strong>History</strong>: ' + d.history;
});
/* button-listeners and handlers */
// find report in tree
d3.selectAll('.header1, header2, #browseTree, .description').on('mousedown', function(e){
vis.tree.clearAll(gvis.root); // collapse data
vis.tree.expandAll(gvis.root); // expand data
vis.tree.update(gvis.root); // show tree
// !!! to sort out when we get there, but probably best to pass a 'search' object with searchField, searchText and maybe the data ?
gvis.searchField = "d.identifier"; // find the right node(s)...
gvis.searchText = e.identifier;
vis.tree.searchTree(gvis.root);
gvis.root.children.forEach(vis.tree.collapseAllNotFound); // collapse all non-found
vis.tree.update(gvis.root); // update
});
// go to web
d3.selectAll('#source').on('mousedown', function(e){
// open report in new window
var url = e.source;
var win = window.open(url, '_blank');
win ? win.focus() : alert('please allow pop-ups for this site'); // this needs to get tested
});
// show more info
d3.selectAll('#moreInfo').on('mousedown', function(d) {
if (d3.select('#moreInfoText' + spaceLess(d.name)).style('display') === 'none') {
d3.select(this).html('less info'); // toggle name
d3.select('#moreInfoText' + spaceLess(d.name))
.style('display', 'inherit')
.style('font-size', 1e-6 + 'px')
.transition()
.style('font-size', '1em'); // toggle display
} else {
d3.select(this).html('more info'); // toggle name
d3.select('#moreInfoText' + spaceLess(d.name))
.style('font-size', '1em')
.transition()
.style('font-size', 1e-6 + 'px'); // toggle display
setTimeout(function(){
d3.select('#moreInfoText' + spaceLess(d.name))
.style('display', 'none');
}, 250); // wait until transition has finished before setting display to none
} // toggle based on display property
});
} // vis.cards.updateCards()
return my;
})(); // vis.cards module
// tree --------------------------------------------------------------------------------------------------------------
vis.tree = (function() {
var my = {};
//===============================================
my.searchTree = function(d) {
if (d.children)
d.children.forEach(my.searchTree);
else if (d._children)
d._children.forEach(my.searchTree);
var searchFieldValue = eval(gvis.searchField);
if (searchFieldValue && searchFieldValue.match(gvis.searchText)) {
// Walk parent chain
var ancestors = [];
var parent = d;
while (typeof(parent) !== "undefined") {
ancestors.push(parent);
parent.class = "found";
parent = parent.parent;
}
}
}
//===============================================
my.clearAll = function(d) {
d.class = "";
if (d.children)
d.children.forEach(my.clearAll);
else if (d._children)
d._children.forEach(my.clearAll);
}
//===============================================
my.collapse = function(d) {
if (d.children) {
d._children = d.children;
d._children.forEach(my.collapse);
d.children = null;
}
}
//===============================================
my.collapseAllNotFound = function(d) {
if (d.children) {
if (d.class !== "found") {
d._children = d.children;
d._children.forEach(my.collapseAllNotFound);
d.children = null;
} else
d.children.forEach(my.collapseAllNotFound);
}
}
//===============================================
my.expandAll = function(d) {
if (d._children) {
d.children = d._children;
d.children.forEach(my.expandAll);
d._children = null;
} else if (d.children)
d.children.forEach(my.expandAll);
}
//= private func ================================
// Toggle children on click.
function toggle(d) {
if (d.children) {
d._children = d.children;
d.children = null;
} else {
d.children = d._children;
d._children = null;
}
my.clearAll(gvis.root);
my.update(d);
// $("#searchName").select2("val", "");
// $("#searchName").val("").trigger("change");
}
var width = document.getElementById('containerTree').clientWidth * .9,
height = document.getElementById('containerTree').clientHeight * .9,
margin = { top: height * .05, right: width * .1, bottom: height * 1e-6, left: width * .2 };
var i = 0,
duration = 600;
var tree = d3.layout.tree()
.size([height, width]);
var diagonal = d3.svg.diagonal()
.projection(function(d) { return [d.y, d.x]; });
var svg; // needs to be accessible for both my.init and my.update function
var expandBouncer = false; // for opening/closing all nodes on index-node double-click
// public variables ----------------------------
my.init = function() {
svg = d3.select('div#containerTree')
.append('svg')
.attr('id', 'svg')
.attr('width', width + margin.right)
.attr('height', height + margin.top + margin.bottom)
.append('g')
.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
// rename data for use in tree
gvis.root = gvis.dataTree;
gvis.root.x0 = height / 2;
gvis.root.y0 = 0;
gvis.root.children.forEach(my.collapse); // applies collapse only to the root's children
my.update(gvis.root); // calls the vis-building function
// allow opening/closing entire tree on index-node double-click
var l = d3.selectAll('.node')[0].length;
var indexNode = d3.selectAll('.node')[0][l-1];
indexNode.id = 'indexNode';
d3.select('.node#indexNode').on('dblclick', function(){
expandBouncer = !expandBouncer;
if(expandBouncer){
my.collapse(gvis.root);
my.expandAll(gvis.root);
my.update(gvis.root);
} else {
my.clearAll(gvis.root);
gvis.root.children.forEach(vis.tree.collapse);
my.update(gvis.root);
}
}); // listener and handler for opening/closing all nodes
}
my.update = function(source) {
// Compute the new tree layout.
var nodes = tree.nodes(gvis.root).reverse(), // first node will be last in the returned object. doesn't seem to impact on the layout though
links = tree.links(nodes);
// Normalize for fixed-depth.
nodes.forEach(function(d) { d.y = d.depth * 100; }); // change node-distances (width of the vis)
// Update the nodes…
var node = svg.selectAll("g.node")
.data(nodes, function(d) { return d.id || (d.id = ++i); }); // key function. Rank by id or assign an id based on the last available id + 1
// Enter any new nodes at the parent's previous position.
var nodeEnter = node.enter().append("g")
.attr("class", "node")
.attr("transform", function(d) { return "translate(" + source.y0 + "," + source.x0 + ")"; })
.on("click", toggle); // moves child objects into children key
nodeEnter.append("circle")
.attr("r", 1e-6)
.style("fill", function(d) { return d._children ? "#ccc" : "#fff"; });
nodeEnter.append("text")
.classed('reportNode', function(d) { return !d.children && !d._children; })
.attr("x", function(d) { return d.children || d._children ? -10 : 10; })
.attr("dy", ".35em")
.attr("text-anchor", function(d) { return d.children || d._children ? "end" : "start"; })
.text(function(d) { return d.key || d.name; }) // returns the parent name for all parents and the report name for all leave nodes
.style("fill-opacity", 1e-6); // small number for transition
// event listender and handler for node-report-link
d3.selectAll('.reportNode').on('mousedown', function(d){
// option (1) show report card
vis.cards.updateCards([d]);
// // option (2) open report in new window
// var url = d.Link;
// var win = window.open(url, '_blank');
// win ? win.focus() : alert('please allow pop-ups for this site'); // this needs to get tested
});
// Transition nodes to their new position.
var nodeUpdate = node.transition()
.duration(duration)
.attr("transform", function(d) { return "translate(" + d.y + "," + d.x + ")"; });
nodeUpdate.select("circle")
.attr("r", 4.5)
.style("fill", function(d) {
if (d.class === "found") {
return "#437F3F"; // dark green
} else if (d._children) {
return "#ccc";
} else {
return "#fff";
}
})
.style("stroke", function(d) {
if (d.class === "found") {
return "#437F3F"; // dark green
}
});
nodeUpdate.select("text")
.style("fill-opacity", 1);
// Transition exiting nodes to the parent's new position.
var nodeExit = node.exit().transition()
.duration(duration)
.attr("transform", function(d) { return "translate(" + source.y + "," + source.x + ")"; })
.remove();
nodeExit.select("circle")
.attr("r", 1e-6);
nodeExit.select("text")
.style("fill-opacity", 1e-6);
// Update the links…
var link = svg.selectAll("path.link")
.data(links, function(d) { return d.target.id; }); // key function is the id - equivalent to the node id above
// Enter any new links at the parent's previous position.
link.enter().insert("path", "g")
.attr("class", "link")
.attr("d", function(d) {
var o = {x: source.x0, y: source.y0};
return diagonal({source: o, target: o});
});
// Transition links to their new position.
link.transition()
.duration(duration)
.attr("d", diagonal)
.style("stroke", function(d) {
if (d.target.class === "found") {
return "#437F3F"; // dark green
}
});
// Transition exiting nodes to the parent's new position.
link.exit().transition()
.duration(duration)
.attr("d", function(d) {
var o = {x: source.x, y: source.y};
return diagonal({source: o, target: o});
})
.remove();
// Stash the old positions for transition.
nodes.forEach(function(d) {
d.x0 = d.x;
d.y0 = d.y;
});
} // update function ?
return my;
})(); // vis.tree
| Added listeners/handlers for report card image hover (js)
| search.js | Added listeners/handlers for report card image hover (js) | <ide><path>earch.js
<ide>
<ide> /* button-listeners and handlers */
<ide>
<add>
<add> // image hover shows picture in big
<add> d3.selectAll('img#graph').on('mouseover', function() {
<add>
<add> // get client width (the browser-safe way)
<add> var w = window.innerWidth || document.documentElement.clientWidth || document.body.clientWidth;
<add> var h = window.innerHeight || document.documentElement.clientHeight || document.body.clientHeight;
<add>
<add> // get the image tag code (including inline style to set max-width and max-height)
<add> var image = '<img src=' + this.src + ' style="max-width:'+ w/2 + 'px; max-height:' + (h * 0.9) + 'px">'
<add>
<add> // tooltip to always be 5% from top and a maximum of 90% tall so that it's always in sight'
<add> d3.select('div.tooltip')
<add> .style('left', (d3.event.pageX + 20) + 'px')
<add> .style('top', '5vh')
<add> .html(image)
<add> .style('opacity', 0)
<add> .transition()
<add> .style('opacity', 0.9);
<add>
<add> });
<add>
<add> d3.selectAll('img#graph').on('mousemove', function() {
<add>
<add> // only move horizontally
<add> d3.select('div.tooltip')
<add> .style('left', (d3.event.pageX + 20) + 'px')
<add> .style('top', '5vh');
<add>
<add>
<add> });
<add>
<add> d3.selectAll('img#graph').on('mouseout', function() {
<add>
<add> d3.select('div.tooltip')
<add> .transition()
<add> .style('opacity', 0);
<add>
<add>
<add> });
<add>
<add>
<ide> // find report in tree
<ide> d3.selectAll('.header1, header2, #browseTree, .description').on('mousedown', function(e){
<ide> |
|
Java | apache-2.0 | ee69561bc2cee85cab3ae934d50455537be4db72 | 0 | Exorath/EventsService | /*
* Copyright 2016 Exorath
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.exorathcloud.service.events;
import com.exorath.service.commons.portProvider.PortProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Created by Shortninja66 on 12/26/2016.
*/
public class Main {
private static Service service;
private static final Logger LOG = LoggerFactory.getLogger(Main.class);
public static void main(String[] args) {
Main.service = null;//Todo assign service implementation here
LOG.info("Service " + Main.service.getClass() + " instantiated");
Transport.setup(Main.service, PortProvider.getEnvironmentPortProvider());
LOG.info("HTTP transport setup");
}
}
| src/main/java/com/exorathcloud/service/events/Main.java | /*
* Copyright 2016 Exorath
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.exorathcloud.service.events;
/**
* Created by Shortninja66 on 12/26/2016.
*/
public class Main {
}
| Created psvm
| src/main/java/com/exorathcloud/service/events/Main.java | Created psvm | <ide><path>rc/main/java/com/exorathcloud/service/events/Main.java
<ide> */
<ide> package com.exorathcloud.service.events;
<ide>
<add>import com.exorath.service.commons.portProvider.PortProvider;
<add>import org.slf4j.Logger;
<add>import org.slf4j.LoggerFactory;
<add>
<ide> /**
<ide> * Created by Shortninja66 on 12/26/2016.
<ide> */
<ide> public class Main {
<add> private static Service service;
<add> private static final Logger LOG = LoggerFactory.getLogger(Main.class);
<ide>
<del>
<del>
<add> public static void main(String[] args) {
<add> Main.service = null;//Todo assign service implementation here
<add> LOG.info("Service " + Main.service.getClass() + " instantiated");
<add> Transport.setup(Main.service, PortProvider.getEnvironmentPortProvider());
<add> LOG.info("HTTP transport setup");
<add> }
<ide> } |
|
Java | apache-2.0 | 81f84c0533ed172219cccbf41e25c38b9ec043a5 | 0 | diffplug/spotless,diffplug/spotless,diffplug/spotless,diffplug/spotless,diffplug/spotless,diffplug/spotless | /*
* Copyright 2016 DiffPlug
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.diffplug.gradle.spotless;
import java.io.File;
import java.util.Objects;
import javax.annotation.Nullable;
import org.gradle.api.GradleException;
import org.gradle.api.file.FileCollection;
import org.gradle.api.plugins.JavaPluginConvention;
import org.gradle.api.tasks.SourceSet;
import com.diffplug.spotless.FormatterStep;
import com.diffplug.spotless.scala.ScalaFmtStep;
public class ScalaExtension extends FormatExtension {
static final String NAME = "scala";
public ScalaExtension(SpotlessExtension rootExtension) {
super(rootExtension);
}
public ScalaFmtConfig scalafmt() {
return scalafmt(ScalaFmtStep.defaultVersion());
}
public ScalaFmtConfig scalafmt(String version) {
return new ScalaFmtConfig(version);
}
public class ScalaFmtConfig {
final String version;
@Nullable
Object configFile;
ScalaFmtConfig(String version) {
this.version = Objects.requireNonNull(version);
addStep(createStep());
}
public void configFile(Object configFile) {
this.configFile = Objects.requireNonNull(configFile);
replaceStep(createStep());
}
private FormatterStep createStep() {
File resolvedConfigFile = configFile == null ? null : getProject().file(configFile);
return ScalaFmtStep.create(version, GradleProvisioner.fromProject(getProject()), resolvedConfigFile);
}
}
/** If the user hasn't specified the files yet, we'll assume he/she means all of the scala files. */
@Override
protected void setupTask(SpotlessTask task) {
if (target == null) {
JavaPluginConvention javaPlugin = getProject().getConvention().findPlugin(JavaPluginConvention.class);
if (javaPlugin == null) {
throw new GradleException("You must either specify 'target' manually or apply the 'scala' plugin.");
}
FileCollection union = getProject().files();
for (SourceSet sourceSet : javaPlugin.getSourceSets()) {
union = union.plus(sourceSet.getAllSource().filter(file -> {
String name = file.getName();
return name.endsWith(".scala") || name.endsWith(".sc");
}));
}
target = union;
}
super.setupTask(task);
}
}
| plugin-gradle/src/main/java/com/diffplug/gradle/spotless/ScalaExtension.java | /*
* Copyright 2016 DiffPlug
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.diffplug.gradle.spotless;
import java.io.File;
import java.util.Objects;
import javax.annotation.Nullable;
import org.gradle.api.GradleException;
import org.gradle.api.file.FileCollection;
import org.gradle.api.plugins.JavaPluginConvention;
import org.gradle.api.tasks.SourceSet;
import com.diffplug.spotless.FormatterStep;
import com.diffplug.spotless.scala.ScalaFmtStep;
public class ScalaExtension extends FormatExtension {
static final String NAME = "scala";
public ScalaExtension(SpotlessExtension rootExtension) {
super(rootExtension);
}
public ScalaFmtConfig scalafmt() {
return scalafmt(ScalaFmtStep.defaultVersion());
}
public ScalaFmtConfig scalafmt(String version) {
return new ScalaFmtConfig(version);
}
public class ScalaFmtConfig {
final String version;
@Nullable
Object configFile;
ScalaFmtConfig(String version) {
this.version = Objects.requireNonNull(version);
addStep(createStep());
}
public void configFile(Object configFile) {
this.configFile = Objects.requireNonNull(configFile);
replaceStep(createStep());
}
private FormatterStep createStep() {
File resolvedConfigFile = configFile == null ? null : getProject().file(configFile);
return ScalaFmtStep.create(version, GradleProvisioner.fromProject(getProject()), resolvedConfigFile);
}
}
/** If the user hasn't specified the files yet, we'll assume he/she means all of the kotlin files. */
@Override
protected void setupTask(SpotlessTask task) {
if (target == null) {
JavaPluginConvention javaPlugin = getProject().getConvention().findPlugin(JavaPluginConvention.class);
if (javaPlugin == null) {
throw new GradleException("You must either specify 'target' manually or apply the 'scala' plugin.");
}
FileCollection union = getProject().files();
for (SourceSet sourceSet : javaPlugin.getSourceSets()) {
union = union.plus(sourceSet.getAllSource().filter(file -> {
String name = file.getName();
return name.endsWith(".scala") || name.endsWith(".sc");
}));
}
target = union;
}
super.setupTask(task);
}
}
| Fix typo referencing scala. (#580)
| plugin-gradle/src/main/java/com/diffplug/gradle/spotless/ScalaExtension.java | Fix typo referencing scala. (#580) | <ide><path>lugin-gradle/src/main/java/com/diffplug/gradle/spotless/ScalaExtension.java
<ide> }
<ide> }
<ide>
<del> /** If the user hasn't specified the files yet, we'll assume he/she means all of the kotlin files. */
<add> /** If the user hasn't specified the files yet, we'll assume he/she means all of the scala files. */
<ide> @Override
<ide> protected void setupTask(SpotlessTask task) {
<ide> if (target == null) { |
|
Java | apache-2.0 | dc83e98e96a611f6ed9320c43fb45c69cf3990ba | 0 | dianping/cat,JacksonSha/cat,xiaojiaqi/cat,ddviplinux/cat,xiaojiaqi/cat,howepeng/cat,unidal/cat,chinaboard/cat,cdljsj/cat,gspandy/cat,cdljsj/cat,xiaojiaqi/cat,itnihao/cat,dadarom/cat,wyzssw/cat,redbeans2015/cat,redbeans2015/cat,redbeans2015/cat,chqlb/cat,dianping/cat,michael8335/cat,redbeans2015/cat,michael8335/cat,chinaboard/cat,jialinsun/cat,howepeng/cat,TonyChai24/cat,wyzssw/cat,ddviplinux/cat,howepeng/cat,wuqiangxjtu/cat,chinaboard/cat,itnihao/cat,unidal/cat,TonyChai24/cat,wyzssw/cat,jialinsun/cat,JacksonSha/cat,jialinsun/cat,javachengwc/cat,ddviplinux/cat,jialinsun/cat,javachengwc/cat,unidal/cat,TonyChai24/cat,wyzssw/cat,itnihao/cat,unidal/cat,howepeng/cat,cdljsj/cat,bryanchou/cat,dianping/cat,wuqiangxjtu/cat,michael8335/cat,JacksonSha/cat,JacksonSha/cat,dianping/cat,jialinsun/cat,chinaboard/cat,javachengwc/cat,wuqiangxjtu/cat,javachengwc/cat,gspandy/cat,dianping/cat,dadarom/cat,wuqiangxjtu/cat,chqlb/cat,javachengwc/cat,redbeans2015/cat,wuqiangxjtu/cat,redbeans2015/cat,dadarom/cat,wuqiangxjtu/cat,bryanchou/cat,ddviplinux/cat,bryanchou/cat,cdljsj/cat,dadarom/cat,xiaojiaqi/cat,gspandy/cat,cdljsj/cat,xiaojiaqi/cat,JacksonSha/cat,TonyChai24/cat,TonyChai24/cat,bryanchou/cat,bryanchou/cat,itnihao/cat,JacksonSha/cat,itnihao/cat,javachengwc/cat,unidal/cat,gspandy/cat,michael8335/cat,ddviplinux/cat,itnihao/cat,dianping/cat,ddviplinux/cat,jialinsun/cat,wyzssw/cat,gspandy/cat,gspandy/cat,howepeng/cat,michael8335/cat,chinaboard/cat,bryanchou/cat,chqlb/cat,cdljsj/cat,dadarom/cat,chinaboard/cat,chqlb/cat,xiaojiaqi/cat,chqlb/cat,dadarom/cat,dianping/cat,TonyChai24/cat,wyzssw/cat,howepeng/cat,unidal/cat,michael8335/cat,chqlb/cat | package com.dianping.cat.report.task.alert.sender;
import java.util.List;
import com.dianping.cat.Cat;
public class WeixinSender extends BaseSender {
@Override
protected void sendLog(String title, String content, List<String> receivers) {
StringBuilder builder = new StringBuilder();
builder.append(title).append(" ").append(content).append(" ");
for (String receiver : receivers) {
builder.append(receiver).append(" ");
}
Cat.logEvent("SendWeixin", builder.toString());
m_logger.info("SendWeixin" + builder.toString());
}
@Override
public boolean sendAlert(List<String> receivers, String domain, String title, String content, String alertType) {
if (alertType == null || !alertType.equals("error")) {
return true;
}
try {
content = content.replaceAll("<br/>", "\n");
m_mailSms.sendWeiXin(title, content, domain, mergeList(receivers));
sendLog(title, content, receivers);
return true;
} catch (Exception ex) {
Cat.logError("send weixin error" + " " + title + " " + content, ex);
return false;
}
}
private String mergeList(List<String> receivers) {
StringBuilder builder = new StringBuilder();
for (String receiver : receivers) {
builder.append(receiver).append(",");
}
String tmpResult = builder.toString();
if (tmpResult.endsWith(",")) {
return tmpResult.substring(0, tmpResult.length() - 1);
} else {
return tmpResult;
}
}
}
| cat-home/src/main/java/com/dianping/cat/report/task/alert/sender/WeixinSender.java | package com.dianping.cat.report.task.alert.sender;
import java.util.List;
import com.dianping.cat.Cat;
public class WeixinSender extends BaseSender {
@Override
protected void sendLog(String title, String content, List<String> receivers) {
StringBuilder builder = new StringBuilder();
builder.append(title).append(" ").append(content).append(" ");
for (String receiver : receivers) {
builder.append(receiver).append(" ");
}
Cat.logEvent("SendWeixin", builder.toString());
m_logger.info("SendWeixin" + builder.toString());
}
@Override
public boolean sendAlert(List<String> receivers, String domain, String title, String content, String alertType) {
if (alertType == null || !alertType.equals("error")) {
return true;
}
try {
m_mailSms.sendWeiXin(title, content, domain, mergeList(receivers));
sendLog(title, content, receivers);
return true;
} catch (Exception ex) {
Cat.logError("send weixin error" + " " + title + " " + content, ex);
return false;
}
}
private String mergeList(List<String> receivers) {
StringBuilder builder = new StringBuilder();
for (String receiver : receivers) {
builder.append(receiver).append(",");
}
String tmpResult = builder.toString();
if (tmpResult.endsWith(",")) {
return tmpResult.substring(0, tmpResult.length() - 1);
} else {
return tmpResult;
}
}
}
| change Weixin contents format
| cat-home/src/main/java/com/dianping/cat/report/task/alert/sender/WeixinSender.java | change Weixin contents format | <ide><path>at-home/src/main/java/com/dianping/cat/report/task/alert/sender/WeixinSender.java
<ide> }
<ide>
<ide> try {
<add> content = content.replaceAll("<br/>", "\n");
<ide> m_mailSms.sendWeiXin(title, content, domain, mergeList(receivers));
<ide> sendLog(title, content, receivers);
<ide> return true; |
|
Java | mit | 3f729d3a87cff4eadbd24249958bb854fa5054d8 | 0 | vishalkuo/philsTonight | package com.philstonight;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.database.Cursor;
import android.net.Uri;
import android.provider.ContactsContract;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.telephony.SmsManager;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.Spinner;
import android.widget.TextView;
import com.philstonight.Models.RestaurantSingleton;
import com.philstonight.Models.SquadMember;
import com.philstonight.Util.SMSUtils;
import com.philstonight.Util.SharedPrefsUtils;
import com.philstonight.Util.UIUtils;
import com.philstonight.ViewAdapters.SquadAdapter;
import java.util.ArrayList;
public class MainActivity extends AppCompatActivity {
private static final int REQUEST_SELECT_CONTACT = 1;
private static final String EXTRA_NAME = "name";
private static final String EXTRA_NUMBER = "number";
private Button philsButton;
private IntentFilter intentFilter;
private SmsManager smsMgr;
private ArrayList<SquadMember> squadList = new ArrayList<>();
private Button contactButton;
private SquadAdapter squadAdapter;
private Context c = this;
private Spinner placeSpinner;
private ListView squadListView;
private TextView tonight;
private int counter = 0;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
/**
* Load from prefs
*/
SharedPrefsUtils.loadSharedPrefs(this, squadList);
/**
* Find assets
*/
philsButton = (Button)findViewById(R.id.philsButton);
placeSpinner = (Spinner)findViewById(R.id.spinner);
tonight = (TextView)findViewById(R.id.tonight);
ArrayAdapter<String> adapter = new ArrayAdapter<>(this, R.layout.spinner_adapter
, RestaurantSingleton.getInstance());
placeSpinner.setAdapter(adapter);
squadListView = (ListView)findViewById(R.id.squad_list);
contactButton = (Button)findViewById(R.id.contact_button);
/**
* List view
*/
squadAdapter = new SquadAdapter(squadList, c);
squadListView.setAdapter(squadAdapter);
setTonightText();
/**
* SMS Data
*/
intentFilter = new IntentFilter(Globals.SENT);
intentFilter.addAction(Globals.DELIVERED);
smsMgr = SmsManager.getDefault();
philsButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
alertSquad(null,"Squad Alerted");
}
});
// tonight.setOnTouchListener(new View.OnTouchListener() {
// @Override
// public boolean onTouch(View view, MotionEvent motionEvent) {
// if(motionEvent.getAction() == MotionEvent.ACTION_UP){
// if (counter % 10 == 0) {
// alertSquad("Philled Tonight", "Philled Tonight");
// }
// counter++;
// }
// return false;
// }
// });
/**
* Load from contacts
*/
contactButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
addFromContacts(view);
}
});
placeSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> adapterView, View view, int i, long l) {
setTonightText();
}
@Override
public void onNothingSelected(AdapterView<?> adapterView) {
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
return super.onOptionsItemSelected(item);
}
public void addFromContacts(View view) {
Intent intent = new Intent(Intent.ACTION_PICK);
intent.setType(ContactsContract.Contacts.CONTENT_TYPE);
if (intent.resolveActivity(getPackageManager()) != null) {
startActivityForResult(intent, REQUEST_SELECT_CONTACT);
}
}
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == REQUEST_SELECT_CONTACT && resultCode == RESULT_OK) {
Uri contactUri = data.getData();
String contactName;
String contactId;
int hasPhoneNum;
String phoneNumber = "";
Cursor cursor = getContentResolver().query(contactUri, null, null, null, null);
if (cursor.moveToFirst()) {
contactName = cursor.getString(cursor.getColumnIndex(ContactsContract.Contacts.DISPLAY_NAME));
contactId = cursor.getString(cursor.getColumnIndex(ContactsContract.Contacts._ID));
hasPhoneNum = Integer.valueOf(cursor.getString(cursor.getColumnIndex(ContactsContract.Contacts.HAS_PHONE_NUMBER)));
if (hasPhoneNum != 0) {
Cursor pCur = getContentResolver().query(ContactsContract.CommonDataKinds.Phone.CONTENT_URI,
null,
ContactsContract.CommonDataKinds.Phone.CONTACT_ID + " = ?", new String[]{contactId}, null);
while (pCur.moveToNext())
{
String phone = pCur.getString(pCur.getColumnIndex(ContactsContract.CommonDataKinds.Phone.NUMBER));
phoneNumber = phone;
}
pCur.close();
SquadMember squadMember = new SquadMember(contactName, phoneNumber);
squadList.add(squadMember);
squadAdapter.appendToSquad(squadMember);
SharedPrefsUtils.saveToSharedPrefs(squadMember, c);
}
}else{
UIUtils.toastShort("Contact has no number", c);
}
cursor.close();
}
}
public void sendText(String conNumber, String conName, int requestCode){
sendText(conNumber, conName, requestCode, null);
}
public void sendText(String conNumber, String conName, int requestCode, String message)
{
Intent sentIntent = new Intent(Globals.SENT);
Intent deliveredIntent = new Intent(Globals.DELIVERED);
sentIntent.putExtra(EXTRA_NUMBER, conNumber);
sentIntent.putExtra(EXTRA_NAME, conName);
PendingIntent sentPI = PendingIntent.getBroadcast(this, requestCode, sentIntent, 0);
PendingIntent deliveredPI = PendingIntent.getBroadcast(this, requestCode, deliveredIntent, 0);
message = (message == null) ? placeSpinner.getSelectedItem().toString() + Globals.TONIGHT : message;
smsMgr.sendTextMessage(conNumber, null, message, sentPI, deliveredPI);
}
private BroadcastReceiver receiver = SMSUtils.generateBroadcastReceiver();
@Override
protected void onPause() {
super.onPause();
unregisterReceiver(receiver);
}
@Override
protected void onResume(){
super.onResume();
registerReceiver(receiver, intentFilter);
}
public void deleteUser(int position){
squadList.remove(position);
}
private void setTonightText(){
philsButton.setText(placeSpinner.getSelectedItem().toString() + " tonight?");
}
private void alertSquad(String message, String toast){
for (int i = 0; i < squadList.size(); i++) {
SquadMember squadMember = squadList.get(i);
sendText(squadMember.getNumber(), squadMember.getName(), i, message);
}
UIUtils.toastShort(toast, c);
}
}
| PhilsTonight/app/src/main/java/com/philstonight/MainActivity.java | package com.philstonight;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.database.Cursor;
import android.net.Uri;
import android.provider.ContactsContract;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.telephony.SmsManager;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.Spinner;
import android.widget.TextView;
import com.philstonight.Models.RestaurantSingleton;
import com.philstonight.Models.SquadMember;
import com.philstonight.Util.SMSUtils;
import com.philstonight.Util.SharedPrefsUtils;
import com.philstonight.Util.UIUtils;
import com.philstonight.ViewAdapters.SquadAdapter;
import java.util.ArrayList;
public class MainActivity extends AppCompatActivity {
private static final int REQUEST_SELECT_CONTACT = 1;
private static final String EXTRA_NAME = "name";
private static final String EXTRA_NUMBER = "number";
private Button philsButton;
private IntentFilter intentFilter;
private SmsManager smsMgr;
private ArrayList<SquadMember> squadList = new ArrayList<>();
private Button contactButton;
private SquadAdapter squadAdapter;
private Context c = this;
private Spinner placeSpinner;
private ListView squadListView;
private TextView tonight;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
/**
* Load from prefs
*/
SharedPrefsUtils.loadSharedPrefs(this, squadList);
/**
* Find assets
*/
philsButton = (Button)findViewById(R.id.philsButton);
placeSpinner = (Spinner)findViewById(R.id.spinner);
tonight = (TextView)findViewById(R.id.tonight);
ArrayAdapter<String> adapter = new ArrayAdapter<>(this, R.layout.spinner_adapter
, RestaurantSingleton.getInstance());
placeSpinner.setAdapter(adapter);
squadListView = (ListView)findViewById(R.id.squad_list);
contactButton = (Button)findViewById(R.id.contact_button);
/**
* List view
*/
squadAdapter = new SquadAdapter(squadList, c);
squadListView.setAdapter(squadAdapter);
setTonightText();
/**
* SMS Data
*/
intentFilter = new IntentFilter(Globals.SENT);
intentFilter.addAction(Globals.DELIVERED);
smsMgr = SmsManager.getDefault();
philsButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
for (int i = 0; i < squadList.size(); i++) {
SquadMember squadMember = squadList.get(i);
sendText(squadMember.getNumber(), squadMember.getName(), i);
}
UIUtils.toastShort("Squad Alerted", c);
}
});
tonight.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
}
});
/**
* Load from contacts
*/
contactButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
addFromContacts(view);
}
});
placeSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> adapterView, View view, int i, long l) {
setTonightText();
}
@Override
public void onNothingSelected(AdapterView<?> adapterView) {
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
return super.onOptionsItemSelected(item);
}
public void addFromContacts(View view) {
Intent intent = new Intent(Intent.ACTION_PICK);
intent.setType(ContactsContract.Contacts.CONTENT_TYPE);
if (intent.resolveActivity(getPackageManager()) != null) {
startActivityForResult(intent, REQUEST_SELECT_CONTACT);
}
}
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == REQUEST_SELECT_CONTACT && resultCode == RESULT_OK) {
Uri contactUri = data.getData();
String contactName;
String contactId;
int hasPhoneNum;
String phoneNumber = "";
Cursor cursor = getContentResolver().query(contactUri, null, null, null, null);
if (cursor.moveToFirst()) {
contactName = cursor.getString(cursor.getColumnIndex(ContactsContract.Contacts.DISPLAY_NAME));
contactId = cursor.getString(cursor.getColumnIndex(ContactsContract.Contacts._ID));
hasPhoneNum = Integer.valueOf(cursor.getString(cursor.getColumnIndex(ContactsContract.Contacts.HAS_PHONE_NUMBER)));
if (hasPhoneNum != 0) {
Cursor pCur = getContentResolver().query(ContactsContract.CommonDataKinds.Phone.CONTENT_URI,
null,
ContactsContract.CommonDataKinds.Phone.CONTACT_ID + " = ?", new String[]{contactId}, null);
while (pCur.moveToNext())
{
String phone = pCur.getString(pCur.getColumnIndex(ContactsContract.CommonDataKinds.Phone.NUMBER));
phoneNumber = phone;
}
pCur.close();
SquadMember squadMember = new SquadMember(contactName, phoneNumber);
squadList.add(squadMember);
squadAdapter.appendToSquad(squadMember);
SharedPrefsUtils.saveToSharedPrefs(squadMember, c);
}
}else{
UIUtils.toastShort("Contact has no number", c);
}
cursor.close();
}
}
public void sendText(String conNumber, String conName, int requestCode)
{
Intent sentIntent = new Intent(Globals.SENT);
Intent deliveredIntent = new Intent(Globals.DELIVERED);
sentIntent.putExtra(EXTRA_NUMBER, conNumber);
sentIntent.putExtra(EXTRA_NAME, conName);
PendingIntent sentPI = PendingIntent.getBroadcast(this, requestCode, sentIntent, 0);
PendingIntent deliveredPI = PendingIntent.getBroadcast(this, requestCode, deliveredIntent, 0);
smsMgr.sendTextMessage(conNumber, null, placeSpinner.getSelectedItem().toString()
+ Globals.TONIGHT, sentPI, deliveredPI);
}
private BroadcastReceiver receiver = SMSUtils.generateBroadcastReceiver();
@Override
protected void onPause() {
super.onPause();
unregisterReceiver(receiver);
}
@Override
protected void onResume(){
super.onResume();
registerReceiver(receiver, intentFilter);
}
public void deleteUser(int position){
squadList.remove(position);
}
private void setTonightText(){
philsButton.setText(placeSpinner.getSelectedItem().toString() + " tonight?");
}
}
| Phil's Tonight?
| PhilsTonight/app/src/main/java/com/philstonight/MainActivity.java | Phil's Tonight? | <ide><path>hilsTonight/app/src/main/java/com/philstonight/MainActivity.java
<ide> import android.telephony.SmsManager;
<ide> import android.view.Menu;
<ide> import android.view.MenuItem;
<add>import android.view.MotionEvent;
<ide> import android.view.View;
<ide> import android.widget.AdapterView;
<ide> import android.widget.ArrayAdapter;
<ide> private Spinner placeSpinner;
<ide> private ListView squadListView;
<ide> private TextView tonight;
<add> private int counter = 0;
<ide>
<ide> @Override
<ide> protected void onCreate(Bundle savedInstanceState) {
<ide> philsButton.setOnClickListener(new View.OnClickListener() {
<ide> @Override
<ide> public void onClick(View view) {
<del> for (int i = 0; i < squadList.size(); i++) {
<del> SquadMember squadMember = squadList.get(i);
<del> sendText(squadMember.getNumber(), squadMember.getName(), i);
<del> }
<del> UIUtils.toastShort("Squad Alerted", c);
<del>
<add> alertSquad(null,"Squad Alerted");
<ide> }
<ide> });
<ide>
<del> tonight.setOnClickListener(new View.OnClickListener() {
<del> @Override
<del> public void onClick(View view) {
<del>
<del> }
<del> });
<add>// tonight.setOnTouchListener(new View.OnTouchListener() {
<add>// @Override
<add>// public boolean onTouch(View view, MotionEvent motionEvent) {
<add>// if(motionEvent.getAction() == MotionEvent.ACTION_UP){
<add>// if (counter % 10 == 0) {
<add>// alertSquad("Philled Tonight", "Philled Tonight");
<add>// }
<add>// counter++;
<add>// }
<add>// return false;
<add>// }
<add>// });
<ide>
<ide> /**
<ide> * Load from contacts
<ide> }
<ide> }
<ide>
<del> public void sendText(String conNumber, String conName, int requestCode)
<add> public void sendText(String conNumber, String conName, int requestCode){
<add> sendText(conNumber, conName, requestCode, null);
<add> }
<add>
<add> public void sendText(String conNumber, String conName, int requestCode, String message)
<ide> {
<ide> Intent sentIntent = new Intent(Globals.SENT);
<ide> Intent deliveredIntent = new Intent(Globals.DELIVERED);
<ide> sentIntent.putExtra(EXTRA_NAME, conName);
<ide> PendingIntent sentPI = PendingIntent.getBroadcast(this, requestCode, sentIntent, 0);
<ide> PendingIntent deliveredPI = PendingIntent.getBroadcast(this, requestCode, deliveredIntent, 0);
<del> smsMgr.sendTextMessage(conNumber, null, placeSpinner.getSelectedItem().toString()
<del> + Globals.TONIGHT, sentPI, deliveredPI);
<add> message = (message == null) ? placeSpinner.getSelectedItem().toString() + Globals.TONIGHT : message;
<add> smsMgr.sendTextMessage(conNumber, null, message, sentPI, deliveredPI);
<ide> }
<ide>
<ide> private BroadcastReceiver receiver = SMSUtils.generateBroadcastReceiver();
<ide> private void setTonightText(){
<ide> philsButton.setText(placeSpinner.getSelectedItem().toString() + " tonight?");
<ide> }
<add>
<add> private void alertSquad(String message, String toast){
<add> for (int i = 0; i < squadList.size(); i++) {
<add> SquadMember squadMember = squadList.get(i);
<add> sendText(squadMember.getNumber(), squadMember.getName(), i, message);
<add> }
<add> UIUtils.toastShort(toast, c);
<add> }
<ide> } |
|
Java | mit | a1d5ef428e696fa2cd8c3ea4ba834a73567f8db2 | 0 | 0xd4d/iced,0xd4d/iced,0xd4d/iced,0xd4d/iced,0xd4d/iced | // SPDX-License-Identifier: MIT
// Copyright (C) 2018-present iced project and contributors
package com.github.icedland.iced.x86.enc;
import com.github.icedland.iced.x86.Code;
import com.github.icedland.iced.x86.Instruction;
import com.github.icedland.iced.x86.MvexEHBit;
import com.github.icedland.iced.x86.MvexRegMemConv;
import com.github.icedland.iced.x86.OpKind;
import com.github.icedland.iced.x86.RoundingControl;
import com.github.icedland.iced.x86.internal.MandatoryPrefixByte;
import com.github.icedland.iced.x86.internal.MvexInfo;
import com.github.icedland.iced.x86.internal.MvexTupleTypeLut;
import com.github.icedland.iced.x86.internal.TupleTypeTable;
import com.github.icedland.iced.x86.internal.enc.EncFlags1;
import com.github.icedland.iced.x86.internal.enc.EncFlags2;
import com.github.icedland.iced.x86.internal.enc.EncFlags3;
import com.github.icedland.iced.x86.internal.enc.EncoderFlags;
import com.github.icedland.iced.x86.internal.enc.ImmSize;
import com.github.icedland.iced.x86.internal.enc.LBit;
import com.github.icedland.iced.x86.internal.enc.LegacyOpCodeTable;
import com.github.icedland.iced.x86.internal.enc.VexOpCodeTable;
import com.github.icedland.iced.x86.internal.enc.WBit;
/** DO NOT USE: INTERNAL API */
public final class InternalOpCodeHandlers {
private InternalOpCodeHandlers() {
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class InvalidHandler extends OpCodeHandler {
/** DO NOT USE: INTERNAL API */
public static final String ERROR_MESSAGE = "Can't encode an invalid instruction";
/** DO NOT USE: INTERNAL API */
public InvalidHandler() {
super(EncFlags2.NONE, EncFlags3.BIT16OR32 | EncFlags3.BIT64, false, null, new Op[0]);
}
@Override
void encode(Encoder encoder, Instruction instruction) {
encoder.setErrorMessage(ERROR_MESSAGE);
}
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class DeclareDataHandler extends OpCodeHandler {
final int elemLength;
final int maxLength;
/** DO NOT USE: INTERNAL API */
public DeclareDataHandler(int code) {
super(EncFlags2.NONE, EncFlags3.BIT16OR32 | EncFlags3.BIT64, true, null, new Op[0]);
switch (code) {
case Code.DECLAREBYTE:
elemLength = 1;
break;
case Code.DECLAREWORD:
elemLength = 2;
break;
case Code.DECLAREDWORD:
elemLength = 4;
break;
case Code.DECLAREQWORD:
elemLength = 8;
break;
default:
throw new UnsupportedOperationException();
}
maxLength = 16 / elemLength;
}
@Override
void encode(Encoder encoder, Instruction instruction) {
int declDataCount = instruction.getDeclareDataCount();
if (declDataCount < 1 || declDataCount > maxLength) {
encoder.setErrorMessage(String.format("Invalid db/dw/dd/dq data count. Count = %d, max count = %d", declDataCount, maxLength));
return;
}
int length = declDataCount * elemLength;
for (int i = 0; i < length; i++)
encoder.writeByteInternal(instruction.getDeclareByteValue(i));
}
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class ZeroBytesHandler extends OpCodeHandler {
/** DO NOT USE: INTERNAL API */
public ZeroBytesHandler(int code) {
super(EncFlags2.NONE, EncFlags3.BIT16OR32 | EncFlags3.BIT64, true, null, new Op[0]);
}
@Override
void encode(Encoder encoder, Instruction instruction) {
}
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class LegacyHandler extends OpCodeHandler {
private final int tableByte1, tableByte2;
private final int mandatoryPrefix;
private static Op[] createOps(int encFlags1) {
int op0 = (encFlags1 >>> EncFlags1.LEGACY_OP0_SHIFT) & EncFlags1.LEGACY_OP_MASK;
int op1 = (encFlags1 >>> EncFlags1.LEGACY_OP1_SHIFT) & EncFlags1.LEGACY_OP_MASK;
int op2 = (encFlags1 >>> EncFlags1.LEGACY_OP2_SHIFT) & EncFlags1.LEGACY_OP_MASK;
int op3 = (encFlags1 >>> EncFlags1.LEGACY_OP3_SHIFT) & EncFlags1.LEGACY_OP_MASK;
if (op3 != 0) {
assert op0 != 0 && op1 != 0 && op2 != 0;
return new Op[] { OpTables.legacyOps[op0 - 1], OpTables.legacyOps[op1 - 1], OpTables.legacyOps[op2 - 1],
OpTables.legacyOps[op3 - 1] };
}
if (op2 != 0) {
assert op0 != 0 && op1 != 0;
return new Op[] { OpTables.legacyOps[op0 - 1], OpTables.legacyOps[op1 - 1], OpTables.legacyOps[op2 - 1] };
}
if (op1 != 0) {
assert op0 != 0;
return new Op[] { OpTables.legacyOps[op0 - 1], OpTables.legacyOps[op1 - 1] };
}
if (op0 != 0)
return new Op[] { OpTables.legacyOps[op0 - 1] };
return new Op[0];
}
/** DO NOT USE: INTERNAL API */
public LegacyHandler(int encFlags1, int encFlags2, int encFlags3) {
super(encFlags2, encFlags3, false, null, createOps(encFlags1));
switch ((encFlags2 >>> EncFlags2.TABLE_SHIFT) & EncFlags2.TABLE_MASK) {
case LegacyOpCodeTable.MAP0:
tableByte1 = 0;
tableByte2 = 0;
break;
case LegacyOpCodeTable.MAP0F:
tableByte1 = 0x0F;
tableByte2 = 0;
break;
case LegacyOpCodeTable.MAP0F38:
tableByte1 = 0x0F;
tableByte2 = 0x38;
break;
case LegacyOpCodeTable.MAP0F3A:
tableByte1 = 0x0F;
tableByte2 = 0x3A;
break;
default:
throw new UnsupportedOperationException();
}
switch ((encFlags2 >>> EncFlags2.MANDATORY_PREFIX_SHIFT) & EncFlags2.MANDATORY_PREFIX_MASK) {
case MandatoryPrefixByte.NONE:
mandatoryPrefix = 0x00;
break;
case MandatoryPrefixByte.P66:
mandatoryPrefix = 0x66;
break;
case MandatoryPrefixByte.PF3:
mandatoryPrefix = 0xF3;
break;
case MandatoryPrefixByte.PF2:
mandatoryPrefix = 0xF2;
break;
default:
throw new UnsupportedOperationException();
}
}
@Override
void encode(Encoder encoder, Instruction instruction) {
int b = mandatoryPrefix;
encoder.writePrefixes(instruction, b != 0xF3);
if (b != 0)
encoder.writeByteInternal(b);
b = encoder.encoderFlags;
b &= 0x4F;
if (b != 0) {
if ((encoder.encoderFlags & EncoderFlags.HIGH_LEGACY_8_BIT_REGS) != 0)
encoder.setErrorMessage(
"Registers AH, CH, DH, BH can't be used if there's a REX prefix. Use AL, CL, DL, BL, SPL, BPL, SIL, DIL, R8L-R15L instead.");
b |= 0x40;
encoder.writeByteInternal(b);
}
if ((b = tableByte1) != 0) {
encoder.writeByteInternal(b);
if ((b = tableByte2) != 0)
encoder.writeByteInternal(b);
}
}
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class VexHandler extends OpCodeHandler {
private final int table;
private final int lastByte;
private final int mask_W_L;
private final int mask_L;
private final int W1;
private static Op[] createOps(int encFlags1) {
int op0 = (encFlags1 >>> EncFlags1.VEX_OP0_SHIFT) & EncFlags1.VEX_OP_MASK;
int op1 = (encFlags1 >>> EncFlags1.VEX_OP1_SHIFT) & EncFlags1.VEX_OP_MASK;
int op2 = (encFlags1 >>> EncFlags1.VEX_OP2_SHIFT) & EncFlags1.VEX_OP_MASK;
int op3 = (encFlags1 >>> EncFlags1.VEX_OP3_SHIFT) & EncFlags1.VEX_OP_MASK;
int op4 = (encFlags1 >>> EncFlags1.VEX_OP4_SHIFT) & EncFlags1.VEX_OP_MASK;
if (op4 != 0) {
assert op0 != 0 && op1 != 0 && op2 != 0 && op3 != 0;
return new Op[] { OpTables.vexOps[op0 - 1], OpTables.vexOps[op1 - 1], OpTables.vexOps[op2 - 1], OpTables.vexOps[op3 - 1],
OpTables.vexOps[op4 - 1] };
}
if (op3 != 0) {
assert op0 != 0 && op1 != 0 && op2 != 0;
return new Op[] { OpTables.vexOps[op0 - 1], OpTables.vexOps[op1 - 1], OpTables.vexOps[op2 - 1], OpTables.vexOps[op3 - 1] };
}
if (op2 != 0) {
assert op0 != 0 && op1 != 0;
return new Op[] { OpTables.vexOps[op0 - 1], OpTables.vexOps[op1 - 1], OpTables.vexOps[op2 - 1] };
}
if (op1 != 0) {
assert op0 != 0;
return new Op[] { OpTables.vexOps[op0 - 1], OpTables.vexOps[op1 - 1] };
}
if (op0 != 0)
return new Op[] { OpTables.vexOps[op0 - 1] };
return new Op[0];
}
/** DO NOT USE: INTERNAL API */
public VexHandler(int encFlags1, int encFlags2, int encFlags3) {
super(encFlags2, encFlags3, false, null, createOps(encFlags1));
int lastByteTmp = 0;
int mask_W_L_tmp = 0;
int mask_L_tmp = 0;
table = (encFlags2 >>> EncFlags2.TABLE_SHIFT) & EncFlags2.TABLE_MASK;
int wbit = (encFlags2 >>> EncFlags2.WBIT_SHIFT) & EncFlags2.WBIT_MASK;
W1 = wbit == WBit.W1 ? 0xFFFF_FFFF : 0;
int lbit = (encFlags2 >>> EncFlags2.LBIT_SHIFT) & EncFlags2.LBIT_MASK;
switch (lbit) {
case LBit.L1:
case LBit.L256:
lastByteTmp = 4;
break;
}
if (W1 != 0)
lastByteTmp |= 0x80;
lastByteTmp |= (encFlags2 >>> EncFlags2.MANDATORY_PREFIX_SHIFT) & EncFlags2.MANDATORY_PREFIX_MASK;
if (wbit == WBit.WIG)
mask_W_L_tmp |= 0x80;
if (lbit == LBit.LIG) {
mask_W_L_tmp |= 4;
mask_L_tmp |= 4;
}
lastByte = lastByteTmp;
mask_W_L = mask_W_L_tmp;
mask_L = mask_L_tmp;
}
@Override
void encode(Encoder encoder, Instruction instruction) {
encoder.writePrefixes(instruction);
int encoderFlags = encoder.encoderFlags;
int b = lastByte;
b |= (~encoderFlags >>> (EncoderFlags.VVVVV_SHIFT - 3)) & 0x78;
if ((encoder.internal_PreventVEX2 | W1 | (table - VexOpCodeTable.MAP0F)
| (encoderFlags & (EncoderFlags.X | EncoderFlags.B | EncoderFlags.W))) != 0) {
encoder.writeByteInternal(0xC4);
int b2 = table;
b2 |= (~encoderFlags & 7) << 5;
encoder.writeByteInternal(b2);
b |= mask_W_L & encoder.internal_VEX_WIG_LIG;
encoder.writeByteInternal(b);
}
else {
encoder.writeByteInternal(0xC5);
b |= (~encoderFlags & 4) << 5;
b |= mask_L & encoder.internal_VEX_LIG;
encoder.writeByteInternal(b);
}
}
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class XopHandler extends OpCodeHandler {
private final int table;
private final int lastByte;
private static Op[] createOps(int encFlags1) {
int op0 = (encFlags1 >>> EncFlags1.XOP_OP0_SHIFT) & EncFlags1.XOP_OP_MASK;
int op1 = (encFlags1 >>> EncFlags1.XOP_OP1_SHIFT) & EncFlags1.XOP_OP_MASK;
int op2 = (encFlags1 >>> EncFlags1.XOP_OP2_SHIFT) & EncFlags1.XOP_OP_MASK;
int op3 = (encFlags1 >>> EncFlags1.XOP_OP3_SHIFT) & EncFlags1.XOP_OP_MASK;
if (op3 != 0) {
assert op0 != 0 && op1 != 0 && op2 != 0;
return new Op[] { OpTables.xopOps[op0 - 1], OpTables.xopOps[op1 - 1], OpTables.xopOps[op2 - 1], OpTables.xopOps[op3 - 1] };
}
if (op2 != 0) {
assert op0 != 0 && op1 != 0;
return new Op[] { OpTables.xopOps[op0 - 1], OpTables.xopOps[op1 - 1], OpTables.xopOps[op2 - 1] };
}
if (op1 != 0) {
assert op0 != 0;
return new Op[] { OpTables.xopOps[op0 - 1], OpTables.xopOps[op1 - 1] };
}
if (op0 != 0)
return new Op[] { OpTables.xopOps[op0 - 1] };
return new Op[0];
}
/** DO NOT USE: INTERNAL API */
public XopHandler(int encFlags1, int encFlags2, int encFlags3) {
super(encFlags2, encFlags3, false, null, createOps(encFlags1));
int lastByteTmp = 0;
table = 8 + ((encFlags2 >>> EncFlags2.TABLE_SHIFT) & EncFlags2.TABLE_MASK);
assert table == 8 || table == 9 || table == 10 : table;
switch ((encFlags2 >>> EncFlags2.LBIT_SHIFT) & EncFlags2.LBIT_MASK) {
case LBit.L1:
case LBit.L256:
lastByteTmp = 4;
break;
}
int wbit = (encFlags2 >>> EncFlags2.WBIT_SHIFT) & EncFlags2.WBIT_MASK;
if (wbit == WBit.W1)
lastByteTmp |= 0x80;
lastByteTmp |= (encFlags2 >>> EncFlags2.MANDATORY_PREFIX_SHIFT) & EncFlags2.MANDATORY_PREFIX_MASK;
lastByte = lastByteTmp;
}
@Override
void encode(Encoder encoder, Instruction instruction) {
encoder.writePrefixes(instruction);
encoder.writeByteInternal(0x8F);
int encoderFlags = encoder.encoderFlags;
int b = table;
b |= (~encoderFlags & 7) << 5;
encoder.writeByteInternal(b);
b = lastByte;
b |= (~encoderFlags >>> (EncoderFlags.VVVVV_SHIFT - 3)) & 0x78;
encoder.writeByteInternal(b);
}
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class EvexHandler extends OpCodeHandler {
private final int wbit;
private final int tupleType;
private final int table;
private final int p1Bits;
private final int llBits;
private final int mask_W;
private final int mask_LL;
private static Op[] createOps(int encFlags1) {
int op0 = (encFlags1 >>> EncFlags1.EVEX_OP0_SHIFT) & EncFlags1.EVEX_OP_MASK;
int op1 = (encFlags1 >>> EncFlags1.EVEX_OP1_SHIFT) & EncFlags1.EVEX_OP_MASK;
int op2 = (encFlags1 >>> EncFlags1.EVEX_OP2_SHIFT) & EncFlags1.EVEX_OP_MASK;
int op3 = (encFlags1 >>> EncFlags1.EVEX_OP3_SHIFT) & EncFlags1.EVEX_OP_MASK;
if (op3 != 0) {
assert op0 != 0 && op1 != 0 && op2 != 0;
return new Op[] { OpTables.evexOps[op0 - 1], OpTables.evexOps[op1 - 1], OpTables.evexOps[op2 - 1], OpTables.evexOps[op3 - 1] };
}
if (op2 != 0) {
assert op0 != 0 && op1 != 0;
return new Op[] { OpTables.evexOps[op0 - 1], OpTables.evexOps[op1 - 1], OpTables.evexOps[op2 - 1] };
}
if (op1 != 0) {
assert op0 != 0;
return new Op[] { OpTables.evexOps[op0 - 1], OpTables.evexOps[op1 - 1] };
}
if (op0 != 0)
return new Op[] { OpTables.evexOps[op0 - 1] };
return new Op[0];
}
static final TryConvertToDisp8N tryConvertToDisp8N = new TryConvertToDisp8NImpl();
/** DO NOT USE: INTERNAL API */
public EvexHandler(int encFlags1, int encFlags2, int encFlags3) {
super(encFlags2, encFlags3, false, tryConvertToDisp8N, createOps(encFlags1));
int mask_LL_tmp = 0;
int p1BitsTmp = 0;
int mask_W_tmp = 0;
tupleType = (encFlags3 >>> EncFlags3.TUPLE_TYPE_SHIFT) & EncFlags3.TUPLE_TYPE_MASK;
table = (encFlags2 >>> EncFlags2.TABLE_SHIFT) & EncFlags2.TABLE_MASK;
p1BitsTmp = 4 | ((encFlags2 >>> EncFlags2.MANDATORY_PREFIX_SHIFT) & EncFlags2.MANDATORY_PREFIX_MASK);
wbit = (encFlags2 >>> EncFlags2.WBIT_SHIFT) & EncFlags2.WBIT_MASK;
if (wbit == WBit.W1)
p1BitsTmp |= 0x80;
switch ((encFlags2 >>> EncFlags2.LBIT_SHIFT) & EncFlags2.LBIT_MASK) {
case LBit.LIG:
llBits = 0 << 5;
mask_LL_tmp = 3 << 5;
break;
case LBit.L0:
case LBit.LZ:
case LBit.L128:
llBits = 0 << 5;
break;
case LBit.L1:
case LBit.L256:
llBits = 1 << 5;
break;
case LBit.L512:
llBits = 2 << 5;
break;
default:
throw new UnsupportedOperationException();
}
if (wbit == WBit.WIG)
mask_W_tmp |= 0x80;
mask_LL = mask_LL_tmp;
p1Bits = p1BitsTmp;
mask_W = mask_W_tmp;
}
static final class TryConvertToDisp8NImpl extends TryConvertToDisp8N {
@Override
Integer convert(Encoder encoder, OpCodeHandler handler, Instruction instruction, int displ) {
EvexHandler evexHandler = (EvexHandler)handler;
int n = TupleTypeTable.getDisp8N(evexHandler.tupleType, (encoder.encoderFlags & EncoderFlags.BROADCAST) != 0);
int res = displ / n;
if (res * n == displ && -0x80 <= res && res <= 0x7F)
return res;
return null;
}
}
@Override
void encode(Encoder encoder, Instruction instruction) {
encoder.writePrefixes(instruction);
int encoderFlags = encoder.encoderFlags;
encoder.writeByteInternal(0x62);
int b = table;
b |= (encoderFlags & 7) << 5;
b |= (encoderFlags >>> (9 - 4)) & 0x10;
b ^= ~0xF;
encoder.writeByteInternal(b);
b = p1Bits;
b |= (~encoderFlags >>> (EncoderFlags.VVVVV_SHIFT - 3)) & 0x78;
b |= mask_W & encoder.internal_EVEX_WIG;
encoder.writeByteInternal(b);
b = instruction.getOpMask();
if (b != 0) {
if ((encFlags3 & EncFlags3.OP_MASK_REGISTER) == 0)
encoder.setErrorMessage("The instruction doesn't support opmask registers");
}
else {
if ((encFlags3 & EncFlags3.REQUIRE_OP_MASK_REGISTER) != 0)
encoder.setErrorMessage("The instruction must use an opmask register");
}
b |= (encoderFlags >>> (EncoderFlags.VVVVV_SHIFT + 4 - 3)) & 8;
if (instruction.getSuppressAllExceptions()) {
if ((encFlags3 & EncFlags3.SUPPRESS_ALL_EXCEPTIONS) == 0)
encoder.setErrorMessage("The instruction doesn't support suppress-all-exceptions");
b |= 0x10;
}
int rc = instruction.getRoundingControl();
if (rc != RoundingControl.NONE) {
if ((encFlags3 & EncFlags3.ROUNDING_CONTROL) == 0)
encoder.setErrorMessage("The instruction doesn't support rounding control");
b |= 0x10;
b |= (rc - RoundingControl.ROUND_TO_NEAREST) << 5;
}
else if ((encFlags3 & EncFlags3.SUPPRESS_ALL_EXCEPTIONS) == 0 || !instruction.getSuppressAllExceptions())
b |= llBits;
if ((encoderFlags & EncoderFlags.BROADCAST) != 0)
b |= 0x10;
else if (instruction.getBroadcast())
encoder.setErrorMessage("The instruction doesn't support broadcasting");
if (instruction.getZeroingMasking()) {
if ((encFlags3 & EncFlags3.ZEROING_MASKING) == 0)
encoder.setErrorMessage("The instruction doesn't support zeroing masking");
b |= 0x80;
}
b ^= 8;
b |= mask_LL & encoder.internal_EVEX_LIG;
encoder.writeByteInternal(b);
}
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class MvexHandler extends OpCodeHandler {
private final int wbit;
private final int table;
private final int p1Bits;
private final int mask_W;
private static Op[] createOps(int encFlags1) {
int op0 = (encFlags1 >>> EncFlags1.MVEX_OP0_SHIFT) & EncFlags1.MVEX_OP_MASK;
int op1 = (encFlags1 >>> EncFlags1.MVEX_OP1_SHIFT) & EncFlags1.MVEX_OP_MASK;
int op2 = (encFlags1 >>> EncFlags1.MVEX_OP2_SHIFT) & EncFlags1.MVEX_OP_MASK;
int op3 = (encFlags1 >>> EncFlags1.MVEX_OP3_SHIFT) & EncFlags1.MVEX_OP_MASK;
if (op3 != 0) {
assert op0 != 0 && op1 != 0 && op2 != 0;
return new Op[] { OpTables.mvexOps[op0 - 1], OpTables.mvexOps[op1 - 1], OpTables.mvexOps[op2 - 1], OpTables.mvexOps[op3 - 1] };
}
if (op2 != 0) {
assert op0 != 0 && op1 != 0;
return new Op[] { OpTables.mvexOps[op0 - 1], OpTables.mvexOps[op1 - 1], OpTables.mvexOps[op2 - 1] };
}
if (op1 != 0) {
assert op0 != 0;
return new Op[] { OpTables.mvexOps[op0 - 1], OpTables.mvexOps[op1 - 1] };
}
if (op0 != 0)
return new Op[] { OpTables.mvexOps[op0 - 1] };
return new Op[0];
}
static final TryConvertToDisp8N tryConvertToDisp8N = new TryConvertToDisp8NImpl();
/** DO NOT USE: INTERNAL API */
public MvexHandler(int encFlags1, int encFlags2, int encFlags3) {
super(encFlags2, encFlags3, false, tryConvertToDisp8N, createOps(encFlags1));
int p1BitsTmp = 0;
int mask_W_tmp = 0;
table = (encFlags2 >>> EncFlags2.TABLE_SHIFT) & EncFlags2.TABLE_MASK;
p1BitsTmp = (encFlags2 >>> EncFlags2.MANDATORY_PREFIX_SHIFT) & EncFlags2.MANDATORY_PREFIX_MASK;
wbit = ((encFlags2 >>> EncFlags2.WBIT_SHIFT) & EncFlags2.WBIT_MASK);
if (wbit == WBit.W1)
p1BitsTmp |= 0x80;
if (wbit == WBit.WIG)
mask_W_tmp |= 0x80;
p1Bits = p1BitsTmp;
mask_W = mask_W_tmp;
}
static final class TryConvertToDisp8NImpl extends TryConvertToDisp8N {
@Override
Integer convert(Encoder encoder, OpCodeHandler handler, Instruction instruction, int displ) {
int sss = (instruction.getMvexRegMemConv() - MvexRegMemConv.MEM_CONV_NONE) & 7;
int tupleType = MvexTupleTypeLut.data[MvexInfo.getTupleTypeLutKind(instruction.getCode()) * 8 + sss];
int n = TupleTypeTable.getDisp8N(tupleType, false);
int res = displ / n;
if (res * n == displ && -0x80 <= res && res <= 0x7F)
return res;
return null;
}
}
@Override
void encode(Encoder encoder, Instruction instruction) {
encoder.writePrefixes(instruction);
int encoderFlags = encoder.encoderFlags;
encoder.writeByteInternal(0x62);
int b = table;
b |= (encoderFlags & 7) << 5;
b |= (encoderFlags >>> (9 - 4)) & 0x10;
b ^= ~0xF;
encoder.writeByteInternal(b);
b = p1Bits;
b |= (~encoderFlags >>> (EncoderFlags.VVVVV_SHIFT - 3)) & 0x78;
b |= mask_W & encoder.internal_MVEX_WIG;
encoder.writeByteInternal(b);
b = instruction.getOpMask();
if (b != 0) {
if ((encFlags3 & EncFlags3.OP_MASK_REGISTER) == 0)
encoder.setErrorMessage("The instruction doesn't support opmask registers");
}
else {
if ((encFlags3 & EncFlags3.REQUIRE_OP_MASK_REGISTER) != 0)
encoder.setErrorMessage("The instruction must use an opmask register");
}
b |= (encoderFlags >>> (EncoderFlags.VVVVV_SHIFT + 4 - 3)) & 8;
int conv = instruction.getMvexRegMemConv();
// Memory ops can only be op0-op2, never op3 (imm8)
if (instruction.getOp0Kind() == OpKind.MEMORY || instruction.getOp1Kind() == OpKind.MEMORY || instruction.getOp2Kind() == OpKind.MEMORY) {
if (conv >= MvexRegMemConv.MEM_CONV_NONE && conv <= MvexRegMemConv.MEM_CONV_SINT16)
b |= (conv - MvexRegMemConv.MEM_CONV_NONE) << 4;
else if (conv == MvexRegMemConv.NONE) {
// Nothing, treat it as MvexRegMemConv.MEM_CONV_NONE
}
else
encoder.setErrorMessage("Memory operands must use a valid MvexRegMemConv variant, eg. MvexRegMemConv.MEM_CONV_NONE");
if (instruction.getMvexEvictionHint()) {
if (MvexInfo.canUseEvictionHint(instruction.getCode()))
b |= 0x80;
else
encoder.setErrorMessage("This instruction doesn't support eviction hint (`{eh}`)");
}
}
else {
if (instruction.getMvexEvictionHint())
encoder.setErrorMessage("Only memory operands can enable eviction hint (`{eh}`)");
if (conv == MvexRegMemConv.NONE) {
b |= 0x80;
if (instruction.getSuppressAllExceptions()) {
b |= 0x40;
if ((encFlags3 & EncFlags3.SUPPRESS_ALL_EXCEPTIONS) == 0)
encoder.setErrorMessage("The instruction doesn't support suppress-all-exceptions");
}
int rc = instruction.getRoundingControl();
if (rc == RoundingControl.NONE) {
// Nothing
}
else {
if ((encFlags3 & EncFlags3.ROUNDING_CONTROL) == 0)
encoder.setErrorMessage("The instruction doesn't support rounding control");
else {
b |= (rc - RoundingControl.ROUND_TO_NEAREST) << 4;
}
}
}
else if (conv >= MvexRegMemConv.REG_SWIZZLE_NONE && conv <= MvexRegMemConv.REG_SWIZZLE_DDDD) {
if (instruction.getSuppressAllExceptions())
encoder.setErrorMessage("Can't use {sae} with register swizzles");
else if (instruction.getRoundingControl() != RoundingControl.NONE)
encoder.setErrorMessage("Can't use rounding control with register swizzles");
b |= ((conv - MvexRegMemConv.REG_SWIZZLE_NONE) & 7) << 4;
}
else
encoder.setErrorMessage("Register operands can't use memory up/down conversions");
}
if (MvexInfo.getEHBit(instruction.getCode()) == MvexEHBit.EH1)
b |= 0x80;
b ^= 8;
encoder.writeByteInternal(b);
}
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class D3nowHandler extends OpCodeHandler {
final int immediate;
/** DO NOT USE: INTERNAL API */
public D3nowHandler(int encFlags2, int encFlags3) {
super((encFlags2 & ~(0xFFFF << EncFlags2.OP_CODE_SHIFT)) | (0x000F << EncFlags2.OP_CODE_SHIFT), encFlags3, false, null,
Op.operands_3dnow);
immediate = getOpCode(encFlags2);
assert Integer.compareUnsigned(immediate, 0xFF) <= 0 : immediate;
}
@Override
void encode(Encoder encoder, Instruction instruction) {
encoder.writePrefixes(instruction);
encoder.writeByteInternal(0x0F);
encoder.immSize = ImmSize.SIZE1_OP_CODE;
encoder.immediate = immediate;
}
}
}
| src/java/iced-x86/src/main/java/com/github/icedland/iced/x86/enc/InternalOpCodeHandlers.java | // SPDX-License-Identifier: MIT
// Copyright (C) 2018-present iced project and contributors
package com.github.icedland.iced.x86.enc;
import com.github.icedland.iced.x86.Code;
import com.github.icedland.iced.x86.Instruction;
import com.github.icedland.iced.x86.MvexEHBit;
import com.github.icedland.iced.x86.MvexRegMemConv;
import com.github.icedland.iced.x86.OpKind;
import com.github.icedland.iced.x86.RoundingControl;
import com.github.icedland.iced.x86.internal.MandatoryPrefixByte;
import com.github.icedland.iced.x86.internal.MvexInfo;
import com.github.icedland.iced.x86.internal.MvexTupleTypeLut;
import com.github.icedland.iced.x86.internal.TupleTypeTable;
import com.github.icedland.iced.x86.internal.enc.EncFlags1;
import com.github.icedland.iced.x86.internal.enc.EncFlags2;
import com.github.icedland.iced.x86.internal.enc.EncFlags3;
import com.github.icedland.iced.x86.internal.enc.EncoderFlags;
import com.github.icedland.iced.x86.internal.enc.ImmSize;
import com.github.icedland.iced.x86.internal.enc.LBit;
import com.github.icedland.iced.x86.internal.enc.LegacyOpCodeTable;
import com.github.icedland.iced.x86.internal.enc.VexOpCodeTable;
import com.github.icedland.iced.x86.internal.enc.WBit;
/** DO NOT USE: INTERNAL API */
public final class InternalOpCodeHandlers {
private InternalOpCodeHandlers() {
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class InvalidHandler extends OpCodeHandler {
/** DO NOT USE: INTERNAL API */
public static final String ERROR_MESSAGE = "Can't encode an invalid instruction";
/** DO NOT USE: INTERNAL API */
public InvalidHandler() {
super(EncFlags2.NONE, EncFlags3.BIT16OR32 | EncFlags3.BIT64, false, null, new Op[0]);
}
@Override
void encode(Encoder encoder, Instruction instruction) {
encoder.setErrorMessage(ERROR_MESSAGE);
}
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class DeclareDataHandler extends OpCodeHandler {
final int elemLength;
final int maxLength;
/** DO NOT USE: INTERNAL API */
public DeclareDataHandler(int code) {
super(EncFlags2.NONE, EncFlags3.BIT16OR32 | EncFlags3.BIT64, true, null, new Op[0]);
switch (code) {
case Code.DECLAREBYTE:
elemLength = 1;
break;
case Code.DECLAREWORD:
elemLength = 2;
break;
case Code.DECLAREDWORD:
elemLength = 4;
break;
case Code.DECLAREQWORD:
elemLength = 8;
break;
default:
throw new UnsupportedOperationException();
}
maxLength = 16 / elemLength;
}
@Override
void encode(Encoder encoder, Instruction instruction) {
int declDataCount = instruction.getDeclareDataCount();
if (declDataCount < 1 || declDataCount > maxLength) {
encoder.setErrorMessage(String.format("Invalid db/dw/dd/dq data count. Count = %d, max count = %d", declDataCount, maxLength));
return;
}
int length = declDataCount * elemLength;
for (int i = 0; i < length; i++)
encoder.writeByteInternal(instruction.getDeclareByteValue(i));
}
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class ZeroBytesHandler extends OpCodeHandler {
/** DO NOT USE: INTERNAL API */
public ZeroBytesHandler(int code) {
super(EncFlags2.NONE, EncFlags3.BIT16OR32 | EncFlags3.BIT64, true, null, new Op[0]);
}
@Override
void encode(Encoder encoder, Instruction instruction) {
}
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class LegacyHandler extends OpCodeHandler {
private final int tableByte1, tableByte2;
private final int mandatoryPrefix;
private static Op[] createOps(int encFlags1) {
int op0 = (encFlags1 >>> EncFlags1.LEGACY_OP0_SHIFT) & EncFlags1.LEGACY_OP_MASK;
int op1 = (encFlags1 >>> EncFlags1.LEGACY_OP1_SHIFT) & EncFlags1.LEGACY_OP_MASK;
int op2 = (encFlags1 >>> EncFlags1.LEGACY_OP2_SHIFT) & EncFlags1.LEGACY_OP_MASK;
int op3 = (encFlags1 >>> EncFlags1.LEGACY_OP3_SHIFT) & EncFlags1.LEGACY_OP_MASK;
if (op3 != 0) {
assert op0 != 0 && op1 != 0 && op2 != 0;
return new Op[] { OpTables.legacyOps[op0 - 1], OpTables.legacyOps[op1 - 1], OpTables.legacyOps[op2 - 1],
OpTables.legacyOps[op3 - 1] };
}
if (op2 != 0) {
assert op0 != 0 && op1 != 0;
return new Op[] { OpTables.legacyOps[op0 - 1], OpTables.legacyOps[op1 - 1], OpTables.legacyOps[op2 - 1] };
}
if (op1 != 0) {
assert op0 != 0;
return new Op[] { OpTables.legacyOps[op0 - 1], OpTables.legacyOps[op1 - 1] };
}
if (op0 != 0)
return new Op[] { OpTables.legacyOps[op0 - 1] };
return new Op[0];
}
/** DO NOT USE: INTERNAL API */
public LegacyHandler(int encFlags1, int encFlags2, int encFlags3) {
super(encFlags2, encFlags3, false, null, createOps(encFlags1));
switch ((encFlags2 >>> EncFlags2.TABLE_SHIFT) & EncFlags2.TABLE_MASK) {
case LegacyOpCodeTable.MAP0:
tableByte1 = 0;
tableByte2 = 0;
break;
case LegacyOpCodeTable.MAP0F:
tableByte1 = 0x0F;
tableByte2 = 0;
break;
case LegacyOpCodeTable.MAP0F38:
tableByte1 = 0x0F;
tableByte2 = 0x38;
break;
case LegacyOpCodeTable.MAP0F3A:
tableByte1 = 0x0F;
tableByte2 = 0x3A;
break;
default:
throw new UnsupportedOperationException();
}
switch ((encFlags2 >>> EncFlags2.MANDATORY_PREFIX_SHIFT) & EncFlags2.MANDATORY_PREFIX_MASK) {
case MandatoryPrefixByte.NONE:
mandatoryPrefix = MandatoryPrefixByte.NONE;
break;
case MandatoryPrefixByte.P66:
mandatoryPrefix = MandatoryPrefixByte.P66;
break;
case MandatoryPrefixByte.PF3:
mandatoryPrefix = MandatoryPrefixByte.PF3;
break;
case MandatoryPrefixByte.PF2:
mandatoryPrefix = MandatoryPrefixByte.PF2;
break;
default:
throw new UnsupportedOperationException();
}
}
@Override
void encode(Encoder encoder, Instruction instruction) {
int b = mandatoryPrefix;
encoder.writePrefixes(instruction, b != 0xF3);
if (b != 0)
encoder.writeByteInternal(b);
b = encoder.encoderFlags;
b &= 0x4F;
if (b != 0) {
if ((encoder.encoderFlags & EncoderFlags.HIGH_LEGACY_8_BIT_REGS) != 0)
encoder.setErrorMessage(
"Registers AH, CH, DH, BH can't be used if there's a REX prefix. Use AL, CL, DL, BL, SPL, BPL, SIL, DIL, R8L-R15L instead.");
b |= 0x40;
encoder.writeByteInternal(b);
}
if ((b = tableByte1) != 0) {
encoder.writeByteInternal(b);
if ((b = tableByte2) != 0)
encoder.writeByteInternal(b);
}
}
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class VexHandler extends OpCodeHandler {
private final int table;
private final int lastByte;
private final int mask_W_L;
private final int mask_L;
private final int W1;
private static Op[] createOps(int encFlags1) {
int op0 = (encFlags1 >>> EncFlags1.VEX_OP0_SHIFT) & EncFlags1.VEX_OP_MASK;
int op1 = (encFlags1 >>> EncFlags1.VEX_OP1_SHIFT) & EncFlags1.VEX_OP_MASK;
int op2 = (encFlags1 >>> EncFlags1.VEX_OP2_SHIFT) & EncFlags1.VEX_OP_MASK;
int op3 = (encFlags1 >>> EncFlags1.VEX_OP3_SHIFT) & EncFlags1.VEX_OP_MASK;
int op4 = (encFlags1 >>> EncFlags1.VEX_OP4_SHIFT) & EncFlags1.VEX_OP_MASK;
if (op4 != 0) {
assert op0 != 0 && op1 != 0 && op2 != 0 && op3 != 0;
return new Op[] { OpTables.vexOps[op0 - 1], OpTables.vexOps[op1 - 1], OpTables.vexOps[op2 - 1], OpTables.vexOps[op3 - 1],
OpTables.vexOps[op4 - 1] };
}
if (op3 != 0) {
assert op0 != 0 && op1 != 0 && op2 != 0;
return new Op[] { OpTables.vexOps[op0 - 1], OpTables.vexOps[op1 - 1], OpTables.vexOps[op2 - 1], OpTables.vexOps[op3 - 1] };
}
if (op2 != 0) {
assert op0 != 0 && op1 != 0;
return new Op[] { OpTables.vexOps[op0 - 1], OpTables.vexOps[op1 - 1], OpTables.vexOps[op2 - 1] };
}
if (op1 != 0) {
assert op0 != 0;
return new Op[] { OpTables.vexOps[op0 - 1], OpTables.vexOps[op1 - 1] };
}
if (op0 != 0)
return new Op[] { OpTables.vexOps[op0 - 1] };
return new Op[0];
}
/** DO NOT USE: INTERNAL API */
public VexHandler(int encFlags1, int encFlags2, int encFlags3) {
super(encFlags2, encFlags3, false, null, createOps(encFlags1));
int lastByteTmp = 0;
int mask_W_L_tmp = 0;
int mask_L_tmp = 0;
table = (encFlags2 >>> EncFlags2.TABLE_SHIFT) & EncFlags2.TABLE_MASK;
int wbit = (encFlags2 >>> EncFlags2.WBIT_SHIFT) & EncFlags2.WBIT_MASK;
W1 = wbit == WBit.W1 ? 0xFFFF_FFFF : 0;
int lbit = (encFlags2 >>> EncFlags2.LBIT_SHIFT) & EncFlags2.LBIT_MASK;
switch (lbit) {
case LBit.L1:
case LBit.L256:
lastByteTmp = 4;
break;
}
if (W1 != 0)
lastByteTmp |= 0x80;
lastByteTmp |= (encFlags2 >>> EncFlags2.MANDATORY_PREFIX_SHIFT) & EncFlags2.MANDATORY_PREFIX_MASK;
if (wbit == WBit.WIG)
mask_W_L_tmp |= 0x80;
if (lbit == LBit.LIG) {
mask_W_L_tmp |= 4;
mask_L_tmp |= 4;
}
lastByte = lastByteTmp;
mask_W_L = mask_W_L_tmp;
mask_L = mask_L_tmp;
}
@Override
void encode(Encoder encoder, Instruction instruction) {
encoder.writePrefixes(instruction);
int encoderFlags = encoder.encoderFlags;
int b = lastByte;
b |= (~encoderFlags >>> (EncoderFlags.VVVVV_SHIFT - 3)) & 0x78;
if ((encoder.internal_PreventVEX2 | W1 | (table - VexOpCodeTable.MAP0F)
| (encoderFlags & (EncoderFlags.X | EncoderFlags.B | EncoderFlags.W))) != 0) {
encoder.writeByteInternal(0xC4);
int b2 = table;
b2 |= (~encoderFlags & 7) << 5;
encoder.writeByteInternal(b2);
b |= mask_W_L & encoder.internal_VEX_WIG_LIG;
encoder.writeByteInternal(b);
}
else {
encoder.writeByteInternal(0xC5);
b |= (~encoderFlags & 4) << 5;
b |= mask_L & encoder.internal_VEX_LIG;
encoder.writeByteInternal(b);
}
}
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class XopHandler extends OpCodeHandler {
private final int table;
private final int lastByte;
private static Op[] createOps(int encFlags1) {
int op0 = (encFlags1 >>> EncFlags1.XOP_OP0_SHIFT) & EncFlags1.XOP_OP_MASK;
int op1 = (encFlags1 >>> EncFlags1.XOP_OP1_SHIFT) & EncFlags1.XOP_OP_MASK;
int op2 = (encFlags1 >>> EncFlags1.XOP_OP2_SHIFT) & EncFlags1.XOP_OP_MASK;
int op3 = (encFlags1 >>> EncFlags1.XOP_OP3_SHIFT) & EncFlags1.XOP_OP_MASK;
if (op3 != 0) {
assert op0 != 0 && op1 != 0 && op2 != 0;
return new Op[] { OpTables.xopOps[op0 - 1], OpTables.xopOps[op1 - 1], OpTables.xopOps[op2 - 1], OpTables.xopOps[op3 - 1] };
}
if (op2 != 0) {
assert op0 != 0 && op1 != 0;
return new Op[] { OpTables.xopOps[op0 - 1], OpTables.xopOps[op1 - 1], OpTables.xopOps[op2 - 1] };
}
if (op1 != 0) {
assert op0 != 0;
return new Op[] { OpTables.xopOps[op0 - 1], OpTables.xopOps[op1 - 1] };
}
if (op0 != 0)
return new Op[] { OpTables.xopOps[op0 - 1] };
return new Op[0];
}
/** DO NOT USE: INTERNAL API */
public XopHandler(int encFlags1, int encFlags2, int encFlags3) {
super(encFlags2, encFlags3, false, null, createOps(encFlags1));
int lastByteTmp = 0;
table = 8 + ((encFlags2 >>> EncFlags2.TABLE_SHIFT) & EncFlags2.TABLE_MASK);
assert table == 8 || table == 9 || table == 10 : table;
switch ((encFlags2 >>> EncFlags2.LBIT_SHIFT) & EncFlags2.LBIT_MASK) {
case LBit.L1:
case LBit.L256:
lastByteTmp = 4;
break;
}
int wbit = (encFlags2 >>> EncFlags2.WBIT_SHIFT) & EncFlags2.WBIT_MASK;
if (wbit == WBit.W1)
lastByteTmp |= 0x80;
lastByteTmp |= (encFlags2 >>> EncFlags2.MANDATORY_PREFIX_SHIFT) & EncFlags2.MANDATORY_PREFIX_MASK;
lastByte = lastByteTmp;
}
@Override
void encode(Encoder encoder, Instruction instruction) {
encoder.writePrefixes(instruction);
encoder.writeByteInternal(0x8F);
int encoderFlags = encoder.encoderFlags;
int b = table;
b |= (~encoderFlags & 7) << 5;
encoder.writeByteInternal(b);
b = lastByte;
b |= (~encoderFlags >>> (EncoderFlags.VVVVV_SHIFT - 3)) & 0x78;
encoder.writeByteInternal(b);
}
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class EvexHandler extends OpCodeHandler {
private final int wbit;
private final int tupleType;
private final int table;
private final int p1Bits;
private final int llBits;
private final int mask_W;
private final int mask_LL;
private static Op[] createOps(int encFlags1) {
int op0 = (encFlags1 >>> EncFlags1.EVEX_OP0_SHIFT) & EncFlags1.EVEX_OP_MASK;
int op1 = (encFlags1 >>> EncFlags1.EVEX_OP1_SHIFT) & EncFlags1.EVEX_OP_MASK;
int op2 = (encFlags1 >>> EncFlags1.EVEX_OP2_SHIFT) & EncFlags1.EVEX_OP_MASK;
int op3 = (encFlags1 >>> EncFlags1.EVEX_OP3_SHIFT) & EncFlags1.EVEX_OP_MASK;
if (op3 != 0) {
assert op0 != 0 && op1 != 0 && op2 != 0;
return new Op[] { OpTables.evexOps[op0 - 1], OpTables.evexOps[op1 - 1], OpTables.evexOps[op2 - 1], OpTables.evexOps[op3 - 1] };
}
if (op2 != 0) {
assert op0 != 0 && op1 != 0;
return new Op[] { OpTables.evexOps[op0 - 1], OpTables.evexOps[op1 - 1], OpTables.evexOps[op2 - 1] };
}
if (op1 != 0) {
assert op0 != 0;
return new Op[] { OpTables.evexOps[op0 - 1], OpTables.evexOps[op1 - 1] };
}
if (op0 != 0)
return new Op[] { OpTables.evexOps[op0 - 1] };
return new Op[0];
}
static final TryConvertToDisp8N tryConvertToDisp8N = new TryConvertToDisp8NImpl();
/** DO NOT USE: INTERNAL API */
public EvexHandler(int encFlags1, int encFlags2, int encFlags3) {
super(encFlags2, encFlags3, false, tryConvertToDisp8N, createOps(encFlags1));
int mask_LL_tmp = 0;
int p1BitsTmp = 0;
int mask_W_tmp = 0;
tupleType = (encFlags3 >>> EncFlags3.TUPLE_TYPE_SHIFT) & EncFlags3.TUPLE_TYPE_MASK;
table = (encFlags2 >>> EncFlags2.TABLE_SHIFT) & EncFlags2.TABLE_MASK;
p1BitsTmp = 4 | ((encFlags2 >>> EncFlags2.MANDATORY_PREFIX_SHIFT) & EncFlags2.MANDATORY_PREFIX_MASK);
wbit = (encFlags2 >>> EncFlags2.WBIT_SHIFT) & EncFlags2.WBIT_MASK;
if (wbit == WBit.W1)
p1BitsTmp |= 0x80;
switch ((encFlags2 >>> EncFlags2.LBIT_SHIFT) & EncFlags2.LBIT_MASK) {
case LBit.LIG:
llBits = 0 << 5;
mask_LL_tmp = 3 << 5;
break;
case LBit.L0:
case LBit.LZ:
case LBit.L128:
llBits = 0 << 5;
break;
case LBit.L1:
case LBit.L256:
llBits = 1 << 5;
break;
case LBit.L512:
llBits = 2 << 5;
break;
default:
throw new UnsupportedOperationException();
}
if (wbit == WBit.WIG)
mask_W_tmp |= 0x80;
mask_LL = mask_LL_tmp;
p1Bits = p1BitsTmp;
mask_W = mask_W_tmp;
}
static final class TryConvertToDisp8NImpl extends TryConvertToDisp8N {
@Override
Integer convert(Encoder encoder, OpCodeHandler handler, Instruction instruction, int displ) {
EvexHandler evexHandler = (EvexHandler)handler;
int n = TupleTypeTable.getDisp8N(evexHandler.tupleType, (encoder.encoderFlags & EncoderFlags.BROADCAST) != 0);
int res = displ / n;
if (res * n == displ && -0x80 <= res && res <= 0x7F)
return res;
return null;
}
}
@Override
void encode(Encoder encoder, Instruction instruction) {
encoder.writePrefixes(instruction);
int encoderFlags = encoder.encoderFlags;
encoder.writeByteInternal(0x62);
int b = table;
b |= (encoderFlags & 7) << 5;
b |= (encoderFlags >>> (9 - 4)) & 0x10;
b ^= ~0xF;
encoder.writeByteInternal(b);
b = p1Bits;
b |= (~encoderFlags >>> (EncoderFlags.VVVVV_SHIFT - 3)) & 0x78;
b |= mask_W & encoder.internal_EVEX_WIG;
encoder.writeByteInternal(b);
b = instruction.getOpMask();
if (b != 0) {
if ((encFlags3 & EncFlags3.OP_MASK_REGISTER) == 0)
encoder.setErrorMessage("The instruction doesn't support opmask registers");
}
else {
if ((encFlags3 & EncFlags3.REQUIRE_OP_MASK_REGISTER) != 0)
encoder.setErrorMessage("The instruction must use an opmask register");
}
b |= (encoderFlags >>> (EncoderFlags.VVVVV_SHIFT + 4 - 3)) & 8;
if (instruction.getSuppressAllExceptions()) {
if ((encFlags3 & EncFlags3.SUPPRESS_ALL_EXCEPTIONS) == 0)
encoder.setErrorMessage("The instruction doesn't support suppress-all-exceptions");
b |= 0x10;
}
int rc = instruction.getRoundingControl();
if (rc != RoundingControl.NONE) {
if ((encFlags3 & EncFlags3.ROUNDING_CONTROL) == 0)
encoder.setErrorMessage("The instruction doesn't support rounding control");
b |= 0x10;
b |= (rc - RoundingControl.ROUND_TO_NEAREST) << 5;
}
else if ((encFlags3 & EncFlags3.SUPPRESS_ALL_EXCEPTIONS) == 0 || !instruction.getSuppressAllExceptions())
b |= llBits;
if ((encoderFlags & EncoderFlags.BROADCAST) != 0)
b |= 0x10;
else if (instruction.getBroadcast())
encoder.setErrorMessage("The instruction doesn't support broadcasting");
if (instruction.getZeroingMasking()) {
if ((encFlags3 & EncFlags3.ZEROING_MASKING) == 0)
encoder.setErrorMessage("The instruction doesn't support zeroing masking");
b |= 0x80;
}
b ^= 8;
b |= mask_LL & encoder.internal_EVEX_LIG;
encoder.writeByteInternal(b);
}
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class MvexHandler extends OpCodeHandler {
private final int wbit;
private final int table;
private final int p1Bits;
private final int mask_W;
private static Op[] createOps(int encFlags1) {
int op0 = (encFlags1 >>> EncFlags1.MVEX_OP0_SHIFT) & EncFlags1.MVEX_OP_MASK;
int op1 = (encFlags1 >>> EncFlags1.MVEX_OP1_SHIFT) & EncFlags1.MVEX_OP_MASK;
int op2 = (encFlags1 >>> EncFlags1.MVEX_OP2_SHIFT) & EncFlags1.MVEX_OP_MASK;
int op3 = (encFlags1 >>> EncFlags1.MVEX_OP3_SHIFT) & EncFlags1.MVEX_OP_MASK;
if (op3 != 0) {
assert op0 != 0 && op1 != 0 && op2 != 0;
return new Op[] { OpTables.mvexOps[op0 - 1], OpTables.mvexOps[op1 - 1], OpTables.mvexOps[op2 - 1], OpTables.mvexOps[op3 - 1] };
}
if (op2 != 0) {
assert op0 != 0 && op1 != 0;
return new Op[] { OpTables.mvexOps[op0 - 1], OpTables.mvexOps[op1 - 1], OpTables.mvexOps[op2 - 1] };
}
if (op1 != 0) {
assert op0 != 0;
return new Op[] { OpTables.mvexOps[op0 - 1], OpTables.mvexOps[op1 - 1] };
}
if (op0 != 0)
return new Op[] { OpTables.mvexOps[op0 - 1] };
return new Op[0];
}
static final TryConvertToDisp8N tryConvertToDisp8N = new TryConvertToDisp8NImpl();
/** DO NOT USE: INTERNAL API */
public MvexHandler(int encFlags1, int encFlags2, int encFlags3) {
super(encFlags2, encFlags3, false, tryConvertToDisp8N, createOps(encFlags1));
int p1BitsTmp = 0;
int mask_W_tmp = 0;
table = (encFlags2 >>> EncFlags2.TABLE_SHIFT) & EncFlags2.TABLE_MASK;
p1BitsTmp = (encFlags2 >>> EncFlags2.MANDATORY_PREFIX_SHIFT) & EncFlags2.MANDATORY_PREFIX_MASK;
wbit = ((encFlags2 >>> EncFlags2.WBIT_SHIFT) & EncFlags2.WBIT_MASK);
if (wbit == WBit.W1)
p1BitsTmp |= 0x80;
if (wbit == WBit.WIG)
mask_W_tmp |= 0x80;
p1Bits = p1BitsTmp;
mask_W = mask_W_tmp;
}
static final class TryConvertToDisp8NImpl extends TryConvertToDisp8N {
@Override
Integer convert(Encoder encoder, OpCodeHandler handler, Instruction instruction, int displ) {
int sss = (instruction.getMvexRegMemConv() - MvexRegMemConv.MEM_CONV_NONE) & 7;
int tupleType = MvexTupleTypeLut.data[MvexInfo.getTupleTypeLutKind(instruction.getCode()) * 8 + sss];
int n = TupleTypeTable.getDisp8N(tupleType, false);
int res = displ / n;
if (res * n == displ && -0x80 <= res && res <= 0x7F)
return res;
return null;
}
}
@Override
void encode(Encoder encoder, Instruction instruction) {
encoder.writePrefixes(instruction);
int encoderFlags = encoder.encoderFlags;
encoder.writeByteInternal(0x62);
int b = table;
b |= (encoderFlags & 7) << 5;
b |= (encoderFlags >>> (9 - 4)) & 0x10;
b ^= ~0xF;
encoder.writeByteInternal(b);
b = p1Bits;
b |= (~encoderFlags >>> (EncoderFlags.VVVVV_SHIFT - 3)) & 0x78;
b |= mask_W & encoder.internal_MVEX_WIG;
encoder.writeByteInternal(b);
b = instruction.getOpMask();
if (b != 0) {
if ((encFlags3 & EncFlags3.OP_MASK_REGISTER) == 0)
encoder.setErrorMessage("The instruction doesn't support opmask registers");
}
else {
if ((encFlags3 & EncFlags3.REQUIRE_OP_MASK_REGISTER) != 0)
encoder.setErrorMessage("The instruction must use an opmask register");
}
b |= (encoderFlags >>> (EncoderFlags.VVVVV_SHIFT + 4 - 3)) & 8;
int conv = instruction.getMvexRegMemConv();
// Memory ops can only be op0-op2, never op3 (imm8)
if (instruction.getOp0Kind() == OpKind.MEMORY || instruction.getOp1Kind() == OpKind.MEMORY || instruction.getOp2Kind() == OpKind.MEMORY) {
if (conv >= MvexRegMemConv.MEM_CONV_NONE && conv <= MvexRegMemConv.MEM_CONV_SINT16)
b |= (conv - MvexRegMemConv.MEM_CONV_NONE) << 4;
else if (conv == MvexRegMemConv.NONE) {
// Nothing, treat it as MvexRegMemConv.MEM_CONV_NONE
}
else
encoder.setErrorMessage("Memory operands must use a valid MvexRegMemConv variant, eg. MvexRegMemConv.MEM_CONV_NONE");
if (instruction.getMvexEvictionHint()) {
if (MvexInfo.canUseEvictionHint(instruction.getCode()))
b |= 0x80;
else
encoder.setErrorMessage("This instruction doesn't support eviction hint (`{eh}`)");
}
}
else {
if (instruction.getMvexEvictionHint())
encoder.setErrorMessage("Only memory operands can enable eviction hint (`{eh}`)");
if (conv == MvexRegMemConv.NONE) {
b |= 0x80;
if (instruction.getSuppressAllExceptions()) {
b |= 0x40;
if ((encFlags3 & EncFlags3.SUPPRESS_ALL_EXCEPTIONS) == 0)
encoder.setErrorMessage("The instruction doesn't support suppress-all-exceptions");
}
int rc = instruction.getRoundingControl();
if (rc == RoundingControl.NONE) {
// Nothing
}
else {
if ((encFlags3 & EncFlags3.ROUNDING_CONTROL) == 0)
encoder.setErrorMessage("The instruction doesn't support rounding control");
else {
b |= (rc - RoundingControl.ROUND_TO_NEAREST) << 4;
}
}
}
else if (conv >= MvexRegMemConv.REG_SWIZZLE_NONE && conv <= MvexRegMemConv.REG_SWIZZLE_DDDD) {
if (instruction.getSuppressAllExceptions())
encoder.setErrorMessage("Can't use {sae} with register swizzles");
else if (instruction.getRoundingControl() != RoundingControl.NONE)
encoder.setErrorMessage("Can't use rounding control with register swizzles");
b |= ((conv - MvexRegMemConv.REG_SWIZZLE_NONE) & 7) << 4;
}
else
encoder.setErrorMessage("Register operands can't use memory up/down conversions");
}
if (MvexInfo.getEHBit(instruction.getCode()) == MvexEHBit.EH1)
b |= 0x80;
b ^= 8;
encoder.writeByteInternal(b);
}
}
/**
* DO NOT USE: INTERNAL API
*
* @deprecated Not part of the public API
*/
@Deprecated
public static final class D3nowHandler extends OpCodeHandler {
final int immediate;
/** DO NOT USE: INTERNAL API */
public D3nowHandler(int encFlags2, int encFlags3) {
super((encFlags2 & ~(0xFFFF << EncFlags2.OP_CODE_SHIFT)) | (0x000F << EncFlags2.OP_CODE_SHIFT), encFlags3, false, null,
Op.operands_3dnow);
immediate = getOpCode(encFlags2);
assert Integer.compareUnsigned(immediate, 0xFF) <= 0 : immediate;
}
@Override
void encode(Encoder encoder, Instruction instruction) {
encoder.writePrefixes(instruction);
encoder.writeByteInternal(0x0F);
encoder.immSize = ImmSize.SIZE1_OP_CODE;
encoder.immediate = immediate;
}
}
}
| Init `mandatoryPrefix` with correct values
| src/java/iced-x86/src/main/java/com/github/icedland/iced/x86/enc/InternalOpCodeHandlers.java | Init `mandatoryPrefix` with correct values | <ide><path>rc/java/iced-x86/src/main/java/com/github/icedland/iced/x86/enc/InternalOpCodeHandlers.java
<ide>
<ide> switch ((encFlags2 >>> EncFlags2.MANDATORY_PREFIX_SHIFT) & EncFlags2.MANDATORY_PREFIX_MASK) {
<ide> case MandatoryPrefixByte.NONE:
<del> mandatoryPrefix = MandatoryPrefixByte.NONE;
<add> mandatoryPrefix = 0x00;
<ide> break;
<ide> case MandatoryPrefixByte.P66:
<del> mandatoryPrefix = MandatoryPrefixByte.P66;
<add> mandatoryPrefix = 0x66;
<ide> break;
<ide> case MandatoryPrefixByte.PF3:
<del> mandatoryPrefix = MandatoryPrefixByte.PF3;
<add> mandatoryPrefix = 0xF3;
<ide> break;
<ide> case MandatoryPrefixByte.PF2:
<del> mandatoryPrefix = MandatoryPrefixByte.PF2;
<add> mandatoryPrefix = 0xF2;
<ide> break;
<ide> default:
<ide> throw new UnsupportedOperationException(); |
|
Java | apache-2.0 | 35142424861cf26262abaed3d0aee63cf4a01ba4 | 0 | spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework | /*
* Copyright 2002-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.http.converter.xml;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.HashSet;
import java.util.Set;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.sax.SAXSource;
import javax.xml.transform.stax.StAXSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import org.w3c.dom.Document;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.XMLReaderFactory;
import org.springframework.http.HttpInputMessage;
import org.springframework.http.HttpOutputMessage;
import org.springframework.http.MediaType;
import org.springframework.http.converter.AbstractHttpMessageConverter;
import org.springframework.http.converter.HttpMessageConversionException;
import org.springframework.http.converter.HttpMessageNotReadableException;
import org.springframework.http.converter.HttpMessageNotWritableException;
import org.springframework.util.StreamUtils;
/**
* Implementation of {@link org.springframework.http.converter.HttpMessageConverter}
* that can read and write {@link Source} objects.
*
* @author Arjen Poutsma
* @since 3.0
*/
public class SourceHttpMessageConverter<T extends Source> extends AbstractHttpMessageConverter<T> {
private static final Set<Class<?>> SUPPORTED_CLASSES = new HashSet<Class<?>>(5);
static {
SUPPORTED_CLASSES.add(DOMSource.class);
SUPPORTED_CLASSES.add(SAXSource.class);
SUPPORTED_CLASSES.add(StAXSource.class);
SUPPORTED_CLASSES.add(StreamSource.class);
SUPPORTED_CLASSES.add(Source.class);
}
private final TransformerFactory transformerFactory = TransformerFactory.newInstance();
private boolean processExternalEntities = false;
/**
* Sets the {@link #setSupportedMediaTypes(java.util.List) supportedMediaTypes}
* to {@code text/xml} and {@code application/xml}, and {@code application/*-xml}.
*/
public SourceHttpMessageConverter() {
super(MediaType.APPLICATION_XML, MediaType.TEXT_XML, new MediaType("application", "*+xml"));
}
/**
* Indicates whether external XML entities are processed when converting to a Source.
* <p>Default is {@code false}, meaning that external entities are not resolved.
*/
public void setProcessExternalEntities(boolean processExternalEntities) {
this.processExternalEntities = processExternalEntities;
}
@Override
public boolean supports(Class<?> clazz) {
return SUPPORTED_CLASSES.contains(clazz);
}
@Override
@SuppressWarnings("unchecked")
protected T readInternal(Class<? extends T> clazz, HttpInputMessage inputMessage)
throws IOException, HttpMessageNotReadableException {
InputStream body = inputMessage.getBody();
if (DOMSource.class.equals(clazz)) {
return (T) readDOMSource(body);
}
else if (SAXSource.class.equals(clazz)) {
return (T) readSAXSource(body);
}
else if (StAXSource.class.equals(clazz)) {
return (T) readStAXSource(body);
}
else if (StreamSource.class.equals(clazz) || Source.class.equals(clazz)) {
return (T) readStreamSource(body);
}
else {
throw new HttpMessageConversionException("Could not read class [" + clazz +
"]. Only DOMSource, SAXSource, StAXSource, and StreamSource are supported.");
}
}
private DOMSource readDOMSource(InputStream body) throws IOException {
try {
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
documentBuilderFactory.setNamespaceAware(true);
documentBuilderFactory.setFeature(
"http://xml.org/sax/features/external-general-entities", this.processExternalEntities);
DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder();
Document document = documentBuilder.parse(body);
return new DOMSource(document);
}
catch (ParserConfigurationException ex) {
throw new HttpMessageNotReadableException("Could not set feature: " + ex.getMessage(), ex);
}
catch (SAXException ex) {
throw new HttpMessageNotReadableException("Could not parse document: " + ex.getMessage(), ex);
}
}
private SAXSource readSAXSource(InputStream body) throws IOException {
try {
XMLReader reader = XMLReaderFactory.createXMLReader();
reader.setFeature(
"http://xml.org/sax/features/external-general-entities", this.processExternalEntities);
byte[] bytes = StreamUtils.copyToByteArray(body);
return new SAXSource(reader, new InputSource(new ByteArrayInputStream(bytes)));
}
catch (SAXException ex) {
throw new HttpMessageNotReadableException("Could not parse document: " + ex.getMessage(), ex);
}
}
private Source readStAXSource(InputStream body) {
try {
XMLInputFactory inputFactory = XMLInputFactory.newFactory();
inputFactory.setProperty(
"javax.xml.stream.isSupportingExternalEntities", this.processExternalEntities);
XMLStreamReader streamReader = inputFactory.createXMLStreamReader(body);
return new StAXSource(streamReader);
}
catch (XMLStreamException ex) {
throw new HttpMessageNotReadableException("Could not parse document: " + ex.getMessage(), ex);
}
}
private StreamSource readStreamSource(InputStream body) throws IOException {
byte[] bytes = StreamUtils.copyToByteArray(body);
return new StreamSource(new ByteArrayInputStream(bytes));
}
@Override
protected Long getContentLength(T t, MediaType contentType) {
if (t instanceof DOMSource) {
try {
CountingOutputStream os = new CountingOutputStream();
transform(t, new StreamResult(os));
return os.count;
}
catch (TransformerException ex) {
// ignore
}
}
return null;
}
@Override
protected void writeInternal(T t, HttpOutputMessage outputMessage)
throws IOException, HttpMessageNotWritableException {
try {
Result result = new StreamResult(outputMessage.getBody());
transform(t, result);
}
catch (TransformerException ex) {
throw new HttpMessageNotWritableException("Could not transform [" + t + "] to output message", ex);
}
}
private void transform(Source source, Result result) throws TransformerException {
this.transformerFactory.newTransformer().transform(source, result);
}
private static class CountingOutputStream extends OutputStream {
long count = 0;
@Override
public void write(int b) throws IOException {
this.count++;
}
@Override
public void write(byte[] b) throws IOException {
this.count += b.length;
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
this.count += len;
}
}
}
| spring-web/src/main/java/org/springframework/http/converter/xml/SourceHttpMessageConverter.java | /*
* Copyright 2002-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.http.converter.xml;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.sax.SAXSource;
import javax.xml.transform.stax.StAXSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import org.w3c.dom.Document;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.XMLReaderFactory;
import org.springframework.http.HttpInputMessage;
import org.springframework.http.HttpOutputMessage;
import org.springframework.http.MediaType;
import org.springframework.http.converter.AbstractHttpMessageConverter;
import org.springframework.http.converter.HttpMessageConversionException;
import org.springframework.http.converter.HttpMessageNotReadableException;
import org.springframework.http.converter.HttpMessageNotWritableException;
import org.springframework.util.StreamUtils;
/**
* Implementation of {@link org.springframework.http.converter.HttpMessageConverter}
* that can read and write {@link Source} objects.
*
* @author Arjen Poutsma
* @since 3.0
*/
public class SourceHttpMessageConverter<T extends Source> extends AbstractHttpMessageConverter<T> {
private final TransformerFactory transformerFactory = TransformerFactory.newInstance();
private boolean processExternalEntities = false;
/**
* Sets the {@link #setSupportedMediaTypes(java.util.List) supportedMediaTypes}
* to {@code text/xml} and {@code application/xml}, and {@code application/*-xml}.
*/
public SourceHttpMessageConverter() {
super(MediaType.APPLICATION_XML, MediaType.TEXT_XML, new MediaType("application", "*+xml"));
}
/**
* Indicates whether external XML entities are processed when converting
* to a Source.
* <p>Default is {@code false}, meaning that external entities are not resolved.
*/
public void setProcessExternalEntities(boolean processExternalEntities) {
this.processExternalEntities = processExternalEntities;
}
@Override
public boolean supports(Class<?> clazz) {
return DOMSource.class.equals(clazz) || SAXSource.class.equals(clazz)
|| StreamSource.class.equals(clazz) || Source.class.equals(clazz);
}
@Override
@SuppressWarnings("unchecked")
protected T readInternal(Class<? extends T> clazz, HttpInputMessage inputMessage)
throws IOException, HttpMessageNotReadableException {
InputStream body = inputMessage.getBody();
if (DOMSource.class.equals(clazz)) {
return (T) readDOMSource(body);
}
else if (SAXSource.class.equals(clazz)) {
return (T) readSAXSource(body);
}
else if (StAXSource.class.equals(clazz)) {
return (T) readStAXSource(body);
}
else if (StreamSource.class.equals(clazz) || Source.class.equals(clazz)) {
return (T) readStreamSource(body);
}
else {
throw new HttpMessageConversionException("Could not read class [" + clazz +
"]. Only DOMSource, SAXSource, and StreamSource are supported.");
}
}
private DOMSource readDOMSource(InputStream body) throws IOException {
try {
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
documentBuilderFactory.setNamespaceAware(true);
documentBuilderFactory.setFeature("http://xml.org/sax/features/external-general-entities", processExternalEntities);
DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder();
Document document = documentBuilder.parse(body);
return new DOMSource(document);
}
catch (ParserConfigurationException ex) {
throw new HttpMessageNotReadableException("Could not set feature: " + ex.getMessage(), ex);
}
catch (SAXException ex) {
throw new HttpMessageNotReadableException("Could not parse document: " + ex.getMessage(), ex);
}
}
private SAXSource readSAXSource(InputStream body) throws IOException {
try {
XMLReader reader = XMLReaderFactory.createXMLReader();
reader.setFeature("http://xml.org/sax/features/external-general-entities", processExternalEntities);
byte[] bytes = StreamUtils.copyToByteArray(body);
return new SAXSource(reader, new InputSource(new ByteArrayInputStream(bytes)));
}
catch (SAXException ex) {
throw new HttpMessageNotReadableException("Could not parse document: " + ex.getMessage(), ex);
}
}
private Source readStAXSource(InputStream body) {
try {
XMLInputFactory inputFactory = XMLInputFactory.newFactory();
inputFactory.setProperty("javax.xml.stream.isSupportingExternalEntities", processExternalEntities);
XMLStreamReader streamReader = inputFactory.createXMLStreamReader(body);
return new StAXSource(streamReader);
}
catch (XMLStreamException ex) {
throw new HttpMessageNotReadableException("Could not parse document: " + ex.getMessage(), ex);
}
}
private StreamSource readStreamSource(InputStream body) throws IOException {
byte[] bytes = StreamUtils.copyToByteArray(body);
return new StreamSource(new ByteArrayInputStream(bytes));
}
@Override
protected Long getContentLength(T t, MediaType contentType) {
if (t instanceof DOMSource) {
try {
CountingOutputStream os = new CountingOutputStream();
transform(t, new StreamResult(os));
return os.count;
}
catch (TransformerException ex) {
// ignore
}
}
return null;
}
@Override
protected void writeInternal(T t, HttpOutputMessage outputMessage)
throws IOException, HttpMessageNotWritableException {
try {
Result result = new StreamResult(outputMessage.getBody());
transform(t, result);
}
catch (TransformerException ex) {
throw new HttpMessageNotWritableException("Could not transform [" + t + "] to output message", ex);
}
}
private void transform(Source source, Result result) throws TransformerException {
this.transformerFactory.newTransformer().transform(source, result);
}
private static class CountingOutputStream extends OutputStream {
private long count = 0;
@Override
public void write(int b) throws IOException {
count++;
}
@Override
public void write(byte[] b) throws IOException {
count += b.length;
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
count += len;
}
}
}
| SourceHttpMessageConverter's supports implementation needs to check for StAXSource
Issue: SPR-11341
| spring-web/src/main/java/org/springframework/http/converter/xml/SourceHttpMessageConverter.java | SourceHttpMessageConverter's supports implementation needs to check for StAXSource | <ide><path>pring-web/src/main/java/org/springframework/http/converter/xml/SourceHttpMessageConverter.java
<ide> import java.io.IOException;
<ide> import java.io.InputStream;
<ide> import java.io.OutputStream;
<del>
<add>import java.util.HashSet;
<add>import java.util.Set;
<ide> import javax.xml.parsers.DocumentBuilder;
<ide> import javax.xml.parsers.DocumentBuilderFactory;
<ide> import javax.xml.parsers.ParserConfigurationException;
<ide> import org.xml.sax.SAXException;
<ide> import org.xml.sax.XMLReader;
<ide> import org.xml.sax.helpers.XMLReaderFactory;
<add>
<ide> import org.springframework.http.HttpInputMessage;
<ide> import org.springframework.http.HttpOutputMessage;
<ide> import org.springframework.http.MediaType;
<ide> */
<ide> public class SourceHttpMessageConverter<T extends Source> extends AbstractHttpMessageConverter<T> {
<ide>
<add> private static final Set<Class<?>> SUPPORTED_CLASSES = new HashSet<Class<?>>(5);
<add>
<add> static {
<add> SUPPORTED_CLASSES.add(DOMSource.class);
<add> SUPPORTED_CLASSES.add(SAXSource.class);
<add> SUPPORTED_CLASSES.add(StAXSource.class);
<add> SUPPORTED_CLASSES.add(StreamSource.class);
<add> SUPPORTED_CLASSES.add(Source.class);
<add> }
<add>
<add>
<ide> private final TransformerFactory transformerFactory = TransformerFactory.newInstance();
<ide>
<ide> private boolean processExternalEntities = false;
<add>
<ide>
<ide> /**
<ide> * Sets the {@link #setSupportedMediaTypes(java.util.List) supportedMediaTypes}
<ide>
<ide>
<ide> /**
<del> * Indicates whether external XML entities are processed when converting
<del> * to a Source.
<add> * Indicates whether external XML entities are processed when converting to a Source.
<ide> * <p>Default is {@code false}, meaning that external entities are not resolved.
<ide> */
<ide> public void setProcessExternalEntities(boolean processExternalEntities) {
<ide> this.processExternalEntities = processExternalEntities;
<ide> }
<ide>
<add>
<ide> @Override
<ide> public boolean supports(Class<?> clazz) {
<del> return DOMSource.class.equals(clazz) || SAXSource.class.equals(clazz)
<del> || StreamSource.class.equals(clazz) || Source.class.equals(clazz);
<add> return SUPPORTED_CLASSES.contains(clazz);
<ide> }
<ide>
<ide> @Override
<ide> }
<ide> else {
<ide> throw new HttpMessageConversionException("Could not read class [" + clazz +
<del> "]. Only DOMSource, SAXSource, and StreamSource are supported.");
<add> "]. Only DOMSource, SAXSource, StAXSource, and StreamSource are supported.");
<ide> }
<ide> }
<ide>
<ide> try {
<ide> DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
<ide> documentBuilderFactory.setNamespaceAware(true);
<del> documentBuilderFactory.setFeature("http://xml.org/sax/features/external-general-entities", processExternalEntities);
<add> documentBuilderFactory.setFeature(
<add> "http://xml.org/sax/features/external-general-entities", this.processExternalEntities);
<ide> DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder();
<ide> Document document = documentBuilder.parse(body);
<ide> return new DOMSource(document);
<ide> private SAXSource readSAXSource(InputStream body) throws IOException {
<ide> try {
<ide> XMLReader reader = XMLReaderFactory.createXMLReader();
<del> reader.setFeature("http://xml.org/sax/features/external-general-entities", processExternalEntities);
<add> reader.setFeature(
<add> "http://xml.org/sax/features/external-general-entities", this.processExternalEntities);
<ide> byte[] bytes = StreamUtils.copyToByteArray(body);
<ide> return new SAXSource(reader, new InputSource(new ByteArrayInputStream(bytes)));
<ide> }
<ide> private Source readStAXSource(InputStream body) {
<ide> try {
<ide> XMLInputFactory inputFactory = XMLInputFactory.newFactory();
<del> inputFactory.setProperty("javax.xml.stream.isSupportingExternalEntities", processExternalEntities);
<add> inputFactory.setProperty(
<add> "javax.xml.stream.isSupportingExternalEntities", this.processExternalEntities);
<ide> XMLStreamReader streamReader = inputFactory.createXMLStreamReader(body);
<ide> return new StAXSource(streamReader);
<ide> }
<ide>
<ide> private static class CountingOutputStream extends OutputStream {
<ide>
<del> private long count = 0;
<add> long count = 0;
<ide>
<ide> @Override
<ide> public void write(int b) throws IOException {
<del> count++;
<add> this.count++;
<ide> }
<ide>
<ide> @Override
<ide> public void write(byte[] b) throws IOException {
<del> count += b.length;
<add> this.count += b.length;
<ide> }
<ide>
<ide> @Override
<ide> public void write(byte[] b, int off, int len) throws IOException {
<del> count += len;
<add> this.count += len;
<ide> }
<ide> }
<ide> |
|
Java | mit | d3ee3c9ae5750065f89b62bc21333392fbad0304 | 0 | Szewek/Minecraft-Flux,Szewek/Minecraft-Flux | package szewek.mcflux;
import org.apache.logging.log4j.Logger;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.common.event.FMLInitializationEvent;
import net.minecraftforge.fml.common.event.FMLPostInitializationEvent;
import net.minecraftforge.fml.common.event.FMLPreInitializationEvent;
import szewek.mcflux.api.CapabilityEnergy;
import szewek.mcflux.api.flavor.CapabilityFlavorEnergy;
import szewek.mcflux.wrapper.InjectWrappers;
@Mod(modid = R.MCFLUX_NAME, version = R.MCFLUX_VERSION)
public class MCFluxMod {
private Logger log;
@Mod.EventHandler
public void preInit(FMLPreInitializationEvent e) {
log = e.getModLog();
if (R.MCFLUX_VERSION.charAt(0) == '@')
log.warn("You are running Minecraft-Flux with an unknown version (development maybe?)");
else
log.info("Minecraft-Flux version " + R.MCFLUX_VERSION);
CapabilityEnergy.register();
CapabilityFlavorEnergy.register();
MinecraftForge.EVENT_BUS.register(new InjectWrappers());
}
public void init(FMLInitializationEvent e) {
}
public void postInit(FMLPostInitializationEvent e) {
}
}
| src/main/java/szewek/mcflux/MCFluxMod.java | package szewek.mcflux;
import org.apache.logging.log4j.Logger;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.common.event.FMLInitializationEvent;
import net.minecraftforge.fml.common.event.FMLPostInitializationEvent;
import net.minecraftforge.fml.common.event.FMLPreInitializationEvent;
import szewek.mcflux.api.CapabilityEnergy;
import szewek.mcflux.api.flavor.CapabilityFlavorEnergy;
import szewek.mcflux.wrapper.InjectWrappers;
@Mod(modid = R.MCFLUX_NAME, version = R.MCFLUX_VERSION)
public class MCFluxMod {
private Logger log;
@Mod.EventHandler
public void preInit(FMLPreInitializationEvent e) {
CapabilityEnergy.register();
CapabilityFlavorEnergy.register();
log = e.getModLog();
if (R.MCFLUX_VERSION.charAt(0) == '$')
log.warn("You are running Minecraft-Flux with an unknown version");
MinecraftForge.EVENT_BUS.register(new InjectWrappers());
}
public void init(FMLInitializationEvent e) {
}
public void postInit(FMLPostInitializationEvent e) {
}
}
| Fixed version check | src/main/java/szewek/mcflux/MCFluxMod.java | Fixed version check | <ide><path>rc/main/java/szewek/mcflux/MCFluxMod.java
<ide>
<ide> @Mod.EventHandler
<ide> public void preInit(FMLPreInitializationEvent e) {
<add> log = e.getModLog();
<add> if (R.MCFLUX_VERSION.charAt(0) == '@')
<add> log.warn("You are running Minecraft-Flux with an unknown version (development maybe?)");
<add> else
<add> log.info("Minecraft-Flux version " + R.MCFLUX_VERSION);
<ide> CapabilityEnergy.register();
<ide> CapabilityFlavorEnergy.register();
<del> log = e.getModLog();
<del> if (R.MCFLUX_VERSION.charAt(0) == '$')
<del> log.warn("You are running Minecraft-Flux with an unknown version");
<ide> MinecraftForge.EVENT_BUS.register(new InjectWrappers());
<ide> }
<ide> |
|
Java | mit | error: pathspec 'src/test/java/fr/lille1/maven_data_extraction/core/graph/MavenMultigraphLabeledTest.java' did not match any file(s) known to git
| 3f30ee5a8fb0e6cc4146ed9886721cb68ce46f36 | 1 | blckshrk/MavenDataExtraction,blckshrk/MavenDataExtraction | package fr.lille1.maven_data_extraction.core.graph;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import fr.lille1.maven_data_extraction.core.Project;
import fr.lille1.maven_data_extraction.core.Version;
import fr.lille1.maven_data_extraction.core.graph.MavenLabeledEdge;
import fr.lille1.maven_data_extraction.core.graph.MavenMultigraph;
import fr.lille1.maven_data_extraction.core.graph.MavenMultigraphLabeled;
/**
*
* @author Alexandre Bonhomme
*
*/
public class MavenMultigraphLabeledTest {
private MavenMultigraph<MavenLabeledEdge> graph;
private Project p1;
private Project p2;
private Project p3;
@Before
public void setUp() throws Exception {
graph = new MavenMultigraphLabeled();
p1 = new Project("org.apache.abdera", "abdera");
p1.addVersion(new Version("1.0", new File("")));
p1.addVersion(new Version("1.1.1", new File("")));
p2 = new Project("org.apache.accumulo", "accumulo-core");
p2.addVersion(new Version("1.3.6", new File("")));
p3 = new Project("org.apache.ace", "ace-pom");
p3.addVersion(new Version("0.8.0-incubator", new File("")));
p3.addVersion(new Version("0.8.1-incubator", new File("")));
// add the vertices
graph.addVertex(p1);
graph.addVertex(p2);
graph.addVertex(p3);
// p1.v1 -> p2.v1
graph.addEdge(p1, p2, p1.getVersion("1.0").getVersionNumber(), p2
.getVersion("1.3.6").getVersionNumber());
// p1.v1 -> p3.v1
graph.addEdge(p1, p3, p1.getVersion("1.0").getVersionNumber(), p3
.getVersion("0.8.0-incubator").getVersionNumber());
// p2.v1 -> p3.v2
graph.addEdge(p2, p3, p2.getVersion("1.3.6").getVersionNumber(), p3
.getVersion("0.8.1-incubator").getVersionNumber());
}
@Test
public void testGetVertex() {
Project project = graph.getVertex("org.apache.abdera", "abdera");
assertEquals(p1, project);
}
@Test
public void testGetDependencies() {
List<Project> dependencies = graph.getDependencies(p1);
assertEquals(2, dependencies.size());
assertTrue(dependencies.contains(p2));
assertTrue(dependencies.contains(p3));
}
@Test
public void testGetUsages() {
List<Project> usages = graph.getUsages(p3);
assertEquals(2, usages.size());
assertTrue(usages.contains(p1));
assertTrue(usages.contains(p2));
}
}
| src/test/java/fr/lille1/maven_data_extraction/core/graph/MavenMultigraphLabeledTest.java | Adding a 'graph' package in test/ to match with the apps structure
| src/test/java/fr/lille1/maven_data_extraction/core/graph/MavenMultigraphLabeledTest.java | Adding a 'graph' package in test/ to match with the apps structure | <ide><path>rc/test/java/fr/lille1/maven_data_extraction/core/graph/MavenMultigraphLabeledTest.java
<add>package fr.lille1.maven_data_extraction.core.graph;
<add>
<add>import static org.junit.Assert.assertEquals;
<add>import static org.junit.Assert.assertTrue;
<add>
<add>import java.io.File;
<add>import java.util.List;
<add>
<add>import org.junit.Before;
<add>import org.junit.Test;
<add>
<add>import fr.lille1.maven_data_extraction.core.Project;
<add>import fr.lille1.maven_data_extraction.core.Version;
<add>import fr.lille1.maven_data_extraction.core.graph.MavenLabeledEdge;
<add>import fr.lille1.maven_data_extraction.core.graph.MavenMultigraph;
<add>import fr.lille1.maven_data_extraction.core.graph.MavenMultigraphLabeled;
<add>
<add>/**
<add> *
<add> * @author Alexandre Bonhomme
<add> *
<add> */
<add>public class MavenMultigraphLabeledTest {
<add>
<add> private MavenMultigraph<MavenLabeledEdge> graph;
<add>
<add> private Project p1;
<add> private Project p2;
<add> private Project p3;
<add>
<add> @Before
<add> public void setUp() throws Exception {
<add> graph = new MavenMultigraphLabeled();
<add>
<add> p1 = new Project("org.apache.abdera", "abdera");
<add> p1.addVersion(new Version("1.0", new File("")));
<add> p1.addVersion(new Version("1.1.1", new File("")));
<add>
<add> p2 = new Project("org.apache.accumulo", "accumulo-core");
<add> p2.addVersion(new Version("1.3.6", new File("")));
<add>
<add> p3 = new Project("org.apache.ace", "ace-pom");
<add> p3.addVersion(new Version("0.8.0-incubator", new File("")));
<add> p3.addVersion(new Version("0.8.1-incubator", new File("")));
<add>
<add> // add the vertices
<add> graph.addVertex(p1);
<add> graph.addVertex(p2);
<add> graph.addVertex(p3);
<add>
<add> // p1.v1 -> p2.v1
<add> graph.addEdge(p1, p2, p1.getVersion("1.0").getVersionNumber(), p2
<add> .getVersion("1.3.6").getVersionNumber());
<add> // p1.v1 -> p3.v1
<add> graph.addEdge(p1, p3, p1.getVersion("1.0").getVersionNumber(), p3
<add> .getVersion("0.8.0-incubator").getVersionNumber());
<add> // p2.v1 -> p3.v2
<add> graph.addEdge(p2, p3, p2.getVersion("1.3.6").getVersionNumber(), p3
<add> .getVersion("0.8.1-incubator").getVersionNumber());
<add> }
<add>
<add> @Test
<add> public void testGetVertex() {
<add> Project project = graph.getVertex("org.apache.abdera", "abdera");
<add> assertEquals(p1, project);
<add> }
<add>
<add> @Test
<add> public void testGetDependencies() {
<add> List<Project> dependencies = graph.getDependencies(p1);
<add> assertEquals(2, dependencies.size());
<add> assertTrue(dependencies.contains(p2));
<add> assertTrue(dependencies.contains(p3));
<add> }
<add>
<add> @Test
<add> public void testGetUsages() {
<add> List<Project> usages = graph.getUsages(p3);
<add> assertEquals(2, usages.size());
<add> assertTrue(usages.contains(p1));
<add> assertTrue(usages.contains(p2));
<add> }
<add>
<add>} |
|
Java | apache-2.0 | b47ba6494a4dcaf9b9abd6a8dac49f128099c9fb | 0 | cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x | /*
* The Spring Framework is published under the terms
* of the Apache Software License.
*/
package org.springframework.jndi;
import java.util.Properties;
import javax.naming.NamingException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.InitializingBean;
/**
* Convenient superclass for JNDI-based Service Locators. Subclasses are
* JavaBeans, exposing a jndiName property. This may or may not include
* the "java:comp/env/" prefix expected by J2EE applications when accessing
* a locally mapped (ENC - Environmental Naming Context) resource. If it
* doesn't, the "java:comp/env/" prefix will be prepended if the "resourceRef"
* property is true (the default is <strong>false</strong>) and no other scheme
* like "java:" is given.
*
* <p>Subclasses must implement the located() method to cache the results
* of the JNDI lookup. They don't need to worry about error handling.</p>
*
* <p><b>Assumptions:</b> The resource obtained from JNDI can be cached.
*
* <p>Subclasses will often be used as singletons in a bean container. This
* sometiems presents a problem if that bean container pre-instantiates singletons,
* since this class does the JNDI lookup in its init method, but the resource being
* pointed to may not exist at that time, even though it may exist at the time of
* first usage. The solution is to tell the bean container not to pre-instantiate
* this class (i.e. lazy load it instead).<p>
*
* @author Rod Johnson
* @version $Id: AbstractJndiLocator.java,v 1.8 2004-03-04 18:34:08 jhoeller Exp $
* @see #setJndiTemplate
* @see #setJndiEnvironment
* @see #setResourceRef
*/
public abstract class AbstractJndiLocator implements InitializingBean {
/** JNDI prefix used in a J2EE container */
public static String CONTAINER_PREFIX = "java:comp/env/";
protected final Log logger = LogFactory.getLog(getClass());
private JndiTemplate jndiTemplate = new JndiTemplate();
private String jndiName;
private boolean resourceRef = false;
/**
* Create a new JNDI locator. The jndiName property must be set,
* and afterPropertiesSet be called to perform the JNDI lookup.
* <p>Obviously, this class is typically used via a BeanFactory.
*/
public AbstractJndiLocator() {
}
/**
* Create a new JNDI locator, specifying the JNDI name. If the name
* doesn't include a java:comp/env/ prefix, it will be prepended.
* <p>As this is a shortcut, it calls afterPropertiesSet to perform
* the JNDI lookup immediately.
* @param jndiName JNDI name.
*/
public AbstractJndiLocator(String jndiName) throws NamingException, IllegalArgumentException {
setJndiName(jndiName);
afterPropertiesSet();
}
/**
* Set the JNDI template to use for the JNDI lookup.
* You can also specify JNDI environment settings via setJndiEnvironment.
* @see #setJndiEnvironment
*/
public final void setJndiTemplate(JndiTemplate jndiTemplate) {
this.jndiTemplate = jndiTemplate;
}
/**
* Return the JNDI template to use for the JNDI lookup.
*/
public final JndiTemplate getJndiTemplate() {
return jndiTemplate;
}
/**
* Set the JNDI environment to use for the JNDI lookup.
* Creates a JndiTemplate with the given environment settings.
* @see #setJndiTemplate
*/
public final void setJndiEnvironment(Properties jndiEnvironment) {
this.jndiTemplate = new JndiTemplate(jndiEnvironment);
}
/**
* Return the JNDI enviromment to use for the JNDI lookup.
*/
public final Properties getJndiEnvironment() {
return jndiTemplate.getEnvironment();
}
/**
* Set the JNDI name. If it doesn't begin "java:comp/env/"
* we add this prefix if resourceRef is set to True.
* @param jndiName JNDI name of bean to look up
* @see #setResourceRef
*/
public final void setJndiName(String jndiName) {
this.jndiName = jndiName;
}
/**
* Return the JNDI name to look up.
*/
public final String getJndiName() {
return jndiName;
}
/**
* Set if the lookup occurs in a J2EE container, i.e. if the prefix
* "java:comp/env/" needs to be added if the JNDI name doesn't already
* contain it. Default is false.
* <p>Note: Will only get applied if no other scheme like "java:" is given.
*/
public void setResourceRef(boolean resourceRef) {
this.resourceRef = resourceRef;
}
/**
* Return if the lookup occurs in a J2EE container.
*/
public final boolean isResourceRef() {
return resourceRef;
}
public final void afterPropertiesSet() throws NamingException, IllegalArgumentException {
if (this.jndiName == null || this.jndiName.equals("")) {
throw new IllegalArgumentException("Property 'jndiName' must be set on " + getClass().getName());
}
// prepend container prefix if not already specified and no other scheme given
if (this.resourceRef && !this.jndiName.startsWith(CONTAINER_PREFIX) && this.jndiName.indexOf(':') == -1) {
this.jndiName = CONTAINER_PREFIX + this.jndiName;
}
Object o = lookup(this.jndiName);
located(o);
}
private Object lookup(String jndiName) throws NamingException {
Object o = this.jndiTemplate.lookup(jndiName);
logger.debug("Successfully looked up object with jndiName '" + jndiName + "': value=[" + o + "]");
return o;
}
/**
* Subclasses must implement this to cache the object this class has obtained from JNDI.
* @param jndiObject object successfully retrieved from JNDI
*/
protected abstract void located(Object jndiObject);
}
| src/org/springframework/jndi/AbstractJndiLocator.java | /*
* The Spring Framework is published under the terms
* of the Apache Software License.
*/
package org.springframework.jndi;
import javax.naming.NamingException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.InitializingBean;
/**
* Convenient superclass for JNDI-based Service Locators. Subclasses are
* JavaBeans, exposing a jndiName property. This may or may not include
* the "java:comp/env/" prefix expected by J2EE applications when accessing
* a locally mapped (ENC - Environmental Naming Context) resource. If it
* doesn't, the "java:comp/env/" prefix will be prepended if the "resourceRef"
* property is true (the default is <strong>false</strong>) and no other scheme
* like "java:" is given.
*
* <p>Subclasses must implement the located() method to cache the results
* of the JNDI lookup. They don't need to worry about error handling.</p>
*
* <p><b>Assumptions:</b> The resource obtained from JNDI can be cached.
*
* <p>Subclasses will often be used as singletons in a bean container. This
* sometiems presents a problem if that bean container pre-instantiates singletons,
* since this class does the JNDI lookup in its init method, but the resource being
* pointed to may not exist at that time, even though it may exist at the time of
* first usage. The solution is to tell the bean container not to pre-instantiate
* this class (i.e. lazy load it instead).<p>
*
* @author Rod Johnson
* @version $Id: AbstractJndiLocator.java,v 1.7 2004-02-27 14:59:00 luke_t Exp $
* @see #setResourceRef
*/
public abstract class AbstractJndiLocator implements InitializingBean {
/** JNDI prefix used in a J2EE container */
public static String CONTAINER_PREFIX = "java:comp/env/";
protected final Log logger = LogFactory.getLog(getClass());
private JndiTemplate jndiTemplate = new JndiTemplate();
private String jndiName;
private boolean resourceRef = false;
/**
* Create a new JNDI locator. The jndiName property must be set,
* and afterPropertiesSet be called to perform the JNDI lookup.
* <p>Obviously, this class is typically used via a BeanFactory.
*/
public AbstractJndiLocator() {
}
/**
* Create a new JNDI locator, specifying the JNDI name. If the name
* doesn't include a java:comp/env/ prefix, it will be prepended.
* <p>As this is a shortcut, it calls afterPropertiesSet to perform
* the JNDI lookup immediately.
* @param jndiName JNDI name.
*/
public AbstractJndiLocator(String jndiName) throws NamingException, IllegalArgumentException {
setJndiName(jndiName);
afterPropertiesSet();
}
/**
* Set the JNDI template to use for the JNDI lookup.
*/
public final void setJndiTemplate(JndiTemplate template) {
jndiTemplate = template;
}
/**
* Return the JNDI template to use for the JNDI lookup.
*/
public final JndiTemplate getJndiTemplate() {
return jndiTemplate;
}
/**
* Set the JNDI name. If it doesn't begin "java:comp/env/"
* we add this prefix if resourceRef is set to True.
* @param jndiName JNDI name of bean to look up
* @see #setResourceRef
*/
public final void setJndiName(String jndiName) {
this.jndiName = jndiName;
}
/**
* Return the JNDI name to look up.
*/
public final String getJndiName() {
return jndiName;
}
/**
* Set if the lookup occurs in a J2EE container, i.e. if the prefix
* "java:comp/env/" needs to be added if the JNDI name doesn't already
* contain it. Default is false.
* <p>Note: Will only get applied if no other scheme like "java:" is given.
*/
public void setResourceRef(boolean resourceRef) {
this.resourceRef = resourceRef;
}
/**
* Return if the lookup occurs in a J2EE container.
*/
public final boolean isResourceRef() {
return resourceRef;
}
public final void afterPropertiesSet() throws NamingException, IllegalArgumentException {
if (this.jndiName == null || this.jndiName.equals("")) {
throw new IllegalArgumentException("Property 'jndiName' must be set on " + getClass().getName());
}
// prepend container prefix if not already specified and no other scheme given
if (this.resourceRef && !this.jndiName.startsWith(CONTAINER_PREFIX) && this.jndiName.indexOf(':') == -1) {
this.jndiName = CONTAINER_PREFIX + this.jndiName;
}
Object o = lookup(this.jndiName);
located(o);
}
private Object lookup(String jndiName) throws NamingException {
Object o = this.jndiTemplate.lookup(jndiName);
logger.debug("Successfully looked up object with jndiName '" + jndiName + "': value=[" + o + "]");
return o;
}
/**
* Subclasses must implement this to cache the object this class has obtained from JNDI.
* @param jndiObject object successfully retrieved from JNDI
*/
protected abstract void located(Object jndiObject);
}
| added setJndiEnvironment
git-svn-id: b619a0c99665f88f1afe72824344cefe9a1c8c90@1753 fd5a2b45-1f63-4059-99e9-3c7cb7fd75c8
| src/org/springframework/jndi/AbstractJndiLocator.java | added setJndiEnvironment | <ide><path>rc/org/springframework/jndi/AbstractJndiLocator.java
<ide> */
<ide>
<ide> package org.springframework.jndi;
<add>
<add>import java.util.Properties;
<ide>
<ide> import javax.naming.NamingException;
<ide>
<ide> * this class (i.e. lazy load it instead).<p>
<ide> *
<ide> * @author Rod Johnson
<del> * @version $Id: AbstractJndiLocator.java,v 1.7 2004-02-27 14:59:00 luke_t Exp $
<add> * @version $Id: AbstractJndiLocator.java,v 1.8 2004-03-04 18:34:08 jhoeller Exp $
<add> * @see #setJndiTemplate
<add> * @see #setJndiEnvironment
<ide> * @see #setResourceRef
<ide> */
<ide> public abstract class AbstractJndiLocator implements InitializingBean {
<ide> private String jndiName;
<ide>
<ide> private boolean resourceRef = false;
<add>
<ide>
<ide> /**
<ide> * Create a new JNDI locator. The jndiName property must be set,
<ide>
<ide> /**
<ide> * Set the JNDI template to use for the JNDI lookup.
<add> * You can also specify JNDI environment settings via setJndiEnvironment.
<add> * @see #setJndiEnvironment
<ide> */
<del> public final void setJndiTemplate(JndiTemplate template) {
<del> jndiTemplate = template;
<add> public final void setJndiTemplate(JndiTemplate jndiTemplate) {
<add> this.jndiTemplate = jndiTemplate;
<ide> }
<ide>
<ide> /**
<ide> */
<ide> public final JndiTemplate getJndiTemplate() {
<ide> return jndiTemplate;
<add> }
<add>
<add> /**
<add> * Set the JNDI environment to use for the JNDI lookup.
<add> * Creates a JndiTemplate with the given environment settings.
<add> * @see #setJndiTemplate
<add> */
<add> public final void setJndiEnvironment(Properties jndiEnvironment) {
<add> this.jndiTemplate = new JndiTemplate(jndiEnvironment);
<add> }
<add>
<add> /**
<add> * Return the JNDI enviromment to use for the JNDI lookup.
<add> */
<add> public final Properties getJndiEnvironment() {
<add> return jndiTemplate.getEnvironment();
<ide> }
<ide>
<ide> /**
<ide> return resourceRef;
<ide> }
<ide>
<add>
<ide> public final void afterPropertiesSet() throws NamingException, IllegalArgumentException {
<ide> if (this.jndiName == null || this.jndiName.equals("")) {
<ide> throw new IllegalArgumentException("Property 'jndiName' must be set on " + getClass().getName()); |
|
Java | mit | 96dd9e51f250fe58d61ee13de995cbe00e781364 | 0 | 4a2e532e/RealisticSwimming,ClassicTowny/RealisticSwimming | /*
Copyright (c) 2016-2017 4a2e532e
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package realisticSwimming;
import fr.neatmonster.nocheatplus.checks.CheckType;
import fr.neatmonster.nocheatplus.checks.access.IViolationInfo;
import fr.neatmonster.nocheatplus.hooks.APIUtils;
import fr.neatmonster.nocheatplus.hooks.NCPHook;
import fr.neatmonster.nocheatplus.hooks.NCPHookManager;
import org.bukkit.entity.Player;
public class NoCheatPlusListener implements NCPHook {
public NoCheatPlusListener(){
NCPHookManager.addHook(CheckType.ALL, this);
}
@Override
public String getHookName() {
return "RealisticSwimming_ExperimentalQuickFix";
}
@Override
public String getHookVersion() {
return "0.1";
}
@Override
public boolean onCheckFailure(CheckType checkType, Player player, IViolationInfo iViolationInfo) {
if(!Config.noCheatPlusCompatibilityMode || APIUtils.needsSynchronization(checkType)){
return false;
}
if((checkType == CheckType.MOVING || checkType == CheckType.FIGHT_FASTHEAL) && (player.hasMetadata("swimming") || player.hasMetadata("falling"))){
return true;
}
return false;
}
}
| src/realisticSwimming/NoCheatPlusListener.java | /*
Copyright (c) 2016-2017 4a2e532e
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package realisticSwimming;
import fr.neatmonster.nocheatplus.checks.CheckType;
import fr.neatmonster.nocheatplus.checks.access.IViolationInfo;
import fr.neatmonster.nocheatplus.hooks.APIUtils;
import fr.neatmonster.nocheatplus.hooks.NCPHook;
import fr.neatmonster.nocheatplus.hooks.NCPHookManager;
import org.bukkit.entity.Player;
public class NoCheatPlusListener implements NCPHook {
public NoCheatPlusListener(){
NCPHookManager.addHook(CheckType.ALL, this);
}
@Override
public String getHookName() {
return "RealisticSwimming_ExperimentalQuickFix";
}
@Override
public String getHookVersion() {
return "0.1";
}
@Override
public boolean onCheckFailure(CheckType checkType, Player player, IViolationInfo iViolationInfo) {
if(!Config.noCheatPlusCompatibilityMode || APIUtils.needsSynchronization(checkType)){
return false;
}
if(player.hasMetadata("swimming") || player.hasMetadata("falling")){
return true;
}
return false;
}
}
| Only cancel specific NCP checks
| src/realisticSwimming/NoCheatPlusListener.java | Only cancel specific NCP checks | <ide><path>rc/realisticSwimming/NoCheatPlusListener.java
<ide> if(!Config.noCheatPlusCompatibilityMode || APIUtils.needsSynchronization(checkType)){
<ide> return false;
<ide> }
<del> if(player.hasMetadata("swimming") || player.hasMetadata("falling")){
<add> if((checkType == CheckType.MOVING || checkType == CheckType.FIGHT_FASTHEAL) && (player.hasMetadata("swimming") || player.hasMetadata("falling"))){
<ide> return true;
<ide> }
<ide> return false; |
|
JavaScript | agpl-3.0 | 7cb761c0cc20e21c9743282345edac296c1a5e64 | 0 | schul-cloud/schulcloud-server,schul-cloud/schulcloud-server,schul-cloud/schulcloud-server | const { expect } = require('chai');
const { makeStringRCConform } = require('../../../src/services/rocketChat/helpers');
describe('makeStringRCConform', () => {
it('replaces german umlaute', () => {
expect(makeStringRCConform('öÖüÜäÄß')).to.equal('oeOeueUeaeAess');
});
it('replaces spaces', () => {
expect(makeStringRCConform('two words')).to.equal('two-words');
});
it('replaces special characters', () => {
expect(makeStringRCConform('?!"\'\\[]()´*+´§$%&/=,;:^°')).to.equal('________________________');
});
it('does not replace normal chars or numbers', () => {
expect(makeStringRCConform('LoremIpsum0815')).to.equal('LoremIpsum0815');
});
it('does not replace allowed sepcial characters', () => {
expect(makeStringRCConform('.-_')).to.equal('.-_');
});
});
| test/services/rocketChat/helpers.test.js | const { expect } = require('chai');
const { makeStringRCConform } = require('../../../src/services/rocketChat/helpers');
describe.only('makeStringRCConform', () => {
it('replaces german umlaute', () => {
expect(makeStringRCConform('öÖüÜäÄß')).to.equal('oeOeueUeaeAess');
});
it('replaces spaces', () => {
expect(makeStringRCConform('two words')).to.equal('two-words');
});
it('replaces special characters', () => {
expect(makeStringRCConform('?!"\'\\[]()´*+´§$%&/=,;:^°')).to.equal('________________________');
});
it('does not replace normal chars or numbers', () => {
expect(makeStringRCConform('LoremIpsum0815')).to.equal('LoremIpsum0815');
});
it('does not replace allowed sepcial characters', () => {
expect(makeStringRCConform('.-_')).to.equal('.-_');
});
});
| remove.only
| test/services/rocketChat/helpers.test.js | remove.only | <ide><path>est/services/rocketChat/helpers.test.js
<ide> const { expect } = require('chai');
<ide> const { makeStringRCConform } = require('../../../src/services/rocketChat/helpers');
<ide>
<del>describe.only('makeStringRCConform', () => {
<add>describe('makeStringRCConform', () => {
<ide> it('replaces german umlaute', () => {
<ide> expect(makeStringRCConform('öÖüÜäÄß')).to.equal('oeOeueUeaeAess');
<ide> }); |
|
JavaScript | mit | 27e7a1d2c0ddeaf08df796cbd35e4871854118b1 | 0 | pzw224/ui-grid,xiaojie123/ng-grid,edmondpr/ng-grid,ingshtrom/ui-grid,hahn-kev/ui-grid,Lukasz-Wisniewski/ui-grid,ppossanzini/ui-grid,alex87/ui-grid,Julius1985/ui-grid,kamikasky/ui-grid,PaulL1/ng-grid,reupen/ui-grid,AdverseEvents/ui-grid,cell-one/ui-grid,benoror/ui-grid,brucebetts/ui-grid,suryasingh/ng-grid,btesser/ng-grid,edmondpr/ng-grid,rightscale/ng-grid,jmptrader/ui-grid,ntmuigrid/ntmuigrid,kennypowers1987/ui-grid,Julius1985/ui-grid,c1240539157/ui-grid,b2io/ng-grid,machinemetrics/ui-grid,solvebio/ng-grid,vidakovic/ui-grid,PeopleNet/ng-grid-fork,hahn-kev/ui-grid,kennypowers1987/ui-grid,seafoam6/ui-grid,jmptrader/ui-grid,zen4s/ng-grid,masterpowers/ui-grid,robpurcell/ui-grid,dbckr/ui-grid,ciccio86/ui-grid,JayHuang/ng-grid,luisdesig/ui-grid,louwie17/ui-grid,280455936/ng-grid,dlgski/ui-grid,angular-ui/ng-grid-legacy,triersistemas/tr-grid,zamboney/ui-grid,SomeKittens/ui-grid,seafoam6/ui-grid,nsartor/ui-grid,JLLeitschuh/ui-grid,edivancamargo/ui-grid,ppossanzini/ui-grid,ciccio86/ui-grid,mirik123/ng-grid,ingshtrom/ui-grid,vidakovic/ui-grid,kamikasky/ui-grid,jbarrus/ng-grid,scottlepp/ui-grid,machinemetrics/ui-grid,dlgski/ui-grid,FernCreek/ng-grid,vidakovic/ui-grid,JayHuang/ng-grid,DmitryEfimenko/ui-grid,wesleycho/ui-grid,zuohaocheng/ui-grid,como-quesito/ui-grid,eeiswerth/ng-grid,kenwilcox/ui-grid,mportuga/ui-grid,c1240539157/ui-grid,bjossi86/ui-grid,edivancamargo/ui-grid,zhaokunfay/ui-grid,thomsonreuters/ng-grid,mirik123/ng-grid,swalters/ng-grid,alex87/ui-grid,AgDude/ng-grid,salarmehr/ng-grid,therealtomas/ui-grid,klieber/ui-grid,DmitryEfimenko/ui-grid,wesleycho/ui-grid,alex87/ui-grid,thomsonreuters/ng-grid,likaiwalkman/ui-grid,frantisekjandos/ui-grid,jiangzhixiao/ui-grid,zmon/ui-grid,louwie17/ui-grid,btesser/ng-grid,AgDude/ng-grid,DexStr/ng-grid,cityglobal/ui-grid,kenwilcox/ui-grid,mboriani/ClearFilters,mage-eag/ui-grid,suryasingh/ng-grid,Stiv/ui-grid,Servoy/ng-grid,aw2basc/ui-grid,bjossi86/ui-grid,zmon/ui-grid,jbarrus/ng-grid,sandwich99/ng-grid,bartkiewicz/ui-grid,machinemetrics/ui-grid,ingshtrom/ui-grid,solvebio/ng-grid,wesleycho/ui-grid,htettwin/ui-grid,bartkiewicz/ui-grid,angular-ui/ng-grid,amyboyd/ng-grid,zhaokunfay/ui-grid,Cosium/ui-grid,DexStr/ng-grid,ntgn81/ui-grid,suryasingh/ng-grid,Stiv/ui-grid,seafoam6/ui-grid,zamboney/ui-grid,PeopleNet/ng-grid-fork,Khamull/ui-grid,domakas/ui-grid,zuohaocheng/ui-grid,scottlepp/ui-grid,FugleMonkey/ng-grid,benoror/ui-grid,SomeKittens/ui-grid,amyboyd/ng-grid,masterpowers/ui-grid,cityglobal/ui-grid,Servoy/ng-grid,aw2basc/ui-grid,lookfirst/ui-grid,jintoppy/ui-grid,robpurcell/ui-grid,angular-ui/ng-grid,triersistemas/tr-grid,dietergoetelen/ui-grid,como-quesito/ui-grid,swalters/ng-grid,salarmehr/ng-grid,likaiwalkman/ui-grid,dbckr/ui-grid,xiaojie123/ng-grid,cell-one/ui-grid,mportuga/ui-grid,edivancamargo/ui-grid,FugleMonkey/ng-grid,kenwilcox/ui-grid,Cosium/ui-grid,klieber/ui-grid,JayHuang/ng-grid,angular-ui/ui-grid,Namek/ui-grid,b2io/ng-grid,solvebio/ng-grid,imbalind/ui-grid,zen4s/ng-grid,reupen/ui-grid,pkdevbox/ui-grid,angular-ui/ng-grid-legacy,c1240539157/ui-grid,Cosium/ui-grid,Phalynx/ng-grid,Stiv/ui-grid,DmitryEfimenko/ui-grid,angular-ui/ng-grid,zuohaocheng/ui-grid,JLLeitschuh/ui-grid,luisdesig/ui-grid,ntmuigrid/ntmuigrid,nishant8BITS/ng-grid,Namek/ui-grid,therealtomas/ui-grid,nishant8BITS/ng-grid,amyboyd/ng-grid,zhaokunfay/ui-grid,abgaryanharutyun/ng-grid,htettwin/ui-grid,bjossi86/ui-grid,Khamull/ui-grid,ppossanzini/ui-grid,kennypowers1987/ui-grid,500tech/ng-grid,brucebetts/ui-grid,pzw224/ui-grid,Lukasz-Wisniewski/ui-grid,mage-eag/ui-grid,Julius1985/ui-grid,benoror/ui-grid,SomeKittens/ui-grid,eeiswerth/ng-grid,ntgn81/ui-grid,yccteam/ui-grid,angular-ui/ng-grid-legacy,InteractiveIntelligence/ng-grid,frantisekjandos/ui-grid,dietergoetelen/ui-grid,hahn-kev/ui-grid,cell-one/ui-grid,jiangzhixiao/ui-grid,ntmuigrid/ntmuigrid,YonatanKra/ui-grid,xiaojie123/ng-grid,como-quesito/ui-grid,mportuga/ui-grid,mboriani/ClearFilters,sandwich99/ng-grid,masterpowers/ui-grid,pzw224/ui-grid,luisdesig/ui-grid,JLLeitschuh/ui-grid,triersistemas/tr-grid,bartkiewicz/ui-grid,FugleMonkey/ng-grid,YonatanKra/ui-grid,scottlepp/ui-grid,InteractiveIntelligence/ng-grid,aw2basc/ui-grid,jmptrader/ui-grid,domakas/ui-grid,AdverseEvents/ui-grid,Servoy/ng-grid,brucebetts/ui-grid,dietergoetelen/ui-grid,pkdevbox/ui-grid,ciccio86/ui-grid,dpinho17/ui-grid,Khamull/ui-grid,jintoppy/ui-grid,Namek/ui-grid,AdverseEvents/ui-grid,btesser/ng-grid,dpinho17/ui-grid,dpinho17/ui-grid,rightscale/ng-grid,sandwich99/ng-grid,mirik123/ng-grid,zen4s/ng-grid,nsartor/ui-grid,cityglobal/ui-grid,zmon/ui-grid,klieber/ui-grid,imbalind/ui-grid,angular-ui/ui-grid,zuzusik/ui-grid,frantisekjandos/ui-grid,reupen/ui-grid,b2io/ng-grid,pkdevbox/ui-grid,PaulL1/ng-grid,lookfirst/ui-grid,likaiwalkman/ui-grid,zuzusik/ui-grid,nishant8BITS/ng-grid,ntgn81/ui-grid,280455936/ng-grid,lookfirst/ui-grid,yccteam/ui-grid,mage-eag/ui-grid,dlgski/ui-grid,Phalynx/ng-grid,salarmehr/ng-grid,abgaryanharutyun/ng-grid,jiangzhixiao/ui-grid,500tech/ng-grid,jintoppy/ui-grid,FernCreek/ng-grid,zamboney/ui-grid,domakas/ui-grid,eeiswerth/ng-grid,500tech/ng-grid,therealtomas/ui-grid,280455936/ng-grid,edmondpr/ng-grid,imbalind/ui-grid,louwie17/ui-grid,nsartor/ui-grid,dbckr/ui-grid,mboriani/ClearFilters,htettwin/ui-grid,angular-ui/ui-grid,YonatanKra/ui-grid,jbarrus/ng-grid,yccteam/ui-grid,AgDude/ng-grid,Phalynx/ng-grid,robpurcell/ui-grid,zuzusik/ui-grid,abgaryanharutyun/ng-grid,kamikasky/ui-grid,Lukasz-Wisniewski/ui-grid,DexStr/ng-grid,FernCreek/ng-grid,InteractiveIntelligence/ng-grid | (function() {
"use strict";
/* jasmine specs for services go here */
describe('Dom Utility Service', function () {
var $dUtils;
var $scope;
var $linker;
var $cache;
beforeEach(module('ngGrid'));
beforeEach(inject(function ($rootScope, $domUtilityService, $templateCache, $compile) {
$scope = $rootScope.$new();
$dUtils = $domUtilityService;
$linker = $compile;
$cache = $templateCache;
}));
// AssignGridContainers
describe('AssignGridContainers', function () {
var domsizesCalled,
grid,
root;
beforeEach(function() {
grid = {
elementDims: {},
refreshDomSizes: function () {
domsizesCalled = true;
}
};
$scope.adjustScrollTop = function(top) {
expect(top).toEqual(grid.$canvas.scrollTop());
};
root = angular.element('<div class="ng-scope ngGrid"></div>');
root.append(angular.element($cache.get('gridTemplate.html')));
});
it('should should find the correct elements and assign them in the grid properly', function () {
$dUtils.AssignGridContainers($scope, root, grid);
expect(grid.$root.is(".ngGrid")).toEqual(true);
expect(grid.$root.length).toEqual(1);
expect(grid.$topPanel.is(".ngTopPanel")).toEqual(true);
expect(grid.$topPanel.length).toEqual(1);
expect(grid.$groupPanel.is(".ngGroupPanel")).toEqual(true);
expect(grid.$groupPanel.length).toEqual(1);
expect(grid.$headerContainer.is(".ngHeaderContainer")).toEqual(true);
expect(grid.$headerContainer.length).toEqual(1);
expect(grid.$headerScroller.is(".ngHeaderScroller")).toEqual(true);
expect(grid.$headerScroller.length).toEqual(1);
expect(grid.$viewport.is(".ngViewport")).toEqual(true);
expect(grid.$viewport.length).toEqual(1);
expect(grid.$canvas.is(".ngCanvas")).toEqual(true);
expect(grid.$canvas.length).toEqual(1);
// Removed footer tests as it is in its own directive, and not available in the grid template dom
expect(grid.elementDims.rootMaxH).toEqual(grid.$root.height());
expect(domsizesCalled).toEqual(true);
});
});
// BuildStyles
describe('BuildStyles', function () {
it('should set the styleSheet object of the grid to be a stylesheet object with CSS', function () {
var domsizesCalled;
var scrollLeftCalled;
var scrollTopCalled;
$scope.columns = [
{ visible: true, pinned: false, width: 100, },
{ visible: true, pinned: false, width: 100, },
{ visible: true, pinned: false, width: 100, },
{ visible: true, pinned: false, width: 100, }];
$scope.totalRowWidth = function() {
return 400;
};
$scope.adjustScrollLeft = function () {
scrollLeftCalled = true;
};
$scope.adjustScrollTop = function () {
scrollTopCalled = true;
};
var root = angular.element('<div class="ng-scope ngGrid"></div>');
root.append(angular.element($cache.get('gridTemplate.html')));
var grid = {
config: {
rowHeight: 30
},
gridId: 1,
elementDims: {},
refreshDomSizes: function () {
domsizesCalled = true;
}
};
$dUtils.AssignGridContainers($scope, root, grid);
$dUtils.BuildStyles($scope, grid, true);
var gs = grid.styleSheet;
var temp = gs && (gs.innerHTML || (gs.styleSheet && gs.styleSheet.cssText));
expect(domsizesCalled).toEqual(true);
expect(scrollLeftCalled).toEqual(true);
expect(scrollTopCalled).toEqual(true);
expect(temp).toMatch(/.1 .ngCanvas { width: 400px; }.1 .ngRow { width: 400px; }.1 .ngCanvas { width: 400px; }.1 .ngHeaderScroller { width: 4\d\dpx}.1 .col0 { width: 100px; left: 0px; height: 30px }.1 .colt0 { width: 100px; }.1 .col1 { width: 100px; left: 100px; height: 30px }.1 .colt1 { width: 100px; }.1 .col2 { width: 100px; left: 200px; height: 30px }.1 .colt2 { width: 100px; }.1 .col3 { width: 100px; left: 300px; height: 30px }.1 .colt3 { width: 100px; }/);
});
});
// setColLeft
describe('setColLeft', function () {
it('should set the left positioning of the specified column to the given integer', function () {
$scope.columns = [
{ visible: true, pinned: false, width: 100, index: 0 },
{ visible: true, pinned: false, width: 100, index: 1 },
{ visible: true, pinned: false, width: 100, index: 2 },
{ visible: true, pinned: false, width: 100, index: 3 }];
$scope.totalRowWidth = function () {return 400;};
$scope.adjustScrollLeft = function () {};
$scope.adjustScrollTop = function () {};
var root = angular.element('<div class="ng-scope ngGrid"></div>');
root.append(angular.element($cache.get('gridTemplate.html')));
var grid = {
config: {
rowHeight: 30
},
gridId: 1,
elementDims: {},
refreshDomSizes: function () {}
};
$dUtils.AssignGridContainers($scope, root, grid);
$dUtils.BuildStyles($scope, grid, true);
$dUtils.setColLeft($scope.columns[0], 300, grid);
var gs = grid.styleSheet;
var temp = gs && (gs.innerHTML || (gs.styleSheet && gs.styleSheet.cssText));
expect(temp).toMatch(/.1 .ngCanvas { width: 400px; }.1 .ngRow { width: 400px; }.1 .ngCanvas { width: 400px; }.1 .ngHeaderScroller { width: 4\d\dpx}.1 .col0 { width: 100px; left: 300px; height: 30px }.1 .colt0 { width: 100px; }.1 .col1 { width: 100px; left: 100px; height: 30px }.1 .colt1 { width: 100px; }.1 .col2 { width: 100px; left: 200px; height: 30px }.1 .colt2 { width: 100px; }.1 .col3 { width: 100px; left: 300px; height: 30px }.1 .colt3 { width: 100px; }/);
});
});
});
describe('Sort Service', function () {
var $sort;
beforeEach(module('ngGrid'));
beforeEach(inject(function ($sortService) {
$sort = $sortService;
}));
describe('guessing the sort function', function() {
var foo = {};
it('should return the correct function for the type', function () {
expect($sort.guessSortFn(true)).toEqual($sort.sortBool);
expect($sort.guessSortFn(false)).toEqual($sort.sortBool);
expect($sort.guessSortFn(-0.13)).toEqual($sort.sortNumber);
expect($sort.guessSortFn("-0.13")).toEqual($sort.sortNumberStr);
expect($sort.guessSortFn("0.13")).toEqual($sort.sortNumberStr);
expect($sort.guessSortFn("+0.13")).toEqual($sort.sortNumberStr);
expect($sort.guessSortFn(new Date())).toEqual($sort.sortDate);
expect($sort.guessSortFn("foo")).toEqual($sort.sortAlpha);
expect($sort.guessSortFn(foo)).toEqual($sort.basicSort);
});
});
});
describe('Utility Service', function () {
var $utils;
beforeEach(module('ngGrid'));
beforeEach(inject(function ($utilityService) {
$utils = $utilityService;
}));
// evalProperty
describe('evalProperty should find the right property given a heirarchy.', function () {
// foundme
it('returns foundme', function() {
var obj = { foo: { bar: { hello: { world: "foundme" } } } };
expect($utils.evalProperty(obj, "foo.bar.hello.world")).toEqual("foundme");
});
// undefined
it('returns undefined', function () {
var obj = { foo: { bar: { hello: { world: "foundme" } } } };
expect($utils.evalProperty(obj, "foo.bar.omg")).toEqual(undefined);
});
});
// visualLength
describe('visualLength should return the correct visual length of text.', function () {
it('returns integer', function() {
var div = '<div style="line-height: 1; margin: 0; padding: 0; border: 0; vertical-align: baseline; width: 30px; font-family: Arial; font-size: 12pt">The quick brown fox jumped over the lazy dog.</div></body></html>';
var visualLength = $utils.visualLength(div);
// Was .toEqual(286) but was inconsistent across Browsers and operating systems. Firefox is 329, Chromium, Chrome, and PhantomJS are 286, Travis CI is 367!
// CSS reset and forcing font family and physical font sizes does not help
expect(visualLength).toBeGreaterThan(285);
expect(visualLength).toBeLessThan(368);
});
});
// forIn
describe('forIn should execute the function for each key in an object.', function() {
it('executes some code', function () {
var obj = {
foo: "foo",
bar: "bar",
hello: "hello",
world: "world"
};
$utils.forIn(obj, function (val, key) {
obj[key] = "foundme";
});
expect(obj.foo).toEqual("foundme");
expect(obj.bar).toEqual("foundme");
expect(obj.hello).toEqual("foundme");
expect(obj.world).toEqual("foundme");
});
});
// endsWith
describe('endsWith should return true or false based on the last character in a string', function () {
var str = "Peter Piper picked a peck of pickeled peppers";
it('returns true', function() {
expect($utils.endsWith(str, "peppers")).toEqual(true);
});
it('returns false', function () {
expect($utils.endsWith(str, "peter")).toEqual(false);
});
});
// isNullOrUndefined
describe('isNullOrUndefined return true or false based on wherer or not a given reference is explucitly null or undefined', function () {
it('returns true', function () {
var hello;
expect($utils.isNullOrUndefined(hello)).toEqual(true);
hello = null;
expect($utils.isNullOrUndefined(hello)).toEqual(true);
hello = undefined;
expect($utils.isNullOrUndefined(hello)).toEqual(true);
expect($utils.isNullOrUndefined(null)).toEqual(true);
expect($utils.isNullOrUndefined(undefined)).toEqual(true);
});
it('returns false', function () {
expect($utils.isNullOrUndefined("foundme")).toEqual(false);
expect($utils.isNullOrUndefined("")).toEqual(false);
expect($utils.isNullOrUndefined(0)).toEqual(false);
});
});
});
})();
| test/unit/servicesSpec.js | (function() {
"use strict";
/* jasmine specs for services go here */
describe('Dom Utility Service', function () {
var $dUtils;
var $scope;
var $linker;
var $cache;
beforeEach(module('ngGrid'));
beforeEach(inject(function ($rootScope, $domUtilityService, $templateCache, $compile) {
$scope = $rootScope.$new();
$dUtils = $domUtilityService;
$linker = $compile;
$cache = $templateCache;
}));
// AssignGridContainers
describe('AssignGridContainers', function () {
var domsizesCalled,
grid,
root;
beforeEach(function() {
grid = {
elementDims: {},
refreshDomSizes: function () {
domsizesCalled = true;
}
};
$scope.adjustScrollTop = function(top) {
expect(top).toEqual(grid.$canvas.scrollTop());
};
root = angular.element('<div class="ng-scope ngGrid"></div>');
root.append(angular.element($cache.get('gridTemplate.html')));
});
it('should should find the correct elements and assign them in the grid properly', function () {
$dUtils.AssignGridContainers($scope, root, grid);
expect(grid.$root.is(".ngGrid")).toEqual(true);
expect(grid.$root.length).toEqual(1);
expect(grid.$topPanel.is(".ngTopPanel")).toEqual(true);
expect(grid.$topPanel.length).toEqual(1);
expect(grid.$groupPanel.is(".ngGroupPanel")).toEqual(true);
expect(grid.$groupPanel.length).toEqual(1);
expect(grid.$headerContainer.is(".ngHeaderContainer")).toEqual(true);
expect(grid.$headerContainer.length).toEqual(1);
expect(grid.$headerScroller.is(".ngHeaderScroller")).toEqual(true);
expect(grid.$headerScroller.length).toEqual(1);
expect(grid.$viewport.is(".ngViewport")).toEqual(true);
expect(grid.$viewport.length).toEqual(1);
expect(grid.$canvas.is(".ngCanvas")).toEqual(true);
expect(grid.$canvas.length).toEqual(1);
// Removed footer tests as it is in its own directive, and not available in the grid template dom
expect(grid.elementDims.rootMaxH).toEqual(grid.$root.height());
expect(domsizesCalled).toEqual(true);
});
});
// BuildStyles
describe('BuildStyles', function () {
it('should set the $styleSheet object of the grid to be a stylesheet object with CSS', function () {
var domsizesCalled;
var scrollLeftCalled;
var scrollTopCalled;
$scope.columns = [
{ visible: true, pinned: false, width: 100, },
{ visible: true, pinned: false, width: 100, },
{ visible: true, pinned: false, width: 100, },
{ visible: true, pinned: false, width: 100, }];
$scope.totalRowWidth = function() {
return 400;
};
$scope.adjustScrollLeft = function () {
scrollLeftCalled = true;
};
$scope.adjustScrollTop = function () {
scrollTopCalled = true;
};
var root = angular.element('<div class="ng-scope ngGrid"></div>');
root.append(angular.element($cache.get('gridTemplate.html')));
var grid = {
config: {
rowHeight: 30
},
gridId: 1,
elementDims: {},
refreshDomSizes: function () {
domsizesCalled = true;
}
};
$dUtils.AssignGridContainers($scope, root, grid);
$dUtils.BuildStyles($scope, grid, true);
var temp = grid.$styleSheet.html();
expect(domsizesCalled).toEqual(true);
expect(scrollLeftCalled).toEqual(true);
expect(scrollTopCalled).toEqual(true);
expect(temp).toMatch(/.1 .ngCanvas { width: 400px; }.1 .ngRow { width: 400px; }.1 .ngCanvas { width: 400px; }.1 .ngHeaderScroller { width: 4\d\dpx}.1 .col0 { width: 100px; left: 0px; height: 30px }.1 .colt0 { width: 100px; }.1 .col1 { width: 100px; left: 100px; height: 30px }.1 .colt1 { width: 100px; }.1 .col2 { width: 100px; left: 200px; height: 30px }.1 .colt2 { width: 100px; }.1 .col3 { width: 100px; left: 300px; height: 30px }.1 .colt3 { width: 100px; }/);
});
});
// setColLeft
describe('setColLeft', function () {
it('should set the left positioning of the specified column to the given integer', function () {
$scope.columns = [
{ visible: true, pinned: false, width: 100, index: 0 },
{ visible: true, pinned: false, width: 100, index: 1 },
{ visible: true, pinned: false, width: 100, index: 2 },
{ visible: true, pinned: false, width: 100, index: 3 }];
$scope.totalRowWidth = function () {return 400;};
$scope.adjustScrollLeft = function () {};
$scope.adjustScrollTop = function () {};
var root = angular.element('<div class="ng-scope ngGrid"></div>');
root.append(angular.element($cache.get('gridTemplate.html')));
var grid = {
config: {
rowHeight: 30
},
gridId: 1,
elementDims: {},
refreshDomSizes: function () {}
};
$dUtils.AssignGridContainers($scope, root, grid);
$dUtils.BuildStyles($scope, grid, true);
$dUtils.setColLeft($scope.columns[0], 300, grid);
var temp = grid.$styleSheet.html();
expect(temp).toMatch(/.1 .ngCanvas { width: 400px; }.1 .ngRow { width: 400px; }.1 .ngCanvas { width: 400px; }.1 .ngHeaderScroller { width: 4\d\dpx}.1 .col0 { width: 100px; left: 300px; height: 30px }.1 .colt0 { width: 100px; }.1 .col1 { width: 100px; left: 100px; height: 30px }.1 .colt1 { width: 100px; }.1 .col2 { width: 100px; left: 200px; height: 30px }.1 .colt2 { width: 100px; }.1 .col3 { width: 100px; left: 300px; height: 30px }.1 .colt3 { width: 100px; }/);
});
});
});
describe('Sort Service', function () {
var $sort;
beforeEach(module('ngGrid'));
beforeEach(inject(function ($sortService) {
$sort = $sortService;
}));
describe('guessing the sort function', function() {
var foo = {};
it('should return the correct function for the type', function () {
expect($sort.guessSortFn(true)).toEqual($sort.sortBool);
expect($sort.guessSortFn(false)).toEqual($sort.sortBool);
expect($sort.guessSortFn(-0.13)).toEqual($sort.sortNumber);
expect($sort.guessSortFn("-0.13")).toEqual($sort.sortNumberStr);
expect($sort.guessSortFn("0.13")).toEqual($sort.sortNumberStr);
expect($sort.guessSortFn("+0.13")).toEqual($sort.sortNumberStr);
expect($sort.guessSortFn(new Date())).toEqual($sort.sortDate);
expect($sort.guessSortFn("foo")).toEqual($sort.sortAlpha);
expect($sort.guessSortFn(foo)).toEqual($sort.basicSort);
});
});
});
describe('Utility Service', function () {
var $utils;
beforeEach(module('ngGrid'));
beforeEach(inject(function ($utilityService) {
$utils = $utilityService;
}));
// evalProperty
describe('evalProperty should find the right property given a heirarchy.', function () {
// foundme
it('returns foundme', function() {
var obj = { foo: { bar: { hello: { world: "foundme" } } } };
expect($utils.evalProperty(obj, "foo.bar.hello.world")).toEqual("foundme");
});
// undefined
it('returns undefined', function () {
var obj = { foo: { bar: { hello: { world: "foundme" } } } };
expect($utils.evalProperty(obj, "foo.bar.omg")).toEqual(undefined);
});
});
// visualLength
describe('visualLength should return the correct visual length of text.', function () {
it('returns integer', function() {
var div = '<div style="line-height: 1; margin: 0; padding: 0; border: 0; vertical-align: baseline; width: 30px; font-family: Arial; font-size: 12pt">The quick brown fox jumped over the lazy dog.</div></body></html>';
var visualLength = $utils.visualLength(div);
// Was .toEqual(286) but was inconsistent across Browsers and operating systems. Firefox is 329, Chromium, Chrome, and PhantomJS are 286, Travis CI is 367!
// CSS reset and forcing font family and physical font sizes does not help
expect(visualLength).toBeGreaterThan(285);
expect(visualLength).toBeLessThan(368);
});
});
// forIn
describe('forIn should execute the function for each key in an object.', function() {
it('executes some code', function () {
var obj = {
foo: "foo",
bar: "bar",
hello: "hello",
world: "world"
};
$utils.forIn(obj, function (val, key) {
obj[key] = "foundme";
});
expect(obj.foo).toEqual("foundme");
expect(obj.bar).toEqual("foundme");
expect(obj.hello).toEqual("foundme");
expect(obj.world).toEqual("foundme");
});
});
// endsWith
describe('endsWith should return true or false based on the last character in a string', function () {
var str = "Peter Piper picked a peck of pickeled peppers";
it('returns true', function() {
expect($utils.endsWith(str, "peppers")).toEqual(true);
});
it('returns false', function () {
expect($utils.endsWith(str, "peter")).toEqual(false);
});
});
// isNullOrUndefined
describe('isNullOrUndefined return true or false based on wherer or not a given reference is explucitly null or undefined', function () {
it('returns true', function () {
var hello;
expect($utils.isNullOrUndefined(hello)).toEqual(true);
hello = null;
expect($utils.isNullOrUndefined(hello)).toEqual(true);
hello = undefined;
expect($utils.isNullOrUndefined(hello)).toEqual(true);
expect($utils.isNullOrUndefined(null)).toEqual(true);
expect($utils.isNullOrUndefined(undefined)).toEqual(true);
});
it('returns false', function () {
expect($utils.isNullOrUndefined("foundme")).toEqual(false);
expect($utils.isNullOrUndefined("")).toEqual(false);
expect($utils.isNullOrUndefined(0)).toEqual(false);
});
});
});
})();
| fix tests
| test/unit/servicesSpec.js | fix tests | <ide><path>est/unit/servicesSpec.js
<ide>
<ide> it('should should find the correct elements and assign them in the grid properly', function () {
<ide> $dUtils.AssignGridContainers($scope, root, grid);
<del>
<add>
<ide> expect(grid.$root.is(".ngGrid")).toEqual(true);
<ide> expect(grid.$root.length).toEqual(1);
<ide>
<ide> expect(grid.$viewport.length).toEqual(1);
<ide> expect(grid.$canvas.is(".ngCanvas")).toEqual(true);
<ide> expect(grid.$canvas.length).toEqual(1);
<del>
<add>
<ide> // Removed footer tests as it is in its own directive, and not available in the grid template dom
<ide>
<ide> expect(grid.elementDims.rootMaxH).toEqual(grid.$root.height());
<ide> });
<ide> // BuildStyles
<ide> describe('BuildStyles', function () {
<del> it('should set the $styleSheet object of the grid to be a stylesheet object with CSS', function () {
<add> it('should set the styleSheet object of the grid to be a stylesheet object with CSS', function () {
<ide> var domsizesCalled;
<ide> var scrollLeftCalled;
<ide> var scrollTopCalled;
<ide> };
<ide> $dUtils.AssignGridContainers($scope, root, grid);
<ide> $dUtils.BuildStyles($scope, grid, true);
<del> var temp = grid.$styleSheet.html();
<add> var gs = grid.styleSheet;
<add> var temp = gs && (gs.innerHTML || (gs.styleSheet && gs.styleSheet.cssText));
<ide> expect(domsizesCalled).toEqual(true);
<ide> expect(scrollLeftCalled).toEqual(true);
<ide> expect(scrollTopCalled).toEqual(true);
<ide> $dUtils.AssignGridContainers($scope, root, grid);
<ide> $dUtils.BuildStyles($scope, grid, true);
<ide> $dUtils.setColLeft($scope.columns[0], 300, grid);
<del> var temp = grid.$styleSheet.html();
<add> var gs = grid.styleSheet;
<add> var temp = gs && (gs.innerHTML || (gs.styleSheet && gs.styleSheet.cssText));
<ide> expect(temp).toMatch(/.1 .ngCanvas { width: 400px; }.1 .ngRow { width: 400px; }.1 .ngCanvas { width: 400px; }.1 .ngHeaderScroller { width: 4\d\dpx}.1 .col0 { width: 100px; left: 300px; height: 30px }.1 .colt0 { width: 100px; }.1 .col1 { width: 100px; left: 100px; height: 30px }.1 .colt1 { width: 100px; }.1 .col2 { width: 100px; left: 200px; height: 30px }.1 .colt2 { width: 100px; }.1 .col3 { width: 100px; left: 300px; height: 30px }.1 .colt3 { width: 100px; }/);
<ide> });
<ide> });
<ide> var visualLength = $utils.visualLength(div);
<ide> // Was .toEqual(286) but was inconsistent across Browsers and operating systems. Firefox is 329, Chromium, Chrome, and PhantomJS are 286, Travis CI is 367!
<ide> // CSS reset and forcing font family and physical font sizes does not help
<del> expect(visualLength).toBeGreaterThan(285);
<del> expect(visualLength).toBeLessThan(368);
<add> expect(visualLength).toBeGreaterThan(285);
<add> expect(visualLength).toBeLessThan(368);
<ide> });
<ide> });
<ide> // forIn
<ide> describe('endsWith should return true or false based on the last character in a string', function () {
<ide> var str = "Peter Piper picked a peck of pickeled peppers";
<ide> it('returns true', function() {
<del>
<add>
<ide> expect($utils.endsWith(str, "peppers")).toEqual(true);
<ide> });
<ide> it('returns false', function () {
<ide> // isNullOrUndefined
<ide> describe('isNullOrUndefined return true or false based on wherer or not a given reference is explucitly null or undefined', function () {
<ide> it('returns true', function () {
<del> var hello;
<add> var hello;
<ide> expect($utils.isNullOrUndefined(hello)).toEqual(true);
<ide>
<ide> hello = null;
<ide> expect($utils.isNullOrUndefined(hello)).toEqual(true);
<del>
<add>
<ide> hello = undefined;
<ide> expect($utils.isNullOrUndefined(hello)).toEqual(true);
<ide> expect($utils.isNullOrUndefined(null)).toEqual(true); |
|
Java | apache-2.0 | 221fc83ec0053a228a762adbb088d55062cede9d | 0 | atcult/mod-cataloging,atcult/mod-cataloging,atcult/mod-cataloging | package org.folio.cataloging.dao;
import net.sf.hibernate.Hibernate;
import net.sf.hibernate.HibernateException;
import net.sf.hibernate.Session;
import net.sf.hibernate.type.Type;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.folio.cataloging.Global;
import org.folio.cataloging.business.codetable.Avp;
import org.folio.cataloging.business.common.DataAccessException;
import org.folio.cataloging.business.common.Defaults;
import org.folio.cataloging.dao.persistence.*;
import org.folio.cataloging.log.MessageCatalog;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
import static java.util.stream.Collectors.toList;
/**
* Manages access to table S_BIB_MARC_IND_DB_CRLTN -- the correlation between AMICUS
* database encoding and MARC21 encoding
* @author paulm
* @author natasciab
* @version $Revision: 1.12 $, $Date: 2007/02/13 09:17:59 $
*/
@SuppressWarnings("unchecked")
public class DAOBibliographicCorrelation extends DAOCorrelation {
private static final Log logger =
LogFactory.getLog(DAOBibliographicCorrelation.class);
private static final String ALPHABETICAL_ORDER = " order by ct.longText ";
private static final String SEQUENCE_ORDER = " order by ct.sequence ";
private String defaultListOrder = Defaults.getBoolean("labels.alphabetical.order", true)?ALPHABETICAL_ORDER:SEQUENCE_ORDER;
/**
* Returns the BibliographicCorrelation from BibliographicCorrelationKey
* @param bibliographicCorrelationKey -- the database bibliographicCorrelationKey
* @return a BibliographicCorrelation object containing or null when none found
*
*/
public Correlation getBibliographicCorrelation(CorrelationKey bibliographicCorrelationKey)
throws DataAccessException {
return getBibliographicCorrelation(
bibliographicCorrelationKey.getMarcTag(),
bibliographicCorrelationKey.getMarcFirstIndicator(),
bibliographicCorrelationKey.getMarcSecondIndicator(),
bibliographicCorrelationKey.getMarcTagCategoryCode());
}
/**
* Returns the BibliographicCorrelation based on MARC encoding and category code
* @param tag -- marc tag
* @param firstIndicator -- marc first indicator
* @param secondIndicator -- marc second indicator
* @param categoryCode -- category code
* @return a BibliographicCorrelation object or null when none found
*
*/
public Correlation getBibliographicCorrelation(
String tag,
char firstIndicator,
char secondIndicator,
short categoryCode)
throws DataAccessException {
List l=null;
if(categoryCode!=0){
l =
find(
"from BibliographicCorrelation as bc "
+ "where bc.key.marcTag = ? and "
+ "(bc.key.marcFirstIndicator = ? or bc.key.marcFirstIndicator='S' )and "
+ "bc.key.marcFirstIndicator <> '@' and "
+ "(bc.key.marcSecondIndicator = ? or bc.key.marcSecondIndicator='S')and "
+ "bc.key.marcSecondIndicator <> '@' and "
+ "bc.key.marcTagCategoryCode = ?",
new Object[] {
new String(tag),
new Character(firstIndicator),
new Character(secondIndicator),
new Short(categoryCode)},
new Type[] {
Hibernate.STRING,
Hibernate.CHARACTER,
Hibernate.CHARACTER,
Hibernate.SHORT });
}
else {
l =
find(
"from BibliographicCorrelation as bc "
+ "where bc.key.marcTag = ? and "
+ "(bc.key.marcFirstIndicator = ? or bc.key.marcFirstIndicator='S' )and "
+ "bc.key.marcFirstIndicator <> '@' and "
+ "(bc.key.marcSecondIndicator = ? or bc.key.marcSecondIndicator='S')and "
+ "bc.key.marcSecondIndicator <> '@' order by bc.key.marcTagCategoryCode asc",
new Object[] {
new String(tag),
new Character(firstIndicator),
new Character(secondIndicator)},
new Type[] {
Hibernate.STRING,
Hibernate.CHARACTER,
Hibernate.CHARACTER});
}
//if (l.size() == 1) {
if (l.size() >=1) {
return (Correlation) l.get(0);
}
else
return null;
}
/**
* Returns the MARC encoding based on the input database encodings
* @param category -- the database category (1-name, etc...)
* @param value1 -- the first database code
* @param value2 -- the second database code
* @param value3 -- the third database code
* @return a BibliographicCorrelationKey object containing
* the MARC encoding (tag and indicators) or null when none found
*
*/
public CorrelationKey getMarcEncoding(
short category,
short value1,
short value2,
short value3)
throws DataAccessException {
List l =
find(
"from BibliographicCorrelation as bc "
+ "where bc.key.marcTagCategoryCode = ? and "
+ "bc.databaseFirstValue = ? and "
+ "bc.databaseSecondValue = ? and "
+ "bc.databaseThirdValue = ?",
new Object[] {
new Short(category),
new Short(value1),
new Short(value2),
new Short(value3)},
new Type[] {
Hibernate.SHORT,
Hibernate.SHORT,
Hibernate.SHORT,
Hibernate.SHORT });
if (l.size() == 1) {
return ((Correlation) l.get(0)).getKey();
} else {
return null;
}
}
@Deprecated
public List getSecondCorrelationList(short category,short value1,Class codeTable) throws DataAccessException
{
return find("Select distinct ct from "
+ codeTable.getName()
+ " as ct, BibliographicCorrelation as bc "
+ " where bc.key.marcTagCategoryCode = ? and "
+ " bc.key.marcFirstIndicator <> '@' and "
+ " bc.key.marcSecondIndicator <> '@' and "
+ " bc.databaseFirstValue = ? and "
+ " bc.databaseSecondValue = ct.code and "
+ "ct.obsoleteIndicator = '0' order by ct.sequence ",
new Object[] { new Short(category), new Short(value1)},
new Type[] { Hibernate.SHORT, Hibernate.SHORT});
}
/**
* Get second correlation values by marc category and first correlation.
*
* @param session the hibernate session
* @param category the marc category used as filter criterion
* @param value1 the first correlation value used as filter criterion
* @param classTable the mapped class in the hibernate configuration
* @param locale the locale associated to language used as filter criterion
* @return
* @throws DataAccessException
*/
public List<Avp<String>> getSecondCorrelationList(final Session session,
final short category,
final short value1,
final Class classTable,
final Locale locale) throws DataAccessException
{
try {
final List<CodeTable> codeTables = session.find("Select distinct ct from "
+ classTable.getName()
+ " as ct, BibliographicCorrelation as bc "
+ " where bc.key.marcTagCategoryCode = ? and "
+ " bc.key.marcFirstIndicator <> '@' and "
+ " bc.key.marcSecondIndicator <> '@' and "
+ " bc.databaseFirstValue = ? and "
+ " bc.databaseSecondValue = ct.code and "
+ " ct.obsoleteIndicator = '0' and "
+ " ct.language = ? "
+ " order by ct.sequence ",
new Object[]{new Short(category), new Short(value1), locale.getISO3Language()},
new Type[]{Hibernate.SHORT, Hibernate.SHORT, Hibernate.STRING});
return codeTables
.stream()
.map(codeTable -> (Avp<String>) new Avp(codeTable.getCodeString().trim(), codeTable.getLongText()))
.collect(toList());
} catch (final HibernateException exception) {
logger.error(MessageCatalog._00010_DATA_ACCESS_FAILURE, exception);
return Collections.emptyList();
}
}
public static final String SELECT_CLASSIFICATION_TAG_LABELS =
"SELECT AA.TBL_SEQ_NBR, AA.TYP_VLU_CDE, aa.FNCTN_VLU_CDE, AA.TBL_VLU_OBSLT_IND, AA.SHORT_STRING_TEXT, AA.STRING_TEXT, AA.LANGID"
+ " FROM "
+ System.getProperty(Global.SCHEMA_SUITE_KEY) +".S_BIB_CLSTN_TYP_FNCTN AA,"
+ System.getProperty(Global.SCHEMA_SUITE_KEY) +".S_BIB_MARC_IND_DB_CRLTN BC"
+ " WHERE BC.MARC_TAG_CAT_CDE = ?"
+ " AND BC.MARC_TAG_1ST_IND <> '@'"
+ " AND BC.MARC_TAG_2ND_IND <> '@'"
+ " AND BC.MARC_TAG_IND_VLU_1_CDE = ?"
+ " AND BC.MARC_TAG_IND_VLU_2_CDE = AA.FNCTN_VLU_CDE"
+ " AND BC.MARC_TAG_IND_VLU_1_CDE = AA.TYP_VLU_CDE"
+ " AND AA.TBL_VLU_OBSLT_IND = 0"
+ " ORDER BY AA.TBL_SEQ_NBR";
public List<ClassificationFunction> getClassificationTagLabels(short category, short value1) throws DataAccessException
{
Connection connection = null;
PreparedStatement stmt = null;
ResultSet rs = null;
Session session = currentSession();
List<ClassificationFunction> list = new ArrayList<ClassificationFunction>();
ClassificationFunction item = null;
try {
connection = session.connection();
stmt = connection.prepareStatement(SELECT_CLASSIFICATION_TAG_LABELS);
stmt.setInt(1, category);
stmt.setInt(2, value1);
rs = stmt.executeQuery();
while (rs.next()) {
item = new ClassificationFunction();
item.setSequence(rs.getInt("TBL_SEQ_NBR"));
// item.setCode(rs.getShort("TYP_VLU_CDE"));
item.setCode(rs.getShort("FNCTN_VLU_CDE"));
item.setObsoleteIndicator((rs.getString("TBL_VLU_OBSLT_IND")).charAt(0));
item.setLanguage(rs.getString("LANGID"));
item.setShortText(rs.getString("SHORT_STRING_TEXT"));
item.setLongText(rs.getString("STRING_TEXT"));
list.add(item);
}
} catch (HibernateException e) {
logAndWrap(e);
} catch (SQLException e) {
logAndWrap(e);
} finally {
try{ rs.close(); } catch(Exception ex){}
try{ stmt.close(); } catch(Exception ex){}
}
return list;
}
@Deprecated
public List getThirdCorrelationList(
short category,
short value1,
short value2,
Class codeTable)
throws DataAccessException {
return find(
" select distinct ct from "
+ codeTable.getName()
+ " as ct, BibliographicCorrelation as bc "
+ " where bc.key.marcTagCategoryCode = ? and "
+ " bc.key.marcFirstIndicator <> '@' and "
+ " bc.key.marcSecondIndicator <> '@' and "
+ " bc.databaseFirstValue = ? and "
+ " bc.databaseSecondValue = ? and "
+ " bc.databaseThirdValue = ct.code and "
+ " ct.obsoleteIndicator = 0 order by ct.sequence ",
new Object[] {
new Short(category),
new Short(value1),
new Short(value2)},
new Type[] { Hibernate.SHORT, Hibernate.SHORT, Hibernate.SHORT });
}
/**
* Gets third correlation values by marc category, first and second correlations.
*
* @param session the hibernate session
* @param category the marc category used as filter criterion
* @param value1 the first correlation value used as filter criterion
* @param value2 the second correlation value used as filter criterion
* @param classTable the mapped class in the hibernate configuration
* @param locale the locale associated to language used as filter criterion
* @return
* @throws DataAccessException
*/
public List<Avp<String>> getThirdCorrelationList(final Session session,
final short category,
final short value1,
final short value2,
final Class classTable,
final Locale locale) throws DataAccessException {
try{
final List<CodeTable> codeTables = session.find(" select distinct ct from "
+ classTable.getName()
+ " as ct, BibliographicCorrelation as bc "
+ " where bc.key.marcTagCategoryCode = ? and "
+ " bc.key.marcFirstIndicator <> '@' and "
+ " bc.key.marcSecondIndicator <> '@' and "
+ " bc.databaseFirstValue = ? and "
+ " bc.databaseSecondValue = ? and "
+ " bc.databaseThirdValue = ct.code and "
+ " ct.obsoleteIndicator = '0' and "
+ " ct.language = ? "
+ " order by ct.sequence ",
new Object[] {new Short(category), new Short(value1), new Short(value2), locale.getISO3Language()},
new Type[] { Hibernate.SHORT, Hibernate.SHORT, Hibernate.SHORT, Hibernate.STRING });
return codeTables
.stream()
.map(codeTable -> (Avp<String>) new Avp(codeTable.getCodeString().trim(), codeTable.getLongText()))
.collect(toList());
} catch (final HibernateException exception) {
logger.error(MessageCatalog._00010_DATA_ACCESS_FAILURE, exception);
return Collections.emptyList();
}
}
public short getFirstAllowedValue2(
short category,
short value1,
short value3)
throws DataAccessException {
List l = find(
" from BibliographicCorrelation as bc "
+ " where bc.key.marcTagCategoryCode = ? and "
+ " bc.key.marcFirstIndicator <> '@' and "
+ " bc.key.marcSecondIndicator <> '@' and "
+ " bc.databaseFirstValue = ? and "
+ " bc.databaseThirdValue = ? ",
new Object[] {
new Short(category),
new Short(value1),
new Short(value3)},
new Type[] { Hibernate.SHORT, Hibernate.SHORT, Hibernate.SHORT });
if (l.size() > 0) {
return ((BibliographicCorrelation)l.get(0)).getDatabaseSecondValue();
}
else {
return -1;
}
}
public String getClassificationIndexByShelfType(short shelfType) throws DataAccessException
{
List l = find("from BibliographicCorrelation as bc "
+ " where bc.key.marcTagCategoryCode = 13 and "
+ " bc.databaseFirstValue = ? ",
new Object[] { new Short(shelfType)},
new Type[] { Hibernate.SHORT });
if (l.size() == 1) {
String s = ((Correlation) l.get(0)).getSearchIndexTypeCode();
return new DAOIndexList().getIndexByEnglishAbreviation(s);
} else {
return null;
}
}
/*modifica barbara 13/04/2007 PRN 127 - nuova intestazione su lista vuota default maschera inserimento intestazione nome*/
public CorrelationKey getMarcTagCodeBySelectedIndex(String selectedIndex)
throws DataAccessException {
if(selectedIndex == null)
{
return null;
}
List l =
find(
"from BibliographicCorrelation as bc "
+ " where bc.searchIndexTypeCode = ?"
+" or bc.searchIndexTypeCode = ?" ,
new Object[] { new String(selectedIndex.substring(0, 2)),
new String(selectedIndex.substring(0, 2).toLowerCase())},
new Type[] { Hibernate.STRING, Hibernate.STRING});
if(l.size()>0)
return ((Correlation) l.get(0)).getKey();
else
return null;
}
public CorrelationKey getMarcTagCodeBySelectedIndex(String selectedIndex, String tagNumber) throws DataAccessException
{
List l =
find(
"from BibliographicCorrelation as bc "
+ " where (bc.searchIndexTypeCode = ?"
+" or bc.searchIndexTypeCode = ?)"
+" and bc.key.marcTag = ?",
new Object[] { new String(selectedIndex.substring(0, 2)),
new String(selectedIndex.substring(0, 2).toLowerCase()),
new String(tagNumber)},
new Type[] { Hibernate.STRING, Hibernate.STRING, Hibernate.STRING});
if(l.size()>0)
return ((Correlation) l.get(0)).getKey();
else
return null;
}
/**
* Return the label for the tag to display
* @return MarcTagDisplay
* @throws DataAccessException
*/
public List<LabelTagDisplay> getMarcTagDisplay(String language){
List l = new ArrayList<MarcTagDisplay>();
try {
l = find(
"from MarcTagDisplay as bc "
+ "where bc.language = ? ",
new Object[] {
new String(language)
},
new Type[] {
Hibernate.STRING});
} catch (DataAccessException e) {
logger.debug("DataAccessException for list of MarcTagDisplay");
}
if (l.size()> 0) {
return l;
}
return l;
}
/* Bug 4775 inizio */
public List<RdaMarcTagDisplay> getRdaMarcTagDisplay(String language)
{
List l = new ArrayList<MarcTagDisplay>();
try {
l = find(
"from RdaMarcTagDisplay as bc where bc.language = ? ",
new Object[] {new String(language)}, new Type[] {Hibernate.STRING});
} catch (DataAccessException e) {
logger.debug("DataAccessException for list of RdaMarcTagDisplay");
}
if (l.size()> 0) {
return l;
}
return l;
}
/* Bug 4775 fine */
/**
* Label per authority
* @param language
* @return
*/
public List<LabelTagDisplay> getAutorityMarcTagDisplay(String language)
{
List l = new ArrayList<AutMarcTagDisplay>();
try {
l = find("from AutMarcTagDisplay as bc where bc.language = ? ",
new Object[] {new String(language)},new Type[] {Hibernate.STRING});
} catch (DataAccessException e) {
logger.debug("DataAccessException for list of MarcTagDisplay");
}
if (l.size()> 0) {
return l;
}
return l;
}
public List<BibliographicCorrelation> getFirstAllowedValue2List(short category, short value1, short value3) throws DataAccessException
{
List l = find(" from BibliographicCorrelation as bc "
+ " where bc.key.marcTagCategoryCode = ? and "
+ " bc.key.marcFirstIndicator <> '@' and "
+ " bc.key.marcSecondIndicator <> '@' and "
+ " bc.databaseFirstValue = ? and "
+ " bc.databaseThirdValue = ? ",
new Object[] {
new Short(category),
new Short(value1),
new Short(value3)},
new Type[] { Hibernate.SHORT, Hibernate.SHORT, Hibernate.SHORT });
return l;
}
/**
* Checking validity of the second correlation
* @param l
* @param value2
* @return
* @throws DataAccessException
*/
public boolean isPresentSecondCorrelation(List l, short value2) throws DataAccessException
{
boolean isPresent = false;
if (l.size() > 0) {
Iterator<BibliographicCorrelation> ite = l.iterator();
while (ite.hasNext()) {
short secondCorr = ite.next().getDatabaseSecondValue();
if (secondCorr == value2) {
isPresent = true;
}
}
}
return isPresent;
}
/**
* Loads first correlation list using note group code for tag range values.
*
* @param session the session of hibernate
* @param noteGroupTypeCode the note group code used as filter criterion.
* @param locale the Locale, used here as a filter criterion.
* @return
* @throws DataAccessException
*/
public List<Avp<String>> getFirstCorrelationByNoteGroupCode(final Session session, final String noteGroupTypeCode, final Locale locale) throws DataAccessException {
final String fromTag = (noteGroupTypeCode.length() == 2 ? "0"+noteGroupTypeCode : noteGroupTypeCode);
final String toTag = String.valueOf(Short.parseShort(noteGroupTypeCode) + 99);
final StringBuilder sqlFilter = new StringBuilder(" and bc.key.marcSecondIndicator <> '@' ")
.append(" and bc.databaseFirstValue = ct.code ")
.append(" and bc.key.marcTagCategoryCode = 7 ")
.append(" and bc.key.marcTag between '").append(fromTag)
.append("' and '").append(toTag).append("' ");
return getCorrelatedList(session, BibliographicNoteType.class, true, sqlFilter.toString(), locale);
}
/**
* Generic method that gets first correlation using filter sql as criterion.
*
* @param session the session of hibernate
* @param clazz the mapped class in the hibernate configuration.
* @param alphabeticOrder true if alphabetical order
* @param sqlFilter the sql filter added to query
* @param locale the Locale, used here as a filter criterion.
* @return
* @throws DataAccessException in case of SQL exception.
*/
public List<Avp<String>> getCorrelatedList(final Session session,
final Class clazz,
final boolean alphabeticOrder,
final String sqlFilter,
final Locale locale) throws DataAccessException
{
final String sqlOrder = ( alphabeticOrder ? ALPHABETICAL_ORDER : SEQUENCE_ORDER );
try {
final List<CodeTable> codeTables = session.find("select distinct ct from "
+ clazz.getName()
+ " as ct, BibliographicCorrelation as bc "
+ " where ct.obsoleteIndicator = '0' "
+ " and ct.language = ? "
+ sqlFilter
+ sqlOrder,
new Object[] { locale.getISO3Language()},
new Type[] { Hibernate.STRING });
return codeTables
.stream()
.map(codeTable -> (Avp<String>) new Avp(codeTable.getCodeString().trim(), codeTable.getLongText()))
.collect(toList());
} catch (final HibernateException exception) {
logger.error(MessageCatalog._00010_DATA_ACCESS_FAILURE, exception);
return Collections.emptyList();
}
}
} | src/main/java/org/folio/cataloging/dao/DAOBibliographicCorrelation.java | package org.folio.cataloging.dao;
import net.sf.hibernate.Hibernate;
import net.sf.hibernate.HibernateException;
import net.sf.hibernate.Session;
import net.sf.hibernate.type.Type;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.folio.cataloging.Global;
import org.folio.cataloging.business.codetable.Avp;
import org.folio.cataloging.business.common.DataAccessException;
import org.folio.cataloging.business.common.Defaults;
import org.folio.cataloging.dao.persistence.*;
import org.folio.cataloging.log.MessageCatalog;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
import static java.util.stream.Collectors.toList;
/**
* Manages access to table S_BIB_MARC_IND_DB_CRLTN -- the correlation between AMICUS
* database encoding and MARC21 encoding
* @author paulm
* @author natasciab
* @version $Revision: 1.12 $, $Date: 2007/02/13 09:17:59 $
*/
@SuppressWarnings("unchecked")
public class DAOBibliographicCorrelation extends DAOCorrelation {
private static final Log logger =
LogFactory.getLog(DAOBibliographicCorrelation.class);
private static final String ALPHABETICAL_ORDER = " order by ct.longText ";
private static final String SEQUENCE_ORDER = " order by ct.sequence ";
private String defaultListOrder = Defaults.getBoolean("labels.alphabetical.order", true)?ALPHABETICAL_ORDER:SEQUENCE_ORDER;
/**
* Returns the BibliographicCorrelation from BibliographicCorrelationKey
* @param bibliographicCorrelationKey -- the database bibliographicCorrelationKey
* @return a BibliographicCorrelation object containing or null when none found
*
*/
public Correlation getBibliographicCorrelation(CorrelationKey bibliographicCorrelationKey)
throws DataAccessException {
return getBibliographicCorrelation(
bibliographicCorrelationKey.getMarcTag(),
bibliographicCorrelationKey.getMarcFirstIndicator(),
bibliographicCorrelationKey.getMarcSecondIndicator(),
bibliographicCorrelationKey.getMarcTagCategoryCode());
}
/**
* Returns the BibliographicCorrelation based on MARC encoding and category code
* @param tag -- marc tag
* @param firstIndicator -- marc first indicator
* @param secondIndicator -- marc second indicator
* @param categoryCode -- category code
* @return a BibliographicCorrelation object or null when none found
*
*/
public Correlation getBibliographicCorrelation(
String tag,
char firstIndicator,
char secondIndicator,
short categoryCode)
throws DataAccessException {
List l=null;
if(categoryCode!=0){
l =
find(
"from BibliographicCorrelation as bc "
+ "where bc.key.marcTag = ? and "
+ "(bc.key.marcFirstIndicator = ? or bc.key.marcFirstIndicator='S' )and "
+ "bc.key.marcFirstIndicator <> '@' and "
+ "(bc.key.marcSecondIndicator = ? or bc.key.marcSecondIndicator='S')and "
+ "bc.key.marcSecondIndicator <> '@' and "
+ "bc.key.marcTagCategoryCode = ?",
new Object[] {
new String(tag),
new Character(firstIndicator),
new Character(secondIndicator),
new Short(categoryCode)},
new Type[] {
Hibernate.STRING,
Hibernate.CHARACTER,
Hibernate.CHARACTER,
Hibernate.SHORT });
}
else {
l =
find(
"from BibliographicCorrelation as bc "
+ "where bc.key.marcTag = ? and "
+ "(bc.key.marcFirstIndicator = ? or bc.key.marcFirstIndicator='S' )and "
+ "bc.key.marcFirstIndicator <> '@' and "
+ "(bc.key.marcSecondIndicator = ? or bc.key.marcSecondIndicator='S')and "
+ "bc.key.marcSecondIndicator <> '@' order by bc.key.marcTagCategoryCode asc",
new Object[] {
new String(tag),
new Character(firstIndicator),
new Character(secondIndicator)},
new Type[] {
Hibernate.STRING,
Hibernate.CHARACTER,
Hibernate.CHARACTER});
}
//if (l.size() == 1) {
if (l.size() >=1) {
return (Correlation) l.get(0);
}
else
return null;
}
/**
* Returns the MARC encoding based on the input database encodings
* @param category -- the database category (1-name, etc...)
* @param value1 -- the first database code
* @param value2 -- the second database code
* @param value3 -- the third database code
* @return a BibliographicCorrelationKey object containing
* the MARC encoding (tag and indicators) or null when none found
*
*/
public CorrelationKey getMarcEncoding(
short category,
short value1,
short value2,
short value3)
throws DataAccessException {
List l =
find(
"from BibliographicCorrelation as bc "
+ "where bc.key.marcTagCategoryCode = ? and "
+ "bc.databaseFirstValue = ? and "
+ "bc.databaseSecondValue = ? and "
+ "bc.databaseThirdValue = ?",
new Object[] {
new Short(category),
new Short(value1),
new Short(value2),
new Short(value3)},
new Type[] {
Hibernate.SHORT,
Hibernate.SHORT,
Hibernate.SHORT,
Hibernate.SHORT });
if (l.size() == 1) {
return ((Correlation) l.get(0)).getKey();
} else {
return null;
}
}
@Deprecated
public List getSecondCorrelationList(short category,short value1,Class codeTable) throws DataAccessException
{
return find("Select distinct ct from "
+ codeTable.getName()
+ " as ct, BibliographicCorrelation as bc "
+ " where bc.key.marcTagCategoryCode = ? and "
+ " bc.key.marcFirstIndicator <> '@' and "
+ " bc.key.marcSecondIndicator <> '@' and "
+ " bc.databaseFirstValue = ? and "
+ " bc.databaseSecondValue = ct.code and "
+ "ct.obsoleteIndicator = '0' order by ct.sequence ",
new Object[] { new Short(category), new Short(value1)},
new Type[] { Hibernate.SHORT, Hibernate.SHORT});
}
/**
* Get second correlation values by marc category and first correlation.
*
* @param session the hibernate session
* @param category the marc category used as filter criterion
* @param value1 the first correlation value used as filter criterion
* @param classTable the mapped class in the hibernate configuration
* @param locale the locale associated to language used as filter criterion
* @return
* @throws DataAccessException
*/
public List<Avp<String>> getSecondCorrelationList(final Session session,
final short category,
final short value1,
final Class classTable,
final Locale locale) throws DataAccessException
{
try {
final List<CodeTable> codeTables = session.find("Select distinct ct from "
+ classTable.getName()
+ " as ct, BibliographicCorrelation as bc "
+ " where bc.key.marcTagCategoryCode = ? and "
+ " bc.key.marcFirstIndicator <> '@' and "
+ " bc.key.marcSecondIndicator <> '@' and "
+ " bc.databaseFirstValue = ? and "
+ " bc.databaseSecondValue = ct.code and "
+ " ct.obsoleteIndicator = '0' and "
+ " ct.language = ? "
+ " order by ct.sequence ",
new Object[]{new Short(category), new Short(value1), locale.getISO3Language()},
new Type[]{Hibernate.SHORT, Hibernate.SHORT, Hibernate.STRING});
return codeTables
.stream()
.map(codeTable -> (Avp<String>) new Avp(codeTable.getCodeString().trim(), codeTable.getLongText()))
.collect(toList());
} catch (final HibernateException exception) {
logger.error(MessageCatalog._00010_DATA_ACCESS_FAILURE, exception);
return Collections.emptyList();
}
}
public static final String SELECT_CLASSIFICATION_TAG_LABELS =
"SELECT AA.TBL_SEQ_NBR, AA.TYP_VLU_CDE, aa.FNCTN_VLU_CDE, AA.TBL_VLU_OBSLT_IND, AA.SHORT_STRING_TEXT, AA.STRING_TEXT, AA.LANGID"
+ " FROM "
+ System.getProperty(Global.SCHEMA_SUITE_KEY) +".S_BIB_CLSTN_TYP_FNCTN AA,"
+ System.getProperty(Global.SCHEMA_SUITE_KEY) +".S_BIB_MARC_IND_DB_CRLTN BC"
+ " WHERE BC.MARC_TAG_CAT_CDE = ?"
+ " AND BC.MARC_TAG_1ST_IND <> '@'"
+ " AND BC.MARC_TAG_2ND_IND <> '@'"
+ " AND BC.MARC_TAG_IND_VLU_1_CDE = ?"
+ " AND BC.MARC_TAG_IND_VLU_2_CDE = AA.FNCTN_VLU_CDE"
+ " AND BC.MARC_TAG_IND_VLU_1_CDE = AA.TYP_VLU_CDE"
+ " AND AA.TBL_VLU_OBSLT_IND = 0"
+ " ORDER BY AA.TBL_SEQ_NBR";
public List<ClassificationFunction> getClassificationTagLabels(short category, short value1) throws DataAccessException
{
Connection connection = null;
PreparedStatement stmt = null;
ResultSet rs = null;
Session session = currentSession();
List<ClassificationFunction> list = new ArrayList<ClassificationFunction>();
ClassificationFunction item = null;
try {
connection = session.connection();
stmt = connection.prepareStatement(SELECT_CLASSIFICATION_TAG_LABELS);
stmt.setInt(1, category);
stmt.setInt(2, value1);
rs = stmt.executeQuery();
while (rs.next()) {
item = new ClassificationFunction();
item.setSequence(rs.getInt("TBL_SEQ_NBR"));
// item.setCode(rs.getShort("TYP_VLU_CDE"));
item.setCode(rs.getShort("FNCTN_VLU_CDE"));
item.setObsoleteIndicator(rs.getString("TBL_VLU_OBSLT_IND").equals("1"));
item.setLanguage(rs.getString("LANGID"));
item.setShortText(rs.getString("SHORT_STRING_TEXT"));
item.setLongText(rs.getString("STRING_TEXT"));
list.add(item);
}
} catch (HibernateException e) {
logAndWrap(e);
} catch (SQLException e) {
logAndWrap(e);
} finally {
try{ rs.close(); } catch(Exception ex){}
try{ stmt.close(); } catch(Exception ex){}
}
return list;
}
@Deprecated
public List getThirdCorrelationList(
short category,
short value1,
short value2,
Class codeTable)
throws DataAccessException {
return find(
" select distinct ct from "
+ codeTable.getName()
+ " as ct, BibliographicCorrelation as bc "
+ " where bc.key.marcTagCategoryCode = ? and "
+ " bc.key.marcFirstIndicator <> '@' and "
+ " bc.key.marcSecondIndicator <> '@' and "
+ " bc.databaseFirstValue = ? and "
+ " bc.databaseSecondValue = ? and "
+ " bc.databaseThirdValue = ct.code and "
+ " ct.obsoleteIndicator = 0 order by ct.sequence ",
new Object[] {
new Short(category),
new Short(value1),
new Short(value2)},
new Type[] { Hibernate.SHORT, Hibernate.SHORT, Hibernate.SHORT });
}
/**
* Gets third correlation values by marc category, first and second correlations.
*
* @param session the hibernate session
* @param category the marc category used as filter criterion
* @param value1 the first correlation value used as filter criterion
* @param value2 the second correlation value used as filter criterion
* @param classTable the mapped class in the hibernate configuration
* @param locale the locale associated to language used as filter criterion
* @return
* @throws DataAccessException
*/
public List<Avp<String>> getThirdCorrelationList(final Session session,
final short category,
final short value1,
final short value2,
final Class classTable,
final Locale locale) throws DataAccessException {
try{
final List<CodeTable> codeTables = session.find(" select distinct ct from "
+ classTable.getName()
+ " as ct, BibliographicCorrelation as bc "
+ " where bc.key.marcTagCategoryCode = ? and "
+ " bc.key.marcFirstIndicator <> '@' and "
+ " bc.key.marcSecondIndicator <> '@' and "
+ " bc.databaseFirstValue = ? and "
+ " bc.databaseSecondValue = ? and "
+ " bc.databaseThirdValue = ct.code and "
+ " ct.obsoleteIndicator = '0' and "
+ " ct.language = ? "
+ " order by ct.sequence ",
new Object[] {new Short(category), new Short(value1), new Short(value2), locale.getISO3Language()},
new Type[] { Hibernate.SHORT, Hibernate.SHORT, Hibernate.SHORT, Hibernate.STRING });
return codeTables
.stream()
.map(codeTable -> (Avp<String>) new Avp(codeTable.getCodeString().trim(), codeTable.getLongText()))
.collect(toList());
} catch (final HibernateException exception) {
logger.error(MessageCatalog._00010_DATA_ACCESS_FAILURE, exception);
return Collections.emptyList();
}
}
public short getFirstAllowedValue2(
short category,
short value1,
short value3)
throws DataAccessException {
List l = find(
" from BibliographicCorrelation as bc "
+ " where bc.key.marcTagCategoryCode = ? and "
+ " bc.key.marcFirstIndicator <> '@' and "
+ " bc.key.marcSecondIndicator <> '@' and "
+ " bc.databaseFirstValue = ? and "
+ " bc.databaseThirdValue = ? ",
new Object[] {
new Short(category),
new Short(value1),
new Short(value3)},
new Type[] { Hibernate.SHORT, Hibernate.SHORT, Hibernate.SHORT });
if (l.size() > 0) {
return ((BibliographicCorrelation)l.get(0)).getDatabaseSecondValue();
}
else {
return -1;
}
}
public String getClassificationIndexByShelfType(short shelfType) throws DataAccessException
{
List l = find("from BibliographicCorrelation as bc "
+ " where bc.key.marcTagCategoryCode = 13 and "
+ " bc.databaseFirstValue = ? ",
new Object[] { new Short(shelfType)},
new Type[] { Hibernate.SHORT });
if (l.size() == 1) {
String s = ((Correlation) l.get(0)).getSearchIndexTypeCode();
return new DAOIndexList().getIndexByEnglishAbreviation(s);
} else {
return null;
}
}
/*modifica barbara 13/04/2007 PRN 127 - nuova intestazione su lista vuota default maschera inserimento intestazione nome*/
public CorrelationKey getMarcTagCodeBySelectedIndex(String selectedIndex)
throws DataAccessException {
if(selectedIndex == null)
{
return null;
}
List l =
find(
"from BibliographicCorrelation as bc "
+ " where bc.searchIndexTypeCode = ?"
+" or bc.searchIndexTypeCode = ?" ,
new Object[] { new String(selectedIndex.substring(0, 2)),
new String(selectedIndex.substring(0, 2).toLowerCase())},
new Type[] { Hibernate.STRING, Hibernate.STRING});
if(l.size()>0)
return ((Correlation) l.get(0)).getKey();
else
return null;
}
public CorrelationKey getMarcTagCodeBySelectedIndex(String selectedIndex, String tagNumber) throws DataAccessException
{
List l =
find(
"from BibliographicCorrelation as bc "
+ " where (bc.searchIndexTypeCode = ?"
+" or bc.searchIndexTypeCode = ?)"
+" and bc.key.marcTag = ?",
new Object[] { new String(selectedIndex.substring(0, 2)),
new String(selectedIndex.substring(0, 2).toLowerCase()),
new String(tagNumber)},
new Type[] { Hibernate.STRING, Hibernate.STRING, Hibernate.STRING});
if(l.size()>0)
return ((Correlation) l.get(0)).getKey();
else
return null;
}
/**
* Return the label for the tag to display
* @return MarcTagDisplay
* @throws DataAccessException
*/
public List<LabelTagDisplay> getMarcTagDisplay(String language){
List l = new ArrayList<MarcTagDisplay>();
try {
l = find(
"from MarcTagDisplay as bc "
+ "where bc.language = ? ",
new Object[] {
new String(language)
},
new Type[] {
Hibernate.STRING});
} catch (DataAccessException e) {
logger.debug("DataAccessException for list of MarcTagDisplay");
}
if (l.size()> 0) {
return l;
}
return l;
}
/* Bug 4775 inizio */
public List<RdaMarcTagDisplay> getRdaMarcTagDisplay(String language)
{
List l = new ArrayList<MarcTagDisplay>();
try {
l = find(
"from RdaMarcTagDisplay as bc where bc.language = ? ",
new Object[] {new String(language)}, new Type[] {Hibernate.STRING});
} catch (DataAccessException e) {
logger.debug("DataAccessException for list of RdaMarcTagDisplay");
}
if (l.size()> 0) {
return l;
}
return l;
}
/* Bug 4775 fine */
/**
* Label per authority
* @param language
* @return
*/
public List<LabelTagDisplay> getAutorityMarcTagDisplay(String language)
{
List l = new ArrayList<AutMarcTagDisplay>();
try {
l = find("from AutMarcTagDisplay as bc where bc.language = ? ",
new Object[] {new String(language)},new Type[] {Hibernate.STRING});
} catch (DataAccessException e) {
logger.debug("DataAccessException for list of MarcTagDisplay");
}
if (l.size()> 0) {
return l;
}
return l;
}
public List<BibliographicCorrelation> getFirstAllowedValue2List(short category, short value1, short value3) throws DataAccessException
{
List l = find(" from BibliographicCorrelation as bc "
+ " where bc.key.marcTagCategoryCode = ? and "
+ " bc.key.marcFirstIndicator <> '@' and "
+ " bc.key.marcSecondIndicator <> '@' and "
+ " bc.databaseFirstValue = ? and "
+ " bc.databaseThirdValue = ? ",
new Object[] {
new Short(category),
new Short(value1),
new Short(value3)},
new Type[] { Hibernate.SHORT, Hibernate.SHORT, Hibernate.SHORT });
return l;
}
/**
* Checking validity of the second correlation
* @param l
* @param value2
* @return
* @throws DataAccessException
*/
public boolean isPresentSecondCorrelation(List l, short value2) throws DataAccessException
{
boolean isPresent = false;
if (l.size() > 0) {
Iterator<BibliographicCorrelation> ite = l.iterator();
while (ite.hasNext()) {
short secondCorr = ite.next().getDatabaseSecondValue();
if (secondCorr == value2) {
isPresent = true;
}
}
}
return isPresent;
}
/**
* Loads first correlation list using note group code for tag range values.
*
* @param session the session of hibernate
* @param noteGroupTypeCode the note group code used as filter criterion.
* @param locale the Locale, used here as a filter criterion.
* @return
* @throws DataAccessException
*/
public List<Avp<String>> getFirstCorrelationByNoteGroupCode(final Session session, final String noteGroupTypeCode, final Locale locale) throws DataAccessException {
final String fromTag = (noteGroupTypeCode.length() == 2 ? "0"+noteGroupTypeCode : noteGroupTypeCode);
final String toTag = String.valueOf(Short.parseShort(noteGroupTypeCode) + 99);
final StringBuilder sqlFilter = new StringBuilder(" and bc.key.marcSecondIndicator <> '@' ")
.append(" and bc.databaseFirstValue = ct.code ")
.append(" and bc.key.marcTagCategoryCode = 7 ")
.append(" and bc.key.marcTag between '").append(fromTag)
.append("' and '").append(toTag).append("' ");
return getCorrelatedList(session, BibliographicNoteType.class, true, sqlFilter.toString(), locale);
}
/**
* Generic method that gets first correlation using filter sql as criterion.
*
* @param session the session of hibernate
* @param clazz the mapped class in the hibernate configuration.
* @param alphabeticOrder true if alphabetical order
* @param sqlFilter the sql filter added to query
* @param locale the Locale, used here as a filter criterion.
* @return
* @throws DataAccessException in case of SQL exception.
*/
public List<Avp<String>> getCorrelatedList(final Session session,
final Class clazz,
final boolean alphabeticOrder,
final String sqlFilter,
final Locale locale) throws DataAccessException
{
final String sqlOrder = ( alphabeticOrder ? ALPHABETICAL_ORDER : SEQUENCE_ORDER );
try {
final List<CodeTable> codeTables = session.find("select distinct ct from "
+ clazz.getName()
+ " as ct, BibliographicCorrelation as bc "
+ " where ct.obsoleteIndicator = '0' "
+ " and ct.language = ? "
+ sqlFilter
+ sqlOrder,
new Object[] { locale.getISO3Language()},
new Type[] { Hibernate.STRING });
return codeTables
.stream()
.map(codeTable -> (Avp<String>) new Avp(codeTable.getCodeString().trim(), codeTable.getLongText()))
.collect(toList());
} catch (final HibernateException exception) {
logger.error(MessageCatalog._00010_DATA_ACCESS_FAILURE, exception);
return Collections.emptyList();
}
}
} | MODCAT-2:Error for type
| src/main/java/org/folio/cataloging/dao/DAOBibliographicCorrelation.java | MODCAT-2:Error for type | <ide><path>rc/main/java/org/folio/cataloging/dao/DAOBibliographicCorrelation.java
<ide> item.setSequence(rs.getInt("TBL_SEQ_NBR"));
<ide> // item.setCode(rs.getShort("TYP_VLU_CDE"));
<ide> item.setCode(rs.getShort("FNCTN_VLU_CDE"));
<del> item.setObsoleteIndicator(rs.getString("TBL_VLU_OBSLT_IND").equals("1"));
<add> item.setObsoleteIndicator((rs.getString("TBL_VLU_OBSLT_IND")).charAt(0));
<ide> item.setLanguage(rs.getString("LANGID"));
<ide> item.setShortText(rs.getString("SHORT_STRING_TEXT"));
<ide> item.setLongText(rs.getString("STRING_TEXT")); |
|
Java | apache-2.0 | ed0b8565729388c36904245b6b6f6a4aae4b81db | 0 | Ks89/AndroidDataBinding_Examples | package it.stefanocappa.databindingexample;
import android.databinding.DataBindingUtil;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import java.util.Random;
import butterknife.Bind;
import butterknife.ButterKnife;
import butterknife.OnClick;
import it.stefanocappa.databindingexample.databinding.Tab2FragmentBinding;
/**
* A placeholder fragment containing a simple view.
*/
public class Tab2Fragment extends Fragment {
@Bind(R.id.randomButton)
Button randomButton;
private User2 user;
public static Tab2Fragment newInstance() {
return new Tab2Fragment();
}
public Tab2Fragment() {
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.tab2_fragment, container, false);
ButterKnife.bind(this, view);
//FragmentMainBinding is the layout name without "-" and in Pascal Style. With at the end the word "Binding"
Tab2FragmentBinding binding = DataBindingUtil.bind(view);
user = new User2("FirstName", "LastName", false);
user.professor.set(new Random().nextBoolean());
binding.setUser(user);
return view;
}
@Override
public void onDestroyView() {
super.onDestroyView();
ButterKnife.unbind(this);
}
@OnClick(R.id.randomButton)
public void onClickRandom(View v) {
boolean newBool = new Random().nextBoolean();
Log.d("Tab2", "newBool is " + newBool);
user.professor.set(newBool);
//here it isn't necessary to bind the updated object (binding.setUser(user)), because i'm using an
//ObservableBoolean in the model
}
}
| app/src/main/java/it/stefanocappa/databindingexample/Tab2Fragment.java | package it.stefanocappa.databindingexample;
import android.databinding.DataBindingUtil;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import java.util.Random;
import butterknife.Bind;
import butterknife.ButterKnife;
import butterknife.OnClick;
import it.stefanocappa.databindingexample.databinding.Tab2FragmentBinding;
/**
* A placeholder fragment containing a simple view.
*/
public class Tab2Fragment extends Fragment {
@Bind(R.id.randomButton)
Button randomButton;
private User2 user;
private Tab2FragmentBinding binding;
public static Tab2Fragment newInstance() {
return new Tab2Fragment();
}
public Tab2Fragment() {
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.tab2_fragment, container, false);
ButterKnife.bind(this, view);
//FragmentMainBinding is the layout name without "-" and in Pascal Style. With at the end the word "Binding"
binding = DataBindingUtil.bind(view);
user = new User2("FirstName", "LastName", false);
user.professor.set(new Random().nextBoolean());
binding.setUser(user);
return view;
}
@Override
public void onDestroyView() {
super.onDestroyView();
ButterKnife.unbind(this);
}
@OnClick(R.id.randomButton)
public void onClickRandom(View v) {
boolean newBool = new Random().nextBoolean();
Log.d("Tab2", "newBool is " + newBool);
user.professor.set(newBool);
//here it isn't necessary to bind the updated object (binding.setUser(user)), because i'm using an
//ObservableBoolean in the model
}
}
| removed useless private field
| app/src/main/java/it/stefanocappa/databindingexample/Tab2Fragment.java | removed useless private field | <ide><path>pp/src/main/java/it/stefanocappa/databindingexample/Tab2Fragment.java
<ide> Button randomButton;
<ide>
<ide> private User2 user;
<del> private Tab2FragmentBinding binding;
<ide>
<ide> public static Tab2Fragment newInstance() {
<ide> return new Tab2Fragment();
<ide> ButterKnife.bind(this, view);
<ide>
<ide> //FragmentMainBinding is the layout name without "-" and in Pascal Style. With at the end the word "Binding"
<del> binding = DataBindingUtil.bind(view);
<add> Tab2FragmentBinding binding = DataBindingUtil.bind(view);
<ide>
<ide> user = new User2("FirstName", "LastName", false);
<ide> user.professor.set(new Random().nextBoolean()); |
|
Java | mit | error: pathspec 'software/CLO/src/java/ie/ucd/clops/runtime/options/ArgOption.java' did not match any file(s) known to git
| 447c0f71d62e3caab7d59bcc9fab4277375177ef | 1 | GaloisInc/CLOPS,GaloisInc/CLOPS,GaloisInc/CLOPS,GaloisInc/CLOPS,GaloisInc/CLOPS | package ie.ucd.clops.runtime.options;
import java.util.*;
import java.util.regex.*;
import ie.ucd.clops.runtime.parser.ProcessingResult;
/**
* This class represents an argument option.
* An option whose value is the string on which it matched.
*
* @author Mikolas Janota
*
*/
public class ArgOption implements Option<String> {
private Pattern form;
private String val = null;
private final String identifier;
/**
* Construct a new {@code ArgOption}.
* @param regexStr a regular expression {@link java.util.regex}
*/
public ArgOption(String identifier, String regexStr) {
this.identifier = identifier;
this.form = Pattern.compile(regexStr);
}
public String getIdentifier() { return identifier; }
public Option<?> getMatchingOption(String argument) {
return matches(argument) ? this : null;
}
public ProcessingResult process(/*@non_null*/String[] args, int offset) {
String argument = args[offset];
assert getMatchingOption(argument) == this;
this.val = argument;
return new ProcessingResult(1, false, null);
}
public /*@pure*/boolean hasValue() { return val == null; }
public /*@pure*/String getValue() { return val; }
public void unset() { val = null; }
public void set(Object value) throws InvalidOptionValueException {
}
public void setFromString(String value) throws InvalidOptionValueException {
if (!matches(value)) {
String msg = "The given string (\"" + value + "\") does not correspond to the format of this argument";
throw new InvalidOptionValueException(msg);
}
this.val = value;
}
public void addAlias(String alias) {
assert false;
//TODO
}
public Set<String> getAliases() {
assert false;
//TODO
return null;
}
public void setProperty(/*@non_null*/String propertyName, String propertyValue)
throws InvalidOptionPropertyValueException {
throw new InvalidOptionPropertyValueException("");
}
public /*@pure*/boolean acceptsProperty(/*@non_null*/String propertyName) {
return false;
}
/**
* Determines whether the given string matches this argument.
*/
private boolean matches(String s) {
Matcher m = form.matcher(s);
return m.matches();
}
}
| software/CLO/src/java/ie/ucd/clops/runtime/options/ArgOption.java | Adding an ArgOption class.
| software/CLO/src/java/ie/ucd/clops/runtime/options/ArgOption.java | Adding an ArgOption class. | <ide><path>oftware/CLO/src/java/ie/ucd/clops/runtime/options/ArgOption.java
<add>package ie.ucd.clops.runtime.options;
<add>
<add>import java.util.*;
<add>import java.util.regex.*;
<add>
<add>import ie.ucd.clops.runtime.parser.ProcessingResult;
<add>
<add>/**
<add> * This class represents an argument option.
<add> * An option whose value is the string on which it matched.
<add> *
<add> * @author Mikolas Janota
<add> *
<add> */
<add>public class ArgOption implements Option<String> {
<add> private Pattern form;
<add> private String val = null;
<add> private final String identifier;
<add>
<add> /**
<add> * Construct a new {@code ArgOption}.
<add> * @param regexStr a regular expression {@link java.util.regex}
<add> */
<add> public ArgOption(String identifier, String regexStr) {
<add> this.identifier = identifier;
<add> this.form = Pattern.compile(regexStr);
<add> }
<add>
<add> public String getIdentifier() { return identifier; }
<add>
<add> public Option<?> getMatchingOption(String argument) {
<add> return matches(argument) ? this : null;
<add> }
<add>
<add> public ProcessingResult process(/*@non_null*/String[] args, int offset) {
<add> String argument = args[offset];
<add> assert getMatchingOption(argument) == this;
<add> this.val = argument;
<add> return new ProcessingResult(1, false, null);
<add> }
<add>
<add>
<add> public /*@pure*/boolean hasValue() { return val == null; }
<add>
<add> public /*@pure*/String getValue() { return val; }
<add>
<add> public void unset() { val = null; }
<add>
<add> public void set(Object value) throws InvalidOptionValueException {
<add> }
<add>
<add> public void setFromString(String value) throws InvalidOptionValueException {
<add> if (!matches(value)) {
<add> String msg = "The given string (\"" + value + "\") does not correspond to the format of this argument";
<add> throw new InvalidOptionValueException(msg);
<add> }
<add> this.val = value;
<add> }
<add>
<add> public void addAlias(String alias) {
<add> assert false;
<add> //TODO
<add> }
<add>
<add> public Set<String> getAliases() {
<add> assert false;
<add> //TODO
<add> return null;
<add> }
<add>
<add> public void setProperty(/*@non_null*/String propertyName, String propertyValue)
<add> throws InvalidOptionPropertyValueException {
<add> throw new InvalidOptionPropertyValueException("");
<add> }
<add>
<add> public /*@pure*/boolean acceptsProperty(/*@non_null*/String propertyName) {
<add> return false;
<add> }
<add>
<add>
<add> /**
<add> * Determines whether the given string matches this argument.
<add> */
<add> private boolean matches(String s) {
<add> Matcher m = form.matcher(s);
<add> return m.matches();
<add> }
<add>
<add>
<add>} |
|
Java | apache-2.0 | 328bdbcd980d025bfbbc2bbf678bfb4067ff1cd0 | 0 | profesorfalken/jHardware | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jutils.jhardware.info.memory.unix;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Stream;
import org.jutils.jhardware.info.memory.AbstractMemoryInfo;
import org.jutils.jhardware.util.HardwareInfoUtils;
/**
* Information related to Memory
*
* @author Javier Garcia Alonso
*/
public final class UnixMemoryInfo extends AbstractMemoryInfo {
private static final String MEMINFO = "/proc/meminfo";
private static String getMemoryData() {
final StringBuilder buffer = new StringBuilder();
try (Stream<String> streamMemoryInfo = HardwareInfoUtils.readFile(MEMINFO)) {
streamMemoryInfo.forEach(line -> buffer.append(line).append("\r\n"));
}
return buffer.toString();
}
@Override
protected Map<String, String> parseInfo() {
Map<String, String> memoryDataMap = new HashMap<>();
String[] dataStringLines = getMemoryData().split("\\r?\\n");
for (final String dataLine : dataStringLines) {
String[] dataStringInfo = dataLine.split(":");
memoryDataMap.put(dataStringInfo[0].trim(), (dataStringInfo.length == 2) ? dataStringInfo[1].trim() : "");
}
return memoryDataMap;
}
}
| src/main/java/org/jutils/jhardware/info/memory/unix/UnixMemoryInfo.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jutils.jhardware.info.memory.unix;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Stream;
import org.jutils.jhardware.info.memory.AbstractMemoryInfo;
import org.jutils.jhardware.util.HardwareInfoUtils;
/**
* Information related to Memory
*
* @author Javier Garcia Alonso
*/
public final class UnixMemoryInfo extends AbstractMemoryInfo {
private static final String MEMINFO = "/proc/meminfo";
private static String getMemoryData(){
Stream<String> streamMemoryInfo = HardwareInfoUtils.readFile(MEMINFO);
final StringBuilder buffer = new StringBuilder();
streamMemoryInfo.forEach((String line) ->
buffer.append(line).append("\r\n")
);
return buffer.toString();
}
@Override
protected Map<String, String> parseInfo() {
Map<String, String> memoryDataMap = new HashMap<>();
String[] dataStringLines = getMemoryData().split("\\r?\\n");
for (final String dataLine : dataStringLines) {
String[] dataStringInfo = dataLine.split(":");
memoryDataMap.put(dataStringInfo[0].trim(), (dataStringInfo.length == 2) ? dataStringInfo[1].trim() : "");
}
return memoryDataMap;
}
}
| Close missing meminfo stream (Related #12)
| src/main/java/org/jutils/jhardware/info/memory/unix/UnixMemoryInfo.java | Close missing meminfo stream (Related #12) | <ide><path>rc/main/java/org/jutils/jhardware/info/memory/unix/UnixMemoryInfo.java
<ide> import java.util.HashMap;
<ide> import java.util.Map;
<ide> import java.util.stream.Stream;
<add>
<ide> import org.jutils.jhardware.info.memory.AbstractMemoryInfo;
<ide> import org.jutils.jhardware.util.HardwareInfoUtils;
<ide>
<ide> * @author Javier Garcia Alonso
<ide> */
<ide> public final class UnixMemoryInfo extends AbstractMemoryInfo {
<add>
<ide> private static final String MEMINFO = "/proc/meminfo";
<del>
<del> private static String getMemoryData(){
<del> Stream<String> streamMemoryInfo = HardwareInfoUtils.readFile(MEMINFO);
<add>
<add> private static String getMemoryData() {
<ide> final StringBuilder buffer = new StringBuilder();
<del>
<del> streamMemoryInfo.forEach((String line) ->
<del> buffer.append(line).append("\r\n")
<del> );
<del>
<add> try (Stream<String> streamMemoryInfo = HardwareInfoUtils.readFile(MEMINFO)) {
<add> streamMemoryInfo.forEach(line -> buffer.append(line).append("\r\n"));
<add> }
<add>
<ide> return buffer.toString();
<ide> }
<ide> |
|
Java | apache-2.0 | aa2df403d3d8976a777b427a70213a39c8448f6c | 0 | superkonduktr/closure-compiler,superkonduktr/closure-compiler,GerHobbelt/closure-compiler,jimmytuc/closure-compiler,shantanusharma/closure-compiler,mprobst/closure-compiler,tdelmas/closure-compiler,mediogre/closure-compiler,rockyzh/closure-compiler,GerHobbelt/closure-compiler,mbrukman/closure-compiler,TomK/closure-compiler,selkhateeb/closure-compiler,selkhateeb/closure-compiler,MatrixFrog/closure-compiler,jimmytuc/closure-compiler,nawawi/closure-compiler,anomaly/closure-compiler,mprobst/closure-compiler,neraliu/closure-compiler,mcanthony/closure-compiler,tiobe/closure-compiler,nawawi/closure-compiler,jaen/closure-compiler,LorenzoDV/closure-compiler,Yannic/closure-compiler,neraliu/closure-compiler,ralic/closure-compiler,brad4d/closure-compiler,GerHobbelt/closure-compiler,google/closure-compiler,savelichalex/closure-compiler,vobruba-martin/closure-compiler,vobruba-martin/closure-compiler,ralic/closure-compiler,jaen/closure-compiler,monetate/closure-compiler,BladeRunnerJS/closure-compiler,Pimm/closure-compiler,monetate/closure-compiler,ochafik/closure-compiler,nawawi/closure-compiler,TomK/closure-compiler,mweissbacher/closure-compiler,superkonduktr/closure-compiler,BladeRunnerJS/closure-compiler,savelichalex/closure-compiler,mweissbacher/closure-compiler,MatrixFrog/closure-compiler,Pimm/closure-compiler,superkonduktr/closure-compiler,mbrukman/closure-compiler,savelichalex/closure-compiler,monetate/closure-compiler,shantanusharma/closure-compiler,brad4d/closure-compiler,mcanthony/closure-compiler,Yannic/closure-compiler,tiobe/closure-compiler,jaen/closure-compiler,dlochrie/closure-compiler,tsdl2013/closure-compiler,mediogre/closure-compiler,visokio/closure-compiler,mweissbacher/closure-compiler,visokio/closure-compiler,redforks/closure-compiler,mprobst/closure-compiler,tiobe/closure-compiler,mprobst/closure-compiler,dlochrie/closure-compiler,anomaly/closure-compiler,Dominator008/closure-compiler,redforks/closure-compiler,mweissbacher/closure-compiler,Dominator008/closure-compiler,ralic/closure-compiler,ChadKillingsworth/closure-compiler,tsdl2013/closure-compiler,mcanthony/closure-compiler,neraliu/closure-compiler,google/closure-compiler,mediogre/closure-compiler,MatrixFrog/closure-compiler,rockyzh/closure-compiler,nawawi/closure-compiler,TomK/closure-compiler,mbrukman/closure-compiler,lgeorgieff/closure-compiler,vobruba-martin/closure-compiler,Dominator008/closure-compiler,selkhateeb/closure-compiler,ochafik/closure-compiler,mneise/closure-compiler,anomaly/closure-compiler,lgeorgieff/closure-compiler,brad4d/closure-compiler,mneise/closure-compiler,dlochrie/closure-compiler,Yannic/closure-compiler,tdelmas/closure-compiler,Yannic/closure-compiler,shantanusharma/closure-compiler,mneise/closure-compiler,tiobe/closure-compiler,tdelmas/closure-compiler,ChadKillingsworth/closure-compiler,LorenzoDV/closure-compiler,jimmytuc/closure-compiler,Pimm/closure-compiler,lgeorgieff/closure-compiler,redforks/closure-compiler,google/closure-compiler,tsdl2013/closure-compiler,ChadKillingsworth/closure-compiler,GerHobbelt/closure-compiler,rintaro/closure-compiler,ChadKillingsworth/closure-compiler,lgeorgieff/closure-compiler,anomaly/closure-compiler,tdelmas/closure-compiler,rintaro/closure-compiler,vobruba-martin/closure-compiler,shantanusharma/closure-compiler,ochafik/closure-compiler,Medium/closure-compiler,rintaro/closure-compiler,BladeRunnerJS/closure-compiler,MatrixFrog/closure-compiler,LorenzoDV/closure-compiler,rockyzh/closure-compiler,google/closure-compiler,Medium/closure-compiler,monetate/closure-compiler,visokio/closure-compiler,Medium/closure-compiler | /*
* Copyright 2004 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.jscomp.SyntacticScopeCreator.RedeclarationHandler;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.JSDocInfoBuilder;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.HashSet;
import java.util.Set;
/**
* Checks that all variables are declared, that file-private variables are
* accessed only in the file that declares them, and that any var references
* that cross module boundaries respect declared module dependencies.
*
*/
class VarCheck extends AbstractPostOrderCallback implements
HotSwapCompilerPass {
static final DiagnosticType UNDEFINED_VAR_ERROR = DiagnosticType.error(
"JSC_UNDEFINED_VARIABLE",
"variable {0} is undeclared");
static final DiagnosticType VIOLATED_MODULE_DEP_ERROR = DiagnosticType.error(
"JSC_VIOLATED_MODULE_DEPENDENCY",
"module {0} cannot reference {2}, defined in " +
"module {1}, since {1} loads after {0}");
static final DiagnosticType MISSING_MODULE_DEP_ERROR = DiagnosticType.warning(
"JSC_MISSING_MODULE_DEPENDENCY",
"missing module dependency; module {0} should depend " +
"on module {1} because it references {2}");
static final DiagnosticType STRICT_MODULE_DEP_ERROR = DiagnosticType.disabled(
"JSC_STRICT_MODULE_DEPENDENCY",
// The newline below causes the JS compiler not to complain when the
// referenced module's name changes because, for example, it's a
// synthetic module.
"cannot reference {2} because of a missing module dependency\n"
+ "defined in module {1}, referenced from module {0}");
static final DiagnosticType NAME_REFERENCE_IN_EXTERNS_ERROR =
DiagnosticType.warning(
"JSC_NAME_REFERENCE_IN_EXTERNS",
"accessing name {0} in externs has no effect. " +
"Perhaps you forgot to add a var keyword?");
static final DiagnosticType UNDEFINED_EXTERN_VAR_ERROR =
DiagnosticType.warning(
"JSC_UNDEFINED_EXTERN_VAR_ERROR",
"name {0} is not defined in the externs.");
static final DiagnosticType VAR_MULTIPLY_DECLARED_ERROR =
DiagnosticType.error(
"JSC_VAR_MULTIPLY_DECLARED_ERROR",
"Variable {0} first declared in {1}");
static final DiagnosticType VAR_ARGUMENTS_SHADOWED_ERROR =
DiagnosticType.error(
"JSC_VAR_ARGUMENTS_SHADOWED_ERROR",
"Shadowing \"arguments\" is not allowed");
static final DiagnosticType LET_CONST_MULTIPLY_DECLARED_ERROR =
DiagnosticType.error(
"JSC_LET_CONST_MULTIPLY_DECLARED_ERROR",
"Duplicate let / const declaration in the same scope is not allowed.");
// The arguments variable is special, in that it's declared in every local
// scope, but not explicitly declared.
private static final String ARGUMENTS = "arguments";
// Vars that still need to be declared in externs. These will be declared
// at the end of the pass, or when we see the equivalent var declared
// in the normal code.
private final Set<String> varsToDeclareInExterns = new HashSet<>();
private final AbstractCompiler compiler;
// Whether this is the post-processing sanity check.
private final boolean sanityCheck;
// Whether extern checks emit error.
private final boolean strictExternCheck;
VarCheck(AbstractCompiler compiler) {
this(compiler, false);
}
VarCheck(AbstractCompiler compiler, boolean sanityCheck) {
this.compiler = compiler;
this.strictExternCheck = compiler.getErrorLevel(
JSError.make("", 0, 0, UNDEFINED_EXTERN_VAR_ERROR)) == CheckLevel.ERROR;
this.sanityCheck = sanityCheck;
}
/**
* Create a SyntacticScopeCreator. If not in sanity check mode, use a
* {@link RedeclarationCheckHandler} to check var redeclarations.
* @return the SyntacticScopeCreator
*/
private ScopeCreator createScopeCreator() {
if (sanityCheck) {
return new Es6SyntacticScopeCreator(compiler);
} else {
return new Es6SyntacticScopeCreator(compiler, new RedeclarationCheckHandler());
}
}
@Override
public void process(Node externs, Node root) {
ScopeCreator scopeCreator = createScopeCreator();
// Don't run externs-checking in sanity check mode. Normalization will
// remove duplicate VAR declarations, which will make
// externs look like they have assigns.
if (!sanityCheck) {
NodeTraversal traversal = new NodeTraversal(
compiler, new NameRefInExternsCheck(), scopeCreator);
traversal.traverse(externs);
}
NodeTraversal t = new NodeTraversal(compiler, this, scopeCreator);
t.traverseRoots(externs, root);
for (String varName : varsToDeclareInExterns) {
createSynthesizedExternVar(varName);
}
}
@Override
public void hotSwapScript(Node scriptRoot, Node originalRoot) {
Preconditions.checkState(scriptRoot.isScript());
ScopeCreator scopeCreator = createScopeCreator();
NodeTraversal t = new NodeTraversal(compiler, this, scopeCreator);
// Note we use the global scope to prevent wrong "undefined-var errors" on
// variables that are defined in other JS files.
Scope topScope = scopeCreator.createScope(compiler.getRoot(), null);
t.traverseWithScope(scriptRoot, topScope);
// TODO(bashir) Check if we need to createSynthesizedExternVar like process.
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (!n.isName()) {
return;
}
String varName = n.getString();
// Only a function can have an empty name.
if (varName.isEmpty()) {
Preconditions.checkState(parent.isFunction());
Preconditions.checkState(NodeUtil.isFunctionExpression(parent));
return;
}
// Check if this is a declaration for a var that has been declared
// elsewhere. If so, mark it as a duplicate.
if ((parent.isVar() ||
NodeUtil.isFunctionDeclaration(parent)) &&
varsToDeclareInExterns.contains(varName)) {
createSynthesizedExternVar(varName);
JSDocInfoBuilder builder = JSDocInfoBuilder.maybeCopyFrom(n.getJSDocInfo());
builder.addSuppression("duplicate");
n.setJSDocInfo(builder.build());
}
// Check that the var has been declared.
Scope scope = t.getScope();
Var var = scope.getVar(varName);
if (var == null) {
if (NodeUtil.isFunctionExpression(parent)) {
// e.g. [ function foo() {} ], it's okay if "foo" isn't defined in the
// current scope.
} else {
boolean isArguments = scope.isLocal() && ARGUMENTS.equals(varName);
// The extern checks are stricter, don't report a second error.
if (!isArguments && !(strictExternCheck && t.getInput().isExtern())) {
t.report(n, UNDEFINED_VAR_ERROR, varName);
}
if (sanityCheck) {
throw new IllegalStateException("Unexpected variable " + varName);
} else {
createSynthesizedExternVar(varName);
scope.getGlobalScope().declare(varName, n, compiler.getSynthesizedExternsInput());
}
}
return;
}
CompilerInput currInput = t.getInput();
CompilerInput varInput = var.input;
if (currInput == varInput || currInput == null || varInput == null) {
// The variable was defined in the same file. This is fine.
return;
}
// Check module dependencies.
JSModule currModule = currInput.getModule();
JSModule varModule = varInput.getModule();
JSModuleGraph moduleGraph = compiler.getModuleGraph();
if (!sanityCheck &&
varModule != currModule && varModule != null && currModule != null) {
if (moduleGraph.dependsOn(currModule, varModule)) {
// The module dependency was properly declared.
} else {
if (scope.isGlobal()) {
if (moduleGraph.dependsOn(varModule, currModule)) {
// The variable reference violates a declared module dependency.
t.report(n, VIOLATED_MODULE_DEP_ERROR,
currModule.getName(), varModule.getName(), varName);
} else {
// The variable reference is between two modules that have no
// dependency relationship. This should probably be considered an
// error, but just issue a warning for now.
t.report(n, MISSING_MODULE_DEP_ERROR,
currModule.getName(), varModule.getName(), varName);
}
} else {
t.report(n, STRICT_MODULE_DEP_ERROR,
currModule.getName(), varModule.getName(), varName);
}
}
}
}
/**
* Create a new variable in a synthetic script. This will prevent
* subsequent compiler passes from crashing.
*/
private void createSynthesizedExternVar(String varName) {
Node nameNode = IR.name(varName);
// Mark the variable as constant if it matches the coding convention
// for constant vars.
// NOTE(nicksantos): honestly, I'm not sure how much this matters.
// AFAIK, all people who use the CONST coding convention also
// compile with undeclaredVars as errors. We have some test
// cases for this configuration though, and it makes them happier.
if (compiler.getCodingConvention().isConstant(varName)) {
nameNode.putBooleanProp(Node.IS_CONSTANT_NAME, true);
}
getSynthesizedExternsRoot().addChildToBack(
IR.var(nameNode));
varsToDeclareInExterns.remove(varName);
compiler.reportCodeChange();
}
/**
* A check for name references in the externs inputs. These used to prevent
* a variable from getting renamed, but no longer have any effect.
*/
private class NameRefInExternsCheck extends AbstractPostOrderCallback {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (n.isName()) {
switch (parent.getType()) {
case Token.VAR:
case Token.LET:
case Token.CONST:
case Token.FUNCTION:
case Token.CLASS:
case Token.PARAM_LIST:
// These are okay.
break;
case Token.GETPROP:
if (n == parent.getFirstChild()) {
Scope scope = t.getScope();
Var var = scope.getVar(n.getString());
if (var == null) {
t.report(n, UNDEFINED_EXTERN_VAR_ERROR, n.getString());
varsToDeclareInExterns.add(n.getString());
}
}
break;
case Token.ASSIGN:
// Don't warn for the "window.foo = foo;" nodes added by
// DeclaredGlobalExternsOnWindow.
if (n == parent.getLastChild() && parent.getFirstChild().isGetProp()
&& parent.getFirstChild().getLastChild().getString().equals(n.getString())) {
break;
}
// fall through
default:
// Don't warn for simple var assignments "/** @const */ var foo = bar;"
// They are used to infer the types of namespace aliases.
if (parent.getType() != Token.NAME || parent.getParent() == null ||
!NodeUtil.isNameDeclaration(parent.getParent())) {
t.report(n, NAME_REFERENCE_IN_EXTERNS_ERROR, n.getString());
}
Scope scope = t.getScope();
Var var = scope.getVar(n.getString());
if (var == null) {
varsToDeclareInExterns.add(n.getString());
}
break;
}
}
}
}
/**
* @param n The name node to check.
* @param origVar The associated Var.
* @return Whether duplicated declarations warnings should be suppressed
* for the given node.
*/
static boolean hasDuplicateDeclarationSuppression(Node n, Var origVar) {
Preconditions.checkState(n.isName() || n.isRest() || n.isStringKey());
Node parent = n.getParent();
Node origParent = origVar.getParentNode();
JSDocInfo info = parent.getJSDocInfo();
if (info != null && info.getSuppressions().contains("duplicate")) {
return true;
}
info = origParent.getJSDocInfo();
return (info != null && info.getSuppressions().contains("duplicate"));
}
/**
* The handler for duplicate declarations.
*/
private class RedeclarationCheckHandler implements RedeclarationHandler {
@Override
public void onRedeclaration(
Scope s, String name, Node n, CompilerInput input) {
Node parent = n.getParent();
// Don't allow multiple variables to be declared at the top-level scope
if (s.isGlobal()) {
Var origVar = s.getVar(name);
Node origParent = origVar.getParentNode();
if (origParent.isCatch() &&
parent.isCatch()) {
// Okay, both are 'catch(x)' variables.
return;
}
if (parent.isLet() || parent.isConst() ||
origParent.isLet() || origParent.isConst()) {
compiler.report(
JSError.make(n, LET_CONST_MULTIPLY_DECLARED_ERROR));
return;
}
boolean allowDupe = hasDuplicateDeclarationSuppression(n, origVar);
if (!allowDupe) {
compiler.report(
JSError.make(n,
VAR_MULTIPLY_DECLARED_ERROR,
name,
(origVar.input != null
? origVar.input.getName()
: "??")));
}
} else if (name.equals(ARGUMENTS) && !NodeUtil.isVarDeclaration(n)) {
// Disallow shadowing "arguments" as we can't handle with our current
// scope modeling.
compiler.report(
JSError.make(n, VAR_ARGUMENTS_SHADOWED_ERROR));
}
}
}
/** Lazily create a "new" externs root for undeclared variables. */
private Node getSynthesizedExternsRoot() {
return compiler.getSynthesizedExternsInput().getAstRoot(compiler);
}
}
| src/com/google/javascript/jscomp/VarCheck.java | /*
* Copyright 2004 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSet;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.jscomp.SyntacticScopeCreator.RedeclarationHandler;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.JSDocInfoBuilder;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.HashSet;
import java.util.Set;
/**
* Checks that all variables are declared, that file-private variables are
* accessed only in the file that declares them, and that any var references
* that cross module boundaries respect declared module dependencies.
*
*/
class VarCheck extends AbstractPostOrderCallback implements
HotSwapCompilerPass {
static final DiagnosticType UNDEFINED_VAR_ERROR = DiagnosticType.error(
"JSC_UNDEFINED_VARIABLE",
"variable {0} is undeclared");
static final DiagnosticType VIOLATED_MODULE_DEP_ERROR = DiagnosticType.error(
"JSC_VIOLATED_MODULE_DEPENDENCY",
"module {0} cannot reference {2}, defined in " +
"module {1}, since {1} loads after {0}");
static final DiagnosticType MISSING_MODULE_DEP_ERROR = DiagnosticType.warning(
"JSC_MISSING_MODULE_DEPENDENCY",
"missing module dependency; module {0} should depend " +
"on module {1} because it references {2}");
static final DiagnosticType STRICT_MODULE_DEP_ERROR = DiagnosticType.disabled(
"JSC_STRICT_MODULE_DEPENDENCY",
// The newline below causes the JS compiler not to complain when the
// referenced module's name changes because, for example, it's a
// synthetic module.
"cannot reference {2} because of a missing module dependency\n"
+ "defined in module {1}, referenced from module {0}");
static final DiagnosticType NAME_REFERENCE_IN_EXTERNS_ERROR =
DiagnosticType.warning(
"JSC_NAME_REFERENCE_IN_EXTERNS",
"accessing name {0} in externs has no effect. " +
"Perhaps you forgot to add a var keyword?");
static final DiagnosticType UNDEFINED_EXTERN_VAR_ERROR =
DiagnosticType.warning(
"JSC_UNDEFINED_EXTERN_VAR_ERROR",
"name {0} is not defined in the externs.");
static final DiagnosticType VAR_MULTIPLY_DECLARED_ERROR =
DiagnosticType.error(
"JSC_VAR_MULTIPLY_DECLARED_ERROR",
"Variable {0} first declared in {1}");
static final DiagnosticType VAR_ARGUMENTS_SHADOWED_ERROR =
DiagnosticType.error(
"JSC_VAR_ARGUMENTS_SHADOWED_ERROR",
"Shadowing \"arguments\" is not allowed");
static final DiagnosticType LET_CONST_MULTIPLY_DECLARED_ERROR =
DiagnosticType.error(
"JSC_LET_CONST_MULTIPLY_DECLARED_ERROR",
"Duplicate let / const declaration in the same scope is not allowed.");
// The arguments variable is special, in that it's declared in every local
// scope, but not explicitly declared.
private static final String ARGUMENTS = "arguments";
// Vars that still need to be declared in externs. These will be declared
// at the end of the pass, or when we see the equivalent var declared
// in the normal code.
private final Set<String> varsToDeclareInExterns = new HashSet<>();
private final AbstractCompiler compiler;
// Whether this is the post-processing sanity check.
private final boolean sanityCheck;
// Whether extern checks emit error.
private final boolean strictExternCheck;
private final Set allowedExternAssignmentTypes = ImmutableSet.of(
Token.VAR, Token.CONST, Token.LET);
VarCheck(AbstractCompiler compiler) {
this(compiler, false);
}
VarCheck(AbstractCompiler compiler, boolean sanityCheck) {
this.compiler = compiler;
this.strictExternCheck = compiler.getErrorLevel(
JSError.make("", 0, 0, UNDEFINED_EXTERN_VAR_ERROR)) == CheckLevel.ERROR;
this.sanityCheck = sanityCheck;
}
/**
* Create a SyntacticScopeCreator. If not in sanity check mode, use a
* {@link RedeclarationCheckHandler} to check var redeclarations.
* @return the SyntacticScopeCreator
*/
private ScopeCreator createScopeCreator() {
if (sanityCheck) {
return new Es6SyntacticScopeCreator(compiler);
} else {
return new Es6SyntacticScopeCreator(compiler, new RedeclarationCheckHandler());
}
}
@Override
public void process(Node externs, Node root) {
ScopeCreator scopeCreator = createScopeCreator();
// Don't run externs-checking in sanity check mode. Normalization will
// remove duplicate VAR declarations, which will make
// externs look like they have assigns.
if (!sanityCheck) {
NodeTraversal traversal = new NodeTraversal(
compiler, new NameRefInExternsCheck(), scopeCreator);
traversal.traverse(externs);
}
NodeTraversal t = new NodeTraversal(compiler, this, scopeCreator);
t.traverseRoots(externs, root);
for (String varName : varsToDeclareInExterns) {
createSynthesizedExternVar(varName);
}
}
@Override
public void hotSwapScript(Node scriptRoot, Node originalRoot) {
Preconditions.checkState(scriptRoot.isScript());
ScopeCreator scopeCreator = createScopeCreator();
NodeTraversal t = new NodeTraversal(compiler, this, scopeCreator);
// Note we use the global scope to prevent wrong "undefined-var errors" on
// variables that are defined in other JS files.
Scope topScope = scopeCreator.createScope(compiler.getRoot(), null);
t.traverseWithScope(scriptRoot, topScope);
// TODO(bashir) Check if we need to createSynthesizedExternVar like process.
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (!n.isName()) {
return;
}
String varName = n.getString();
// Only a function can have an empty name.
if (varName.isEmpty()) {
Preconditions.checkState(parent.isFunction());
Preconditions.checkState(NodeUtil.isFunctionExpression(parent));
return;
}
// Check if this is a declaration for a var that has been declared
// elsewhere. If so, mark it as a duplicate.
if ((parent.isVar() ||
NodeUtil.isFunctionDeclaration(parent)) &&
varsToDeclareInExterns.contains(varName)) {
createSynthesizedExternVar(varName);
JSDocInfoBuilder builder = JSDocInfoBuilder.maybeCopyFrom(n.getJSDocInfo());
builder.addSuppression("duplicate");
n.setJSDocInfo(builder.build());
}
// Check that the var has been declared.
Scope scope = t.getScope();
Var var = scope.getVar(varName);
if (var == null) {
if (NodeUtil.isFunctionExpression(parent)) {
// e.g. [ function foo() {} ], it's okay if "foo" isn't defined in the
// current scope.
} else {
boolean isArguments = scope.isLocal() && ARGUMENTS.equals(varName);
// The extern checks are stricter, don't report a second error.
if (!isArguments && !(strictExternCheck && t.getInput().isExtern())) {
t.report(n, UNDEFINED_VAR_ERROR, varName);
}
if (sanityCheck) {
throw new IllegalStateException("Unexpected variable " + varName);
} else {
createSynthesizedExternVar(varName);
scope.getGlobalScope().declare(varName, n, compiler.getSynthesizedExternsInput());
}
}
return;
}
CompilerInput currInput = t.getInput();
CompilerInput varInput = var.input;
if (currInput == varInput || currInput == null || varInput == null) {
// The variable was defined in the same file. This is fine.
return;
}
// Check module dependencies.
JSModule currModule = currInput.getModule();
JSModule varModule = varInput.getModule();
JSModuleGraph moduleGraph = compiler.getModuleGraph();
if (!sanityCheck &&
varModule != currModule && varModule != null && currModule != null) {
if (moduleGraph.dependsOn(currModule, varModule)) {
// The module dependency was properly declared.
} else {
if (scope.isGlobal()) {
if (moduleGraph.dependsOn(varModule, currModule)) {
// The variable reference violates a declared module dependency.
t.report(n, VIOLATED_MODULE_DEP_ERROR,
currModule.getName(), varModule.getName(), varName);
} else {
// The variable reference is between two modules that have no
// dependency relationship. This should probably be considered an
// error, but just issue a warning for now.
t.report(n, MISSING_MODULE_DEP_ERROR,
currModule.getName(), varModule.getName(), varName);
}
} else {
t.report(n, STRICT_MODULE_DEP_ERROR,
currModule.getName(), varModule.getName(), varName);
}
}
}
}
/**
* Create a new variable in a synthetic script. This will prevent
* subsequent compiler passes from crashing.
*/
private void createSynthesizedExternVar(String varName) {
Node nameNode = IR.name(varName);
// Mark the variable as constant if it matches the coding convention
// for constant vars.
// NOTE(nicksantos): honestly, I'm not sure how much this matters.
// AFAIK, all people who use the CONST coding convention also
// compile with undeclaredVars as errors. We have some test
// cases for this configuration though, and it makes them happier.
if (compiler.getCodingConvention().isConstant(varName)) {
nameNode.putBooleanProp(Node.IS_CONSTANT_NAME, true);
}
getSynthesizedExternsRoot().addChildToBack(
IR.var(nameNode));
varsToDeclareInExterns.remove(varName);
compiler.reportCodeChange();
}
/**
* A check for name references in the externs inputs. These used to prevent
* a variable from getting renamed, but no longer have any effect.
*/
private class NameRefInExternsCheck extends AbstractPostOrderCallback {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (n.isName()) {
switch (parent.getType()) {
case Token.VAR:
case Token.LET:
case Token.CONST:
case Token.FUNCTION:
case Token.CLASS:
case Token.PARAM_LIST:
// These are okay.
break;
case Token.GETPROP:
if (n == parent.getFirstChild()) {
Scope scope = t.getScope();
Var var = scope.getVar(n.getString());
if (var == null) {
t.report(n, UNDEFINED_EXTERN_VAR_ERROR, n.getString());
varsToDeclareInExterns.add(n.getString());
}
}
break;
case Token.ASSIGN:
// Don't warn for the "window.foo = foo;" nodes added by
// DeclaredGlobalExternsOnWindow.
if (n == parent.getLastChild() && parent.getFirstChild().isGetProp()
&& parent.getFirstChild().getLastChild().getString().equals(n.getString())) {
break;
}
// fall through
default:
// Don't warn for simple var assignments "/** @const */ var foo = bar;"
// They are used to infer the types of namespace aliases.
if (parent.getType() != Token.NAME || parent.getParent() == null ||
!allowedExternAssignmentTypes.contains(parent.getParent().getType())) {
t.report(n, NAME_REFERENCE_IN_EXTERNS_ERROR, n.getString());
}
Scope scope = t.getScope();
Var var = scope.getVar(n.getString());
if (var == null) {
varsToDeclareInExterns.add(n.getString());
}
break;
}
}
}
}
/**
* @param n The name node to check.
* @param origVar The associated Var.
* @return Whether duplicated declarations warnings should be suppressed
* for the given node.
*/
static boolean hasDuplicateDeclarationSuppression(Node n, Var origVar) {
Preconditions.checkState(n.isName() || n.isRest() || n.isStringKey());
Node parent = n.getParent();
Node origParent = origVar.getParentNode();
JSDocInfo info = parent.getJSDocInfo();
if (info != null && info.getSuppressions().contains("duplicate")) {
return true;
}
info = origParent.getJSDocInfo();
return (info != null && info.getSuppressions().contains("duplicate"));
}
/**
* The handler for duplicate declarations.
*/
private class RedeclarationCheckHandler implements RedeclarationHandler {
@Override
public void onRedeclaration(
Scope s, String name, Node n, CompilerInput input) {
Node parent = n.getParent();
// Don't allow multiple variables to be declared at the top-level scope
if (s.isGlobal()) {
Var origVar = s.getVar(name);
Node origParent = origVar.getParentNode();
if (origParent.isCatch() &&
parent.isCatch()) {
// Okay, both are 'catch(x)' variables.
return;
}
if (parent.isLet() || parent.isConst() ||
origParent.isLet() || origParent.isConst()) {
compiler.report(
JSError.make(n, LET_CONST_MULTIPLY_DECLARED_ERROR));
return;
}
boolean allowDupe = hasDuplicateDeclarationSuppression(n, origVar);
if (!allowDupe) {
compiler.report(
JSError.make(n,
VAR_MULTIPLY_DECLARED_ERROR,
name,
(origVar.input != null
? origVar.input.getName()
: "??")));
}
} else if (name.equals(ARGUMENTS) && !NodeUtil.isVarDeclaration(n)) {
// Disallow shadowing "arguments" as we can't handle with our current
// scope modeling.
compiler.report(
JSError.make(n, VAR_ARGUMENTS_SHADOWED_ERROR));
}
}
}
/** Lazily create a "new" externs root for undeclared variables. */
private Node getSynthesizedExternsRoot() {
return compiler.getSynthesizedExternsInput().getAstRoot(compiler);
}
}
| Use NodeUtil.isNameDeclaration instead of manual checks for var, const or let.
| src/com/google/javascript/jscomp/VarCheck.java | Use NodeUtil.isNameDeclaration instead of manual checks for var, const or let. | <ide><path>rc/com/google/javascript/jscomp/VarCheck.java
<ide> package com.google.javascript.jscomp;
<ide>
<ide> import com.google.common.base.Preconditions;
<del>import com.google.common.collect.ImmutableSet;
<ide> import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
<ide> import com.google.javascript.jscomp.SyntacticScopeCreator.RedeclarationHandler;
<ide> import com.google.javascript.rhino.IR;
<ide>
<ide> // Whether extern checks emit error.
<ide> private final boolean strictExternCheck;
<del>
<del> private final Set allowedExternAssignmentTypes = ImmutableSet.of(
<del> Token.VAR, Token.CONST, Token.LET);
<ide>
<ide> VarCheck(AbstractCompiler compiler) {
<ide> this(compiler, false);
<ide> // Don't warn for simple var assignments "/** @const */ var foo = bar;"
<ide> // They are used to infer the types of namespace aliases.
<ide> if (parent.getType() != Token.NAME || parent.getParent() == null ||
<del> !allowedExternAssignmentTypes.contains(parent.getParent().getType())) {
<add> !NodeUtil.isNameDeclaration(parent.getParent())) {
<ide> t.report(n, NAME_REFERENCE_IN_EXTERNS_ERROR, n.getString());
<ide> }
<ide> |
|
JavaScript | mit | 11f1e7fba4858db958e99dfcc98d081e209b340d | 0 | schlotg/node-aws-deploy | /*///////////////////////////////////////////////////////////////////////////
For Best results use the an 'app-config.json' file. This File should be
located in the same directory as this file. If you choose not to use it
then your application must run out of the same directory as this file
and you entry point must be a file named 'start.js' located in this
directory.
'app-config.json' allows you to configure following properties for your
application:
"awsUpdates:" <Check for and get this latest AWS updates on a pull>
"applicationName:" <name of the application>
"applicationDirectory": <set this to the directory you application lives in>
"applicationPath": <the path to the application directory>
"appEntry": <set this to the name of the 'js' file that is your entry point>
"commandArguments": <command line arguments you would like pass to the application>
"appEnvironmentVariables": <{<key>:<pair>}, key pair environment variables that need to be se for the application >
"appURL": <https://myapp> used for manual webhooks
"dependencies" : <dependencies within the package.json that need to be pulled in addition to the application directory>
"pullPort": <set this to the port for a pull requests> - defaults to 8000
// The key and cert files are only necessary if you want to listen for a pull
// request securely. If they are omitted and HTTP server is start instead. Beware
// as someone could be snooping and then start sending your servers pull requests
"pullKey": <path to a ssh key file for the HTTPS Server>
"pullCert": <path to a ssh cert file for the HTTPS Server>
"pullCa": <array of paths to the certificate authority files> (optional)
"pullPassphrase" : <string - phrase that the certificate was generated with> (optional if certificate was not generated with a passphrase)
// This is a secret key that is configured here and passed in via a webhook in
// response to a pull request. This is to prevent unauthorized requests from causing
// pulls. If no pull secret is configure then all pull request are valid
"pullSecret": <secret phrase>
"pullBranch": <git branch to use for the pull>
"pullField": <field that contains the branch information on a post by the web hook (defaults to 'ref')>
// In theory you can put an cloud vendor specific params in here. You just have to have support in cloud.js for them.
// Curently AWS is the only cloud platform supported
// Put your AWS config params in here. Example:
"accessKeyId": "XXXXXXXXXXXXXXXXXXXX",
"secretAccessKey": "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
"region": "us-east-1"
"sudo": <Mac and Linux only, should we prefix all shell commands with sudo?> - defaults to no sudo
// This file must conform the following interface: It must have a start function that is exported that excepts a callback
// as a parameter and calls the callback when compete to start the application.
"preLaunch": <a javascript file to execute relative to the application directory before starting the app>
// To Trigger a pull and restart
*////////////////////////////////////////////////////////////////////////////
(function (){
var fs = require ('fs');
var exec = require('child_process').exec;
var path = require('path');
var cluster = require ('cluster');
var async = require ('async');
var config = require ('./config');
var server = require ('./server');
var configData = config.data;
/*if (configData.data){
configData = configData.data;
config.update ();
}*/
var error, pull_error = "";
var package_json, package_copy, parsed_package, parsed_copy;
var restart = false;
var need_restart = false;
// This an abstraction layer for different cloud services
// all AWS specific code or other cloud vendor stuff should go in here
var cloud = require ("./cloud.js");
var updating_on = false;
var sudo;
var instance_data;
var secure_post;
var pull_list;
var homePath;
var appDir;
var conditionString = config.conditionString;
// post a command out
function post (_url, body, port, secure, path, cb){
var qs = require('querystring');
body = qs.stringify (body);
var options = {
host: _url,
port: port,
path: path,
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': body.length
},
rejectUnauthorized:false // add this so if someone uses a local certificate on the machine the post will still go through
};
var server = (secure) ? https : http;
var req = server.request(options, function(res) {
var result = "";
res.setEncoding('utf8');
res.on('data', function (chunk) {result += chunk;});
res.on('end', function (){
cb && cb (null, result);
});
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
cb && cb (e.message);
});
// write data to request body
req.write(body);
req.end();
}
// pull the latest source
function pull(cb, master, req, res){
master = (master === true || master === "true");
pull_error = "";
need_restart = false;
if (!pull_list){
pull_list = [appDir];
if (configData.dependencies){
configData.dependencies.forEach (function (proj){
pull_list.push (homePath + "/" + proj + "/");
});
}
}
function _pull (cb){
console.log ("Pulling the latest code from remote repository");
async.eachSeries (pull_list, function (proj, cb){
// get the latest code
console.log ("\tPulling " + proj);
var child = exec ("cd " + proj + " ; " + sudo + "git pull", function (err, std, ster){
if (err){
console.log ("\t\tError pulling repository. Error" + ster);
pull_error += "\t\tError pulling repository. Error" + ster;
}
else{
console.log ("\t\t" + std);
if (std && std.search ("Already up-to-date") === -1){
need_restart = true;
}
}
cb ();
});
}, function (err){
cb && cb ();
});
}
// only pull the latest if in the cloud. For local development don't do anything. The developer must manually pull
if (cloud.isCloud ()){
if (!master){
_pull (function (){
if (need_restart && restart){
if (pull_error){
res && res.send('They were errors pulling. Errors:' + pull_error);
}
else{
res && res.send('Pull Successful!');
}
process.exit(0);
}
cb && cb ();
});
}
else { // else we are the master so find all the other AWS instances to pull from
// get other instances that our are same type and already running
req.query.master = false;
cloud.getInstances (function (err, instances){
if (instances && instances.length){
console.log ("Found " + instances.length + " instances, re-posting.");
instances.forEach (function (instance){
if (instance.dns && instance.id !== cloud.getInstanceId ()){ // don't signal ourselves
post (instance.dns, req.body, configData.pullPort, secure_post,
url.format ({pathname:"/pull", query:req.query}));
}
});
// now pull and restart ourselves
_pull (function (){
if (need_restart && restart){
if (pull_error){
res && res.send('They were errors pulling. Errors:' + pull_error);
}
else{
res && res.send('Pull Successful!');
}
}
cb && cb ();
});
}
else{
console.log ("No instances found");
cb && cb ();
}
});
}
}
else {cb && cb ();}
}
// check if any NPM dependencies changed
function checkNodeDependencies (cb){
// read in our files
console.log ("\nChecking for Node version changes");
try { package_copy = fs.readFileSync ("package.copy");}
catch (e) { package_copy = null;}
try {package_json = fs.readFileSync ("package.json");}
catch (e) { package_json = null;}
parsed_package = (package_json) ? JSON.parse (package_json) : null;
parsed_copy = (package_copy) ? JSON.parse (package_copy) : null;
// see if our node versions match
if (parsed_package && parsed_package.nodeVersion){
var version = parsed_package.nodeVersion.replace ('v', "");
var node_version = process.version.replace ('v', "");
if (version !== node_version){
console.log (" Upgrading Node");
console.log (" current version:" + node_version);
console.log (" requested version:" + version);
// upgrade node using 'n'
var child = exec (sudo + "n " + version, function (err, std, ster){
if (err){
console.log (" Node upgrade failed. Error:" + ster);
pull_error += "\nNode upgrade failed. Error:" + ster;
}
else{
console.log (" Node upgrade success, restarting");
process.exit (0); // exit so we get restarted
}
cb & cb ();
});
}
else{
console.log (" Node is up to date");
console.log (" current version:" + node_version);
cb & cb ();
}
}
else{
cb & cb ();
}
}
// check AWS dependencies
function checkAWSDependencies (cb){
if (!configData.local && instance_data.deploy){
var child = exec("sudo yum -y update", function (err, std, ster){
console.log ("\nChecking for AWS Updates\n" + std);
if (err) {cb (ster);}
else {cb ();}
});
}
else {cb ();}
}
function getSymbolicLinks (itemPath){
var symbolicLinks = [];
function walkRecursive (itemPath) {
if (fs.existsSync(itemPath)){
// if a directory but not a a symbolic link. (Leave those)
var symbolicLink = fs.lstatSync(itemPath).isSymbolicLink ();
if (fs.statSync(itemPath).isDirectory()){
if (!symbolicLink) {
fs.readdirSync(itemPath).forEach(function(childItemName) {
walkRecursive (path.join (itemPath, childItemName));
});
}
else {
var index = itemPath.lastIndexOf("/");
if (index > 0) {index++;}
symbolicLinks.push (itemPath.slice (index, itemPath.length));
}
}
}
}
walkRecursive (itemPath);
return symbolicLinks;
}
// check for any node module changes and reinstall the associated packages.
// NPM doesn't do a good job of keeping track. So keep a copy of the last successful
// update and compare it. Find ones that have changed and delete them and then
// re-install.
function checkNPMDependencies (cb, projPath){
function deleteRecursiveSync (itemPath){
function walkRecursive (itemPath) {
if (fs.existsSync(itemPath)){
// if a directory but not a a symbolic link. (Leave those)
var symbolicLink = fs.lstatSync(itemPath).isSymbolicLink ();
if (fs.statSync(itemPath).isDirectory() && !symbolicLink) {
fs.readdirSync(itemPath).forEach(function(childItemName) {
walkRecursive (path.join (itemPath, childItemName));
});
if (!symbolicLink){
try {fs.rmdirSync(itemPath);}
catch (e){}
}
} else {
fs.unlinkSync(itemPath);
}
}
}
walkRecursive (itemPath);
}
var _package_json, _parsed_copy, _parsed_package, _package_copy;
if (!projPath){
_package_json = package_json;
_parsed_copy = parsed_copy;
_parsed_package = parsed_package;
_package_copy = package_copy;
projPath = appDir + '/';
}
else{
try { _package_copy = fs.readFileSync (projPath + "package.copy");}
catch (e) { _package_copy = null;}
try {_package_json = fs.readFileSync (projPath + "package.json");}
catch (e) { _package_json = null;}
_parsed_package = (_package_json) ? JSON.parse (_package_json) : null;
_parsed_copy = (_package_copy) ? JSON.parse (_package_copy) : null;
}
console.log ("\nChecking for Node Module dependency changes for:" + projPath);
if (!_package_json){
console.log ("WARNING Your Application has no 'package.json' . It is highly" +
" recommended that you use one to manage your NPM dependencies");
}
else{ // delete the modules that have changed and re-install with new versions
if (!_package_copy || _package_copy.toString() !== _package_json.toString ()){
console.log ("\tNPM dependency changes detected");
if (_parsed_package && _parsed_package.dependencies){
for (var package_name in _parsed_package.dependencies){
var copy_version = (_parsed_copy && _parsed_copy.dependencies) ? _parsed_copy.dependencies[package_name] : "";
if (copy_version !== _parsed_package.dependencies[package_name]){
deleteRecursiveSync (projPath + "node_modules/" + package_name);
}
}
}
if (_parsed_copy && _parsed_copy.dependencies){
for (package_name in _parsed_copy.dependencies){
copy_version = (_parsed_package && _parsed_package.dependencies) ? _parsed_package.dependencies[package_name] : "";
if (copy_version !== _parsed_copy.dependencies[package_name]){
deleteRecursiveSync (projPath + "node_modules/" + package_name);
}
}
}
console.log ("\tInstalling new Node Modules");
var cmd_str = (projPath) ? "cd " + projPath + " ; " + sudo + "npm install -d" : sudo + "npm install -d";
var child = exec (cmd_str, function (err, std, ster){
if (err){
console.log ("\t\tError installing Node modules. Error:" + ster + " :" + err);
pull_error += "\nError installing Node modules. Error:" + ster + " :" + err;
fs.writeFileSync (projPath + "package.copy", _package_json);
}
else{
console.log ("\t\tSuccessfully updated Node Modules: " + std);
fs.writeFileSync (projPath + "package.copy", _package_json);
}
cb && cb ();
});
}
else{
console.log ("\tNo node module changes detected");
cb && cb ();
}
}
}
function checkAllNPMDependencies (cb){
var symbolicLinks = getSymbolicLinks (appDir + "/node_modules/");
// first check the local ones
var dependencies = (configData && configData.dependencies) || [];
console.log ("creating NPM Links");
async.eachSeries (dependencies, function (proj, cb){
var cmd_str = " cd " + appDir + " ; " + sudo + " npm unlink " + proj;
var child = exec (cmd_str, function (err, std, ster){
if (err){
console.log ("\tError unlinking " + proj + " to " + appDir);
if (ster) { console.log ("\t\t" + ster); }
}
else{
console.log ("\tunlinking " + proj + " to " + appDir);
if (std) { console.log ("\t\t" + std); }
}
// give us a couple seconds before moving onto the next one. Seems to be some issue with
// not letting a few cycles elapse before trying it again.
cb ();
});
}, function (){
console.log ("Unlinking complete");
checkNPMDependencies (function (){
// now check the dependencies of any dependent projects
async.eachSeries (dependencies, function (dependency, done){
var projPath = homePath + "/" + dependency + "/";
checkNPMDependencies (function (){
done ();
}, projPath);
// other dependency directories are linked in using symbolic links
// If we deleted them, add them back in
}, function createNPMLinks (){
console.log ("creating NPM Links");
var links = (dependencies && dependencies.length) ? dependencies : symbolicLinks;
async.eachSeries (links, function (proj, cb){
var cmd_str = " cd " + appDir + " ; " + sudo + " npm link " + proj;
var child = exec (cmd_str, function (err, std, ster){
if (err){
console.log ("\tError linking " + proj + " to " + appDir);
if (ster) {console.log ("\t\t" + ster);}
}
else{
console.log ("\tlinking " + proj + " to " + appDir);
if (std) {console.log ("\t\t" + std);}
}
cb ();
});
}, function (){
console.log ("Linking complete");
cb && cb ();
});
});
});
});
}
function checkAndUpdateEnvironment (cb, master){
if (updating_on || configData.remote === 'n'){
// get the latest code
pull (function (){
// check for dependency changes
checkAWSDependencies (function (){
checkNodeDependencies (function (){
checkAllNPMDependencies (function (){
cb && cb ();
});
});
});
}, master);
}
}
// start the application
function startApp (){
// set command line args
if (configData.commandArguments || configData.pullArgs){
if (configData.commandArguments){
var args = configData.commandArguments.split (" ");
if (cluster.isMaster){ // only output this info once
console.log ("Set the following Command Line Arguments:\n\t" + configData.commandArguments);
}
args && args.forEach (function (arg){
process.argv.push (arg);
});
}
/*
var pullArgs = configData.pullArgs;
if (pullArgs){
if (cluster.isMaster){ // only output this info once
console.log ("Set the following Pull Arguments:\n\t" + pullArgs);
}
pullArgs && pullArgs.forEach (function (arg){
process.argv.push (arg);
});
}*/
}
else if (cluster.isMaster) {
console.log ("No Command Line Arguments set!");
}
// set environment variables
if (configData.appEnvironmentVariables){
var env_vars;
try {env_vars = JSON.parse (conditionString(configData.appEnvironmentVariables));}
catch (err) {console.log ("Error parsing the environment variables JSON:" + err);}
if (env_vars){
if (cluster.isMaster){ // only output this info once
console.log ("Set the following Environment Variables:");
console.log (env_vars);
}
for (var k in env_vars){
process.env[k] = env_vars[k];
}
}
}
else if (cluster.isMaster){
console.log ("No Environment Variables set!");
}
// call the prelaunch file if available
var workingDirectory = configData.applicationDirectory || process.cwd();
if (cluster.isMaster && configData.preLaunch){
console.log ("Processing preLaunch File: " + configData.preLaunch);
var pre_launch = require (workingDirectory + '/' + configData.preLaunch);
pre_launch.start (function (){
_start ();
});
}
else{ // no prelaunch file or not master
_start ();
}
function _start (){
// enter the application
var appEntry = configData.appEntry || "start.js", date;
if (cluster.isMaster){
date = new Date ();
console.log ("\n\n********************************************************************************");
console.log ("\tSTARTING APPLICATION %s", configData.applicationName);
console.log ("\tCALLING: %s", appEntry);
console.log ("\t\tDate:" + date.toUTCString ());
console.log ("********************************************************************************\n\n");
}
// actually launch the app!!!
require (workingDirectory + '/' + appEntry);
restart = true;
}
}
/////////////////// CODE EXECUTION STARTS HERE ///////////////////////////
function run (){
if (cluster.isMaster){
console.log ("********** Node-Deploy Started *********");
var date = new Date ();
console.log (date.toString ());
console.log ("working directory:" + process.cwd ());
}
// get the app path and the home path
appDir = (configData && configData.applicationDirectory) || "";
homePath = appDir.slice (0, appDir.lastIndexOf ('/'));
// if nor configured this does nothing
sudo = (configData.sudo) ? "sudo " : "";
if (cluster.isMaster){
// init the cloud code
cloud.init (function (){
instance_data = cloud.getInstanceData ();
console.log ("\nGetting Cloud Data")
console.log (" Instance ID:" + cloud.getInstanceId ());
console.log (" Instance Data:%j", instance_data);
// see if updating should be on or off
if (instance_data && instance_data.deploy === true){
updating_on = true;
}
// change directory to the app working directory. Default to the current directory
var workingDirectory = configData.applicationDirectory || process.cwd();
console.log ("\nWorking Directory is:" + process.cwd());
process.chdir (workingDirectory);
console.log ("Setting Working Directory to:" + process.cwd());
// determine if we are in the could or not and set an environment variable
// in case other code needs to know this
console.log ("\nServer in cloud: " + cloud.isCloud ());
process.env['CLOUD'] = cloud.isCloud ();
process.env['INSTANCE_ID'] = cloud.getInstanceId ();
process.env['INSTANCE_DATA'] = JSON.stringify (cloud.getInstanceData ());
checkAndUpdateEnvironment (function (){
if (updating_on){
server.startServer (instance_data, checkAndUpdateEnvironment, function (){
startApp ();
});
}
else{
console.log ("NO SERVER STARTED");
startApp ();
}
}, false);
});
}
else{
startApp ();
}
}
run ();
})();
| _start.js | /*///////////////////////////////////////////////////////////////////////////
For Best results use the an 'app-config.json' file. This File should be
located in the same directory as this file. If you choose not to use it
then your application must run out of the same directory as this file
and you entry point must be a file named 'start.js' located in this
directory.
'app-config.json' allows you to configure following properties for your
application:
"awsUpdates:" <Check for and get this latest AWS updates on a pull>
"applicationName:" <name of the application>
"applicationDirectory": <set this to the directory you application lives in>
"applicationPath": <the path to the application directory>
"appEntry": <set this to the name of the 'js' file that is your entry point>
"commandArguments": <command line arguments you would like pass to the application>
"appEnvironmentVariables": <{<key>:<pair>}, key pair environment variables that need to be se for the application >
"appURL": <https://myapp> used for manual webhooks
"dependencies" : <dependencies within the package.json that need to be pulled in addition to the application directory>
"pullPort": <set this to the port for a pull requests> - defaults to 8000
// The key and cert files are only necessary if you want to listen for a pull
// request securely. If they are omitted and HTTP server is start instead. Beware
// as someone could be snooping and then start sending your servers pull requests
"pullKey": <path to a ssh key file for the HTTPS Server>
"pullCert": <path to a ssh cert file for the HTTPS Server>
"pullCa": <array of paths to the certificate authority files> (optional)
"pullPassphrase" : <string - phrase that the certificate was generated with> (optional if certificate was not generated with a passphrase)
// This is a secret key that is configured here and passed in via a webhook in
// response to a pull request. This is to prevent unauthorized requests from causing
// pulls. If no pull secret is configure then all pull request are valid
"pullSecret": <secret phrase>
"pullBranch": <git branch to use for the pull>
"pullField": <field that contains the branch information on a post by the web hook (defaults to 'ref')>
// In theory you can put an cloud vendor specific params in here. You just have to have support in cloud.js for them.
// Curently AWS is the only cloud platform supported
// Put your AWS config params in here. Example:
"accessKeyId": "XXXXXXXXXXXXXXXXXXXX",
"secretAccessKey": "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
"region": "us-east-1"
"sudo": <Mac and Linux only, should we prefix all shell commands with sudo?> - defaults to no sudo
// This file must conform the following interface: It must have a start function that is exported that excepts a callback
// as a parameter and calls the callback when compete to start the application.
"preLaunch": <a javascript file to execute relative to the application directory before starting the app>
// To Trigger a pull and restart
*////////////////////////////////////////////////////////////////////////////
(function (){
var fs = require ('fs');
var exec = require('child_process').exec;
var path = require('path');
var cluster = require ('cluster');
var async = require ('async');
var config = require ('./config');
var server = require ('./server');
var configData = config.data;
/*if (configData.data){
configData = configData.data;
config.update ();
}*/
var error, pull_error = "";
var package_json, package_copy, parsed_package, parsed_copy;
var restart = false;
var need_restart = false;
// This an abstraction layer for different cloud services
// all AWS specific code or other cloud vendor stuff should go in here
var cloud = require ("./cloud.js");
var updating_on = false;
var sudo;
var instance_data;
var secure_post;
var pull_list;
var homePath;
var appDir;
var conditionString = config.conditionString;
// post a command out
function post (_url, body, port, secure, path, cb){
var qs = require('querystring');
body = qs.stringify (body);
var options = {
host: _url,
port: port,
path: path,
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': body.length
},
rejectUnauthorized:false // add this so if someone uses a local certificate on the machine the post will still go through
};
var server = (secure) ? https : http;
var req = server.request(options, function(res) {
var result = "";
res.setEncoding('utf8');
res.on('data', function (chunk) {result += chunk;});
res.on('end', function (){
cb && cb (null, result);
});
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
cb && cb (e.message);
});
// write data to request body
req.write(body);
req.end();
}
// pull the latest source
function pull(cb, master, req, res){
master = (master === true || master === "true");
pull_error = "";
need_restart = false;
if (!pull_list){
pull_list = [appDir];
if (configData.dependencies){
configData.dependencies.forEach (function (proj){
pull_list.push (homePath + "/" + proj + "/");
});
}
}
function _pull (cb){
console.log ("Pulling the latest code from remote repository");
async.eachSeries (pull_list, function (proj, cb){
// get the latest code
console.log ("\tPulling " + proj);
var child = exec ("cd " + proj + " ; " + sudo + "git pull", function (err, std, ster){
if (err){
console.log ("\t\tError pulling repository. Error" + ster);
pull_error += "\t\tError pulling repository. Error" + ster;
}
else{
console.log ("\t\t" + std);
if (std && std.search ("Already up-to-date") === -1){
need_restart = true;
}
}
cb ();
});
}, function (err){
cb && cb ();
});
}
// only pull the latest if in the cloud. For local development don't do anything. The developer must manually pull
if (cloud.isCloud ()){
if (!master){
_pull (function (){
if (need_restart && restart){
if (pull_error){
res && res.send('They were errors pulling. Errors:' + pull_error);
}
else{
res && res.send('Pull Successful!');
}
process.exit(0);
}
cb && cb ();
});
}
else { // else we are the master so find all the other AWS instances to pull from
// get other instances that our are same type and already running
req.query.master = false;
cloud.getInstances (function (err, instances){
if (instances && instances.length){
console.log ("Found " + instances.length + " instances, re-posting.");
instances.forEach (function (instance){
if (instance.dns && instance.id !== cloud.getInstanceId ()){ // don't signal ourselves
post (instance.dns, req.body, configData.pullPort, secure_post,
url.format ({pathname:"/pull", query:req.query}));
}
});
// now pull and restart ourselves
_pull (function (){
if (need_restart && restart){
if (pull_error){
res && res.send('They were errors pulling. Errors:' + pull_error);
}
else{
res && res.send('Pull Successful!');
}
}
cb && cb ();
});
}
else{
console.log ("No instances found");
cb && cb ();
}
});
}
}
else {cb && cb ();}
}
// check if any NPM dependencies changed
function checkNodeDependencies (cb){
// read in our files
console.log ("\nChecking for Node version changes");
try { package_copy = fs.readFileSync ("package.copy");}
catch (e) { package_copy = null;}
try {package_json = fs.readFileSync ("package.json");}
catch (e) { package_json = null;}
parsed_package = (package_json) ? JSON.parse (package_json) : null;
parsed_copy = (package_copy) ? JSON.parse (package_copy) : null;
// see if our node versions match
if (parsed_package && parsed_package.nodeVersion){
var version = parsed_package.nodeVersion.replace ('v', "");
var node_version = process.version.replace ('v', "");
if (version !== node_version){
console.log (" Upgrading Node");
console.log (" current version:" + node_version);
console.log (" requested version:" + version);
// upgrade node using 'n'
var child = exec (sudo + "n " + version, function (err, std, ster){
if (err){
console.log (" Node upgrade failed. Error:" + ster);
pull_error += "\nNode upgrade failed. Error:" + ster;
}
else{
console.log (" Node upgrade success, restarting");
process.exit (0); // exit so we get restarted
}
cb & cb ();
});
}
else{
console.log (" Node is up to date");
console.log (" current version:" + node_version);
cb & cb ();
}
}
else{
cb & cb ();
}
}
// check AWS dependencies
function checkAWSDependencies (cb){
if (!configData.local && instance_data.deploy){
var child = exec("sudo yum -y update", function (err, std, ster){
console.log ("\nChecking for AWS Updates\n" + std);
if (err) {cb (ster);}
else {cb ();}
});
}
else {cb ();}
}
function getSymbolicLinks (itemPath){
var symbolicLinks = [];
function walkRecursive (itemPath) {
if (fs.existsSync(itemPath)){
// if a directory but not a a symbolic link. (Leave those)
var symbolicLink = fs.lstatSync(itemPath).isSymbolicLink ();
if (fs.statSync(itemPath).isDirectory()){
if (!symbolicLink) {
fs.readdirSync(itemPath).forEach(function(childItemName) {
walkRecursive (path.join (itemPath, childItemName));
});
}
else {
var index = itemPath.lastIndexOf("/");
if (index > 0) {index++;}
symbolicLinks.push (itemPath.slice (index, itemPath.length));
}
}
}
}
walkRecursive (itemPath);
return symbolicLinks;
}
// check for any node module changes and reinstall the associated packages.
// NPM doesn't do a good job of keeping track. So keep a copy of the last successful
// update and compare it. Find ones that have changed and delete them and then
// re-install.
function checkNPMDependencies (cb, projPath){
function deleteRecursiveSync (itemPath){
function walkRecursive (itemPath) {
if (fs.existsSync(itemPath)){
// if a directory but not a a symbolic link. (Leave those)
var symbolicLink = fs.lstatSync(itemPath).isSymbolicLink ();
if (fs.statSync(itemPath).isDirectory() && !symbolicLink) {
fs.readdirSync(itemPath).forEach(function(childItemName) {
walkRecursive (path.join (itemPath, childItemName));
});
if (!symbolicLink){
try {fs.rmdirSync(itemPath);}
catch (e){}
}
} else {
fs.unlinkSync(itemPath);
}
}
}
walkRecursive (itemPath);
}
var _package_json, _parsed_copy, _parsed_package, _package_copy;
if (!projPath){
_package_json = package_json;
_parsed_copy = parsed_copy;
_parsed_package = parsed_package;
_package_copy = package_copy;
projPath = appDir + '/';
}
else{
try { _package_copy = fs.readFileSync (projPath + "package.copy");}
catch (e) { _package_copy = null;}
try {_package_json = fs.readFileSync (projPath + "package.json");}
catch (e) { _package_json = null;}
_parsed_package = (_package_json) ? JSON.parse (_package_json) : null;
_parsed_copy = (_package_copy) ? JSON.parse (_package_copy) : null;
}
console.log ("\nChecking for Node Module dependency changes for:" + projPath);
if (!_package_json){
console.log ("WARNING Your Application has no 'package.json' . It is highly" +
" recommended that you use one to manage your NPM dependencies");
}
else{ // delete the modules that have changed and re-install with new versions
if (!_package_copy || _package_copy.toString() !== _package_json.toString ()){
console.log ("\tNPM dependency changes detected");
if (_parsed_package && _parsed_package.dependencies){
for (var package_name in _parsed_package.dependencies){
var copy_version = (_parsed_copy && _parsed_copy.dependencies) ? _parsed_copy.dependencies[package_name] : "";
if (copy_version !== _parsed_package.dependencies[package_name]){
deleteRecursiveSync (projPath + "node_modules/" + package_name);
}
}
}
if (_parsed_copy && _parsed_copy.dependencies){
for (package_name in _parsed_copy.dependencies){
copy_version = (_parsed_package && _parsed_package.dependencies) ? _parsed_package.dependencies[package_name] : "";
if (copy_version !== _parsed_copy.dependencies[package_name]){
deleteRecursiveSync (projPath + "node_modules/" + package_name);
}
}
}
console.log ("\tInstalling new Node Modules");
var cmd_str = (projPath) ? "cd " + projPath + " ; " + sudo + "npm install -d" : sudo + "npm install -d";
var child = exec (cmd_str, function (err, std, ster){
if (err){
console.log ("\t\tError installing Node modules. Error:" + ster + " :" + err);
pull_error += "\nError installing Node modules. Error:" + ster + " :" + err;
fs.writeFileSync (projPath + "package.copy", _package_json);
}
else{
console.log ("\t\tSuccessfully updated Node Modules: " + std);
fs.writeFileSync (projPath + "package.copy", _package_json);
}
cb && cb ();
});
}
else{
console.log ("\tNo node module changes detected");
cb && cb ();
}
}
}
function checkAllNPMDependencies (cb){
var symbolicLinks = getSymbolicLinks (appDir + "/node_modules/");
// first check the local ones
var dependencies = (configData && configData.dependencies) || [];
console.log ("creating NPM Links");
async.eachSeries (dependencies, function (proj, cb){
var cmd_str = " cd " + appDir + " ; " + sudo + " npm unlink " + proj;
var child = exec (cmd_str, function (err, std, ster){
if (err){
console.log ("\tError unlinking " + proj + " to " + appDir);
if (ster) { console.log ("\t\t" + ster); }
}
else{
console.log ("\tunlinking " + proj + " to " + appDir);
if (std) { console.log ("\t\t" + std); }
}
// give us a couple seconds before moving onto the next one. Seems to be some issue with
// not letting a few cycles elapse before trying it again.
cb ();
});
}, function (){
console.log ("Unlinking complete");
checkNPMDependencies (function (){
// now check the dependencies of any dependent projects
async.eachSeries (dependencies, function (dependency, done){
var projPath = homePath + "/" + dependency + "/";
checkNPMDependencies (function (){
done ();
}, projPath);
// other dependency directories are linked in using symbolic links
// If we deleted them, add them back in
}, function createNPMLinks (){
console.log ("creating NPM Links");
var links = (dependencies && dependencies.length) ? dependencies : symbolicLinks;
async.eachSeries (links, function (proj, cb){
var cmd_str = " cd " + appDir + " ; " + sudo + " npm link " + proj;
var child = exec (cmd_str, function (err, std, ster){
if (err){
console.log ("\tError linking " + proj + " to " + appDir);
if (ster) {console.log ("\t\t" + ster);}
}
else{
console.log ("\tlinking " + proj + " to " + appDir);
if (std) {console.log ("\t\t" + std);}
}
cb ();
});
}, function (){
console.log ("Linking complete");
cb && cb ();
});
});
});
});
}
function checkAndUpdateEnvironment (cb, master){
if (updating_on || configData.remote === 'n'){
// get the latest code
pull (function (){
// check for dependency changes
checkAWSDependencies (function (){
checkNodeDependencies (function (){
checkAllNPMDependencies (function (){
cb && cb ();
});
});
});
}, master);
}
}
// start the application
function startApp (){
// set command line args
if (configData.commandArguments || configData.pullArgs){
if (configData.commandArguments){
var args = configData.commandArguments.split (" ");
if (cluster.isMaster){ // only output this info once
console.log ("Set the following Command Line Arguments:\n\t" + configData.commandArguments);
}
args && args.forEach (function (arg){
process.argv.push (arg);
});
}
var pullArgs = configData.pullArgs;
if (pullArgs){
if (cluster.isMaster){ // only output this info once
console.log ("Set the following Pull Arguments:\n\t" + pullArgs);
}
pullArgs && pullArgs.forEach (function (arg){
process.argv.push (arg);
});
}
}
else if (cluster.isMaster) {
console.log ("No Command Line Arguments set!");
}
// set environment variables
if (configData.appEnvironmentVariables){
var env_vars;
try {env_vars = JSON.parse (conditionString(configData.appEnvironmentVariables));}
catch (err) {console.log ("Error parsing the environment variables JSON:" + err);}
if (env_vars){
if (cluster.isMaster){ // only output this info once
console.log ("Set the following Environment Variables:");
console.log (env_vars);
}
for (var k in env_vars){
process.env[k] = env_vars[k];
}
}
}
else if (cluster.isMaster){
console.log ("No Environment Variables set!");
}
// call the prelaunch file if available
var workingDirectory = configData.applicationDirectory || process.cwd();
if (cluster.isMaster && configData.preLaunch){
console.log ("Processing preLaunch File: " + configData.preLaunch);
var pre_launch = require (workingDirectory + '/' + configData.preLaunch);
pre_launch.start (function (){
_start ();
});
}
else{ // no prelaunch file or not master
_start ();
}
function _start (){
// enter the application
var appEntry = configData.appEntry || "start.js", date;
if (cluster.isMaster){
date = new Date ();
console.log ("\n\n********************************************************************************");
console.log ("\tSTARTING APPLICATION %s", configData.applicationName);
console.log ("\tCALLING: %s", appEntry);
console.log ("\t\tDate:" + date.toUTCString ());
console.log ("********************************************************************************\n\n");
}
// actually launch the app!!!
require (workingDirectory + '/' + appEntry);
restart = true;
}
}
/////////////////// CODE EXECUTION STARTS HERE ///////////////////////////
function run (){
if (cluster.isMaster){
console.log ("********** Node-Deploy Started *********");
var date = new Date ();
console.log (date.toString ());
console.log ("working directory:" + process.cwd ());
}
// get the app path and the home path
appDir = (configData && configData.applicationDirectory) || "";
homePath = appDir.slice (0, appDir.lastIndexOf ('/'));
// if nor configured this does nothing
sudo = (configData.sudo) ? "sudo " : "";
if (cluster.isMaster){
// init the cloud code
cloud.init (function (){
instance_data = cloud.getInstanceData ();
console.log ("\nGetting Cloud Data")
console.log (" Instance ID:" + cloud.getInstanceId ());
console.log (" Instance Data:%j", instance_data);
// see if updating should be on or off
if (instance_data && instance_data.deploy === true){
updating_on = true;
}
// change directory to the app working directory. Default to the current directory
var workingDirectory = configData.applicationDirectory || process.cwd();
console.log ("\nWorking Directory is:" + process.cwd());
process.chdir (workingDirectory);
console.log ("Setting Working Directory to:" + process.cwd());
// determine if we are in the could or not and set an environment variable
// in case other code needs to know this
console.log ("\nServer in cloud: " + cloud.isCloud ());
process.env['CLOUD'] = cloud.isCloud ();
process.env['INSTANCE_ID'] = cloud.getInstanceId ();
process.env['INSTANCE_DATA'] = JSON.stringify (cloud.getInstanceData ());
checkAndUpdateEnvironment (function (){
if (updating_on){
server.startServer (instance_data, checkAndUpdateEnvironment, function (){
startApp ();
});
}
else{
console.log ("NO SERVER STARTED");
startApp ();
}
}, false);
});
}
else{
startApp ();
}
}
run ();
})();
| Fixed an issue with the pull args getting cleared out
| _start.js | Fixed an issue with the pull args getting cleared out | <ide><path>start.js
<ide> process.argv.push (arg);
<ide> });
<ide> }
<add> /*
<ide> var pullArgs = configData.pullArgs;
<ide> if (pullArgs){
<ide> if (cluster.isMaster){ // only output this info once
<ide> pullArgs && pullArgs.forEach (function (arg){
<ide> process.argv.push (arg);
<ide> });
<del> }
<add> }*/
<ide> }
<ide> else if (cluster.isMaster) {
<ide> console.log ("No Command Line Arguments set!"); |
|
Java | apache-2.0 | error: pathspec 'Chapter_005_Coll.Pro/List/src/main/java/ru/sbulygin/ReplicationLinkedList.java' did not match any file(s) known to git
| d9c452ce6872d3f284acc7513655625645ea1de5 | 1 | sergeyBulygin/Java_a_to_z | package ru.sbulygin;
import java.util.Iterator;
/**
* Class ReplicationArrayList.
*
* @author sbulygin.
* @since 30.05.2017.
* @version 1.0.
* @param <E> type.
*/
public class ReplicationLinkedList<E> implements SimpleContainer<E> {
/**
* Size of list.
*/
private int size = 0;
/**
* First entry in list.
*/
private Entry<E> firstElement;
/**
* Last entry in list.
*/
private Entry<E> lastElement;
@Override
public void add(E e) {
Entry<E> newEntry = new Entry<E>(e, null, this.lastElement);
if (lastElement == null) {
this.firstElement = newEntry;
} else {
newEntry.next = newEntry;
}
this.lastElement = newEntry;
size++;
}
@Override
public E get(int index) {
return finder(index).element;
}
/**
* Method getting size of list.
* @return size of list.
*/
public int size() {
return this.size;
}
@Override
public Iterator<E> iterator() {
return new ReplicationLinkedIterator<>();
}
/**
* Class ReplicationLinkedIterator.
*
* @author sbulygin.
* @since 30.05.2017.
* @version 1.0.
* @param <E> type.
*/
private class ReplicationLinkedIterator<E> implements Iterator<E> {
/**
* Position of iterator.
*/
private int position = 0;
@Override
public boolean hasNext() {
return this.position != size;
}
@Override
public E next() {
position++;
return (E) get(position - 1);
}
}
/**
* Method looks up the element at index in list.
* @param index index of element.
* @return the element is found.
*/
private Entry<E> finder(int index) {
if (index < 0 || index >= size) {
throw new IndexOutOfBoundsException("The element with the given index was not found.");
}
Entry<E> searchElement;
if (index < (size >> 1)) {
searchElement = this.firstElement;
for (int count = 0; count <= index; count++) {
searchElement = searchElement.next;
}
} else {
searchElement = this.lastElement;
for (int count = size; count > index; count--) {
searchElement = searchElement.prev;
}
}
return searchElement;
}
/**
* Private static class for creating Double-linked entries in ReplicationLinkedList.
* @param <E> The type of the parameter.
*/
private static class Entry<E> {
/**
* The element that contains the data type E in the entry.
*/
private E element;
/**
* Link of next element in list.
*/
private Entry<E> next;
/**
* Link of previous element in list.
*/
private Entry<E> prev;
/**
* Constructor of class Entry.
* @param element data to save in entry.
* @param next link of next element.
* @param prev link of previous element.
*/
Entry(E element, Entry<E> next, Entry<E> prev) {
this.element = element;
this.next = next;
this.prev = prev;
}
}
}
| Chapter_005_Coll.Pro/List/src/main/java/ru/sbulygin/ReplicationLinkedList.java | implementation ReplicationLinkedList #16
| Chapter_005_Coll.Pro/List/src/main/java/ru/sbulygin/ReplicationLinkedList.java | implementation ReplicationLinkedList #16 | <ide><path>hapter_005_Coll.Pro/List/src/main/java/ru/sbulygin/ReplicationLinkedList.java
<add>package ru.sbulygin;
<add>
<add>import java.util.Iterator;
<add>
<add>/**
<add> * Class ReplicationArrayList.
<add> *
<add> * @author sbulygin.
<add> * @since 30.05.2017.
<add> * @version 1.0.
<add> * @param <E> type.
<add> */
<add>
<add>public class ReplicationLinkedList<E> implements SimpleContainer<E> {
<add>
<add> /**
<add> * Size of list.
<add> */
<add> private int size = 0;
<add>
<add> /**
<add> * First entry in list.
<add> */
<add> private Entry<E> firstElement;
<add>
<add> /**
<add> * Last entry in list.
<add> */
<add> private Entry<E> lastElement;
<add>
<add> @Override
<add> public void add(E e) {
<add> Entry<E> newEntry = new Entry<E>(e, null, this.lastElement);
<add> if (lastElement == null) {
<add> this.firstElement = newEntry;
<add> } else {
<add> newEntry.next = newEntry;
<add> }
<add> this.lastElement = newEntry;
<add> size++;
<add>
<add> }
<add>
<add> @Override
<add> public E get(int index) {
<add> return finder(index).element;
<add> }
<add>
<add> /**
<add> * Method getting size of list.
<add> * @return size of list.
<add> */
<add> public int size() {
<add> return this.size;
<add> }
<add>
<add> @Override
<add> public Iterator<E> iterator() {
<add> return new ReplicationLinkedIterator<>();
<add> }
<add>
<add> /**
<add> * Class ReplicationLinkedIterator.
<add> *
<add> * @author sbulygin.
<add> * @since 30.05.2017.
<add> * @version 1.0.
<add> * @param <E> type.
<add> */
<add> private class ReplicationLinkedIterator<E> implements Iterator<E> {
<add>
<add> /**
<add> * Position of iterator.
<add> */
<add> private int position = 0;
<add>
<add> @Override
<add> public boolean hasNext() {
<add> return this.position != size;
<add> }
<add>
<add> @Override
<add> public E next() {
<add> position++;
<add> return (E) get(position - 1);
<add> }
<add>
<add> }
<add>
<add> /**
<add> * Method looks up the element at index in list.
<add> * @param index index of element.
<add> * @return the element is found.
<add> */
<add> private Entry<E> finder(int index) {
<add> if (index < 0 || index >= size) {
<add> throw new IndexOutOfBoundsException("The element with the given index was not found.");
<add> }
<add> Entry<E> searchElement;
<add> if (index < (size >> 1)) {
<add> searchElement = this.firstElement;
<add> for (int count = 0; count <= index; count++) {
<add> searchElement = searchElement.next;
<add> }
<add>
<add> } else {
<add> searchElement = this.lastElement;
<add> for (int count = size; count > index; count--) {
<add> searchElement = searchElement.prev;
<add> }
<add> }
<add> return searchElement;
<add>
<add> }
<add>
<add> /**
<add> * Private static class for creating Double-linked entries in ReplicationLinkedList.
<add> * @param <E> The type of the parameter.
<add> */
<add> private static class Entry<E> {
<add>
<add> /**
<add> * The element that contains the data type E in the entry.
<add> */
<add> private E element;
<add>
<add> /**
<add> * Link of next element in list.
<add> */
<add> private Entry<E> next;
<add>
<add> /**
<add> * Link of previous element in list.
<add> */
<add> private Entry<E> prev;
<add>
<add> /**
<add> * Constructor of class Entry.
<add> * @param element data to save in entry.
<add> * @param next link of next element.
<add> * @param prev link of previous element.
<add> */
<add> Entry(E element, Entry<E> next, Entry<E> prev) {
<add> this.element = element;
<add> this.next = next;
<add> this.prev = prev;
<add> }
<add> }
<add>
<add>
<add>}
<add> |
|
JavaScript | mit | 07863174af205f17961f3d45d78ebef6c7dae315 | 0 | globexdesigns/react-dnd,nagaozen/react-dnd,RallySoftware/react-dnd,wagonhq/react-dnd,colbyr/react-dnd,cesarandreu/react-dnd,Reggino/react-dnd,tomulin1/react-dnd,konce/react-dnd,tylercollier/react-dnd-demo,gaearon/react-dnd,prometheusresearch/react-dnd,gaearon/react-dnd,globexdesigns/react-dnd,jgable/react-dnd,jgable/react-dnd,longlho/react-dnd,konce/react-dnd,jowcy/react-dnd,nickknw/react-dnd,craigklem/react-dnd,tomulin1/react-dnd,randrianov/react-dnd,zetkin/react-dnd,srajko/react-dnd,arnif/react-dnd,wagonhq/react-dnd,jowcy/react-dnd,nickknw/react-dnd,Reggino/react-dnd,ntdb/react-dnd,prometheusresearch/react-dnd,RallySoftware/react-dnd,colbyr/react-dnd,pairyo/react-dnd,numso/react-dnd,zetkin/react-dnd,hiddentao/react-dnd,craigklem/react-dnd,pairyo/react-dnd,arnif/react-dnd,tylercollier/react-dnd-demo,numso/react-dnd,longlho/react-dnd,randrianov/react-dnd,prometheusresearch/react-dnd,cesarandreu/react-dnd,hiddentao/react-dnd,srajko/react-dnd,ntdb/react-dnd,nagaozen/react-dnd | 'use strict';
var DragDropActionCreators = require('../actions/DragDropActionCreators'),
NativeDragItemTypes = require('../constants/NativeDragItemTypes'),
DropEffects = require('../constants/DropEffects'),
EnterLeaveMonitor = require('../utils/EnterLeaveMonitor'),
isFileDragDropEvent = require('./isFileDragDropEvent'),
shallowEqual = require('react/lib/shallowEqual'),
union = require('lodash-node/modern/arrays/union'),
without = require('lodash-node/modern/arrays/without'),
isWebkit = require('./isWebkit'),
isFirefox = require('./isFirefox');
// Store global state for browser-specific fixes and workarounds
var _monitor = new EnterLeaveMonitor(),
_currentDragTarget,
_initialDragTargetRect,
_imitateCurrentDragEnd,
_dragTargetRectDidChange,
_currentDropEffect;
function getElementRect(el) {
var rect = el.getBoundingClientRect();
// Copy so object doesn't get reused
return { top: rect.top, left: rect.left, width: rect.width, height: rect.height };
}
function checkIfCurrentDragTargetRectChanged() {
if (!_dragTargetRectDidChange) {
var currentRect = getElementRect(_currentDragTarget);
_dragTargetRectDidChange = !shallowEqual(_initialDragTargetRect, currentRect);
}
return _dragTargetRectDidChange;
}
function triggerDragEndIfDragSourceWasRemovedFromDOM() {
if (_currentDragTarget &&
_imitateCurrentDragEnd &&
!document.body.contains(_currentDragTarget)) {
_imitateCurrentDragEnd();
}
}
function preventDefaultFileDropAction(e) {
if (isFileDragDropEvent(e)) {
e.preventDefault();
}
}
if (typeof window !== 'undefined') {
window.addEventListener('dragenter', function (e) {
preventDefaultFileDropAction(e);
var isFirstEnter = _monitor.enter(e.target);
if (isFirstEnter && isFileDragDropEvent(e)) {
DragDropActionCreators.startDragging(NativeDragItemTypes.FILE, null);
}
});
window.addEventListener('dragover', function (e) {
preventDefaultFileDropAction(e);
// At the top level of event bubbling, use previously set drop effect and reset it.
if (_currentDropEffect) {
e.dataTransfer.dropEffect = _currentDropEffect;
_currentDropEffect = null;
}
if (!_currentDragTarget) {
return;
}
if (isWebkit() && checkIfCurrentDragTargetRectChanged()) {
// Prevent animating to incorrect position
e.preventDefault();
}
});
window.addEventListener('dragleave', function (e) {
preventDefaultFileDropAction(e);
var isLastLeave = _monitor.leave(e.target);
if (isLastLeave && isFileDragDropEvent(e)) {
DragDropActionCreators.endDragging();
}
});
window.addEventListener('drop', function (e) {
preventDefaultFileDropAction(e);
_monitor.reset();
if (isFileDragDropEvent(e)) {
DragDropActionCreators.endDragging();
}
triggerDragEndIfDragSourceWasRemovedFromDOM();
});
// Mouse events tell us that dragging has ended but `dragend` didn't dire.
// This may happen if source DOM was removed while dragging.
window.addEventListener('mousein', triggerDragEndIfDragSourceWasRemovedFromDOM);
window.addEventListener('mouseout', triggerDragEndIfDragSourceWasRemovedFromDOM);
window.addEventListener('mouseenter', triggerDragEndIfDragSourceWasRemovedFromDOM);
window.addEventListener('mouseleave', triggerDragEndIfDragSourceWasRemovedFromDOM);
window.addEventListener('mousemove', triggerDragEndIfDragSourceWasRemovedFromDOM);
}
var NativeDragDropSupport = {
handleDragStart(dragTarget, imitateDragEnd) {
_currentDragTarget = dragTarget;
_initialDragTargetRect = getElementRect(dragTarget);
_dragTargetRectDidChange = false;
_imitateCurrentDragEnd = imitateDragEnd;
},
handleDragEnd() {
_currentDragTarget = null;
_initialDragTargetRect = null;
_dragTargetRectDidChange = false;
_imitateCurrentDragEnd = null;
},
handleDragOver(e, dropEffect) {
// As event bubbles top-down, first specified effect will be used
if (!_currentDropEffect) {
_currentDropEffect = dropEffect;
}
}
};
module.exports = NativeDragDropSupport; | modules/utils/NativeDragDropSupport.js | 'use strict';
var DragDropActionCreators = require('../actions/DragDropActionCreators'),
NativeDragItemTypes = require('../constants/NativeDragItemTypes'),
DropEffects = require('../constants/DropEffects'),
EnterLeaveMonitor = require('../utils/EnterLeaveMonitor'),
isFileDragDropEvent = require('./isFileDragDropEvent'),
shallowEqual = require('react/lib/shallowEqual'),
union = require('lodash-node/modern/arrays/union'),
without = require('lodash-node/modern/arrays/without'),
isWebkit = require('./isWebkit'),
isFirefox = require('./isFirefox');
// Store global state for browser-specific fixes and workarounds
var _monitor = new EnterLeaveMonitor(),
_currentDragTarget,
_initialDragTargetRect,
_imitateCurrentDragEnd,
_dragTargetRectDidChange,
_lastDragSourceCheckTimeout,
_currentDropEffect;
function getElementRect(el) {
var rect = el.getBoundingClientRect();
// Copy so object doesn't get reused
return { top: rect.top, left: rect.left, width: rect.width, height: rect.height };
}
function checkIfCurrentDragTargetRectChanged() {
if (!_dragTargetRectDidChange) {
var currentRect = getElementRect(_currentDragTarget);
_dragTargetRectDidChange = !shallowEqual(_initialDragTargetRect, currentRect);
}
return _dragTargetRectDidChange;
}
function triggerDragEndIfDragSourceWasRemovedFromDOM() {
if (_currentDragTarget &&
_imitateCurrentDragEnd &&
!document.body.contains(_currentDragTarget)) {
_imitateCurrentDragEnd();
}
}
function preventDefaultFileDropAction(e) {
if (isFileDragDropEvent(e)) {
e.preventDefault();
}
}
if (typeof window !== 'undefined') {
window.addEventListener('dragenter', function (e) {
preventDefaultFileDropAction(e);
var isFirstEnter = _monitor.enter(e.target);
if (isFirstEnter && isFileDragDropEvent(e)) {
DragDropActionCreators.startDragging(NativeDragItemTypes.FILE, null);
}
});
window.addEventListener('dragover', function (e) {
preventDefaultFileDropAction(e);
// At the top level of event bubbling, use previously set drop effect and reset it.
if (_currentDropEffect) {
e.dataTransfer.dropEffect = _currentDropEffect;
_currentDropEffect = null;
}
if (!_currentDragTarget) {
return;
}
if (isWebkit() && checkIfCurrentDragTargetRectChanged()) {
// Prevent animating to incorrect position
e.preventDefault();
} else if (isFirefox()) {
// Firefox won't trigger a global `drop` if source node was removed.
// It won't trigger `mouseup` either. It *will* however trigger `dragover`
// continually during drag, so our strategy is to simply wait until `dragover`
// has stopped firing.
clearTimeout(_lastDragSourceCheckTimeout);
_lastDragSourceCheckTimeout = setTimeout(
triggerDragEndIfDragSourceWasRemovedFromDOM,
140 // 70 seems enough on OS X with FF33, double it to be sure
);
}
});
window.addEventListener('dragleave', function (e) {
preventDefaultFileDropAction(e);
var isLastLeave = _monitor.leave(e.target);
if (isLastLeave && isFileDragDropEvent(e)) {
DragDropActionCreators.endDragging();
}
});
window.addEventListener('drop', function (e) {
preventDefaultFileDropAction(e);
_monitor.reset();
if (isFileDragDropEvent(e)) {
DragDropActionCreators.endDragging();
} else if (!isFirefox()) {
triggerDragEndIfDragSourceWasRemovedFromDOM();
}
});
}
var NativeDragDropSupport = {
handleDragStart(dragTarget, imitateDragEnd) {
_currentDragTarget = dragTarget;
_initialDragTargetRect = getElementRect(dragTarget);
_dragTargetRectDidChange = false;
_imitateCurrentDragEnd = imitateDragEnd;
},
handleDragEnd() {
_currentDragTarget = null;
_initialDragTargetRect = null;
_dragTargetRectDidChange = false;
_imitateCurrentDragEnd = null;
},
handleDragOver(e, dropEffect) {
// As event bubbles top-down, first specified effect will be used
if (!_currentDropEffect) {
_currentDropEffect = dropEffect;
}
}
};
module.exports = NativeDragDropSupport; | Use mouse events to unambiguously know that dragging has ended
| modules/utils/NativeDragDropSupport.js | Use mouse events to unambiguously know that dragging has ended | <ide><path>odules/utils/NativeDragDropSupport.js
<ide> _initialDragTargetRect,
<ide> _imitateCurrentDragEnd,
<ide> _dragTargetRectDidChange,
<del> _lastDragSourceCheckTimeout,
<ide> _currentDropEffect;
<ide>
<ide> function getElementRect(el) {
<ide> if (isWebkit() && checkIfCurrentDragTargetRectChanged()) {
<ide> // Prevent animating to incorrect position
<ide> e.preventDefault();
<del> } else if (isFirefox()) {
<del>
<del> // Firefox won't trigger a global `drop` if source node was removed.
<del> // It won't trigger `mouseup` either. It *will* however trigger `dragover`
<del> // continually during drag, so our strategy is to simply wait until `dragover`
<del> // has stopped firing.
<del>
<del> clearTimeout(_lastDragSourceCheckTimeout);
<del> _lastDragSourceCheckTimeout = setTimeout(
<del> triggerDragEndIfDragSourceWasRemovedFromDOM,
<del> 140 // 70 seems enough on OS X with FF33, double it to be sure
<del> );
<ide> }
<ide> });
<ide>
<ide>
<ide> if (isFileDragDropEvent(e)) {
<ide> DragDropActionCreators.endDragging();
<del> } else if (!isFirefox()) {
<del> triggerDragEndIfDragSourceWasRemovedFromDOM();
<ide> }
<add>
<add> triggerDragEndIfDragSourceWasRemovedFromDOM();
<ide> });
<add>
<add> // Mouse events tell us that dragging has ended but `dragend` didn't dire.
<add> // This may happen if source DOM was removed while dragging.
<add>
<add> window.addEventListener('mousein', triggerDragEndIfDragSourceWasRemovedFromDOM);
<add> window.addEventListener('mouseout', triggerDragEndIfDragSourceWasRemovedFromDOM);
<add> window.addEventListener('mouseenter', triggerDragEndIfDragSourceWasRemovedFromDOM);
<add> window.addEventListener('mouseleave', triggerDragEndIfDragSourceWasRemovedFromDOM);
<add> window.addEventListener('mousemove', triggerDragEndIfDragSourceWasRemovedFromDOM);
<ide> }
<ide>
<ide> var NativeDragDropSupport = { |
|
Java | apache-2.0 | 616d1c4200b70661d9299a78a754cf136aa332ff | 0 | openintents/filemanager,imoblife/filemanager,pacificIT/filemanager,imoblife/filemanager,veniosg/Dir,pacificIT/filemanager | package org.openintents.filemanager.view;
import java.io.File;
import java.util.HashMap;
import org.openintents.filemanager.R;
import org.openintents.filemanager.view.PathBar.Mode;
import android.content.Context;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.View;
import android.view.ViewGroup;
import android.view.View.OnLongClickListener;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.LinearLayout;
/**
* This class handles the displaying of children in {@link Mode.STANDARD_INPUT}, including choosing which children to display, how, and where. It automatically uses the {@link PathBar#mCurrentDirectory} field. <b>Note: </b> Never use this with
* a width of WRAP_CONTENT.
*/
class PathButtonLayout extends LinearLayout implements OnLongClickListener {
private PathBar mPathBar = null;
/** <absolute path, R.drawable id of image to use> */
public static HashMap<String, Integer> mPathDrawables = new HashMap<String, Integer>();
public PathButtonLayout(Context context) {
super(context);
init();
}
public PathButtonLayout(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
private void init() {
this.setOrientation(LinearLayout.HORIZONTAL);
this.setOnLongClickListener(this);
mPathDrawables.put("/sdcard", R.drawable.ic_navbar_sdcard);
mPathDrawables.put("/mnt/sdcard", R.drawable.ic_navbar_sdcard);
mPathDrawables.put("/mnt/sdcard-ext", R.drawable.ic_navbar_sdcard);
mPathDrawables.put("/mnt/sdcard0", R.drawable.ic_navbar_sdcard);
mPathDrawables.put("/mnt/sdcard1", R.drawable.ic_navbar_sdcard);
mPathDrawables.put("/mnt/sdcard2", R.drawable.ic_navbar_sdcard);
mPathDrawables.put("/", R.drawable.ic_navbar_home);
}
public void setNavigationBar(PathBar pathbar) {
mPathBar = pathbar;
}
/**
* Call to properly refresh this {@link PathButtonLayout}'s contents based on the fPath parameter.
*/
public void refresh(File fPath) {
// Reload buttons.
this.removeAllViews();
addPathButtons(fPath);
// Redraw.
invalidate();
}
/**
* Adds the proper buttons according to the fPath parameter.
*/
private void addPathButtons(File fPath) {
StringBuilder cPath = new StringBuilder();
char cChar;
String path = fPath.getAbsolutePath();
for (int i = 0; i < path.length(); i++) {
cChar = path.charAt(i);
cPath.append(cChar);
if ((cChar == '/' || i == path.length() - 1)) { // if folder name ended, or path string ended but not if we 're on root
// add a button
this.addView(PathButtonFactory.newButton(cPath.toString(),
mPathBar));
}
}
}
/**
* Provides a modified implementation of the layoutHorizontal() method of LinearLayout. Removes all children that don't fully fit in this {@link PathButtonLayout}.
*/
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
final boolean isLayoutRtl = false;
final int paddingTop = getPaddingTop();
if (this.getChildCount() > 0)
keepFittingChildren();
int childTop;
int childLeft;
// Where bottom of child should go
final int height = getBottom() - getTop();
// Space available for child
int childSpace = height - paddingTop - getPaddingBottom();
final int count = getChildCount();
childLeft = getPaddingLeft();
int start = 0;
int dir = 1;
// In case of RTL, start drawing from the last child.
if (isLayoutRtl) {
start = count - 1;
dir = -1;
}
for (int i = 0; i < count; i++) {
int childIndex = start + dir * i;
final View child = getChildAt(childIndex);
if (child == null) {
childLeft += 0;
} else if (child.getVisibility() != GONE) {
final int childWidth = child.getMeasuredWidth();
final int childHeight = child.getMeasuredHeight();
final LinearLayout.LayoutParams lp = (LinearLayout.LayoutParams) child
.getLayoutParams();
childTop = paddingTop + ((childSpace - childHeight) / 2)
+ lp.topMargin - lp.bottomMargin;
childLeft += lp.leftMargin;
setChildFrame(child, childLeft, // originally childLeft + getLocationOffset(child)
childTop, childWidth, childHeight);
childLeft += childWidth + lp.rightMargin; // originally childLeft += childWidth + lp.rightMargin + getNextLocationOffset(child);
i += 0; // originally getChildrenSkipCount(child, childIndex);
}
}
}
@Override
protected void measureChildWithMargins(View child,
int parentWidthMeasureSpec, int widthUsed,
int parentHeightMeasureSpec, int heightUsed) {
final MarginLayoutParams lp = (MarginLayoutParams) child
.getLayoutParams();
final int childWidthMeasureSpec = getChildMeasureSpec(
parentWidthMeasureSpec, getPaddingLeft() + getPaddingRight()
+ lp.leftMargin + lp.rightMargin, lp.width);
final int childHeightMeasureSpec = getChildMeasureSpec(
parentHeightMeasureSpec, getPaddingTop() + getPaddingBottom()
+ lp.topMargin + lp.bottomMargin, lp.height);
child.measure(childWidthMeasureSpec, childHeightMeasureSpec);
}
private void setChildFrame(View child, int left, int top, int width,
int height) {
child.layout(left, top, left + width, top + height);
}
/**
* Checks this {@link ViewGroup}'s children and keeps the ones that fit inside it.
*/
private void keepFittingChildren() {
View child = null;
int childrenToDraw = 0;
int sumWidth = 0;
int index = this.getChildCount() - 1;
do {
child = this.getChildAt(index);
sumWidth += child.getMeasuredWidth();
childrenToDraw++;
index--;
} while (sumWidth <= this.getMeasuredWidth() && index >= 0);
if (sumWidth > this.getMeasuredWidth()) { // if the view width has been passed
// keep one child less
childrenToDraw--;
}
int i = 0;
int childrenCount = this.getChildCount();
for (i = 0; i < childrenCount - childrenToDraw; i++) {
this.removeViewAt(0);
}
}
/**
* Add an icon to be shown instead of a the directory name.
*
* @param path
* The path on which to display the icon.
* @param drawableResourceId
* The icon' resource id.
*/
public void addPathDrawable(String path, int drawableResourceId) {
mPathDrawables.put(path, drawableResourceId);
}
public static HashMap<String, Integer> getPathDrawables() {
return mPathDrawables;
}
private static class PathButtonFactory {
/**
* Creates a Button or ImageButton according to the path. e.g. {@code if(file.getAbsolutePath() == '/')}, it should return an ImageButton with the home drawable on it.
*
* @param file
* The directory this button will represent.
* @param navbar
* The {@link PathBar} which will contain the created buttons.
* @return An {@link ImageButton} or a {@link Button}.
*/
private static View newButton(File file, final PathBar navbar) {
View btn = null;
if (mPathDrawables.containsKey(file.getAbsolutePath())) {
btn = new ImageButton(navbar.getContext());
((ImageButton) btn).setImageResource(mPathDrawables.get(file
.getAbsolutePath()));
((ImageButton) btn).setAdjustViewBounds(true);
} else {
btn = new Button(navbar.getContext());
((Button) btn).setText(file.getName());
((Button) btn).setMaxLines(1);
((Button) btn).setTextColor(navbar.getResources().getColor(
R.color.navbar_details));
((Button) btn).setTextSize(TypedValue.COMPLEX_UNIT_SP, 18);
}
android.widget.LinearLayout.LayoutParams params = new android.widget.LinearLayout.LayoutParams(
LayoutParams.WRAP_CONTENT, LayoutParams.MATCH_PARENT);
params.rightMargin = (int) TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, 4, navbar.getResources()
.getDisplayMetrics());
btn.setLayoutParams(params);
btn.setTag(file);
btn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
navbar.cd((File) v.getTag());
}
});
btn.setOnLongClickListener(navbar.getPathButtonLayout());
btn.setBackgroundResource(R.drawable.bg_navbar_btn_standard);
return btn;
}
/**
* @see {@link #newButton(File)}
*/
private static View newButton(String path, PathBar navbar) {
return newButton(new File(path), navbar);
}
}
@Override
public boolean onLongClick(View v) {
mPathBar.switchToManualInput();
return true;
}
} | FileManager/src/org/openintents/filemanager/view/PathButtonLayout.java | package org.openintents.filemanager.view;
import java.io.File;
import java.util.HashMap;
import org.openintents.filemanager.R;
import org.openintents.filemanager.view.PathBar.Mode;
import android.content.Context;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.View;
import android.view.ViewGroup;
import android.view.View.OnLongClickListener;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.LinearLayout;
/**
* This class handles the displaying of children in {@link Mode.STANDARD_INPUT}, including choosing which children to display, how, and where. It automatically uses the {@link PathBar#mCurrentDirectory} field. <b>Note: </b> Never use this with
* a width of WRAP_CONTENT.
*/
class PathButtonLayout extends LinearLayout implements OnLongClickListener {
private PathBar mPathBar = null;
/** <absolute path, R.drawable id of image to use> */
public static HashMap<String, Integer> mPathDrawables = new HashMap<String, Integer>();
public PathButtonLayout(Context context) {
super(context);
init();
}
public PathButtonLayout(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
private void init() {
this.setOrientation(LinearLayout.HORIZONTAL);
this.setOnLongClickListener(this);
mPathDrawables.put("/sdcard", R.drawable.ic_navbar_sdcard);
mPathDrawables.put("/mnt/sdcard", R.drawable.ic_navbar_sdcard);
mPathDrawables.put("/mnt/sdcard-ext", R.drawable.ic_navbar_sdcard);
mPathDrawables.put("/mnt/sdcard2", R.drawable.ic_navbar_sdcard);
mPathDrawables.put("/", R.drawable.ic_navbar_home);
}
public void setNavigationBar(PathBar pathbar) {
mPathBar = pathbar;
}
/**
* Call to properly refresh this {@link PathButtonLayout}'s contents based on the fPath parameter.
*/
public void refresh(File fPath) {
// Reload buttons.
this.removeAllViews();
addPathButtons(fPath);
// Redraw.
invalidate();
}
/**
* Adds the proper buttons according to the fPath parameter.
*/
private void addPathButtons(File fPath) {
StringBuilder cPath = new StringBuilder();
char cChar;
String path = fPath.getAbsolutePath();
for (int i = 0; i < path.length(); i++) {
cChar = path.charAt(i);
cPath.append(cChar);
if ((cChar == '/' || i == path.length() - 1)) { // if folder name ended, or path string ended but not if we 're on root
// add a button
this.addView(PathButtonFactory.newButton(cPath.toString(),
mPathBar));
}
}
}
/**
* Provides a modified implementation of the layoutHorizontal() method of LinearLayout. Removes all children that don't fully fit in this {@link PathButtonLayout}.
*/
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
final boolean isLayoutRtl = false;
final int paddingTop = getPaddingTop();
if (this.getChildCount() > 0)
keepFittingChildren();
int childTop;
int childLeft;
// Where bottom of child should go
final int height = getBottom() - getTop();
// Space available for child
int childSpace = height - paddingTop - getPaddingBottom();
final int count = getChildCount();
childLeft = getPaddingLeft();
int start = 0;
int dir = 1;
// In case of RTL, start drawing from the last child.
if (isLayoutRtl) {
start = count - 1;
dir = -1;
}
for (int i = 0; i < count; i++) {
int childIndex = start + dir * i;
final View child = getChildAt(childIndex);
if (child == null) {
childLeft += 0;
} else if (child.getVisibility() != GONE) {
final int childWidth = child.getMeasuredWidth();
final int childHeight = child.getMeasuredHeight();
final LinearLayout.LayoutParams lp = (LinearLayout.LayoutParams) child
.getLayoutParams();
childTop = paddingTop + ((childSpace - childHeight) / 2)
+ lp.topMargin - lp.bottomMargin;
childLeft += lp.leftMargin;
setChildFrame(child, childLeft, // originally childLeft + getLocationOffset(child)
childTop, childWidth, childHeight);
childLeft += childWidth + lp.rightMargin; // originally childLeft += childWidth + lp.rightMargin + getNextLocationOffset(child);
i += 0; // originally getChildrenSkipCount(child, childIndex);
}
}
}
@Override
protected void measureChildWithMargins(View child,
int parentWidthMeasureSpec, int widthUsed,
int parentHeightMeasureSpec, int heightUsed) {
final MarginLayoutParams lp = (MarginLayoutParams) child
.getLayoutParams();
final int childWidthMeasureSpec = getChildMeasureSpec(
parentWidthMeasureSpec, getPaddingLeft() + getPaddingRight()
+ lp.leftMargin + lp.rightMargin, lp.width);
final int childHeightMeasureSpec = getChildMeasureSpec(
parentHeightMeasureSpec, getPaddingTop() + getPaddingBottom()
+ lp.topMargin + lp.bottomMargin, lp.height);
child.measure(childWidthMeasureSpec, childHeightMeasureSpec);
}
private void setChildFrame(View child, int left, int top, int width,
int height) {
child.layout(left, top, left + width, top + height);
}
/**
* Checks this {@link ViewGroup}'s children and keeps the ones that fit inside it.
*/
private void keepFittingChildren() {
View child = null;
int childrenToDraw = 0;
int sumWidth = 0;
int index = this.getChildCount() - 1;
do {
child = this.getChildAt(index);
sumWidth += child.getMeasuredWidth();
childrenToDraw++;
index--;
} while (sumWidth <= this.getMeasuredWidth() && index >= 0);
if (sumWidth > this.getMeasuredWidth()) { // if the view width has been passed
// keep one child less
childrenToDraw--;
}
int i = 0;
int childrenCount = this.getChildCount();
for (i = 0; i < childrenCount - childrenToDraw; i++) {
this.removeViewAt(0);
}
}
/**
* Add an icon to be shown instead of a the directory name.
*
* @param path
* The path on which to display the icon.
* @param drawableResourceId
* The icon' resource id.
*/
public void addPathDrawable(String path, int drawableResourceId) {
mPathDrawables.put(path, drawableResourceId);
}
public static HashMap<String, Integer> getPathDrawables() {
return mPathDrawables;
}
private static class PathButtonFactory {
/**
* Creates a Button or ImageButton according to the path. e.g. {@code if(file.getAbsolutePath() == '/')}, it should return an ImageButton with the home drawable on it.
*
* @param file
* The directory this button will represent.
* @param navbar
* The {@link PathBar} which will contain the created buttons.
* @return An {@link ImageButton} or a {@link Button}.
*/
private static View newButton(File file, final PathBar navbar) {
View btn = null;
if (mPathDrawables.containsKey(file.getAbsolutePath())) {
btn = new ImageButton(navbar.getContext());
((ImageButton) btn).setImageResource(mPathDrawables.get(file
.getAbsolutePath()));
((ImageButton) btn).setAdjustViewBounds(true);
} else {
btn = new Button(navbar.getContext());
((Button) btn).setText(file.getName());
((Button) btn).setMaxLines(1);
((Button) btn).setTextColor(navbar.getResources().getColor(
R.color.navbar_details));
((Button) btn).setTextSize(TypedValue.COMPLEX_UNIT_SP, 18);
}
android.widget.LinearLayout.LayoutParams params = new android.widget.LinearLayout.LayoutParams(
LayoutParams.WRAP_CONTENT, LayoutParams.MATCH_PARENT);
params.rightMargin = (int) TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, 4, navbar.getResources()
.getDisplayMetrics());
btn.setLayoutParams(params);
btn.setTag(file);
btn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
navbar.cd((File) v.getTag());
}
});
btn.setOnLongClickListener(navbar.getPathButtonLayout());
btn.setBackgroundResource(R.drawable.bg_navbar_btn_standard);
return btn;
}
/**
* @see {@link #newButton(File)}
*/
private static View newButton(String path, PathBar navbar) {
return newButton(new File(path), navbar);
}
}
@Override
public boolean onLongClick(View v) {
mPathBar.switchToManualInput();
return true;
}
} | Added sdcard icon for some more paths
| FileManager/src/org/openintents/filemanager/view/PathButtonLayout.java | Added sdcard icon for some more paths | <ide><path>ileManager/src/org/openintents/filemanager/view/PathButtonLayout.java
<ide> mPathDrawables.put("/sdcard", R.drawable.ic_navbar_sdcard);
<ide> mPathDrawables.put("/mnt/sdcard", R.drawable.ic_navbar_sdcard);
<ide> mPathDrawables.put("/mnt/sdcard-ext", R.drawable.ic_navbar_sdcard);
<add> mPathDrawables.put("/mnt/sdcard0", R.drawable.ic_navbar_sdcard);
<add> mPathDrawables.put("/mnt/sdcard1", R.drawable.ic_navbar_sdcard);
<ide> mPathDrawables.put("/mnt/sdcard2", R.drawable.ic_navbar_sdcard);
<ide> mPathDrawables.put("/", R.drawable.ic_navbar_home);
<ide> } |
|
Java | apache-2.0 | eac43d38d22bd59d7ee70c04f684b4848073b946 | 0 | meetdestiny/geronimo-trader,apache/geronimo,vibe13/geronimo,apache/geronimo,apache/geronimo,vibe13/geronimo,meetdestiny/geronimo-trader,vibe13/geronimo,meetdestiny/geronimo-trader,apache/geronimo,vibe13/geronimo | /**
*
* Copyright 2004, 2005 The Apache Software Foundation or its licensors, as applicable.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.console.util;
public interface ObjectNameConstants {
// Security object names
public static final String SE_REALM_MBEAN_NAME = "geronimo.server:J2EEApplication=org/apache/geronimo/Console,J2EEModule=null,J2EEServer=geronimo,j2eeType=GBean,name=PropertiesLoginManager";
public static final String SE_REALM_IMMUTABLE_MBEAN_NAME = "geronimo.server:name=PropertiesLoginManager,J2EEServer=geronimo,J2EEApplication=null,j2eeType=GBean,J2EEModule=null";
public static final String SECURITY_REALM = "geronimo.security:type=SecurityRealm,*";
public static final String ROOT_LOGGER_OBJECT_NAME = "geronimo.server:name=Logger,J2EEServer=geronimo,J2EEApplication=null,j2eeType=GBean,J2EEModule=org/apache/geronimo/System";
public static final String REQUEST_LOGGER_OBJECT_NAME = "geronimo.server:J2EEApplication=null,J2EEModule=org/apache/geronimo/Server,J2EEServer=geronimo,j2eeType=GBean,name=JettyRequestLog";
public static final String DERBY_OBJECT_NAME = "geronimo.server:name=DerbySystem,J2EEServer=geronimo,J2EEApplication=null,j2eeType=GBean,J2EEModule=null";
public static final String REPO_OBJECT_NAME = "geronimo.server:name=Repository,J2EEServer=geronimo,J2EEApplication=null,j2eeType=GBean,J2EEModule=org/apache/geronimo/System";
public static final String WEBCONTAINER_OBJECT_NAME = "geronimo.server:J2EEApplication=null,J2EEModule=org/apache/geronimo/Server,J2EEServer=geronimo,j2eeType=GBean,name=WebContainer";
public static final String SERVER_INFO_OBJECT_NAME = "geronimo.server:name=ServerInfo,J2EEServer=geronimo,J2EEApplication=null,j2eeType=GBean,J2EEModule=org/apache/geronimo/System";
public static final String JVM_IMPL_NAME = "geronimo.server:j2eeType=JVM,J2EEServer=geronimo,name=JVM";
public static final String DEPLOYER_OBJECT_NAME = "geronimo.server:J2EEApplication=null,J2EEModule=org/apache/geronimo/RuntimeDeployer,J2EEServer=geronimo,j2eeType=Deployer,name=Deployer";
public static final String JETTY_HTTP_CONNECTOR_NAME = "geronimo.server:J2EEServer=geronimo,j2eeType=GBean,name=JettyWebConnector,*";
// TODO: Put the appropriate name here
public static final String JETTY_HTTPS_CONNECTOR_NAME = "geronimo.server:name=JettyWebConnector.HTTPS.8443,J2EEServer=geronimo,j2eeType=GBean,*";
public static final String JCA_MANAGED_CF_QUERY = "*:j2eeType=JCAManagedConnectionFactory,*";
public static final String CONFIG_GBEAN_PREFIX = "geronimo.config:name=";
}
| sandbox/console-standard/src/java/org/apache/geronimo/console/util/ObjectNameConstants.java | /**
*
* Copyright 2004, 2005 The Apache Software Foundation or its licensors, as applicable.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.console.util;
public interface ObjectNameConstants {
// Security object names
public static final String SE_REALM_MBEAN_NAME = "geronimo.server:J2EEApplication=org/apache/geronimo/Console,J2EEModule=null,J2EEServer=geronimo,j2eeType=GBean,name=PropertiesLoginManager";
public static final String SE_REALM_IMMUTABLE_MBEAN_NAME = "geronimo.server:name=PropertiesLoginManager,J2EEServer=geronimo,J2EEApplication=null,j2eeType=GBean,J2EEModule=null";
public static final String SECURITY_REALM = "geronimo.security:type=SecurityRealm,*";
public static final String ROOT_LOGGER_OBJECT_NAME = "geronimo.server:name=Logger,J2EEServer=geronimo,J2EEApplication=null,j2eeType=GBean,J2EEModule=org/apache/geronimo/System";
public static final String REQUEST_LOGGER_OBJECT_NAME = "geronimo.server:J2EEApplication=null,J2EEModule=org/apache/geronimo/Server,J2EEServer=geronimo,j2eeType=GBean,name=JettyRequestLog";
public static final String DERBY_OBJECT_NAME = "geronimo.server:name=DerbySystem,J2EEServer=geronimo,J2EEApplication=null,j2eeType=GBean,J2EEModule=null";
public static final String REPO_OBJECT_NAME = "geronimo.server:name=Repository,J2EEServer=geronimo,J2EEApplication=null,j2eeType=GBean,J2EEModule=org/apache/geronimo/System";
public static final String WEBCONTAINER_OBJECT_NAME = "geronimo.server:J2EEApplication=null,J2EEModule=org/apache/geronimo/Server,J2EEServer=geronimo,j2eeType=GBean,name=JettyWebContainer";
public static final String SERVER_INFO_OBJECT_NAME = "geronimo.server:name=ServerInfo,J2EEServer=geronimo,J2EEApplication=null,j2eeType=GBean,J2EEModule=org/apache/geronimo/System";
public static final String JVM_IMPL_NAME = "geronimo.server:j2eeType=JVM,J2EEServer=geronimo,name=JVM";
public static final String DEPLOYER_OBJECT_NAME = "geronimo.server:J2EEApplication=null,J2EEModule=org/apache/geronimo/RuntimeDeployer,J2EEServer=geronimo,j2eeType=Deployer,name=Deployer";
public static final String JETTY_HTTP_CONNECTOR_NAME = "geronimo.server:J2EEServer=geronimo,j2eeType=GBean,name=JettyWebConnector,*";
// TODO: Put the appropriate name here
public static final String JETTY_HTTPS_CONNECTOR_NAME = "geronimo.server:name=JettyWebConnector.HTTPS.8443,J2EEServer=geronimo,j2eeType=GBean,*";
public static final String JCA_MANAGED_CF_QUERY = "*:j2eeType=JCAManagedConnectionFactory,*";
public static final String CONFIG_GBEAN_PREFIX = "geronimo.config:name=";
}
| Change suggested by GERONIMO-796. This should be short-lived, as I
expect all the web features will break for Tomcat, and I hope
to move web containers/connectors to the management API shortly.
git-svn-id: d69ffe4ccc4861bf06065bd0072b85c931fba7ed@225114 13f79535-47bb-0310-9956-ffa450edef68
| sandbox/console-standard/src/java/org/apache/geronimo/console/util/ObjectNameConstants.java | Change suggested by GERONIMO-796. This should be short-lived, as I expect all the web features will break for Tomcat, and I hope to move web containers/connectors to the management API shortly. | <ide><path>andbox/console-standard/src/java/org/apache/geronimo/console/util/ObjectNameConstants.java
<ide>
<ide> public static final String REPO_OBJECT_NAME = "geronimo.server:name=Repository,J2EEServer=geronimo,J2EEApplication=null,j2eeType=GBean,J2EEModule=org/apache/geronimo/System";
<ide>
<del> public static final String WEBCONTAINER_OBJECT_NAME = "geronimo.server:J2EEApplication=null,J2EEModule=org/apache/geronimo/Server,J2EEServer=geronimo,j2eeType=GBean,name=JettyWebContainer";
<add> public static final String WEBCONTAINER_OBJECT_NAME = "geronimo.server:J2EEApplication=null,J2EEModule=org/apache/geronimo/Server,J2EEServer=geronimo,j2eeType=GBean,name=WebContainer";
<ide>
<ide> public static final String SERVER_INFO_OBJECT_NAME = "geronimo.server:name=ServerInfo,J2EEServer=geronimo,J2EEApplication=null,j2eeType=GBean,J2EEModule=org/apache/geronimo/System";
<ide> |
|
JavaScript | mit | 243103912a582954f307149a51c1787e17744740 | 0 | alexbooker/memento | var moment = require("moment");
var sinon = require("sinon");
var chai = require("chai");
var should = chai.should();
var PasswordReboot = require("../lib/index");
describe("createToken", function() {
it("should return a token", function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var actual = sut.createToken(user);
console.log(actual);
should.exist(actual);
});
});
describe("verifyToken", function() {
it("should succeed if token is correct", function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var token = sut.createToken(user);
var actual = sut.verifyToken(user, token);
actual.should.equal(true);
});
it("should fail if username changes", function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var token = sut.createToken(user);
user.username = "[email protected]";
var actual = sut.verifyToken(user, token);
actual.should.equal(false);
});
it("should fail if any user property changes", function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]",
salt: "23423412341233"
};
var token = sut.createToken(user);
user.salt = "8888888";
var actual = sut.verifyToken(user, token);
actual.should.equal(false);
});
it("should fail if token has changed", function() {
var passwordReboot = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var token = passwordReboot.createToken(user);
var sut = new PasswordReboot("0qNR4pkBYA");
var actual = sut.verifyToken(user, token);
actual.should.equal(false);
});
it("should fail if token has been tampered with", function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var actual = sut.verifyToken(user, "some invalid token...");
actual.should.equal(false);
});
it("should fail if token has expired", function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var token = sut.createToken(user);
var clock = sinon.useFakeTimers(moment().add({
minutes: 20
}).valueOf());
var actual = sut.verifyToken(user, token);
actual.should.equal(false);
clock.reset();
});
it("should fail if token has expired custom minutes until expiration",
function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var MINUTES_UNTIL_EXPIRATION = 10;
var token = sut.createToken(user, MINUTES_UNTIL_EXPIRATION);
var clock = sinon.useFakeTimers(moment().add({
minutes: MINUTES_UNTIL_EXPIRATION
}).valueOf());
var actual = sut.verifyToken(user, token);
actual.should.equal(false);
clock.reset();
});
it("should succeed if token has not expired custom minutes until expiration",
function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var MINUTES_UNTIL_EXPIRATION = 10;
var token = sut.createToken(user, MINUTES_UNTIL_EXPIRATION);
var actual = sut.verifyToken(user, token);
actual.should.equal(true);
});
it("should fail if expiration timestamp has been tampered with", function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var token = sut.createToken(user);
var parts = token.split(sut._DELIMITER);
var hmac = parts[1];
var expirationTime = Number.MAX_VALUE;
token = expirationTime + sut._DELIMITER + hmac;
var actual = sut.verifyToken(user, token);
actual.should.equal(false);
});
});
| test/test.js | var should = require('chai').should();
var sinon = require("sinon");
var moment = require("moment");
var PasswordReboot = require("../lib/index");
describe("PasswordReboot", function() {
describe("createToken", function() {
it("should return a token", function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
sut.DELIMITER = "cheese";
var user = {
username: "[email protected]"
};
var actual = sut.createToken(user);
console.log(actual);
should.exist(actual);
});
});
describe("verifyToken", function() {
it("should succeed if token is correct", function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var token = sut.createToken(user);
var actual = sut.verifyToken(user, token);
actual.should.equal(true);
});
it("should fail if username changes", function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var token = sut.createToken(user);
user.username = "[email protected]";
var actual = sut.verifyToken(user, token);
actual.should.equal(false);
});
it("should fail if any user property changes", function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]",
salt: "23423412341233"
};
var token = sut.createToken(user);
user.salt = "8888888";
var actual = sut.verifyToken(user, token);
actual.should.equal(false);
});
it("should fail if token has changed", function() {
var passwordReboot = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var token = passwordReboot.createToken(user);
var sut = new PasswordReboot("0qNR4pkBYA");
var actual = sut.verifyToken(user, token);
actual.should.equal(false);
});
it("should fail if token has been tampered with", function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var actual = sut.verifyToken(user, "some invalid token...");
actual.should.equal(false);
});
it("should fail if token has expired", function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var token = sut.createToken(user);
var clock = sinon.useFakeTimers(moment().add({ minutes: 20 }).valueOf());
var actual = sut.verifyToken(user, token);
actual.should.equal(false);
clock.reset();
});
it("should fail if token has expired custom minutes until expiration", function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var MINUTES_UNTIL_EXPIRATION = 10;
var token = sut.createToken(user, MINUTES_UNTIL_EXPIRATION);
var clock = sinon.useFakeTimers(moment().add({ minutes: MINUTES_UNTIL_EXPIRATION }).valueOf());
var actual = sut.verifyToken(user, token);
actual.should.equal(false);
clock.reset();
});
it("should succeed if token has not expired custom minutes until expiration", function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var MINUTES_UNTIL_EXPIRATION = 10;
var token = sut.createToken(user, MINUTES_UNTIL_EXPIRATION);
var actual = sut.verifyToken(user, token);
actual.should.equal(true);
});
it("should fail if expiration timestamp has been tampered with", function() {
var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
var user = {
username: "[email protected]"
};
var token = sut.createToken(user);
var parts = token.split(sut._DELIMITER);
var hmac = parts[1];
var expirationTime = Number.MAX_VALUE;
token = expirationTime + sut._DELIMITER + hmac;
var actual = sut.verifyToken(user, token);
actual.should.equal(false);
});
});
});
| reduced line width to 80 characters maximum
| test/test.js | reduced line width to 80 characters maximum | <ide><path>est/test.js
<del>var should = require('chai').should();
<del>var sinon = require("sinon");
<ide> var moment = require("moment");
<add>var sinon = require("sinon");
<add>var chai = require("chai");
<add>var should = chai.should();
<ide>
<ide> var PasswordReboot = require("../lib/index");
<ide>
<del>describe("PasswordReboot", function() {
<add>describe("createToken", function() {
<add> it("should return a token", function() {
<add> var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
<add> var user = {
<add> username: "[email protected]"
<add> };
<ide>
<del> describe("createToken", function() {
<del> it("should return a token", function() {
<del> var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
<del> sut.DELIMITER = "cheese";
<del> var user = {
<del> username: "[email protected]"
<del> };
<add> var actual = sut.createToken(user);
<add> console.log(actual);
<ide>
<del> var actual = sut.createToken(user);
<del> console.log(actual);
<add> should.exist(actual);
<add> });
<add>});
<ide>
<del> should.exist(actual);
<del> });
<add>describe("verifyToken", function() {
<add> it("should succeed if token is correct", function() {
<add> var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
<add> var user = {
<add> username: "[email protected]"
<add> };
<add> var token = sut.createToken(user);
<add>
<add> var actual = sut.verifyToken(user, token);
<add>
<add> actual.should.equal(true);
<ide> });
<ide>
<del> describe("verifyToken", function() {
<del> it("should succeed if token is correct", function() {
<del> var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
<del> var user = {
<del> username: "[email protected]"
<del> };
<del> var token = sut.createToken(user);
<add> it("should fail if username changes", function() {
<add> var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
<add> var user = {
<add> username: "[email protected]"
<add> };
<add> var token = sut.createToken(user);
<add> user.username = "[email protected]";
<ide>
<del> var actual = sut.verifyToken(user, token);
<add> var actual = sut.verifyToken(user, token);
<ide>
<del> actual.should.equal(true);
<del> });
<add> actual.should.equal(false);
<add> });
<ide>
<del> it("should fail if username changes", function() {
<del> var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
<del> var user = {
<del> username: "[email protected]"
<del> };
<del> var token = sut.createToken(user);
<del> user.username = "[email protected]";
<add> it("should fail if any user property changes", function() {
<add> var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
<add> var user = {
<add> username: "[email protected]",
<add> salt: "23423412341233"
<add> };
<add> var token = sut.createToken(user);
<add> user.salt = "8888888";
<ide>
<del> var actual = sut.verifyToken(user, token);
<add> var actual = sut.verifyToken(user, token);
<ide>
<del> actual.should.equal(false);
<del> });
<add> actual.should.equal(false);
<add> });
<ide>
<del> it("should fail if any user property changes", function() {
<del> var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
<del> var user = {
<del> username: "[email protected]",
<del> salt: "23423412341233"
<del> };
<del> var token = sut.createToken(user);
<del> user.salt = "8888888";
<add> it("should fail if token has changed", function() {
<add> var passwordReboot = new PasswordReboot("t9m0HLkdEyWQ6XN");
<add> var user = {
<add> username: "[email protected]"
<add> };
<add> var token = passwordReboot.createToken(user);
<add> var sut = new PasswordReboot("0qNR4pkBYA");
<ide>
<del> var actual = sut.verifyToken(user, token);
<add> var actual = sut.verifyToken(user, token);
<ide>
<del> actual.should.equal(false);
<del> });
<add> actual.should.equal(false);
<add> });
<ide>
<del> it("should fail if token has changed", function() {
<del> var passwordReboot = new PasswordReboot("t9m0HLkdEyWQ6XN");
<del> var user = {
<del> username: "[email protected]"
<del> };
<del> var token = passwordReboot.createToken(user);
<del> var sut = new PasswordReboot("0qNR4pkBYA");
<add> it("should fail if token has been tampered with", function() {
<add> var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
<add> var user = {
<add> username: "[email protected]"
<add> };
<ide>
<del> var actual = sut.verifyToken(user, token);
<add> var actual = sut.verifyToken(user, "some invalid token...");
<ide>
<del> actual.should.equal(false);
<del> });
<add> actual.should.equal(false);
<add> });
<ide>
<del> it("should fail if token has been tampered with", function() {
<del> var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
<del> var user = {
<del> username: "[email protected]"
<del> };
<add> it("should fail if token has expired", function() {
<add> var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
<add> var user = {
<add> username: "[email protected]"
<add> };
<add> var token = sut.createToken(user);
<add> var clock = sinon.useFakeTimers(moment().add({
<add> minutes: 20
<add> }).valueOf());
<ide>
<del> var actual = sut.verifyToken(user, "some invalid token...");
<add> var actual = sut.verifyToken(user, token);
<ide>
<del> actual.should.equal(false);
<del> });
<add> actual.should.equal(false);
<ide>
<del> it("should fail if token has expired", function() {
<del> var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
<del> var user = {
<del> username: "[email protected]"
<del> };
<del> var token = sut.createToken(user);
<del> var clock = sinon.useFakeTimers(moment().add({ minutes: 20 }).valueOf());
<add> clock.reset();
<add> });
<ide>
<del> var actual = sut.verifyToken(user, token);
<del>
<del> actual.should.equal(false);
<del>
<del> clock.reset();
<del> });
<del>
<del> it("should fail if token has expired custom minutes until expiration", function() {
<add> it("should fail if token has expired custom minutes until expiration",
<add> function() {
<ide> var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
<ide> var user = {
<ide> username: "[email protected]"
<ide> };
<ide> var MINUTES_UNTIL_EXPIRATION = 10;
<ide> var token = sut.createToken(user, MINUTES_UNTIL_EXPIRATION);
<del> var clock = sinon.useFakeTimers(moment().add({ minutes: MINUTES_UNTIL_EXPIRATION }).valueOf());
<add> var clock = sinon.useFakeTimers(moment().add({
<add> minutes: MINUTES_UNTIL_EXPIRATION
<add> }).valueOf());
<ide>
<ide> var actual = sut.verifyToken(user, token);
<ide>
<ide> });
<ide>
<ide>
<del> it("should succeed if token has not expired custom minutes until expiration", function() {
<add> it("should succeed if token has not expired custom minutes until expiration",
<add> function() {
<ide> var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
<ide> var user = {
<ide> username: "[email protected]"
<ide> actual.should.equal(true);
<ide> });
<ide>
<del> it("should fail if expiration timestamp has been tampered with", function() {
<del> var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
<del> var user = {
<del> username: "[email protected]"
<del> };
<del> var token = sut.createToken(user);
<del> var parts = token.split(sut._DELIMITER);
<del> var hmac = parts[1];
<del> var expirationTime = Number.MAX_VALUE;
<del> token = expirationTime + sut._DELIMITER + hmac;
<add> it("should fail if expiration timestamp has been tampered with", function() {
<add> var sut = new PasswordReboot("t9m0HLkdEyWQ6XN");
<add> var user = {
<add> username: "[email protected]"
<add> };
<add> var token = sut.createToken(user);
<add> var parts = token.split(sut._DELIMITER);
<add> var hmac = parts[1];
<add> var expirationTime = Number.MAX_VALUE;
<add> token = expirationTime + sut._DELIMITER + hmac;
<ide>
<del> var actual = sut.verifyToken(user, token);
<add> var actual = sut.verifyToken(user, token);
<ide>
<del> actual.should.equal(false);
<del> });
<del>
<del>
<add> actual.should.equal(false);
<ide> });
<del>
<ide> }); |
|
Java | apache-2.0 | 389d5fee750f6dff40fa7c6033fd32192fb25b61 | 0 | apache/geronimo,apache/geronimo,apache/geronimo,apache/geronimo | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.geronimo.system.plugin;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import javax.security.auth.login.FailedLoginException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.geronimo.kernel.repository.Artifact;
import org.apache.geronimo.kernel.repository.FileWriteMonitor;
import org.apache.geronimo.kernel.repository.Version;
import org.apache.geronimo.kernel.repository.WriteableRepository;
import org.apache.geronimo.kernel.util.XmlUtil;
import org.apache.geronimo.system.plugin.model.PluginListType;
import org.apache.geronimo.crypto.encoders.Base64;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* @version $Rev$ $Date$
*/
public class RemoteSourceRepository implements SourceRepository {
private final URI base;
private final String username;
private final String password;
public RemoteSourceRepository(URI base, String username, String password) {
if (!base.getPath().endsWith("/")) {
throw new IllegalArgumentException("base uri must end with '/', not " + base);
}
this.base = base;
this.username = username;
this.password = password;
}
public PluginListType getPluginList() {
try {
URL uri = base.resolve("geronimo-plugins.xml").toURL();
InputStream in = openStream(uri);
if (in != null) {
try {
return PluginXmlUtil.loadPluginList(in);
} finally {
in.close();
}
}
} catch (Exception e) {
// TODO: log it?
}
return null;
}
public OpenResult open(final Artifact artifact, final FileWriteMonitor monitor) throws IOException, FailedLoginException {
// If the artifact version is resolved then look for the artifact in the repo
if (artifact.isResolved()) {
URL location = getURL(artifact);
OpenResult result = open(artifact, location);
if (result != null) {
return result;
}
Version version = artifact.getVersion();
// Snapshot artifacts can have a special filename in an online maven repo.
// The version number is replaced with a timestmap and build number.
// The maven-metadata file contains this extra information.
if (version.toString().indexOf("SNAPSHOT") >= 0 && !(version instanceof SnapshotVersion)) {
// base path for the artifact version in a maven repo
URI basePath = base.resolve(artifact.getGroupId().replace('.', '/') + "/" + artifact.getArtifactId() + "/" + version + "/");
// get the maven-metadata file
Document metadata = getMavenMetadata(basePath);
// determine the snapshot qualifier from the maven-metadata file
if (metadata != null) {
NodeList snapshots = metadata.getDocumentElement().getElementsByTagName("snapshot");
if (snapshots.getLength() >= 1) {
Element snapshot = (Element) snapshots.item(0);
List<String> timestamp = getChildrenText(snapshot, "timestamp");
List<String> buildNumber = getChildrenText(snapshot, "buildNumber");
if (timestamp.size() >= 1 && buildNumber.size() >= 1) {
try {
// recurse back into this method using a SnapshotVersion
SnapshotVersion snapshotVersion = new SnapshotVersion(version);
snapshotVersion.setBuildNumber(Integer.parseInt(buildNumber.get(0)));
snapshotVersion.setTimestamp(timestamp.get(0));
Artifact newQuery = new Artifact(artifact.getGroupId(), artifact.getArtifactId(), snapshotVersion, artifact.getType());
location = getURL(newQuery);
return open(artifact, location);
} catch (NumberFormatException nfe) {
// log.error("Could not create snapshot version for " + artifact, nfe);
}
} else {
// log.error("Could not create snapshot version for " + artifact);
}
}
}
}
return null;
}
// Version is not resolved. Look in maven-metadata.xml and maven-metadata-local.xml for
// the available version numbers. If found then recurse into the enclosing method with
// a resolved version number
else {
// base path for the artifact version in a maven repo
URI basePath = base.resolve(artifact.getGroupId().replace('.', '/') + "/" + artifact.getArtifactId() + "/");
// get the maven-metadata file
Document metadata = getMavenMetadata(basePath);
// determine the available versions from the maven-metadata file
if (metadata != null) {
Element root = metadata.getDocumentElement();
NodeList list = root.getElementsByTagName("versions");
list = ((Element) list.item(0)).getElementsByTagName("version");
Version[] available = new Version[list.getLength()];
for (int i = 0; i < available.length; i++) {
available[i] = new Version(getText(list.item(i)));
}
// desc sort
Arrays.sort(available, new Comparator<Version>() {
public int compare(Version o1, Version o2) {
return o2.toString().compareTo(o1.toString());
}
});
for (Version version : available) {
Artifact versionedArtifact = new Artifact(artifact.getGroupId(), artifact.getArtifactId(), version, artifact.getType());
URL location = getURL(versionedArtifact);
OpenResult result = open(versionedArtifact, location);
if (result != null) {
return result;
}
}
}
}
return null;
}
private OpenResult open(Artifact artifact, URL location) throws IOException, FailedLoginException {
InputStream in = openStream(location);
return (in == null) ? null : new RemoteOpenResult(artifact, in);
}
private InputStream openStream(URL location) throws IOException, FailedLoginException {
URLConnection con = location.openConnection();
if (con instanceof HttpURLConnection) {
HttpURLConnection http = (HttpURLConnection) con;
try {
http.connect();
} catch (IOException e) {
throw (IOException) new IOException("Cannot connect to "+location).initCause(e);
}
if (http.getResponseCode() == 401) { // need to authenticate
if (username == null || username.equals("")) {
throw new FailedLoginException("Server returned 401 " + http.getResponseMessage());
}
//TODO is it necessary to keep getting new http's ?
http = (HttpURLConnection) location.openConnection();
http.setRequestProperty("Authorization",
"Basic " + new String(Base64.encode((username + ":" + password).getBytes())));
http.connect();
if (http.getResponseCode() == 401) {
throw new FailedLoginException("Server returned 401 " + http.getResponseMessage());
} else if (http.getResponseCode() == 404) {
return null; // Not found at this repository
}
} else if (http.getResponseCode() == 404) {
return null; // Not found at this repository
}
return http.getInputStream();
}
return null;
}
private Document getMavenMetadata(URI base) throws IOException, FailedLoginException {
Document doc = null;
InputStream in = null;
try {
URL metaURL = base.resolve( "maven-metadata.xml").toURL();
in = openStream(metaURL);
if (in == null) { // check for local maven metadata
metaURL = base.resolve("maven-metadata-local.xml").toURL();
in = openStream(metaURL);
}
if (in != null) {
DocumentBuilder builder = XmlUtil.newDocumentBuilderFactory().newDocumentBuilder();
doc = builder.parse(in);
}
} catch (ParserConfigurationException e) {
throw (IOException)new IOException().initCause(e);
} catch (SAXException e) {
throw (IOException)new IOException().initCause(e);
} finally {
if (in == null) {
// log.info("No maven metadata available at " + base);
} else {
in.close();
}
}
return doc;
}
private URL getURL(Artifact configId) throws MalformedURLException {
String qualifiedVersion = configId.getVersion().toString();
if (configId.getVersion() instanceof SnapshotVersion) {
SnapshotVersion ssVersion = (SnapshotVersion) configId.getVersion();
String timestamp = ssVersion.getTimestamp();
int buildNumber = ssVersion.getBuildNumber();
if (timestamp != null && buildNumber != 0) {
qualifiedVersion = qualifiedVersion.replaceAll("SNAPSHOT", timestamp + "-" + buildNumber);
}
}
return base.resolve(configId.getGroupId().replace('.', '/') + "/"
+ configId.getArtifactId() + "/" + configId.getVersion()
+ "/" + configId.getArtifactId() + "-"
+ qualifiedVersion + "." + configId.getType()).toURL();
}
/**
* Gets all the text contents of the specified DOM node.
*/
private static String getText(Node target) {
NodeList nodes = target.getChildNodes();
StringBuffer buf = null;
for (int j = 0; j < nodes.getLength(); j++) {
Node node = nodes.item(j);
if (node.getNodeType() == Node.TEXT_NODE) {
if (buf == null) {
buf = new StringBuffer();
}
buf.append(node.getNodeValue());
}
}
return buf == null ? null : buf.toString();
}
/**
* Gets the text out of all the child nodes of a certain type. The result
* array has one element for each child of the specified DOM element that
* has the specified name.
*
* @param root The parent DOM element
* @param property The name of the child elements that hold the text
*/
private static List<String> getChildrenText(Element root, String property) {
NodeList children = root.getChildNodes();
List<String> results = new ArrayList<String>();
for (int i = 0; i < children.getLength(); i++) {
Node check = children.item(i);
if (check.getNodeType() == Node.ELEMENT_NODE && check.getNodeName().equals(property)) {
NodeList nodes = check.getChildNodes();
StringBuffer buf = null;
for (int j = 0; j < nodes.getLength(); j++) {
Node node = nodes.item(j);
if (node.getNodeType() == Node.TEXT_NODE) {
if (buf == null) {
buf = new StringBuffer();
}
buf.append(node.getNodeValue());
}
}
results.add(buf == null ? null : buf.toString());
}
}
return results;
}
private static class RemoteOpenResult implements OpenResult {
private final Artifact artifact;
private final InputStream in;
private File file;
private RemoteOpenResult(Artifact artifact, InputStream in) {
if (!artifact.isResolved()) {
throw new IllegalStateException("Artifact is not resolved: " + artifact);
}
this.artifact = artifact;
this.in = in;
}
public Artifact getArtifact() {
return artifact;
}
public File getFile() throws IOException {
if (file == null) {
file = downloadFile(in);
}
return file;
}
public void install(WriteableRepository repo, FileWriteMonitor monitor) throws IOException {
File file = getFile();
repo.copyToRepository(file, artifact, monitor);
if (!file.delete()) {
// log.warn("Unable to delete temporary download file " + tempFile.getAbsolutePath());
file.deleteOnExit();
}
}
public void close() {
if (in != null) {
try {
in.close();
} catch (IOException e) {
//ignore
}
}
}
/**
* Downloads to a temporary file so we can validate the download before
* installing into the repository.
*
* @param in source of download
// * @param monitor monitor to report results of download
* @return downloaded file
* @throws IOException if input cannot be read or file cannot be written
*/
private File downloadFile(InputStream in/*, ResultsFileWriteMonitor monitor*/) throws IOException {
if (in == null) {
throw new IllegalStateException();
}
FileOutputStream out = null;
byte[] buf;
try {
// monitor.writeStarted(result.getArtifact().toString(), result.getFileSize());
File file = File.createTempFile("geronimo-plugin-download-", ".tmp");
out = new FileOutputStream(file);
buf = new byte[65536];
int count, total = 0;
while ((count = in.read(buf)) > -1) {
out.write(buf, 0, count);
// monitor.writeProgress(total += count);
}
// monitor.writeComplete(total);
in.close();
in = null;
out.close();
out = null;
return file;
} finally {
if (in != null) {
try {
in.close();
} catch (IOException ignored) {
//ignore
}
}
if (out != null) {
try {
out.close();
} catch (IOException ignored) {
//ignore
}
}
}
}
}
public String toString() {
return getClass().getName() + ":" + base;
}
}
| framework/modules/geronimo-plugin/src/main/java/org/apache/geronimo/system/plugin/RemoteSourceRepository.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.geronimo.system.plugin;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import javax.security.auth.login.FailedLoginException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.geronimo.kernel.repository.Artifact;
import org.apache.geronimo.kernel.repository.FileWriteMonitor;
import org.apache.geronimo.kernel.repository.Version;
import org.apache.geronimo.kernel.repository.WriteableRepository;
import org.apache.geronimo.kernel.util.XmlUtil;
import org.apache.geronimo.system.plugin.model.PluginListType;
import org.apache.geronimo.crypto.encoders.Base64;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* @version $Rev$ $Date$
*/
public class RemoteSourceRepository implements SourceRepository {
private final URI base;
private final String username;
private final String password;
public RemoteSourceRepository(URI base, String username, String password) {
if (!base.getPath().endsWith("/")) {
throw new IllegalArgumentException("base uri must end with '/', not " + base);
}
this.base = base;
this.username = username;
this.password = password;
}
public PluginListType getPluginList() {
try {
URL uri = base.resolve("geronimo-plugins.xml").toURL();
InputStream in = openStream(uri);
if (in != null) {
try {
return PluginXmlUtil.loadPluginList(in);
} finally {
in.close();
}
}
} catch (Exception e) {
// TODO: log it?
}
return null;
}
public OpenResult open(final Artifact artifact, final FileWriteMonitor monitor) throws IOException, FailedLoginException {
// If the artifact version is resolved then look for the artifact in the repo
if (artifact.isResolved()) {
URL location = getURL(artifact);
OpenResult result = open(artifact, location);
if (result != null) {
return result;
}
Version version = artifact.getVersion();
// Snapshot artifacts can have a special filename in an online maven repo.
// The version number is replaced with a timestmap and build number.
// The maven-metadata file contains this extra information.
if (version.toString().indexOf("SNAPSHOT") >= 0 && !(version instanceof SnapshotVersion)) {
// base path for the artifact version in a maven repo
URI basePath = base.resolve(artifact.getGroupId().replace('.', '/') + "/" + artifact.getArtifactId() + "/" + version + "/");
// get the maven-metadata file
Document metadata = getMavenMetadata(basePath);
// determine the snapshot qualifier from the maven-metadata file
if (metadata != null) {
NodeList snapshots = metadata.getDocumentElement().getElementsByTagName("snapshot");
if (snapshots.getLength() >= 1) {
Element snapshot = (Element) snapshots.item(0);
List<String> timestamp = getChildrenText(snapshot, "timestamp");
List<String> buildNumber = getChildrenText(snapshot, "buildNumber");
if (timestamp.size() >= 1 && buildNumber.size() >= 1) {
try {
// recurse back into this method using a SnapshotVersion
SnapshotVersion snapshotVersion = new SnapshotVersion(version);
snapshotVersion.setBuildNumber(Integer.parseInt(buildNumber.get(0)));
snapshotVersion.setTimestamp(timestamp.get(0));
Artifact newQuery = new Artifact(artifact.getGroupId(), artifact.getArtifactId(), snapshotVersion, artifact.getType());
location = getURL(newQuery);
return open(artifact, location);
} catch (NumberFormatException nfe) {
// log.error("Could not create snapshot version for " + artifact, nfe);
}
} else {
// log.error("Could not create snapshot version for " + artifact);
}
}
}
}
return null;
}
// Version is not resolved. Look in maven-metadata.xml and maven-metadata-local.xml for
// the available version numbers. If found then recurse into the enclosing method with
// a resolved version number
else {
// base path for the artifact version in a maven repo
URI basePath = base.resolve(artifact.getGroupId().replace('.', '/') + "/" + artifact.getArtifactId() + "/");
// get the maven-metadata file
Document metadata = getMavenMetadata(basePath);
// determine the available versions from the maven-metadata file
if (metadata != null) {
Element root = metadata.getDocumentElement();
NodeList list = root.getElementsByTagName("versions");
list = ((Element) list.item(0)).getElementsByTagName("version");
Version[] available = new Version[list.getLength()];
for (int i = 0; i < available.length; i++) {
available[i] = new Version(getText(list.item(i)));
}
// desc sort
Arrays.sort(available, new Comparator<Version>() {
public int compare(Version o1, Version o2) {
return o2.toString().compareTo(o1.toString());
}
});
for (Version version : available) {
Artifact versionedArtifact = new Artifact(artifact.getGroupId(), artifact.getArtifactId(), version, artifact.getType());
URL location = getURL(versionedArtifact);
OpenResult result = open(versionedArtifact, location);
if (result != null) {
return result;
}
}
}
}
return null;
}
private OpenResult open(Artifact artifact, URL location) throws IOException, FailedLoginException {
InputStream in = openStream(location);
return (in == null) ? null : new RemoteOpenResult(artifact, in);
}
private InputStream openStream(URL location) throws IOException, FailedLoginException {
URLConnection con = location.openConnection();
if (con instanceof HttpURLConnection) {
HttpURLConnection http = (HttpURLConnection) con;
http.connect();
if (http.getResponseCode() == 401) { // need to authenticate
if (username == null || username.equals("")) {
throw new FailedLoginException("Server returned 401 " + http.getResponseMessage());
}
//TODO is it necessary to keep getting new http's ?
http = (HttpURLConnection) location.openConnection();
http.setRequestProperty("Authorization",
"Basic " + new String(Base64.encode((username + ":" + password).getBytes())));
http.connect();
if (http.getResponseCode() == 401) {
throw new FailedLoginException("Server returned 401 " + http.getResponseMessage());
} else if (http.getResponseCode() == 404) {
return null; // Not found at this repository
}
} else if (http.getResponseCode() == 404) {
return null; // Not found at this repository
}
return http.getInputStream();
}
return null;
}
private Document getMavenMetadata(URI base) throws IOException, FailedLoginException {
Document doc = null;
InputStream in = null;
try {
URL metaURL = base.resolve( "maven-metadata.xml").toURL();
in = openStream(metaURL);
if (in == null) { // check for local maven metadata
metaURL = base.resolve("maven-metadata-local.xml").toURL();
in = openStream(metaURL);
}
if (in != null) {
DocumentBuilder builder = XmlUtil.newDocumentBuilderFactory().newDocumentBuilder();
doc = builder.parse(in);
}
} catch (ParserConfigurationException e) {
throw (IOException)new IOException().initCause(e);
} catch (SAXException e) {
throw (IOException)new IOException().initCause(e);
} finally {
if (in == null) {
// log.info("No maven metadata available at " + base);
} else {
in.close();
}
}
return doc;
}
private URL getURL(Artifact configId) throws MalformedURLException {
String qualifiedVersion = configId.getVersion().toString();
if (configId.getVersion() instanceof SnapshotVersion) {
SnapshotVersion ssVersion = (SnapshotVersion) configId.getVersion();
String timestamp = ssVersion.getTimestamp();
int buildNumber = ssVersion.getBuildNumber();
if (timestamp != null && buildNumber != 0) {
qualifiedVersion = qualifiedVersion.replaceAll("SNAPSHOT", timestamp + "-" + buildNumber);
}
}
return base.resolve(configId.getGroupId().replace('.', '/') + "/"
+ configId.getArtifactId() + "/" + configId.getVersion()
+ "/" + configId.getArtifactId() + "-"
+ qualifiedVersion + "." + configId.getType()).toURL();
}
/**
* Gets all the text contents of the specified DOM node.
*/
private static String getText(Node target) {
NodeList nodes = target.getChildNodes();
StringBuffer buf = null;
for (int j = 0; j < nodes.getLength(); j++) {
Node node = nodes.item(j);
if (node.getNodeType() == Node.TEXT_NODE) {
if (buf == null) {
buf = new StringBuffer();
}
buf.append(node.getNodeValue());
}
}
return buf == null ? null : buf.toString();
}
/**
* Gets the text out of all the child nodes of a certain type. The result
* array has one element for each child of the specified DOM element that
* has the specified name.
*
* @param root The parent DOM element
* @param property The name of the child elements that hold the text
*/
private static List<String> getChildrenText(Element root, String property) {
NodeList children = root.getChildNodes();
List<String> results = new ArrayList<String>();
for (int i = 0; i < children.getLength(); i++) {
Node check = children.item(i);
if (check.getNodeType() == Node.ELEMENT_NODE && check.getNodeName().equals(property)) {
NodeList nodes = check.getChildNodes();
StringBuffer buf = null;
for (int j = 0; j < nodes.getLength(); j++) {
Node node = nodes.item(j);
if (node.getNodeType() == Node.TEXT_NODE) {
if (buf == null) {
buf = new StringBuffer();
}
buf.append(node.getNodeValue());
}
}
results.add(buf == null ? null : buf.toString());
}
}
return results;
}
private static class RemoteOpenResult implements OpenResult {
private final Artifact artifact;
private final InputStream in;
private File file;
private RemoteOpenResult(Artifact artifact, InputStream in) {
if (!artifact.isResolved()) {
throw new IllegalStateException("Artifact is not resolved: " + artifact);
}
this.artifact = artifact;
this.in = in;
}
public Artifact getArtifact() {
return artifact;
}
public File getFile() throws IOException {
if (file == null) {
file = downloadFile(in);
}
return file;
}
public void install(WriteableRepository repo, FileWriteMonitor monitor) throws IOException {
File file = getFile();
repo.copyToRepository(file, artifact, monitor);
if (!file.delete()) {
// log.warn("Unable to delete temporary download file " + tempFile.getAbsolutePath());
file.deleteOnExit();
}
}
public void close() {
if (in != null) {
try {
in.close();
} catch (IOException e) {
//ignore
}
}
}
/**
* Downloads to a temporary file so we can validate the download before
* installing into the repository.
*
* @param in source of download
// * @param monitor monitor to report results of download
* @return downloaded file
* @throws IOException if input cannot be read or file cannot be written
*/
private File downloadFile(InputStream in/*, ResultsFileWriteMonitor monitor*/) throws IOException {
if (in == null) {
throw new IllegalStateException();
}
FileOutputStream out = null;
byte[] buf;
try {
// monitor.writeStarted(result.getArtifact().toString(), result.getFileSize());
File file = File.createTempFile("geronimo-plugin-download-", ".tmp");
out = new FileOutputStream(file);
buf = new byte[65536];
int count, total = 0;
while ((count = in.read(buf)) > -1) {
out.write(buf, 0, count);
// monitor.writeProgress(total += count);
}
// monitor.writeComplete(total);
in.close();
in = null;
out.close();
out = null;
return file;
} finally {
if (in != null) {
try {
in.close();
} catch (IOException ignored) {
//ignore
}
}
if (out != null) {
try {
out.close();
} catch (IOException ignored) {
//ignore
}
}
}
}
}
public String toString() {
return getClass().getName() + ":" + base;
}
}
| Print the repo URI when no connection can be made
git-svn-id: 0d16bf2c240b8111500ec482b35765e5042f5526@702555 13f79535-47bb-0310-9956-ffa450edef68
| framework/modules/geronimo-plugin/src/main/java/org/apache/geronimo/system/plugin/RemoteSourceRepository.java | Print the repo URI when no connection can be made | <ide><path>ramework/modules/geronimo-plugin/src/main/java/org/apache/geronimo/system/plugin/RemoteSourceRepository.java
<ide> URLConnection con = location.openConnection();
<ide> if (con instanceof HttpURLConnection) {
<ide> HttpURLConnection http = (HttpURLConnection) con;
<del> http.connect();
<add>
<add> try {
<add> http.connect();
<add> } catch (IOException e) {
<add> throw (IOException) new IOException("Cannot connect to "+location).initCause(e);
<add> }
<add>
<ide> if (http.getResponseCode() == 401) { // need to authenticate
<ide> if (username == null || username.equals("")) {
<ide> throw new FailedLoginException("Server returned 401 " + http.getResponseMessage()); |
|
JavaScript | mit | bab7f281067be01d61485478f0ef8fe6002260c2 | 0 | jkpluta/jkpluta.github.io,jkpluta/jkpluta.github.io | var base_url = "https://jkpluta.github.io";
function start(sel, spnr, href, func) {
if (spnr != null)
$(spnr).html('<img src="../img/spinner.gif">');
$.ajax({
url: base_url + href,
cache: false,
success: function (html) {
$(spnr).html('');
func(sel, html);
},
error: function (xhr, status, error) {
if (spnr != null)
$(spnr).html('<img src="../img/error.png"> <b>' + status + '</b> <i>' + error + '</i>');
}
});
}
function updateMainInfo(sel, html) {
$('#info').html(html);
}
function updateMainBookmarks(sel, html) {
$(sel).html('');
var listy = $('dl', html);
for (var i = 0; i < listy.length; i++) {
$(sel).append('<div id="bk' + i + '" class="col-sm-6 col-md-4 col-lg-3"></div>');
var bieżąca_lista = listy.eq(i);
var bieżące_pozycje = bieżąca_lista.children('dt');
$('#bk' + i).append('<h4>' + bieżąca_lista.prev().html() + '</h4>');
$('#bk' + i).append('<p><dl>');
for (var j = 0; j < bieżące_pozycje.length; j++) {
var bieżąca_pozycja = bieżące_pozycje.eq(j);
var bieżące_linki = bieżąca_pozycja.children('a');
if (bieżące_linki.length > 0) {
var link = bieżące_linki.first();
if (link.attr('ICON_URI') != null)
$('#bk' + i).append('<dt><a href="' + link.attr('href') + '"><img src="' + link.attr('ICON_URI') + '" alt="" title="' + link.text() + '" width="16" height="16"> ' + link.text() + '</a></dt>');
else
$('#bk' + i).append('<dt>' + link[0].outerHTML + '</dt>');
}
}
$('#bk' + i).append('</dl></p>');
}
$(sel).find('a').attr('target', '_blank');
}
function updateMainIcons(sel, html) {
$(sel).html('');
var links = $('a[icon], a[icon_uri]', html);
$(sel).append('<p>');
for (var i = 0; i < links.length; i++) {
var link = links.eq(i);
if (link.attr('ICON_URI') != null)
$(sel).append('<a href="' + link.attr('href') + '"><img src="' + link.attr('ICON_URI') + '" alt="' + link.text() + '" title="' + link.text() + '" width="32" height="32"></a> ');
else
$(sel).append('<a href="' + link.attr('href') + '"><img src="' + link.attr('ICON') + '" alt="' + link.text() + '" title="' + link.text() + '" width="32" height="32"></a> ');
}
$(sel).append('</p>');
$(sel).find('a').attr('target', '_blank');
}
function startJson(sel, spnr, href, func) {
if (spnr != null)
$(spnr).html('<img src="../img/spinner.gif">');
$.ajax({
url: href,
dataType: "json",
method: "GET",
cache: false,
success: function (html) {
$(spnr).html('');
func(sel, html);
},
error: function (xhr, status, error) {
if (spnr != null)
$(spnr).html('<img src="../img/error.png"> <b>' + status + '</b> <i>' + error + '</i>');
}
});
}
function updateMainGists(sel, data) {
var gists = data;
/*
if (gists.length == 0)
$(sel).prev().hide();
else
$(sel).prev().append('<div class="col-12"><h4>Zapiski</h4></div>');
*/
var token = localStorage.getItem('token');
if(token) {
$(sel).prev().append('<div class="col-12"><h4>Zapiski <i id="add-gist" class="fa fa-plus"></i></h4></div>');
$('#add-gist').click(function() {
alert("+");
})
} else {
$(sel).prev().append('<div class="col-12"><h4>Zapiski</h4></div>');
}
for (var idx in gists) {
var gist = gists[idx];
if (gist.description === 'Jan K. Pluta')
startJson(sel, null, gist.files['bookmark.json'].raw_url, updateMainGist);
}
}
function updateMainGist(sel, data) {
if (data.type === "jkpluta.bookmark") {
var token = localStorage.getItem('token');
var link = null
if(token) {
link = $('<div class="col-sm-12 col-md-6 col-lg-4"><a target="_blank"></a> <i id="del-gist-X" class="fa fa-times"></i></div>').appendTo($(sel)).children('a:first');
} else {
link = $('<div class="col-sm-12 col-md-6 col-lg-4"><a target="_blank"></a></div>').appendTo($(sel)).children('a:first');
}
link.attr('href', data.url);
link.text(data.title);
if (data.description != null)
link.parent().after('<div class="col-sm-12 col-md-6 col-lg-8">' + data.description + '</div>');
else
link.parent().after('<div class="col-sm-12 col-md-6 col-lg-8"><i>Proponowana zakładka</i></div>');
if(token) {
$('#del-gist-X').click(function() {
alert("-");
})
}
}
}
function startMain(href) {
start('#info', '#info', '/info.html', updateMainInfo);
start('#bks', '#bke', '/bookmarks.html', updateMainBookmarks);
start('#icns', '#icns', '/icons.html', updateMainIcons);
start('#icns', '#icns', '/icons.html', updateMainIcons);
startJson('#gists', '#gsts', 'https://api.github.com/users/jkpluta/gists', updateMainGists);
$('#google').focus();
}
$(document).ready(function() {
startMain();
})
| js/script.js | var base_url = "https://jkpluta.github.io";
function start(sel, spnr, href, func) {
if (spnr != null)
$(spnr).html('<img src="../img/spinner.gif">');
$.ajax({
url: base_url + href,
cache: false,
success: function (html) {
$(spnr).html('');
func(sel, html);
},
error: function (xhr, status, error) {
if (spnr != null)
$(spnr).html('<img src="../img/error.png"> <b>' + status + '</b> <i>' + error + '</i>');
}
});
}
function updateMainInfo(sel, html) {
$('#info').html(html);
}
function updateMainBookmarks(sel, html) {
$(sel).html('');
var listy = $('dl', html);
for (var i = 0; i < listy.length; i++) {
$(sel).append('<div id="bk' + i + '" class="col-sm-6 col-md-4 col-lg-3"></div>');
var bieżąca_lista = listy.eq(i);
var bieżące_pozycje = bieżąca_lista.children('dt');
$('#bk' + i).append('<h4>' + bieżąca_lista.prev().html() + '</h4>');
$('#bk' + i).append('<p><dl>');
for (var j = 0; j < bieżące_pozycje.length; j++) {
var bieżąca_pozycja = bieżące_pozycje.eq(j);
var bieżące_linki = bieżąca_pozycja.children('a');
if (bieżące_linki.length > 0) {
var link = bieżące_linki.first();
if (link.attr('ICON_URI') != null)
$('#bk' + i).append('<dt><a href="' + link.attr('href') + '"><img src="' + link.attr('ICON_URI') + '" alt="" title="' + link.text() + '" width="16" height="16"> ' + link.text() + '</a></dt>');
else
$('#bk' + i).append('<dt>' + link[0].outerHTML + '</dt>');
}
}
$('#bk' + i).append('</dl></p>');
}
$(sel).find('a').attr('target', '_blank');
}
function updateMainIcons(sel, html) {
$(sel).html('');
var links = $('a[icon], a[icon_uri]', html);
$(sel).append('<p>');
for (var i = 0; i < links.length; i++) {
var link = links.eq(i);
if (link.attr('ICON_URI') != null)
$(sel).append('<a href="' + link.attr('href') + '"><img src="' + link.attr('ICON_URI') + '" alt="' + link.text() + '" title="' + link.text() + '" width="32" height="32"></a> ');
else
$(sel).append('<a href="' + link.attr('href') + '"><img src="' + link.attr('ICON') + '" alt="' + link.text() + '" title="' + link.text() + '" width="32" height="32"></a> ');
}
$(sel).append('</p>');
$(sel).find('a').attr('target', '_blank');
}
function startJson(sel, spnr, href, func) {
if (spnr != null)
$(spnr).html('<img src="../img/spinner.gif">');
$.ajax({
url: href,
dataType: "json",
method: "GET",
cache: false,
success: function (html) {
$(spnr).html('');
func(sel, html);
},
error: function (xhr, status, error) {
if (spnr != null)
$(spnr).html('<img src="../img/error.png"> <b>' + status + '</b> <i>' + error + '</i>');
}
});
}
function updateMainGists(sel, data) {
var gists = data;
/*
if (gists.length == 0)
$(sel).prev().hide();
else
$(sel).prev().append('<div class="col-12"><h4>Zapiski</h4></div>');
*/
var token = localStorage.getItem('token');
if(token) {
$(sel).prev().append('<div class="col-12"><h4>Zapiski <i id="add-gist" class="fa fa-plus"></i></h4></div>');
$('#add-gist').click(function() {
alert("+");
})
} else {
$(sel).prev().append('<div class="col-12"><h4>Zapiski</h4></div>');
}
for (var idx in gists) {
var gist = gists[idx];
if (gist.description === 'Jan K. Pluta')
startJson(sel, null, gist.files['bookmark.json'].raw_url, updateMainGist);
}
}
function updateMainGist(sel, data) {
if (data.type === "jkpluta.bookmark") {
var token = localStorage.getItem('token');
var link = null
if(token) {
link = $('<div class="col-sm-12 col-md-6 col-lg-4"><a target="_blank"></a> <i id="del-gist-X" class="fa fa-times"></i></div>').appendTo($(sel)).children('a:first');
$('#del-gist-X').click(function() {
alert("-");
})
} else {
link = $('<div class="col-sm-12 col-md-6 col-lg-4"><a target="_blank"></a></div>').appendTo($(sel)).children('a:first');
}
link.attr('href', data.url);
link.text(data.title);
if (data.description != null)
link.parent().after('<div class="col-sm-12 col-md-6 col-lg-8">' + data.description + '</div>');
else
link.parent().after('<div class="col-sm-12 col-md-6 col-lg-8"><i>Proponowana zakładka</i></div>');
}
}
function startMain(href) {
start('#info', '#info', '/info.html', updateMainInfo);
start('#bks', '#bke', '/bookmarks.html', updateMainBookmarks);
start('#icns', '#icns', '/icons.html', updateMainIcons);
start('#icns', '#icns', '/icons.html', updateMainIcons);
startJson('#gists', '#gsts', 'https://api.github.com/users/jkpluta/gists', updateMainGists);
$('#google').focus();
}
$(document).ready(function() {
startMain();
})
| Edycja zapisków
| js/script.js | Edycja zapisków | <ide><path>s/script.js
<ide> var link = null
<ide> if(token) {
<ide> link = $('<div class="col-sm-12 col-md-6 col-lg-4"><a target="_blank"></a> <i id="del-gist-X" class="fa fa-times"></i></div>').appendTo($(sel)).children('a:first');
<del> $('#del-gist-X').click(function() {
<del> alert("-");
<del> })
<ide> } else {
<ide> link = $('<div class="col-sm-12 col-md-6 col-lg-4"><a target="_blank"></a></div>').appendTo($(sel)).children('a:first');
<ide> }
<ide> link.parent().after('<div class="col-sm-12 col-md-6 col-lg-8">' + data.description + '</div>');
<ide> else
<ide> link.parent().after('<div class="col-sm-12 col-md-6 col-lg-8"><i>Proponowana zakładka</i></div>');
<add> if(token) {
<add> $('#del-gist-X').click(function() {
<add> alert("-");
<add> })
<add> }
<ide> }
<ide> }
<ide> function startMain(href) { |
|
Java | apache-2.0 | 267b86c57f53dd79ca4c50563b69cca14fc8cd98 | 0 | GlenRSmith/elasticsearch,scorpionvicky/elasticsearch,nknize/elasticsearch,nknize/elasticsearch,gingerwizard/elasticsearch,gfyoung/elasticsearch,vroyer/elassandra,robin13/elasticsearch,uschindler/elasticsearch,scorpionvicky/elasticsearch,nknize/elasticsearch,GlenRSmith/elasticsearch,GlenRSmith/elasticsearch,HonzaKral/elasticsearch,strapdata/elassandra,gfyoung/elasticsearch,coding0011/elasticsearch,uschindler/elasticsearch,robin13/elasticsearch,scorpionvicky/elasticsearch,gingerwizard/elasticsearch,gfyoung/elasticsearch,strapdata/elassandra,gingerwizard/elasticsearch,uschindler/elasticsearch,HonzaKral/elasticsearch,GlenRSmith/elasticsearch,nknize/elasticsearch,uschindler/elasticsearch,coding0011/elasticsearch,robin13/elasticsearch,gfyoung/elasticsearch,scorpionvicky/elasticsearch,strapdata/elassandra,vroyer/elassandra,gingerwizard/elasticsearch,HonzaKral/elasticsearch,robin13/elasticsearch,vroyer/elassandra,GlenRSmith/elasticsearch,gingerwizard/elasticsearch,HonzaKral/elasticsearch,coding0011/elasticsearch,uschindler/elasticsearch,gingerwizard/elasticsearch,coding0011/elasticsearch,gfyoung/elasticsearch,scorpionvicky/elasticsearch,coding0011/elasticsearch,nknize/elasticsearch,gingerwizard/elasticsearch,robin13/elasticsearch,strapdata/elassandra,strapdata/elassandra | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.shield;
import org.elasticsearch.action.*;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsAction;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequestBuilder;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsResponse;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequestBuilder;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryAction;
import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest;
import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequestBuilder;
import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryResponse;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequestBuilder;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse;
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryAction;
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequestBuilder;
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryAction;
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequestBuilder;
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse;
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction;
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest;
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequestBuilder;
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequestBuilder;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequestBuilder;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequestBuilder;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotAction;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequestBuilder;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsAction;
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequestBuilder;
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse;
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotAction;
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequestBuilder;
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusAction;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequestBuilder;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequestBuilder;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequest;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequestBuilder;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksAction;
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequest;
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequestBuilder;
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse;
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateAction;
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateRequest;
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateRequestBuilder;
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateResponse;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistAction;
import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistRequestBuilder;
import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequestBuilder;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexAction;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.close.CloseIndexResponse;
import org.elasticsearch.action.admin.indices.create.CreateIndexAction;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsAction;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequestBuilder;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.action.admin.indices.exists.types.TypesExistsAction;
import org.elasticsearch.action.admin.indices.exists.types.TypesExistsRequest;
import org.elasticsearch.action.admin.indices.exists.types.TypesExistsRequestBuilder;
import org.elasticsearch.action.admin.indices.exists.types.TypesExistsResponse;
import org.elasticsearch.action.admin.indices.flush.FlushAction;
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
import org.elasticsearch.action.admin.indices.flush.FlushRequestBuilder;
import org.elasticsearch.action.admin.indices.flush.FlushResponse;
import org.elasticsearch.action.admin.indices.flush.SyncedFlushAction;
import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest;
import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequestBuilder;
import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequestBuilder;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
import org.elasticsearch.action.admin.indices.get.GetIndexAction;
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
import org.elasticsearch.action.admin.indices.get.GetIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
import org.elasticsearch.action.admin.indices.mapping.get.*;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
import org.elasticsearch.action.admin.indices.open.OpenIndexAction;
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
import org.elasticsearch.action.admin.indices.open.OpenIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.open.OpenIndexResponse;
import org.elasticsearch.action.admin.indices.recovery.RecoveryAction;
import org.elasticsearch.action.admin.indices.recovery.RecoveryRequest;
import org.elasticsearch.action.admin.indices.recovery.RecoveryRequestBuilder;
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequestBuilder;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse;
import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsAction;
import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequest;
import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequestBuilder;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsAction;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequestBuilder;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequestBuilder;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse;
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoreRequestBuilder;
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresAction;
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest;
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction;
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest;
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequestBuilder;
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateResponse;
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction;
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest;
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequestBuilder;
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse;
import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusAction;
import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusRequest;
import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusRequestBuilder;
import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusResponse;
import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeAction;
import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest;
import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequestBuilder;
import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeResponse;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequestBuilder;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse;
import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerAction;
import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerRequest;
import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerRequestBuilder;
import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerResponse;
import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersAction;
import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersRequest;
import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersRequestBuilder;
import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersResponse;
import org.elasticsearch.action.admin.indices.warmer.put.PutWarmerAction;
import org.elasticsearch.action.admin.indices.warmer.put.PutWarmerRequest;
import org.elasticsearch.action.admin.indices.warmer.put.PutWarmerRequestBuilder;
import org.elasticsearch.action.admin.indices.warmer.put.PutWarmerResponse;
import org.elasticsearch.action.bulk.BulkAction;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteAction;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteRequestBuilder;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.explain.ExplainAction;
import org.elasticsearch.action.explain.ExplainRequest;
import org.elasticsearch.action.explain.ExplainRequestBuilder;
import org.elasticsearch.action.explain.ExplainResponse;
import org.elasticsearch.action.fieldstats.FieldStatsAction;
import org.elasticsearch.action.fieldstats.FieldStatsRequest;
import org.elasticsearch.action.fieldstats.FieldStatsRequestBuilder;
import org.elasticsearch.action.fieldstats.FieldStatsResponse;
import org.elasticsearch.action.get.*;
import org.elasticsearch.action.index.IndexAction;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptAction;
import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptRequest;
import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptRequestBuilder;
import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptResponse;
import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptAction;
import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptRequest;
import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptRequestBuilder;
import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptResponse;
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptAction;
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest;
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequestBuilder;
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse;
import org.elasticsearch.action.percolate.*;
import org.elasticsearch.action.search.*;
import org.elasticsearch.action.suggest.SuggestAction;
import org.elasticsearch.action.suggest.SuggestRequest;
import org.elasticsearch.action.suggest.SuggestRequestBuilder;
import org.elasticsearch.action.suggest.SuggestResponse;
import org.elasticsearch.action.termvectors.*;
import org.elasticsearch.action.update.UpdateAction;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateRequestBuilder;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.*;
import org.elasticsearch.client.support.Headers;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.indices.flush.SyncedFlushService;
import org.elasticsearch.threadpool.ThreadPool;
/**
*
*/
public class SecuredClient implements Client {
private final Client client;
private final Admin admin;
private MarvelShieldIntegration shieldIntegration;
@Inject
public SecuredClient(Client client, MarvelShieldIntegration shieldIntegration) {
this.client = client;
this.shieldIntegration = shieldIntegration;
this.admin = new Admin(this.client, this.shieldIntegration);
}
@Override
public AdminClient admin() {
return admin;
}
@Override
public void close() {
client.close();
}
@Override
public ThreadPool threadPool() {
return client.threadPool();
}
@Override
public Headers headers() {
return client.headers();
}
@Override
public Settings settings() {
return client.settings();
}
@Override
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> ActionFuture<Response> execute(Action<Request, Response, RequestBuilder> action, Request request) {
shieldIntegration.bindInternalMarvelUser(request);
return client.execute(action, request);
}
@Override
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void execute(Action<Request, Response, RequestBuilder> action, Request request, ActionListener<Response> actionListener) {
shieldIntegration.bindInternalMarvelUser(request);
client.execute(action, request, actionListener);
}
@Override
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> RequestBuilder prepareExecute(Action<Request, Response, RequestBuilder> action) {
return action.newRequestBuilder(this);
}
public ActionFuture<IndexResponse> index(IndexRequest request) {
return this.execute(IndexAction.INSTANCE, request);
}
public void index(IndexRequest request, ActionListener<IndexResponse> listener) {
this.execute(IndexAction.INSTANCE, request, listener);
}
public IndexRequestBuilder prepareIndex() {
return new IndexRequestBuilder(this, IndexAction.INSTANCE, null);
}
public IndexRequestBuilder prepareIndex(String index, String type) {
return this.prepareIndex(index, type, null);
}
public IndexRequestBuilder prepareIndex(String index, String type, @Nullable String id) {
return this.prepareIndex().setIndex(index).setType(type).setId(id);
}
public ActionFuture<UpdateResponse> update(UpdateRequest request) {
return this.execute(UpdateAction.INSTANCE, request);
}
public void update(UpdateRequest request, ActionListener<UpdateResponse> listener) {
this.execute(UpdateAction.INSTANCE, request, listener);
}
public UpdateRequestBuilder prepareUpdate() {
return new UpdateRequestBuilder(this, UpdateAction.INSTANCE, null, null, null);
}
public UpdateRequestBuilder prepareUpdate(String index, String type, String id) {
return new UpdateRequestBuilder(this, UpdateAction.INSTANCE, index, type, id);
}
public ActionFuture<DeleteResponse> delete(DeleteRequest request) {
return this.execute(DeleteAction.INSTANCE, request);
}
public void delete(DeleteRequest request, ActionListener<DeleteResponse> listener) {
this.execute(DeleteAction.INSTANCE, request, listener);
}
public DeleteRequestBuilder prepareDelete() {
return new DeleteRequestBuilder(this, DeleteAction.INSTANCE, null);
}
public DeleteRequestBuilder prepareDelete(String index, String type, String id) {
return this.prepareDelete().setIndex(index).setType(type).setId(id);
}
public ActionFuture<BulkResponse> bulk(BulkRequest request) {
return this.execute(BulkAction.INSTANCE, request);
}
public void bulk(BulkRequest request, ActionListener<BulkResponse> listener) {
this.execute(BulkAction.INSTANCE, request, listener);
}
public BulkRequestBuilder prepareBulk() {
return new BulkRequestBuilder(this, BulkAction.INSTANCE);
}
public ActionFuture<GetResponse> get(GetRequest request) {
return this.execute(GetAction.INSTANCE, request);
}
public void get(GetRequest request, ActionListener<GetResponse> listener) {
this.execute(GetAction.INSTANCE, request, listener);
}
public GetRequestBuilder prepareGet() {
return new GetRequestBuilder(this, GetAction.INSTANCE, null);
}
public GetRequestBuilder prepareGet(String index, String type, String id) {
return this.prepareGet().setIndex(index).setType(type).setId(id);
}
public ActionFuture<GetIndexedScriptResponse> getIndexedScript(GetIndexedScriptRequest request) {
return this.execute(GetIndexedScriptAction.INSTANCE, request);
}
public void getIndexedScript(GetIndexedScriptRequest request, ActionListener<GetIndexedScriptResponse> listener) {
this.execute(GetIndexedScriptAction.INSTANCE, request, listener);
}
public GetIndexedScriptRequestBuilder prepareGetIndexedScript() {
return new GetIndexedScriptRequestBuilder(this, GetIndexedScriptAction.INSTANCE);
}
public GetIndexedScriptRequestBuilder prepareGetIndexedScript(String scriptLang, String id) {
return this.prepareGetIndexedScript().setScriptLang(scriptLang).setId(id);
}
public PutIndexedScriptRequestBuilder preparePutIndexedScript() {
return new PutIndexedScriptRequestBuilder(this, PutIndexedScriptAction.INSTANCE);
}
public PutIndexedScriptRequestBuilder preparePutIndexedScript(@Nullable String scriptLang, String id, String source) {
return PutIndexedScriptAction.INSTANCE.newRequestBuilder(this).setScriptLang(scriptLang).setId(id).setSource(source);
}
public void putIndexedScript(PutIndexedScriptRequest request, ActionListener<PutIndexedScriptResponse> listener) {
this.execute(PutIndexedScriptAction.INSTANCE, request, listener);
}
public ActionFuture<PutIndexedScriptResponse> putIndexedScript(PutIndexedScriptRequest request) {
return this.execute(PutIndexedScriptAction.INSTANCE, request);
}
public void deleteIndexedScript(DeleteIndexedScriptRequest request, ActionListener<DeleteIndexedScriptResponse> listener) {
this.execute(DeleteIndexedScriptAction.INSTANCE, request, listener);
}
public ActionFuture<DeleteIndexedScriptResponse> deleteIndexedScript(DeleteIndexedScriptRequest request) {
return this.execute(DeleteIndexedScriptAction.INSTANCE, request);
}
public DeleteIndexedScriptRequestBuilder prepareDeleteIndexedScript() {
return DeleteIndexedScriptAction.INSTANCE.newRequestBuilder(this);
}
public DeleteIndexedScriptRequestBuilder prepareDeleteIndexedScript(@Nullable String scriptLang, String id) {
return this.prepareDeleteIndexedScript().setScriptLang(scriptLang).setId(id);
}
public ActionFuture<MultiGetResponse> multiGet(MultiGetRequest request) {
return this.execute(MultiGetAction.INSTANCE, request);
}
public void multiGet(MultiGetRequest request, ActionListener<MultiGetResponse> listener) {
this.execute(MultiGetAction.INSTANCE, request, listener);
}
public MultiGetRequestBuilder prepareMultiGet() {
return new MultiGetRequestBuilder(this, MultiGetAction.INSTANCE);
}
public ActionFuture<SearchResponse> search(SearchRequest request) {
return this.execute(SearchAction.INSTANCE, request);
}
public void search(SearchRequest request, ActionListener<SearchResponse> listener) {
this.execute(SearchAction.INSTANCE, request, listener);
}
public SearchRequestBuilder prepareSearch(String... indices) {
return new SearchRequestBuilder(this, SearchAction.INSTANCE).setIndices(indices);
}
public ActionFuture<SearchResponse> searchScroll(SearchScrollRequest request) {
return this.execute(SearchScrollAction.INSTANCE, request);
}
public void searchScroll(SearchScrollRequest request, ActionListener<SearchResponse> listener) {
this.execute(SearchScrollAction.INSTANCE, request, listener);
}
public SearchScrollRequestBuilder prepareSearchScroll(String scrollId) {
return new SearchScrollRequestBuilder(this, SearchScrollAction.INSTANCE, scrollId);
}
public ActionFuture<MultiSearchResponse> multiSearch(MultiSearchRequest request) {
return this.execute(MultiSearchAction.INSTANCE, request);
}
public void multiSearch(MultiSearchRequest request, ActionListener<MultiSearchResponse> listener) {
this.execute(MultiSearchAction.INSTANCE, request, listener);
}
public MultiSearchRequestBuilder prepareMultiSearch() {
return new MultiSearchRequestBuilder(this, MultiSearchAction.INSTANCE);
}
public ActionFuture<SuggestResponse> suggest(SuggestRequest request) {
return this.execute(SuggestAction.INSTANCE, request);
}
public void suggest(SuggestRequest request, ActionListener<SuggestResponse> listener) {
this.execute(SuggestAction.INSTANCE, request, listener);
}
public SuggestRequestBuilder prepareSuggest(String... indices) {
return new SuggestRequestBuilder(this, SuggestAction.INSTANCE).setIndices(indices);
}
public ActionFuture<TermVectorsResponse> termVectors(TermVectorsRequest request) {
return this.execute(TermVectorsAction.INSTANCE, request);
}
public void termVectors(TermVectorsRequest request, ActionListener<TermVectorsResponse> listener) {
this.execute(TermVectorsAction.INSTANCE, request, listener);
}
public TermVectorsRequestBuilder prepareTermVectors() {
return new TermVectorsRequestBuilder(this, TermVectorsAction.INSTANCE);
}
public TermVectorsRequestBuilder prepareTermVectors(String index, String type, String id) {
return new TermVectorsRequestBuilder(this, TermVectorsAction.INSTANCE, index, type, id);
}
/** @deprecated */
@Deprecated
public ActionFuture<TermVectorsResponse> termVector(TermVectorsRequest request) {
return this.termVectors(request);
}
/** @deprecated */
@Deprecated
public void termVector(TermVectorsRequest request, ActionListener<TermVectorsResponse> listener) {
this.termVectors(request, listener);
}
/** @deprecated */
@Deprecated
public TermVectorsRequestBuilder prepareTermVector() {
return this.prepareTermVectors();
}
/** @deprecated */
@Deprecated
public TermVectorsRequestBuilder prepareTermVector(String index, String type, String id) {
return this.prepareTermVectors(index, type, id);
}
public ActionFuture<MultiTermVectorsResponse> multiTermVectors(MultiTermVectorsRequest request) {
return this.execute(MultiTermVectorsAction.INSTANCE, request);
}
public void multiTermVectors(MultiTermVectorsRequest request, ActionListener<MultiTermVectorsResponse> listener) {
this.execute(MultiTermVectorsAction.INSTANCE, request, listener);
}
public MultiTermVectorsRequestBuilder prepareMultiTermVectors() {
return new MultiTermVectorsRequestBuilder(this, MultiTermVectorsAction.INSTANCE);
}
public ActionFuture<PercolateResponse> percolate(PercolateRequest request) {
return this.execute(PercolateAction.INSTANCE, request);
}
public void percolate(PercolateRequest request, ActionListener<PercolateResponse> listener) {
this.execute(PercolateAction.INSTANCE, request, listener);
}
public PercolateRequestBuilder preparePercolate() {
return new PercolateRequestBuilder(this, PercolateAction.INSTANCE);
}
public MultiPercolateRequestBuilder prepareMultiPercolate() {
return new MultiPercolateRequestBuilder(this, MultiPercolateAction.INSTANCE);
}
public void multiPercolate(MultiPercolateRequest request, ActionListener<MultiPercolateResponse> listener) {
this.execute(MultiPercolateAction.INSTANCE, request, listener);
}
public ActionFuture<MultiPercolateResponse> multiPercolate(MultiPercolateRequest request) {
return this.execute(MultiPercolateAction.INSTANCE, request);
}
public ExplainRequestBuilder prepareExplain(String index, String type, String id) {
return new ExplainRequestBuilder(this, ExplainAction.INSTANCE, index, type, id);
}
public ActionFuture<ExplainResponse> explain(ExplainRequest request) {
return this.execute(ExplainAction.INSTANCE, request);
}
public void explain(ExplainRequest request, ActionListener<ExplainResponse> listener) {
this.execute(ExplainAction.INSTANCE, request, listener);
}
public void clearScroll(ClearScrollRequest request, ActionListener<ClearScrollResponse> listener) {
this.execute(ClearScrollAction.INSTANCE, request, listener);
}
public ActionFuture<ClearScrollResponse> clearScroll(ClearScrollRequest request) {
return this.execute(ClearScrollAction.INSTANCE, request);
}
public ClearScrollRequestBuilder prepareClearScroll() {
return new ClearScrollRequestBuilder(this, ClearScrollAction.INSTANCE);
}
public void fieldStats(FieldStatsRequest request, ActionListener<FieldStatsResponse> listener) {
this.execute(FieldStatsAction.INSTANCE, request, listener);
}
public ActionFuture<FieldStatsResponse> fieldStats(FieldStatsRequest request) {
return this.execute(FieldStatsAction.INSTANCE, request);
}
public FieldStatsRequestBuilder prepareFieldStats() {
return new FieldStatsRequestBuilder(this, FieldStatsAction.INSTANCE);
}
static class IndicesAdmin implements IndicesAdminClient {
private final ElasticsearchClient client;
private final MarvelShieldIntegration shieldIntegration;
public IndicesAdmin(ElasticsearchClient client, MarvelShieldIntegration shieldIntegration) {
this.client = client;
this.shieldIntegration = shieldIntegration;
}
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> ActionFuture<Response> execute(Action<Request, Response, RequestBuilder> action, Request request) {
shieldIntegration.bindInternalMarvelUser(request);
return this.client.execute(action, request);
}
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void execute(Action<Request, Response, RequestBuilder> action, Request request, ActionListener<Response> listener) {
shieldIntegration.bindInternalMarvelUser(request);
this.client.execute(action, request, listener);
}
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> RequestBuilder prepareExecute(Action<Request, Response, RequestBuilder> action) {
return this.client.prepareExecute(action);
}
public ThreadPool threadPool() {
return this.client.threadPool();
}
public ActionFuture<IndicesExistsResponse> exists(IndicesExistsRequest request) {
return this.execute(IndicesExistsAction.INSTANCE, request);
}
public void exists(IndicesExistsRequest request, ActionListener<IndicesExistsResponse> listener) {
this.execute(IndicesExistsAction.INSTANCE, request, listener);
}
public IndicesExistsRequestBuilder prepareExists(String... indices) {
return new IndicesExistsRequestBuilder(this, IndicesExistsAction.INSTANCE, indices);
}
public ActionFuture<TypesExistsResponse> typesExists(TypesExistsRequest request) {
return this.execute(TypesExistsAction.INSTANCE, request);
}
public void typesExists(TypesExistsRequest request, ActionListener<TypesExistsResponse> listener) {
this.execute(TypesExistsAction.INSTANCE, request, listener);
}
public TypesExistsRequestBuilder prepareTypesExists(String... index) {
return new TypesExistsRequestBuilder(this, TypesExistsAction.INSTANCE, index);
}
public ActionFuture<IndicesAliasesResponse> aliases(IndicesAliasesRequest request) {
return this.execute(IndicesAliasesAction.INSTANCE, request);
}
public void aliases(IndicesAliasesRequest request, ActionListener<IndicesAliasesResponse> listener) {
this.execute(IndicesAliasesAction.INSTANCE, request, listener);
}
public IndicesAliasesRequestBuilder prepareAliases() {
return new IndicesAliasesRequestBuilder(this, IndicesAliasesAction.INSTANCE);
}
public ActionFuture<GetAliasesResponse> getAliases(GetAliasesRequest request) {
return this.execute(GetAliasesAction.INSTANCE, request);
}
public void getAliases(GetAliasesRequest request, ActionListener<GetAliasesResponse> listener) {
this.execute(GetAliasesAction.INSTANCE, request, listener);
}
public GetAliasesRequestBuilder prepareGetAliases(String... aliases) {
return new GetAliasesRequestBuilder(this, GetAliasesAction.INSTANCE, aliases);
}
public ActionFuture<ClearIndicesCacheResponse> clearCache(ClearIndicesCacheRequest request) {
return this.execute(ClearIndicesCacheAction.INSTANCE, request);
}
public void aliasesExist(GetAliasesRequest request, ActionListener<AliasesExistResponse> listener) {
this.execute(AliasesExistAction.INSTANCE, request, listener);
}
public ActionFuture<AliasesExistResponse> aliasesExist(GetAliasesRequest request) {
return this.execute(AliasesExistAction.INSTANCE, request);
}
public AliasesExistRequestBuilder prepareAliasesExist(String... aliases) {
return new AliasesExistRequestBuilder(this, AliasesExistAction.INSTANCE, aliases);
}
public ActionFuture<GetIndexResponse> getIndex(GetIndexRequest request) {
return this.execute(GetIndexAction.INSTANCE, request);
}
public void getIndex(GetIndexRequest request, ActionListener<GetIndexResponse> listener) {
this.execute(GetIndexAction.INSTANCE, request, listener);
}
public GetIndexRequestBuilder prepareGetIndex() {
return new GetIndexRequestBuilder(this, GetIndexAction.INSTANCE, Strings.EMPTY_ARRAY);
}
public void clearCache(ClearIndicesCacheRequest request, ActionListener<ClearIndicesCacheResponse> listener) {
this.execute(ClearIndicesCacheAction.INSTANCE, request, listener);
}
public ClearIndicesCacheRequestBuilder prepareClearCache(String... indices) {
return new ClearIndicesCacheRequestBuilder(this, ClearIndicesCacheAction.INSTANCE).setIndices(indices);
}
public ActionFuture<CreateIndexResponse> create(CreateIndexRequest request) {
return this.execute(CreateIndexAction.INSTANCE, request);
}
public void create(CreateIndexRequest request, ActionListener<CreateIndexResponse> listener) {
this.execute(CreateIndexAction.INSTANCE, request, listener);
}
public CreateIndexRequestBuilder prepareCreate(String index) {
return new CreateIndexRequestBuilder(this, CreateIndexAction.INSTANCE, index);
}
public ActionFuture<DeleteIndexResponse> delete(DeleteIndexRequest request) {
return this.execute(DeleteIndexAction.INSTANCE, request);
}
public void delete(DeleteIndexRequest request, ActionListener<DeleteIndexResponse> listener) {
this.execute(DeleteIndexAction.INSTANCE, request, listener);
}
public DeleteIndexRequestBuilder prepareDelete(String... indices) {
return new DeleteIndexRequestBuilder(this, DeleteIndexAction.INSTANCE, indices);
}
public ActionFuture<CloseIndexResponse> close(CloseIndexRequest request) {
return this.execute(CloseIndexAction.INSTANCE, request);
}
public void close(CloseIndexRequest request, ActionListener<CloseIndexResponse> listener) {
this.execute(CloseIndexAction.INSTANCE, request, listener);
}
public CloseIndexRequestBuilder prepareClose(String... indices) {
return new CloseIndexRequestBuilder(this, CloseIndexAction.INSTANCE, indices);
}
public ActionFuture<OpenIndexResponse> open(OpenIndexRequest request) {
return this.execute(OpenIndexAction.INSTANCE, request);
}
public void open(OpenIndexRequest request, ActionListener<OpenIndexResponse> listener) {
this.execute(OpenIndexAction.INSTANCE, request, listener);
}
public OpenIndexRequestBuilder prepareOpen(String... indices) {
return new OpenIndexRequestBuilder(this, OpenIndexAction.INSTANCE, indices);
}
public ActionFuture<FlushResponse> flush(FlushRequest request) {
return this.execute(FlushAction.INSTANCE, request);
}
public void flush(FlushRequest request, ActionListener<FlushResponse> listener) {
this.execute(FlushAction.INSTANCE, request, listener);
}
public FlushRequestBuilder prepareFlush(String... indices) {
return (new FlushRequestBuilder(this, FlushAction.INSTANCE)).setIndices(indices);
}
@Override
public ActionFuture<SyncedFlushResponse> syncedFlush(SyncedFlushRequest syncedFlushRequest) {
return this.execute(SyncedFlushAction.INSTANCE, syncedFlushRequest);
}
@Override
public void syncedFlush(SyncedFlushRequest syncedFlushRequest, ActionListener<SyncedFlushResponse> actionListener) {
this.execute(SyncedFlushAction.INSTANCE, syncedFlushRequest, actionListener);
}
@Override
public SyncedFlushRequestBuilder prepareSyncedFlush(String... indices) {
return (new SyncedFlushRequestBuilder(this, SyncedFlushAction.INSTANCE)).setIndices(indices);
}
public void getMappings(GetMappingsRequest request, ActionListener<GetMappingsResponse> listener) {
this.execute(GetMappingsAction.INSTANCE, request, listener);
}
public void getFieldMappings(GetFieldMappingsRequest request, ActionListener<GetFieldMappingsResponse> listener) {
this.execute(GetFieldMappingsAction.INSTANCE, request, listener);
}
public GetMappingsRequestBuilder prepareGetMappings(String... indices) {
return new GetMappingsRequestBuilder(this, GetMappingsAction.INSTANCE, indices);
}
public ActionFuture<GetMappingsResponse> getMappings(GetMappingsRequest request) {
return this.execute(GetMappingsAction.INSTANCE, request);
}
public GetFieldMappingsRequestBuilder prepareGetFieldMappings(String... indices) {
return new GetFieldMappingsRequestBuilder(this, GetFieldMappingsAction.INSTANCE, indices);
}
public ActionFuture<GetFieldMappingsResponse> getFieldMappings(GetFieldMappingsRequest request) {
return this.execute(GetFieldMappingsAction.INSTANCE, request);
}
public ActionFuture<PutMappingResponse> putMapping(PutMappingRequest request) {
return this.execute(PutMappingAction.INSTANCE, request);
}
public void putMapping(PutMappingRequest request, ActionListener<PutMappingResponse> listener) {
this.execute(PutMappingAction.INSTANCE, request, listener);
}
public PutMappingRequestBuilder preparePutMapping(String... indices) {
return new PutMappingRequestBuilder(this, PutMappingAction.INSTANCE).setIndices(indices);
}
@Override
public ActionFuture<ForceMergeResponse> forceMerge(ForceMergeRequest request) {
return this.execute(ForceMergeAction.INSTANCE, request);
}
@Override
public void forceMerge(ForceMergeRequest request, ActionListener<ForceMergeResponse> listener) {
this.execute(ForceMergeAction.INSTANCE, request, listener);
}
@Override
public ForceMergeRequestBuilder prepareForceMerge(String... indices) {
return (new ForceMergeRequestBuilder(this, ForceMergeAction.INSTANCE)).setIndices(indices);
}
public ActionFuture<UpgradeResponse> upgrade(UpgradeRequest request) {
return this.execute(UpgradeAction.INSTANCE, request);
}
public void upgrade(UpgradeRequest request, ActionListener<UpgradeResponse> listener) {
this.execute(UpgradeAction.INSTANCE, request, listener);
}
public UpgradeRequestBuilder prepareUpgrade(String... indices) {
return new UpgradeRequestBuilder(this, UpgradeAction.INSTANCE).setIndices(indices);
}
public ActionFuture<UpgradeStatusResponse> upgradeStatus(UpgradeStatusRequest request) {
return this.execute(UpgradeStatusAction.INSTANCE, request);
}
public void upgradeStatus(UpgradeStatusRequest request, ActionListener<UpgradeStatusResponse> listener) {
this.execute(UpgradeStatusAction.INSTANCE, request, listener);
}
public UpgradeStatusRequestBuilder prepareUpgradeStatus(String... indices) {
return new UpgradeStatusRequestBuilder(this, UpgradeStatusAction.INSTANCE).setIndices(indices);
}
public ActionFuture<RefreshResponse> refresh(RefreshRequest request) {
return this.execute(RefreshAction.INSTANCE, request);
}
public void refresh(RefreshRequest request, ActionListener<RefreshResponse> listener) {
this.execute(RefreshAction.INSTANCE, request, listener);
}
public RefreshRequestBuilder prepareRefresh(String... indices) {
return new RefreshRequestBuilder(this, RefreshAction.INSTANCE).setIndices(indices);
}
public ActionFuture<IndicesStatsResponse> stats(IndicesStatsRequest request) {
return this.execute(IndicesStatsAction.INSTANCE, request);
}
public void stats(IndicesStatsRequest request, ActionListener<IndicesStatsResponse> listener) {
this.execute(IndicesStatsAction.INSTANCE, request, listener);
}
public IndicesStatsRequestBuilder prepareStats(String... indices) {
return new IndicesStatsRequestBuilder(this, IndicesStatsAction.INSTANCE).setIndices(indices);
}
public ActionFuture<RecoveryResponse> recoveries(RecoveryRequest request) {
return this.execute(RecoveryAction.INSTANCE, request);
}
public void recoveries(RecoveryRequest request, ActionListener<RecoveryResponse> listener) {
this.execute(RecoveryAction.INSTANCE, request, listener);
}
public RecoveryRequestBuilder prepareRecoveries(String... indices) {
return new RecoveryRequestBuilder(this, RecoveryAction.INSTANCE).setIndices(indices);
}
public ActionFuture<IndicesSegmentResponse> segments(IndicesSegmentsRequest request) {
return this.execute(IndicesSegmentsAction.INSTANCE, request);
}
public void segments(IndicesSegmentsRequest request, ActionListener<IndicesSegmentResponse> listener) {
this.execute(IndicesSegmentsAction.INSTANCE, request, listener);
}
public IndicesSegmentsRequestBuilder prepareSegments(String... indices) {
return new IndicesSegmentsRequestBuilder(this, IndicesSegmentsAction.INSTANCE).setIndices(indices);
}
public ActionFuture<IndicesShardStoresResponse> shardStores(IndicesShardStoresRequest request) {
return this.execute(IndicesShardStoresAction.INSTANCE, request);
}
public void shardStores(IndicesShardStoresRequest request, ActionListener<IndicesShardStoresResponse> listener) {
this.execute(IndicesShardStoresAction.INSTANCE, request, listener);
}
public IndicesShardStoreRequestBuilder prepareShardStores(String... indices) {
return new IndicesShardStoreRequestBuilder(this, IndicesShardStoresAction.INSTANCE, indices);
}
public ActionFuture<UpdateSettingsResponse> updateSettings(UpdateSettingsRequest request) {
return this.execute(UpdateSettingsAction.INSTANCE, request);
}
public void updateSettings(UpdateSettingsRequest request, ActionListener<UpdateSettingsResponse> listener) {
this.execute(UpdateSettingsAction.INSTANCE, request, listener);
}
public UpdateSettingsRequestBuilder prepareUpdateSettings(String... indices) {
return new UpdateSettingsRequestBuilder(this, UpdateSettingsAction.INSTANCE, Strings.EMPTY_ARRAY).setIndices(indices);
}
public ActionFuture<AnalyzeResponse> analyze(AnalyzeRequest request) {
return this.execute(AnalyzeAction.INSTANCE, request);
}
public void analyze(AnalyzeRequest request, ActionListener<AnalyzeResponse> listener) {
this.execute(AnalyzeAction.INSTANCE, request, listener);
}
public AnalyzeRequestBuilder prepareAnalyze(@Nullable String index, String text) {
return new AnalyzeRequestBuilder(this, AnalyzeAction.INSTANCE, index, text);
}
public AnalyzeRequestBuilder prepareAnalyze(String text) {
return new AnalyzeRequestBuilder(this, AnalyzeAction.INSTANCE, null, text);
}
public AnalyzeRequestBuilder prepareAnalyze() {
return new AnalyzeRequestBuilder(this, AnalyzeAction.INSTANCE);
}
public ActionFuture<PutIndexTemplateResponse> putTemplate(PutIndexTemplateRequest request) {
return this.execute(PutIndexTemplateAction.INSTANCE, request);
}
public void putTemplate(PutIndexTemplateRequest request, ActionListener<PutIndexTemplateResponse> listener) {
this.execute(PutIndexTemplateAction.INSTANCE, request, listener);
}
public PutIndexTemplateRequestBuilder preparePutTemplate(String name) {
return new PutIndexTemplateRequestBuilder(this, PutIndexTemplateAction.INSTANCE, name);
}
public ActionFuture<GetIndexTemplatesResponse> getTemplates(GetIndexTemplatesRequest request) {
return this.execute(GetIndexTemplatesAction.INSTANCE, request);
}
public void getTemplates(GetIndexTemplatesRequest request, ActionListener<GetIndexTemplatesResponse> listener) {
this.execute(GetIndexTemplatesAction.INSTANCE, request, listener);
}
public GetIndexTemplatesRequestBuilder prepareGetTemplates(String... names) {
return new GetIndexTemplatesRequestBuilder(this, GetIndexTemplatesAction.INSTANCE, names);
}
public ActionFuture<DeleteIndexTemplateResponse> deleteTemplate(DeleteIndexTemplateRequest request) {
return this.execute(DeleteIndexTemplateAction.INSTANCE, request);
}
public void deleteTemplate(DeleteIndexTemplateRequest request, ActionListener<DeleteIndexTemplateResponse> listener) {
this.execute(DeleteIndexTemplateAction.INSTANCE, request, listener);
}
public DeleteIndexTemplateRequestBuilder prepareDeleteTemplate(String name) {
return new DeleteIndexTemplateRequestBuilder(this, DeleteIndexTemplateAction.INSTANCE, name);
}
public ActionFuture<ValidateQueryResponse> validateQuery(ValidateQueryRequest request) {
return this.execute(ValidateQueryAction.INSTANCE, request);
}
public void validateQuery(ValidateQueryRequest request, ActionListener<ValidateQueryResponse> listener) {
this.execute(ValidateQueryAction.INSTANCE, request, listener);
}
public ValidateQueryRequestBuilder prepareValidateQuery(String... indices) {
return new ValidateQueryRequestBuilder(this, ValidateQueryAction.INSTANCE).setIndices(indices);
}
public ActionFuture<PutWarmerResponse> putWarmer(PutWarmerRequest request) {
return this.execute(PutWarmerAction.INSTANCE, request);
}
public void putWarmer(PutWarmerRequest request, ActionListener<PutWarmerResponse> listener) {
this.execute(PutWarmerAction.INSTANCE, request, listener);
}
public PutWarmerRequestBuilder preparePutWarmer(String name) {
return new PutWarmerRequestBuilder(this, PutWarmerAction.INSTANCE, name);
}
public ActionFuture<DeleteWarmerResponse> deleteWarmer(DeleteWarmerRequest request) {
return this.execute(DeleteWarmerAction.INSTANCE, request);
}
public void deleteWarmer(DeleteWarmerRequest request, ActionListener<DeleteWarmerResponse> listener) {
this.execute(DeleteWarmerAction.INSTANCE, request, listener);
}
public DeleteWarmerRequestBuilder prepareDeleteWarmer() {
return new DeleteWarmerRequestBuilder(this, DeleteWarmerAction.INSTANCE);
}
public GetWarmersRequestBuilder prepareGetWarmers(String... indices) {
return new GetWarmersRequestBuilder(this, GetWarmersAction.INSTANCE, indices);
}
public ActionFuture<GetWarmersResponse> getWarmers(GetWarmersRequest request) {
return this.execute(GetWarmersAction.INSTANCE, request);
}
public void getWarmers(GetWarmersRequest request, ActionListener<GetWarmersResponse> listener) {
this.execute(GetWarmersAction.INSTANCE, request, listener);
}
public GetSettingsRequestBuilder prepareGetSettings(String... indices) {
return new GetSettingsRequestBuilder(this, GetSettingsAction.INSTANCE, indices);
}
public ActionFuture<GetSettingsResponse> getSettings(GetSettingsRequest request) {
return this.execute(GetSettingsAction.INSTANCE, request);
}
public void getSettings(GetSettingsRequest request, ActionListener<GetSettingsResponse> listener) {
this.execute(GetSettingsAction.INSTANCE, request, listener);
}
}
static class ClusterAdmin implements ClusterAdminClient {
private final ElasticsearchClient client;
private final MarvelShieldIntegration shieldIntegration;
public ClusterAdmin(ElasticsearchClient client, MarvelShieldIntegration shieldIntegration) {
this.client = client;
this.shieldIntegration = shieldIntegration;
}
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> ActionFuture<Response> execute(Action<Request, Response, RequestBuilder> action, Request request) {
shieldIntegration.bindInternalMarvelUser(request);
return this.client.execute(action, request);
}
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void execute(Action<Request, Response, RequestBuilder> action, Request request, ActionListener<Response> listener) {
shieldIntegration.bindInternalMarvelUser(request);
this.client.execute(action, request, listener);
}
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> RequestBuilder prepareExecute(Action<Request, Response, RequestBuilder> action) {
return this.client.prepareExecute(action);
}
public ThreadPool threadPool() {
return this.client.threadPool();
}
public ActionFuture<ClusterHealthResponse> health(ClusterHealthRequest request) {
return this.execute(ClusterHealthAction.INSTANCE, request);
}
public void health(ClusterHealthRequest request, ActionListener<ClusterHealthResponse> listener) {
this.execute(ClusterHealthAction.INSTANCE, request, listener);
}
public ClusterHealthRequestBuilder prepareHealth(String... indices) {
return new ClusterHealthRequestBuilder(this, ClusterHealthAction.INSTANCE).setIndices(indices);
}
public ActionFuture<ClusterStateResponse> state(ClusterStateRequest request) {
return this.execute(ClusterStateAction.INSTANCE, request);
}
public void state(ClusterStateRequest request, ActionListener<ClusterStateResponse> listener) {
this.execute(ClusterStateAction.INSTANCE, request, listener);
}
public ClusterStateRequestBuilder prepareState() {
return new ClusterStateRequestBuilder(this, ClusterStateAction.INSTANCE);
}
public ActionFuture<ClusterRerouteResponse> reroute(ClusterRerouteRequest request) {
return this.execute(ClusterRerouteAction.INSTANCE, request);
}
public void reroute(ClusterRerouteRequest request, ActionListener<ClusterRerouteResponse> listener) {
this.execute(ClusterRerouteAction.INSTANCE, request, listener);
}
public ClusterRerouteRequestBuilder prepareReroute() {
return new ClusterRerouteRequestBuilder(this, ClusterRerouteAction.INSTANCE);
}
public ActionFuture<ClusterUpdateSettingsResponse> updateSettings(ClusterUpdateSettingsRequest request) {
return this.execute(ClusterUpdateSettingsAction.INSTANCE, request);
}
public void updateSettings(ClusterUpdateSettingsRequest request, ActionListener<ClusterUpdateSettingsResponse> listener) {
this.execute(ClusterUpdateSettingsAction.INSTANCE, request, listener);
}
public ClusterUpdateSettingsRequestBuilder prepareUpdateSettings() {
return new ClusterUpdateSettingsRequestBuilder(this, ClusterUpdateSettingsAction.INSTANCE);
}
public ActionFuture<NodesInfoResponse> nodesInfo(NodesInfoRequest request) {
return this.execute(NodesInfoAction.INSTANCE, request);
}
public void nodesInfo(NodesInfoRequest request, ActionListener<NodesInfoResponse> listener) {
this.execute(NodesInfoAction.INSTANCE, request, listener);
}
public NodesInfoRequestBuilder prepareNodesInfo(String... nodesIds) {
return new NodesInfoRequestBuilder(this, NodesInfoAction.INSTANCE).setNodesIds(nodesIds);
}
public ActionFuture<NodesStatsResponse> nodesStats(NodesStatsRequest request) {
return this.execute(NodesStatsAction.INSTANCE, request);
}
public void nodesStats(NodesStatsRequest request, ActionListener<NodesStatsResponse> listener) {
this.execute(NodesStatsAction.INSTANCE, request, listener);
}
public NodesStatsRequestBuilder prepareNodesStats(String... nodesIds) {
return new NodesStatsRequestBuilder(this, NodesStatsAction.INSTANCE).setNodesIds(nodesIds);
}
public ActionFuture<ClusterStatsResponse> clusterStats(ClusterStatsRequest request) {
return this.execute(ClusterStatsAction.INSTANCE, request);
}
public void clusterStats(ClusterStatsRequest request, ActionListener<ClusterStatsResponse> listener) {
this.execute(ClusterStatsAction.INSTANCE, request, listener);
}
public ClusterStatsRequestBuilder prepareClusterStats() {
return new ClusterStatsRequestBuilder(this, ClusterStatsAction.INSTANCE);
}
public ActionFuture<NodesHotThreadsResponse> nodesHotThreads(NodesHotThreadsRequest request) {
return this.execute(NodesHotThreadsAction.INSTANCE, request);
}
public void nodesHotThreads(NodesHotThreadsRequest request, ActionListener<NodesHotThreadsResponse> listener) {
this.execute(NodesHotThreadsAction.INSTANCE, request, listener);
}
public NodesHotThreadsRequestBuilder prepareNodesHotThreads(String... nodesIds) {
return new NodesHotThreadsRequestBuilder(this, NodesHotThreadsAction.INSTANCE).setNodesIds(nodesIds);
}
public ActionFuture<ClusterSearchShardsResponse> searchShards(ClusterSearchShardsRequest request) {
return this.execute(ClusterSearchShardsAction.INSTANCE, request);
}
public void searchShards(ClusterSearchShardsRequest request, ActionListener<ClusterSearchShardsResponse> listener) {
this.execute(ClusterSearchShardsAction.INSTANCE, request, listener);
}
public ClusterSearchShardsRequestBuilder prepareSearchShards() {
return new ClusterSearchShardsRequestBuilder(this, ClusterSearchShardsAction.INSTANCE);
}
public ClusterSearchShardsRequestBuilder prepareSearchShards(String... indices) {
return new ClusterSearchShardsRequestBuilder(this, ClusterSearchShardsAction.INSTANCE).setIndices(indices);
}
public PendingClusterTasksRequestBuilder preparePendingClusterTasks() {
return new PendingClusterTasksRequestBuilder(this, PendingClusterTasksAction.INSTANCE);
}
public ActionFuture<PendingClusterTasksResponse> pendingClusterTasks(PendingClusterTasksRequest request) {
return this.execute(PendingClusterTasksAction.INSTANCE, request);
}
public void pendingClusterTasks(PendingClusterTasksRequest request, ActionListener<PendingClusterTasksResponse> listener) {
this.execute(PendingClusterTasksAction.INSTANCE, request, listener);
}
public ActionFuture<PutRepositoryResponse> putRepository(PutRepositoryRequest request) {
return this.execute(PutRepositoryAction.INSTANCE, request);
}
public void putRepository(PutRepositoryRequest request, ActionListener<PutRepositoryResponse> listener) {
this.execute(PutRepositoryAction.INSTANCE, request, listener);
}
public PutRepositoryRequestBuilder preparePutRepository(String name) {
return new PutRepositoryRequestBuilder(this, PutRepositoryAction.INSTANCE, name);
}
public ActionFuture<CreateSnapshotResponse> createSnapshot(CreateSnapshotRequest request) {
return this.execute(CreateSnapshotAction.INSTANCE, request);
}
public void createSnapshot(CreateSnapshotRequest request, ActionListener<CreateSnapshotResponse> listener) {
this.execute(CreateSnapshotAction.INSTANCE, request, listener);
}
public CreateSnapshotRequestBuilder prepareCreateSnapshot(String repository, String name) {
return new CreateSnapshotRequestBuilder(this, CreateSnapshotAction.INSTANCE, repository, name);
}
public ActionFuture<GetSnapshotsResponse> getSnapshots(GetSnapshotsRequest request) {
return this.execute(GetSnapshotsAction.INSTANCE, request);
}
public void getSnapshots(GetSnapshotsRequest request, ActionListener<GetSnapshotsResponse> listener) {
this.execute(GetSnapshotsAction.INSTANCE, request, listener);
}
public GetSnapshotsRequestBuilder prepareGetSnapshots(String repository) {
return new GetSnapshotsRequestBuilder(this, GetSnapshotsAction.INSTANCE, repository);
}
public ActionFuture<DeleteSnapshotResponse> deleteSnapshot(DeleteSnapshotRequest request) {
return this.execute(DeleteSnapshotAction.INSTANCE, request);
}
public void deleteSnapshot(DeleteSnapshotRequest request, ActionListener<DeleteSnapshotResponse> listener) {
this.execute(DeleteSnapshotAction.INSTANCE, request, listener);
}
public DeleteSnapshotRequestBuilder prepareDeleteSnapshot(String repository, String name) {
return new DeleteSnapshotRequestBuilder(this, DeleteSnapshotAction.INSTANCE, repository, name);
}
public ActionFuture<DeleteRepositoryResponse> deleteRepository(DeleteRepositoryRequest request) {
return this.execute(DeleteRepositoryAction.INSTANCE, request);
}
public void deleteRepository(DeleteRepositoryRequest request, ActionListener<DeleteRepositoryResponse> listener) {
this.execute(DeleteRepositoryAction.INSTANCE, request, listener);
}
public DeleteRepositoryRequestBuilder prepareDeleteRepository(String name) {
return new DeleteRepositoryRequestBuilder(this, DeleteRepositoryAction.INSTANCE, name);
}
public ActionFuture<VerifyRepositoryResponse> verifyRepository(VerifyRepositoryRequest request) {
return this.execute(VerifyRepositoryAction.INSTANCE, request);
}
public void verifyRepository(VerifyRepositoryRequest request, ActionListener<VerifyRepositoryResponse> listener) {
this.execute(VerifyRepositoryAction.INSTANCE, request, listener);
}
public VerifyRepositoryRequestBuilder prepareVerifyRepository(String name) {
return new VerifyRepositoryRequestBuilder(this, VerifyRepositoryAction.INSTANCE, name);
}
public ActionFuture<GetRepositoriesResponse> getRepositories(GetRepositoriesRequest request) {
return this.execute(GetRepositoriesAction.INSTANCE, request);
}
public void getRepositories(GetRepositoriesRequest request, ActionListener<GetRepositoriesResponse> listener) {
this.execute(GetRepositoriesAction.INSTANCE, request, listener);
}
public GetRepositoriesRequestBuilder prepareGetRepositories(String... name) {
return new GetRepositoriesRequestBuilder(this, GetRepositoriesAction.INSTANCE, name);
}
public ActionFuture<RestoreSnapshotResponse> restoreSnapshot(RestoreSnapshotRequest request) {
return this.execute(RestoreSnapshotAction.INSTANCE, request);
}
public void restoreSnapshot(RestoreSnapshotRequest request, ActionListener<RestoreSnapshotResponse> listener) {
this.execute(RestoreSnapshotAction.INSTANCE, request, listener);
}
public RestoreSnapshotRequestBuilder prepareRestoreSnapshot(String repository, String snapshot) {
return new RestoreSnapshotRequestBuilder(this, RestoreSnapshotAction.INSTANCE, repository, snapshot);
}
public ActionFuture<SnapshotsStatusResponse> snapshotsStatus(SnapshotsStatusRequest request) {
return this.execute(SnapshotsStatusAction.INSTANCE, request);
}
public void snapshotsStatus(SnapshotsStatusRequest request, ActionListener<SnapshotsStatusResponse> listener) {
this.execute(SnapshotsStatusAction.INSTANCE, request, listener);
}
public SnapshotsStatusRequestBuilder prepareSnapshotStatus(String repository) {
return new SnapshotsStatusRequestBuilder(this, SnapshotsStatusAction.INSTANCE, repository);
}
public SnapshotsStatusRequestBuilder prepareSnapshotStatus() {
return new SnapshotsStatusRequestBuilder(this, SnapshotsStatusAction.INSTANCE);
}
public ActionFuture<RenderSearchTemplateResponse> renderSearchTemplate(RenderSearchTemplateRequest request) {
return this.execute(RenderSearchTemplateAction.INSTANCE, request);
}
public void renderSearchTemplate(RenderSearchTemplateRequest request, ActionListener<RenderSearchTemplateResponse> listener) {
this.execute(RenderSearchTemplateAction.INSTANCE, request, listener);
}
public RenderSearchTemplateRequestBuilder prepareRenderSearchTemplate() {
return new RenderSearchTemplateRequestBuilder(this, RenderSearchTemplateAction.INSTANCE);
}
}
static class Admin implements AdminClient {
private final ClusterAdmin clusterAdmin;
private final IndicesAdmin indicesAdmin;
public Admin(ElasticsearchClient client, MarvelShieldIntegration shieldIntegration) {
this.clusterAdmin = new ClusterAdmin(client, shieldIntegration);
this.indicesAdmin = new IndicesAdmin(client, shieldIntegration);
}
public ClusterAdminClient cluster() {
return this.clusterAdmin;
}
public IndicesAdminClient indices() {
return this.indicesAdmin;
}
}
}
| elasticsearch/x-pack/marvel/src/main/java/org/elasticsearch/marvel/shield/SecuredClient.java | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.shield;
import org.elasticsearch.action.*;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsAction;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequestBuilder;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsResponse;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequestBuilder;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryAction;
import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest;
import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequestBuilder;
import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryResponse;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequestBuilder;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse;
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryAction;
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequestBuilder;
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryAction;
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequestBuilder;
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse;
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction;
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest;
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequestBuilder;
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequestBuilder;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequestBuilder;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequestBuilder;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotAction;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequestBuilder;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsAction;
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequestBuilder;
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse;
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotAction;
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequestBuilder;
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusAction;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequestBuilder;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequestBuilder;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequest;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequestBuilder;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksAction;
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequest;
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequestBuilder;
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse;
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateAction;
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateRequest;
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateRequestBuilder;
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateResponse;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistAction;
import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistRequestBuilder;
import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequestBuilder;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexAction;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.close.CloseIndexResponse;
import org.elasticsearch.action.admin.indices.create.CreateIndexAction;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsAction;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequestBuilder;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.action.admin.indices.exists.types.TypesExistsAction;
import org.elasticsearch.action.admin.indices.exists.types.TypesExistsRequest;
import org.elasticsearch.action.admin.indices.exists.types.TypesExistsRequestBuilder;
import org.elasticsearch.action.admin.indices.exists.types.TypesExistsResponse;
import org.elasticsearch.action.admin.indices.flush.FlushAction;
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
import org.elasticsearch.action.admin.indices.flush.FlushRequestBuilder;
import org.elasticsearch.action.admin.indices.flush.FlushResponse;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequestBuilder;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
import org.elasticsearch.action.admin.indices.get.GetIndexAction;
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
import org.elasticsearch.action.admin.indices.get.GetIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
import org.elasticsearch.action.admin.indices.mapping.get.*;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
import org.elasticsearch.action.admin.indices.open.OpenIndexAction;
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
import org.elasticsearch.action.admin.indices.open.OpenIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.open.OpenIndexResponse;
import org.elasticsearch.action.admin.indices.recovery.RecoveryAction;
import org.elasticsearch.action.admin.indices.recovery.RecoveryRequest;
import org.elasticsearch.action.admin.indices.recovery.RecoveryRequestBuilder;
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequestBuilder;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse;
import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsAction;
import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequest;
import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequestBuilder;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsAction;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequestBuilder;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequestBuilder;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse;
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoreRequestBuilder;
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresAction;
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest;
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction;
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest;
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequestBuilder;
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateResponse;
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction;
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest;
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequestBuilder;
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse;
import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusAction;
import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusRequest;
import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusRequestBuilder;
import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusResponse;
import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeAction;
import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest;
import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequestBuilder;
import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeResponse;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequestBuilder;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse;
import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerAction;
import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerRequest;
import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerRequestBuilder;
import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerResponse;
import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersAction;
import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersRequest;
import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersRequestBuilder;
import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersResponse;
import org.elasticsearch.action.admin.indices.warmer.put.PutWarmerAction;
import org.elasticsearch.action.admin.indices.warmer.put.PutWarmerRequest;
import org.elasticsearch.action.admin.indices.warmer.put.PutWarmerRequestBuilder;
import org.elasticsearch.action.admin.indices.warmer.put.PutWarmerResponse;
import org.elasticsearch.action.bulk.BulkAction;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteAction;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteRequestBuilder;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.explain.ExplainAction;
import org.elasticsearch.action.explain.ExplainRequest;
import org.elasticsearch.action.explain.ExplainRequestBuilder;
import org.elasticsearch.action.explain.ExplainResponse;
import org.elasticsearch.action.fieldstats.FieldStatsAction;
import org.elasticsearch.action.fieldstats.FieldStatsRequest;
import org.elasticsearch.action.fieldstats.FieldStatsRequestBuilder;
import org.elasticsearch.action.fieldstats.FieldStatsResponse;
import org.elasticsearch.action.get.*;
import org.elasticsearch.action.index.IndexAction;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptAction;
import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptRequest;
import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptRequestBuilder;
import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptResponse;
import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptAction;
import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptRequest;
import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptRequestBuilder;
import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptResponse;
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptAction;
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest;
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequestBuilder;
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse;
import org.elasticsearch.action.percolate.*;
import org.elasticsearch.action.search.*;
import org.elasticsearch.action.suggest.SuggestAction;
import org.elasticsearch.action.suggest.SuggestRequest;
import org.elasticsearch.action.suggest.SuggestRequestBuilder;
import org.elasticsearch.action.suggest.SuggestResponse;
import org.elasticsearch.action.termvectors.*;
import org.elasticsearch.action.update.UpdateAction;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateRequestBuilder;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.*;
import org.elasticsearch.client.support.Headers;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool;
/**
*
*/
public class SecuredClient implements Client {
private final Client client;
private final Admin admin;
private MarvelShieldIntegration shieldIntegration;
@Inject
public SecuredClient(Client client, MarvelShieldIntegration shieldIntegration) {
this.client = client;
this.shieldIntegration = shieldIntegration;
this.admin = new Admin(this.client, this.shieldIntegration);
}
@Override
public AdminClient admin() {
return admin;
}
@Override
public void close() {
client.close();
}
@Override
public ThreadPool threadPool() {
return client.threadPool();
}
@Override
public Headers headers() {
return client.headers();
}
@Override
public Settings settings() {
return client.settings();
}
@Override
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> ActionFuture<Response> execute(Action<Request, Response, RequestBuilder> action, Request request) {
shieldIntegration.bindInternalMarvelUser(request);
return client.execute(action, request);
}
@Override
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void execute(Action<Request, Response, RequestBuilder> action, Request request, ActionListener<Response> actionListener) {
shieldIntegration.bindInternalMarvelUser(request);
client.execute(action, request, actionListener);
}
@Override
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> RequestBuilder prepareExecute(Action<Request, Response, RequestBuilder> action) {
return action.newRequestBuilder(this);
}
public ActionFuture<IndexResponse> index(IndexRequest request) {
return this.execute(IndexAction.INSTANCE, request);
}
public void index(IndexRequest request, ActionListener<IndexResponse> listener) {
this.execute(IndexAction.INSTANCE, request, listener);
}
public IndexRequestBuilder prepareIndex() {
return new IndexRequestBuilder(this, IndexAction.INSTANCE, null);
}
public IndexRequestBuilder prepareIndex(String index, String type) {
return this.prepareIndex(index, type, null);
}
public IndexRequestBuilder prepareIndex(String index, String type, @Nullable String id) {
return this.prepareIndex().setIndex(index).setType(type).setId(id);
}
public ActionFuture<UpdateResponse> update(UpdateRequest request) {
return this.execute(UpdateAction.INSTANCE, request);
}
public void update(UpdateRequest request, ActionListener<UpdateResponse> listener) {
this.execute(UpdateAction.INSTANCE, request, listener);
}
public UpdateRequestBuilder prepareUpdate() {
return new UpdateRequestBuilder(this, UpdateAction.INSTANCE, null, null, null);
}
public UpdateRequestBuilder prepareUpdate(String index, String type, String id) {
return new UpdateRequestBuilder(this, UpdateAction.INSTANCE, index, type, id);
}
public ActionFuture<DeleteResponse> delete(DeleteRequest request) {
return this.execute(DeleteAction.INSTANCE, request);
}
public void delete(DeleteRequest request, ActionListener<DeleteResponse> listener) {
this.execute(DeleteAction.INSTANCE, request, listener);
}
public DeleteRequestBuilder prepareDelete() {
return new DeleteRequestBuilder(this, DeleteAction.INSTANCE, null);
}
public DeleteRequestBuilder prepareDelete(String index, String type, String id) {
return this.prepareDelete().setIndex(index).setType(type).setId(id);
}
public ActionFuture<BulkResponse> bulk(BulkRequest request) {
return this.execute(BulkAction.INSTANCE, request);
}
public void bulk(BulkRequest request, ActionListener<BulkResponse> listener) {
this.execute(BulkAction.INSTANCE, request, listener);
}
public BulkRequestBuilder prepareBulk() {
return new BulkRequestBuilder(this, BulkAction.INSTANCE);
}
public ActionFuture<GetResponse> get(GetRequest request) {
return this.execute(GetAction.INSTANCE, request);
}
public void get(GetRequest request, ActionListener<GetResponse> listener) {
this.execute(GetAction.INSTANCE, request, listener);
}
public GetRequestBuilder prepareGet() {
return new GetRequestBuilder(this, GetAction.INSTANCE, null);
}
public GetRequestBuilder prepareGet(String index, String type, String id) {
return this.prepareGet().setIndex(index).setType(type).setId(id);
}
public ActionFuture<GetIndexedScriptResponse> getIndexedScript(GetIndexedScriptRequest request) {
return this.execute(GetIndexedScriptAction.INSTANCE, request);
}
public void getIndexedScript(GetIndexedScriptRequest request, ActionListener<GetIndexedScriptResponse> listener) {
this.execute(GetIndexedScriptAction.INSTANCE, request, listener);
}
public GetIndexedScriptRequestBuilder prepareGetIndexedScript() {
return new GetIndexedScriptRequestBuilder(this, GetIndexedScriptAction.INSTANCE);
}
public GetIndexedScriptRequestBuilder prepareGetIndexedScript(String scriptLang, String id) {
return this.prepareGetIndexedScript().setScriptLang(scriptLang).setId(id);
}
public PutIndexedScriptRequestBuilder preparePutIndexedScript() {
return new PutIndexedScriptRequestBuilder(this, PutIndexedScriptAction.INSTANCE);
}
public PutIndexedScriptRequestBuilder preparePutIndexedScript(@Nullable String scriptLang, String id, String source) {
return PutIndexedScriptAction.INSTANCE.newRequestBuilder(this).setScriptLang(scriptLang).setId(id).setSource(source);
}
public void putIndexedScript(PutIndexedScriptRequest request, ActionListener<PutIndexedScriptResponse> listener) {
this.execute(PutIndexedScriptAction.INSTANCE, request, listener);
}
public ActionFuture<PutIndexedScriptResponse> putIndexedScript(PutIndexedScriptRequest request) {
return this.execute(PutIndexedScriptAction.INSTANCE, request);
}
public void deleteIndexedScript(DeleteIndexedScriptRequest request, ActionListener<DeleteIndexedScriptResponse> listener) {
this.execute(DeleteIndexedScriptAction.INSTANCE, request, listener);
}
public ActionFuture<DeleteIndexedScriptResponse> deleteIndexedScript(DeleteIndexedScriptRequest request) {
return this.execute(DeleteIndexedScriptAction.INSTANCE, request);
}
public DeleteIndexedScriptRequestBuilder prepareDeleteIndexedScript() {
return DeleteIndexedScriptAction.INSTANCE.newRequestBuilder(this);
}
public DeleteIndexedScriptRequestBuilder prepareDeleteIndexedScript(@Nullable String scriptLang, String id) {
return this.prepareDeleteIndexedScript().setScriptLang(scriptLang).setId(id);
}
public ActionFuture<MultiGetResponse> multiGet(MultiGetRequest request) {
return this.execute(MultiGetAction.INSTANCE, request);
}
public void multiGet(MultiGetRequest request, ActionListener<MultiGetResponse> listener) {
this.execute(MultiGetAction.INSTANCE, request, listener);
}
public MultiGetRequestBuilder prepareMultiGet() {
return new MultiGetRequestBuilder(this, MultiGetAction.INSTANCE);
}
public ActionFuture<SearchResponse> search(SearchRequest request) {
return this.execute(SearchAction.INSTANCE, request);
}
public void search(SearchRequest request, ActionListener<SearchResponse> listener) {
this.execute(SearchAction.INSTANCE, request, listener);
}
public SearchRequestBuilder prepareSearch(String... indices) {
return new SearchRequestBuilder(this, SearchAction.INSTANCE).setIndices(indices);
}
public ActionFuture<SearchResponse> searchScroll(SearchScrollRequest request) {
return this.execute(SearchScrollAction.INSTANCE, request);
}
public void searchScroll(SearchScrollRequest request, ActionListener<SearchResponse> listener) {
this.execute(SearchScrollAction.INSTANCE, request, listener);
}
public SearchScrollRequestBuilder prepareSearchScroll(String scrollId) {
return new SearchScrollRequestBuilder(this, SearchScrollAction.INSTANCE, scrollId);
}
public ActionFuture<MultiSearchResponse> multiSearch(MultiSearchRequest request) {
return this.execute(MultiSearchAction.INSTANCE, request);
}
public void multiSearch(MultiSearchRequest request, ActionListener<MultiSearchResponse> listener) {
this.execute(MultiSearchAction.INSTANCE, request, listener);
}
public MultiSearchRequestBuilder prepareMultiSearch() {
return new MultiSearchRequestBuilder(this, MultiSearchAction.INSTANCE);
}
public ActionFuture<SuggestResponse> suggest(SuggestRequest request) {
return this.execute(SuggestAction.INSTANCE, request);
}
public void suggest(SuggestRequest request, ActionListener<SuggestResponse> listener) {
this.execute(SuggestAction.INSTANCE, request, listener);
}
public SuggestRequestBuilder prepareSuggest(String... indices) {
return new SuggestRequestBuilder(this, SuggestAction.INSTANCE).setIndices(indices);
}
public ActionFuture<TermVectorsResponse> termVectors(TermVectorsRequest request) {
return this.execute(TermVectorsAction.INSTANCE, request);
}
public void termVectors(TermVectorsRequest request, ActionListener<TermVectorsResponse> listener) {
this.execute(TermVectorsAction.INSTANCE, request, listener);
}
public TermVectorsRequestBuilder prepareTermVectors() {
return new TermVectorsRequestBuilder(this, TermVectorsAction.INSTANCE);
}
public TermVectorsRequestBuilder prepareTermVectors(String index, String type, String id) {
return new TermVectorsRequestBuilder(this, TermVectorsAction.INSTANCE, index, type, id);
}
/** @deprecated */
@Deprecated
public ActionFuture<TermVectorsResponse> termVector(TermVectorsRequest request) {
return this.termVectors(request);
}
/** @deprecated */
@Deprecated
public void termVector(TermVectorsRequest request, ActionListener<TermVectorsResponse> listener) {
this.termVectors(request, listener);
}
/** @deprecated */
@Deprecated
public TermVectorsRequestBuilder prepareTermVector() {
return this.prepareTermVectors();
}
/** @deprecated */
@Deprecated
public TermVectorsRequestBuilder prepareTermVector(String index, String type, String id) {
return this.prepareTermVectors(index, type, id);
}
public ActionFuture<MultiTermVectorsResponse> multiTermVectors(MultiTermVectorsRequest request) {
return this.execute(MultiTermVectorsAction.INSTANCE, request);
}
public void multiTermVectors(MultiTermVectorsRequest request, ActionListener<MultiTermVectorsResponse> listener) {
this.execute(MultiTermVectorsAction.INSTANCE, request, listener);
}
public MultiTermVectorsRequestBuilder prepareMultiTermVectors() {
return new MultiTermVectorsRequestBuilder(this, MultiTermVectorsAction.INSTANCE);
}
public ActionFuture<PercolateResponse> percolate(PercolateRequest request) {
return this.execute(PercolateAction.INSTANCE, request);
}
public void percolate(PercolateRequest request, ActionListener<PercolateResponse> listener) {
this.execute(PercolateAction.INSTANCE, request, listener);
}
public PercolateRequestBuilder preparePercolate() {
return new PercolateRequestBuilder(this, PercolateAction.INSTANCE);
}
public MultiPercolateRequestBuilder prepareMultiPercolate() {
return new MultiPercolateRequestBuilder(this, MultiPercolateAction.INSTANCE);
}
public void multiPercolate(MultiPercolateRequest request, ActionListener<MultiPercolateResponse> listener) {
this.execute(MultiPercolateAction.INSTANCE, request, listener);
}
public ActionFuture<MultiPercolateResponse> multiPercolate(MultiPercolateRequest request) {
return this.execute(MultiPercolateAction.INSTANCE, request);
}
public ExplainRequestBuilder prepareExplain(String index, String type, String id) {
return new ExplainRequestBuilder(this, ExplainAction.INSTANCE, index, type, id);
}
public ActionFuture<ExplainResponse> explain(ExplainRequest request) {
return this.execute(ExplainAction.INSTANCE, request);
}
public void explain(ExplainRequest request, ActionListener<ExplainResponse> listener) {
this.execute(ExplainAction.INSTANCE, request, listener);
}
public void clearScroll(ClearScrollRequest request, ActionListener<ClearScrollResponse> listener) {
this.execute(ClearScrollAction.INSTANCE, request, listener);
}
public ActionFuture<ClearScrollResponse> clearScroll(ClearScrollRequest request) {
return this.execute(ClearScrollAction.INSTANCE, request);
}
public ClearScrollRequestBuilder prepareClearScroll() {
return new ClearScrollRequestBuilder(this, ClearScrollAction.INSTANCE);
}
public void fieldStats(FieldStatsRequest request, ActionListener<FieldStatsResponse> listener) {
this.execute(FieldStatsAction.INSTANCE, request, listener);
}
public ActionFuture<FieldStatsResponse> fieldStats(FieldStatsRequest request) {
return this.execute(FieldStatsAction.INSTANCE, request);
}
public FieldStatsRequestBuilder prepareFieldStats() {
return new FieldStatsRequestBuilder(this, FieldStatsAction.INSTANCE);
}
static class IndicesAdmin implements IndicesAdminClient {
private final ElasticsearchClient client;
private final MarvelShieldIntegration shieldIntegration;
public IndicesAdmin(ElasticsearchClient client, MarvelShieldIntegration shieldIntegration) {
this.client = client;
this.shieldIntegration = shieldIntegration;
}
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> ActionFuture<Response> execute(Action<Request, Response, RequestBuilder> action, Request request) {
shieldIntegration.bindInternalMarvelUser(request);
return this.client.execute(action, request);
}
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void execute(Action<Request, Response, RequestBuilder> action, Request request, ActionListener<Response> listener) {
shieldIntegration.bindInternalMarvelUser(request);
this.client.execute(action, request, listener);
}
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> RequestBuilder prepareExecute(Action<Request, Response, RequestBuilder> action) {
return this.client.prepareExecute(action);
}
public ThreadPool threadPool() {
return this.client.threadPool();
}
public ActionFuture<IndicesExistsResponse> exists(IndicesExistsRequest request) {
return this.execute(IndicesExistsAction.INSTANCE, request);
}
public void exists(IndicesExistsRequest request, ActionListener<IndicesExistsResponse> listener) {
this.execute(IndicesExistsAction.INSTANCE, request, listener);
}
public IndicesExistsRequestBuilder prepareExists(String... indices) {
return new IndicesExistsRequestBuilder(this, IndicesExistsAction.INSTANCE, indices);
}
public ActionFuture<TypesExistsResponse> typesExists(TypesExistsRequest request) {
return this.execute(TypesExistsAction.INSTANCE, request);
}
public void typesExists(TypesExistsRequest request, ActionListener<TypesExistsResponse> listener) {
this.execute(TypesExistsAction.INSTANCE, request, listener);
}
public TypesExistsRequestBuilder prepareTypesExists(String... index) {
return new TypesExistsRequestBuilder(this, TypesExistsAction.INSTANCE, index);
}
public ActionFuture<IndicesAliasesResponse> aliases(IndicesAliasesRequest request) {
return this.execute(IndicesAliasesAction.INSTANCE, request);
}
public void aliases(IndicesAliasesRequest request, ActionListener<IndicesAliasesResponse> listener) {
this.execute(IndicesAliasesAction.INSTANCE, request, listener);
}
public IndicesAliasesRequestBuilder prepareAliases() {
return new IndicesAliasesRequestBuilder(this, IndicesAliasesAction.INSTANCE);
}
public ActionFuture<GetAliasesResponse> getAliases(GetAliasesRequest request) {
return this.execute(GetAliasesAction.INSTANCE, request);
}
public void getAliases(GetAliasesRequest request, ActionListener<GetAliasesResponse> listener) {
this.execute(GetAliasesAction.INSTANCE, request, listener);
}
public GetAliasesRequestBuilder prepareGetAliases(String... aliases) {
return new GetAliasesRequestBuilder(this, GetAliasesAction.INSTANCE, aliases);
}
public ActionFuture<ClearIndicesCacheResponse> clearCache(ClearIndicesCacheRequest request) {
return this.execute(ClearIndicesCacheAction.INSTANCE, request);
}
public void aliasesExist(GetAliasesRequest request, ActionListener<AliasesExistResponse> listener) {
this.execute(AliasesExistAction.INSTANCE, request, listener);
}
public ActionFuture<AliasesExistResponse> aliasesExist(GetAliasesRequest request) {
return this.execute(AliasesExistAction.INSTANCE, request);
}
public AliasesExistRequestBuilder prepareAliasesExist(String... aliases) {
return new AliasesExistRequestBuilder(this, AliasesExistAction.INSTANCE, aliases);
}
public ActionFuture<GetIndexResponse> getIndex(GetIndexRequest request) {
return this.execute(GetIndexAction.INSTANCE, request);
}
public void getIndex(GetIndexRequest request, ActionListener<GetIndexResponse> listener) {
this.execute(GetIndexAction.INSTANCE, request, listener);
}
public GetIndexRequestBuilder prepareGetIndex() {
return new GetIndexRequestBuilder(this, GetIndexAction.INSTANCE, Strings.EMPTY_ARRAY);
}
public void clearCache(ClearIndicesCacheRequest request, ActionListener<ClearIndicesCacheResponse> listener) {
this.execute(ClearIndicesCacheAction.INSTANCE, request, listener);
}
public ClearIndicesCacheRequestBuilder prepareClearCache(String... indices) {
return new ClearIndicesCacheRequestBuilder(this, ClearIndicesCacheAction.INSTANCE).setIndices(indices);
}
public ActionFuture<CreateIndexResponse> create(CreateIndexRequest request) {
return this.execute(CreateIndexAction.INSTANCE, request);
}
public void create(CreateIndexRequest request, ActionListener<CreateIndexResponse> listener) {
this.execute(CreateIndexAction.INSTANCE, request, listener);
}
public CreateIndexRequestBuilder prepareCreate(String index) {
return new CreateIndexRequestBuilder(this, CreateIndexAction.INSTANCE, index);
}
public ActionFuture<DeleteIndexResponse> delete(DeleteIndexRequest request) {
return this.execute(DeleteIndexAction.INSTANCE, request);
}
public void delete(DeleteIndexRequest request, ActionListener<DeleteIndexResponse> listener) {
this.execute(DeleteIndexAction.INSTANCE, request, listener);
}
public DeleteIndexRequestBuilder prepareDelete(String... indices) {
return new DeleteIndexRequestBuilder(this, DeleteIndexAction.INSTANCE, indices);
}
public ActionFuture<CloseIndexResponse> close(CloseIndexRequest request) {
return this.execute(CloseIndexAction.INSTANCE, request);
}
public void close(CloseIndexRequest request, ActionListener<CloseIndexResponse> listener) {
this.execute(CloseIndexAction.INSTANCE, request, listener);
}
public CloseIndexRequestBuilder prepareClose(String... indices) {
return new CloseIndexRequestBuilder(this, CloseIndexAction.INSTANCE, indices);
}
public ActionFuture<OpenIndexResponse> open(OpenIndexRequest request) {
return this.execute(OpenIndexAction.INSTANCE, request);
}
public void open(OpenIndexRequest request, ActionListener<OpenIndexResponse> listener) {
this.execute(OpenIndexAction.INSTANCE, request, listener);
}
public OpenIndexRequestBuilder prepareOpen(String... indices) {
return new OpenIndexRequestBuilder(this, OpenIndexAction.INSTANCE, indices);
}
public ActionFuture<FlushResponse> flush(FlushRequest request) {
return this.execute(FlushAction.INSTANCE, request);
}
public void flush(FlushRequest request, ActionListener<FlushResponse> listener) {
this.execute(FlushAction.INSTANCE, request, listener);
}
public FlushRequestBuilder prepareFlush(String... indices) {
return (new FlushRequestBuilder(this, FlushAction.INSTANCE)).setIndices(indices);
}
public void getMappings(GetMappingsRequest request, ActionListener<GetMappingsResponse> listener) {
this.execute(GetMappingsAction.INSTANCE, request, listener);
}
public void getFieldMappings(GetFieldMappingsRequest request, ActionListener<GetFieldMappingsResponse> listener) {
this.execute(GetFieldMappingsAction.INSTANCE, request, listener);
}
public GetMappingsRequestBuilder prepareGetMappings(String... indices) {
return new GetMappingsRequestBuilder(this, GetMappingsAction.INSTANCE, indices);
}
public ActionFuture<GetMappingsResponse> getMappings(GetMappingsRequest request) {
return this.execute(GetMappingsAction.INSTANCE, request);
}
public GetFieldMappingsRequestBuilder prepareGetFieldMappings(String... indices) {
return new GetFieldMappingsRequestBuilder(this, GetFieldMappingsAction.INSTANCE, indices);
}
public ActionFuture<GetFieldMappingsResponse> getFieldMappings(GetFieldMappingsRequest request) {
return this.execute(GetFieldMappingsAction.INSTANCE, request);
}
public ActionFuture<PutMappingResponse> putMapping(PutMappingRequest request) {
return this.execute(PutMappingAction.INSTANCE, request);
}
public void putMapping(PutMappingRequest request, ActionListener<PutMappingResponse> listener) {
this.execute(PutMappingAction.INSTANCE, request, listener);
}
public PutMappingRequestBuilder preparePutMapping(String... indices) {
return new PutMappingRequestBuilder(this, PutMappingAction.INSTANCE).setIndices(indices);
}
@Override
public ActionFuture<ForceMergeResponse> forceMerge(ForceMergeRequest request) {
return this.execute(ForceMergeAction.INSTANCE, request);
}
@Override
public void forceMerge(ForceMergeRequest request, ActionListener<ForceMergeResponse> listener) {
this.execute(ForceMergeAction.INSTANCE, request, listener);
}
@Override
public ForceMergeRequestBuilder prepareForceMerge(String... indices) {
return (new ForceMergeRequestBuilder(this, ForceMergeAction.INSTANCE)).setIndices(indices);
}
public ActionFuture<UpgradeResponse> upgrade(UpgradeRequest request) {
return this.execute(UpgradeAction.INSTANCE, request);
}
public void upgrade(UpgradeRequest request, ActionListener<UpgradeResponse> listener) {
this.execute(UpgradeAction.INSTANCE, request, listener);
}
public UpgradeRequestBuilder prepareUpgrade(String... indices) {
return new UpgradeRequestBuilder(this, UpgradeAction.INSTANCE).setIndices(indices);
}
public ActionFuture<UpgradeStatusResponse> upgradeStatus(UpgradeStatusRequest request) {
return this.execute(UpgradeStatusAction.INSTANCE, request);
}
public void upgradeStatus(UpgradeStatusRequest request, ActionListener<UpgradeStatusResponse> listener) {
this.execute(UpgradeStatusAction.INSTANCE, request, listener);
}
public UpgradeStatusRequestBuilder prepareUpgradeStatus(String... indices) {
return new UpgradeStatusRequestBuilder(this, UpgradeStatusAction.INSTANCE).setIndices(indices);
}
public ActionFuture<RefreshResponse> refresh(RefreshRequest request) {
return this.execute(RefreshAction.INSTANCE, request);
}
public void refresh(RefreshRequest request, ActionListener<RefreshResponse> listener) {
this.execute(RefreshAction.INSTANCE, request, listener);
}
public RefreshRequestBuilder prepareRefresh(String... indices) {
return new RefreshRequestBuilder(this, RefreshAction.INSTANCE).setIndices(indices);
}
public ActionFuture<IndicesStatsResponse> stats(IndicesStatsRequest request) {
return this.execute(IndicesStatsAction.INSTANCE, request);
}
public void stats(IndicesStatsRequest request, ActionListener<IndicesStatsResponse> listener) {
this.execute(IndicesStatsAction.INSTANCE, request, listener);
}
public IndicesStatsRequestBuilder prepareStats(String... indices) {
return new IndicesStatsRequestBuilder(this, IndicesStatsAction.INSTANCE).setIndices(indices);
}
public ActionFuture<RecoveryResponse> recoveries(RecoveryRequest request) {
return this.execute(RecoveryAction.INSTANCE, request);
}
public void recoveries(RecoveryRequest request, ActionListener<RecoveryResponse> listener) {
this.execute(RecoveryAction.INSTANCE, request, listener);
}
public RecoveryRequestBuilder prepareRecoveries(String... indices) {
return new RecoveryRequestBuilder(this, RecoveryAction.INSTANCE).setIndices(indices);
}
public ActionFuture<IndicesSegmentResponse> segments(IndicesSegmentsRequest request) {
return this.execute(IndicesSegmentsAction.INSTANCE, request);
}
public void segments(IndicesSegmentsRequest request, ActionListener<IndicesSegmentResponse> listener) {
this.execute(IndicesSegmentsAction.INSTANCE, request, listener);
}
public IndicesSegmentsRequestBuilder prepareSegments(String... indices) {
return new IndicesSegmentsRequestBuilder(this, IndicesSegmentsAction.INSTANCE).setIndices(indices);
}
public ActionFuture<IndicesShardStoresResponse> shardStores(IndicesShardStoresRequest request) {
return this.execute(IndicesShardStoresAction.INSTANCE, request);
}
public void shardStores(IndicesShardStoresRequest request, ActionListener<IndicesShardStoresResponse> listener) {
this.execute(IndicesShardStoresAction.INSTANCE, request, listener);
}
public IndicesShardStoreRequestBuilder prepareShardStores(String... indices) {
return new IndicesShardStoreRequestBuilder(this, IndicesShardStoresAction.INSTANCE, indices);
}
public ActionFuture<UpdateSettingsResponse> updateSettings(UpdateSettingsRequest request) {
return this.execute(UpdateSettingsAction.INSTANCE, request);
}
public void updateSettings(UpdateSettingsRequest request, ActionListener<UpdateSettingsResponse> listener) {
this.execute(UpdateSettingsAction.INSTANCE, request, listener);
}
public UpdateSettingsRequestBuilder prepareUpdateSettings(String... indices) {
return new UpdateSettingsRequestBuilder(this, UpdateSettingsAction.INSTANCE, Strings.EMPTY_ARRAY).setIndices(indices);
}
public ActionFuture<AnalyzeResponse> analyze(AnalyzeRequest request) {
return this.execute(AnalyzeAction.INSTANCE, request);
}
public void analyze(AnalyzeRequest request, ActionListener<AnalyzeResponse> listener) {
this.execute(AnalyzeAction.INSTANCE, request, listener);
}
public AnalyzeRequestBuilder prepareAnalyze(@Nullable String index, String text) {
return new AnalyzeRequestBuilder(this, AnalyzeAction.INSTANCE, index, text);
}
public AnalyzeRequestBuilder prepareAnalyze(String text) {
return new AnalyzeRequestBuilder(this, AnalyzeAction.INSTANCE, null, text);
}
public AnalyzeRequestBuilder prepareAnalyze() {
return new AnalyzeRequestBuilder(this, AnalyzeAction.INSTANCE);
}
public ActionFuture<PutIndexTemplateResponse> putTemplate(PutIndexTemplateRequest request) {
return this.execute(PutIndexTemplateAction.INSTANCE, request);
}
public void putTemplate(PutIndexTemplateRequest request, ActionListener<PutIndexTemplateResponse> listener) {
this.execute(PutIndexTemplateAction.INSTANCE, request, listener);
}
public PutIndexTemplateRequestBuilder preparePutTemplate(String name) {
return new PutIndexTemplateRequestBuilder(this, PutIndexTemplateAction.INSTANCE, name);
}
public ActionFuture<GetIndexTemplatesResponse> getTemplates(GetIndexTemplatesRequest request) {
return this.execute(GetIndexTemplatesAction.INSTANCE, request);
}
public void getTemplates(GetIndexTemplatesRequest request, ActionListener<GetIndexTemplatesResponse> listener) {
this.execute(GetIndexTemplatesAction.INSTANCE, request, listener);
}
public GetIndexTemplatesRequestBuilder prepareGetTemplates(String... names) {
return new GetIndexTemplatesRequestBuilder(this, GetIndexTemplatesAction.INSTANCE, names);
}
public ActionFuture<DeleteIndexTemplateResponse> deleteTemplate(DeleteIndexTemplateRequest request) {
return this.execute(DeleteIndexTemplateAction.INSTANCE, request);
}
public void deleteTemplate(DeleteIndexTemplateRequest request, ActionListener<DeleteIndexTemplateResponse> listener) {
this.execute(DeleteIndexTemplateAction.INSTANCE, request, listener);
}
public DeleteIndexTemplateRequestBuilder prepareDeleteTemplate(String name) {
return new DeleteIndexTemplateRequestBuilder(this, DeleteIndexTemplateAction.INSTANCE, name);
}
public ActionFuture<ValidateQueryResponse> validateQuery(ValidateQueryRequest request) {
return this.execute(ValidateQueryAction.INSTANCE, request);
}
public void validateQuery(ValidateQueryRequest request, ActionListener<ValidateQueryResponse> listener) {
this.execute(ValidateQueryAction.INSTANCE, request, listener);
}
public ValidateQueryRequestBuilder prepareValidateQuery(String... indices) {
return new ValidateQueryRequestBuilder(this, ValidateQueryAction.INSTANCE).setIndices(indices);
}
public ActionFuture<PutWarmerResponse> putWarmer(PutWarmerRequest request) {
return this.execute(PutWarmerAction.INSTANCE, request);
}
public void putWarmer(PutWarmerRequest request, ActionListener<PutWarmerResponse> listener) {
this.execute(PutWarmerAction.INSTANCE, request, listener);
}
public PutWarmerRequestBuilder preparePutWarmer(String name) {
return new PutWarmerRequestBuilder(this, PutWarmerAction.INSTANCE, name);
}
public ActionFuture<DeleteWarmerResponse> deleteWarmer(DeleteWarmerRequest request) {
return this.execute(DeleteWarmerAction.INSTANCE, request);
}
public void deleteWarmer(DeleteWarmerRequest request, ActionListener<DeleteWarmerResponse> listener) {
this.execute(DeleteWarmerAction.INSTANCE, request, listener);
}
public DeleteWarmerRequestBuilder prepareDeleteWarmer() {
return new DeleteWarmerRequestBuilder(this, DeleteWarmerAction.INSTANCE);
}
public GetWarmersRequestBuilder prepareGetWarmers(String... indices) {
return new GetWarmersRequestBuilder(this, GetWarmersAction.INSTANCE, indices);
}
public ActionFuture<GetWarmersResponse> getWarmers(GetWarmersRequest request) {
return this.execute(GetWarmersAction.INSTANCE, request);
}
public void getWarmers(GetWarmersRequest request, ActionListener<GetWarmersResponse> listener) {
this.execute(GetWarmersAction.INSTANCE, request, listener);
}
public GetSettingsRequestBuilder prepareGetSettings(String... indices) {
return new GetSettingsRequestBuilder(this, GetSettingsAction.INSTANCE, indices);
}
public ActionFuture<GetSettingsResponse> getSettings(GetSettingsRequest request) {
return this.execute(GetSettingsAction.INSTANCE, request);
}
public void getSettings(GetSettingsRequest request, ActionListener<GetSettingsResponse> listener) {
this.execute(GetSettingsAction.INSTANCE, request, listener);
}
}
static class ClusterAdmin implements ClusterAdminClient {
private final ElasticsearchClient client;
private final MarvelShieldIntegration shieldIntegration;
public ClusterAdmin(ElasticsearchClient client, MarvelShieldIntegration shieldIntegration) {
this.client = client;
this.shieldIntegration = shieldIntegration;
}
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> ActionFuture<Response> execute(Action<Request, Response, RequestBuilder> action, Request request) {
shieldIntegration.bindInternalMarvelUser(request);
return this.client.execute(action, request);
}
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void execute(Action<Request, Response, RequestBuilder> action, Request request, ActionListener<Response> listener) {
shieldIntegration.bindInternalMarvelUser(request);
this.client.execute(action, request, listener);
}
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> RequestBuilder prepareExecute(Action<Request, Response, RequestBuilder> action) {
return this.client.prepareExecute(action);
}
public ThreadPool threadPool() {
return this.client.threadPool();
}
public ActionFuture<ClusterHealthResponse> health(ClusterHealthRequest request) {
return this.execute(ClusterHealthAction.INSTANCE, request);
}
public void health(ClusterHealthRequest request, ActionListener<ClusterHealthResponse> listener) {
this.execute(ClusterHealthAction.INSTANCE, request, listener);
}
public ClusterHealthRequestBuilder prepareHealth(String... indices) {
return new ClusterHealthRequestBuilder(this, ClusterHealthAction.INSTANCE).setIndices(indices);
}
public ActionFuture<ClusterStateResponse> state(ClusterStateRequest request) {
return this.execute(ClusterStateAction.INSTANCE, request);
}
public void state(ClusterStateRequest request, ActionListener<ClusterStateResponse> listener) {
this.execute(ClusterStateAction.INSTANCE, request, listener);
}
public ClusterStateRequestBuilder prepareState() {
return new ClusterStateRequestBuilder(this, ClusterStateAction.INSTANCE);
}
public ActionFuture<ClusterRerouteResponse> reroute(ClusterRerouteRequest request) {
return this.execute(ClusterRerouteAction.INSTANCE, request);
}
public void reroute(ClusterRerouteRequest request, ActionListener<ClusterRerouteResponse> listener) {
this.execute(ClusterRerouteAction.INSTANCE, request, listener);
}
public ClusterRerouteRequestBuilder prepareReroute() {
return new ClusterRerouteRequestBuilder(this, ClusterRerouteAction.INSTANCE);
}
public ActionFuture<ClusterUpdateSettingsResponse> updateSettings(ClusterUpdateSettingsRequest request) {
return this.execute(ClusterUpdateSettingsAction.INSTANCE, request);
}
public void updateSettings(ClusterUpdateSettingsRequest request, ActionListener<ClusterUpdateSettingsResponse> listener) {
this.execute(ClusterUpdateSettingsAction.INSTANCE, request, listener);
}
public ClusterUpdateSettingsRequestBuilder prepareUpdateSettings() {
return new ClusterUpdateSettingsRequestBuilder(this, ClusterUpdateSettingsAction.INSTANCE);
}
public ActionFuture<NodesInfoResponse> nodesInfo(NodesInfoRequest request) {
return this.execute(NodesInfoAction.INSTANCE, request);
}
public void nodesInfo(NodesInfoRequest request, ActionListener<NodesInfoResponse> listener) {
this.execute(NodesInfoAction.INSTANCE, request, listener);
}
public NodesInfoRequestBuilder prepareNodesInfo(String... nodesIds) {
return new NodesInfoRequestBuilder(this, NodesInfoAction.INSTANCE).setNodesIds(nodesIds);
}
public ActionFuture<NodesStatsResponse> nodesStats(NodesStatsRequest request) {
return this.execute(NodesStatsAction.INSTANCE, request);
}
public void nodesStats(NodesStatsRequest request, ActionListener<NodesStatsResponse> listener) {
this.execute(NodesStatsAction.INSTANCE, request, listener);
}
public NodesStatsRequestBuilder prepareNodesStats(String... nodesIds) {
return new NodesStatsRequestBuilder(this, NodesStatsAction.INSTANCE).setNodesIds(nodesIds);
}
public ActionFuture<ClusterStatsResponse> clusterStats(ClusterStatsRequest request) {
return this.execute(ClusterStatsAction.INSTANCE, request);
}
public void clusterStats(ClusterStatsRequest request, ActionListener<ClusterStatsResponse> listener) {
this.execute(ClusterStatsAction.INSTANCE, request, listener);
}
public ClusterStatsRequestBuilder prepareClusterStats() {
return new ClusterStatsRequestBuilder(this, ClusterStatsAction.INSTANCE);
}
public ActionFuture<NodesHotThreadsResponse> nodesHotThreads(NodesHotThreadsRequest request) {
return this.execute(NodesHotThreadsAction.INSTANCE, request);
}
public void nodesHotThreads(NodesHotThreadsRequest request, ActionListener<NodesHotThreadsResponse> listener) {
this.execute(NodesHotThreadsAction.INSTANCE, request, listener);
}
public NodesHotThreadsRequestBuilder prepareNodesHotThreads(String... nodesIds) {
return new NodesHotThreadsRequestBuilder(this, NodesHotThreadsAction.INSTANCE).setNodesIds(nodesIds);
}
public ActionFuture<ClusterSearchShardsResponse> searchShards(ClusterSearchShardsRequest request) {
return this.execute(ClusterSearchShardsAction.INSTANCE, request);
}
public void searchShards(ClusterSearchShardsRequest request, ActionListener<ClusterSearchShardsResponse> listener) {
this.execute(ClusterSearchShardsAction.INSTANCE, request, listener);
}
public ClusterSearchShardsRequestBuilder prepareSearchShards() {
return new ClusterSearchShardsRequestBuilder(this, ClusterSearchShardsAction.INSTANCE);
}
public ClusterSearchShardsRequestBuilder prepareSearchShards(String... indices) {
return new ClusterSearchShardsRequestBuilder(this, ClusterSearchShardsAction.INSTANCE).setIndices(indices);
}
public PendingClusterTasksRequestBuilder preparePendingClusterTasks() {
return new PendingClusterTasksRequestBuilder(this, PendingClusterTasksAction.INSTANCE);
}
public ActionFuture<PendingClusterTasksResponse> pendingClusterTasks(PendingClusterTasksRequest request) {
return this.execute(PendingClusterTasksAction.INSTANCE, request);
}
public void pendingClusterTasks(PendingClusterTasksRequest request, ActionListener<PendingClusterTasksResponse> listener) {
this.execute(PendingClusterTasksAction.INSTANCE, request, listener);
}
public ActionFuture<PutRepositoryResponse> putRepository(PutRepositoryRequest request) {
return this.execute(PutRepositoryAction.INSTANCE, request);
}
public void putRepository(PutRepositoryRequest request, ActionListener<PutRepositoryResponse> listener) {
this.execute(PutRepositoryAction.INSTANCE, request, listener);
}
public PutRepositoryRequestBuilder preparePutRepository(String name) {
return new PutRepositoryRequestBuilder(this, PutRepositoryAction.INSTANCE, name);
}
public ActionFuture<CreateSnapshotResponse> createSnapshot(CreateSnapshotRequest request) {
return this.execute(CreateSnapshotAction.INSTANCE, request);
}
public void createSnapshot(CreateSnapshotRequest request, ActionListener<CreateSnapshotResponse> listener) {
this.execute(CreateSnapshotAction.INSTANCE, request, listener);
}
public CreateSnapshotRequestBuilder prepareCreateSnapshot(String repository, String name) {
return new CreateSnapshotRequestBuilder(this, CreateSnapshotAction.INSTANCE, repository, name);
}
public ActionFuture<GetSnapshotsResponse> getSnapshots(GetSnapshotsRequest request) {
return this.execute(GetSnapshotsAction.INSTANCE, request);
}
public void getSnapshots(GetSnapshotsRequest request, ActionListener<GetSnapshotsResponse> listener) {
this.execute(GetSnapshotsAction.INSTANCE, request, listener);
}
public GetSnapshotsRequestBuilder prepareGetSnapshots(String repository) {
return new GetSnapshotsRequestBuilder(this, GetSnapshotsAction.INSTANCE, repository);
}
public ActionFuture<DeleteSnapshotResponse> deleteSnapshot(DeleteSnapshotRequest request) {
return this.execute(DeleteSnapshotAction.INSTANCE, request);
}
public void deleteSnapshot(DeleteSnapshotRequest request, ActionListener<DeleteSnapshotResponse> listener) {
this.execute(DeleteSnapshotAction.INSTANCE, request, listener);
}
public DeleteSnapshotRequestBuilder prepareDeleteSnapshot(String repository, String name) {
return new DeleteSnapshotRequestBuilder(this, DeleteSnapshotAction.INSTANCE, repository, name);
}
public ActionFuture<DeleteRepositoryResponse> deleteRepository(DeleteRepositoryRequest request) {
return this.execute(DeleteRepositoryAction.INSTANCE, request);
}
public void deleteRepository(DeleteRepositoryRequest request, ActionListener<DeleteRepositoryResponse> listener) {
this.execute(DeleteRepositoryAction.INSTANCE, request, listener);
}
public DeleteRepositoryRequestBuilder prepareDeleteRepository(String name) {
return new DeleteRepositoryRequestBuilder(this, DeleteRepositoryAction.INSTANCE, name);
}
public ActionFuture<VerifyRepositoryResponse> verifyRepository(VerifyRepositoryRequest request) {
return this.execute(VerifyRepositoryAction.INSTANCE, request);
}
public void verifyRepository(VerifyRepositoryRequest request, ActionListener<VerifyRepositoryResponse> listener) {
this.execute(VerifyRepositoryAction.INSTANCE, request, listener);
}
public VerifyRepositoryRequestBuilder prepareVerifyRepository(String name) {
return new VerifyRepositoryRequestBuilder(this, VerifyRepositoryAction.INSTANCE, name);
}
public ActionFuture<GetRepositoriesResponse> getRepositories(GetRepositoriesRequest request) {
return this.execute(GetRepositoriesAction.INSTANCE, request);
}
public void getRepositories(GetRepositoriesRequest request, ActionListener<GetRepositoriesResponse> listener) {
this.execute(GetRepositoriesAction.INSTANCE, request, listener);
}
public GetRepositoriesRequestBuilder prepareGetRepositories(String... name) {
return new GetRepositoriesRequestBuilder(this, GetRepositoriesAction.INSTANCE, name);
}
public ActionFuture<RestoreSnapshotResponse> restoreSnapshot(RestoreSnapshotRequest request) {
return this.execute(RestoreSnapshotAction.INSTANCE, request);
}
public void restoreSnapshot(RestoreSnapshotRequest request, ActionListener<RestoreSnapshotResponse> listener) {
this.execute(RestoreSnapshotAction.INSTANCE, request, listener);
}
public RestoreSnapshotRequestBuilder prepareRestoreSnapshot(String repository, String snapshot) {
return new RestoreSnapshotRequestBuilder(this, RestoreSnapshotAction.INSTANCE, repository, snapshot);
}
public ActionFuture<SnapshotsStatusResponse> snapshotsStatus(SnapshotsStatusRequest request) {
return this.execute(SnapshotsStatusAction.INSTANCE, request);
}
public void snapshotsStatus(SnapshotsStatusRequest request, ActionListener<SnapshotsStatusResponse> listener) {
this.execute(SnapshotsStatusAction.INSTANCE, request, listener);
}
public SnapshotsStatusRequestBuilder prepareSnapshotStatus(String repository) {
return new SnapshotsStatusRequestBuilder(this, SnapshotsStatusAction.INSTANCE, repository);
}
public SnapshotsStatusRequestBuilder prepareSnapshotStatus() {
return new SnapshotsStatusRequestBuilder(this, SnapshotsStatusAction.INSTANCE);
}
public ActionFuture<RenderSearchTemplateResponse> renderSearchTemplate(RenderSearchTemplateRequest request) {
return this.execute(RenderSearchTemplateAction.INSTANCE, request);
}
public void renderSearchTemplate(RenderSearchTemplateRequest request, ActionListener<RenderSearchTemplateResponse> listener) {
this.execute(RenderSearchTemplateAction.INSTANCE, request, listener);
}
public RenderSearchTemplateRequestBuilder prepareRenderSearchTemplate() {
return new RenderSearchTemplateRequestBuilder(this, RenderSearchTemplateAction.INSTANCE);
}
}
static class Admin implements AdminClient {
private final ClusterAdmin clusterAdmin;
private final IndicesAdmin indicesAdmin;
public Admin(ElasticsearchClient client, MarvelShieldIntegration shieldIntegration) {
this.clusterAdmin = new ClusterAdmin(client, shieldIntegration);
this.indicesAdmin = new IndicesAdmin(client, shieldIntegration);
}
public ClusterAdminClient cluster() {
return this.clusterAdmin;
}
public IndicesAdminClient indices() {
return this.indicesAdmin;
}
}
}
| Marvel: add implementation for synced flush to SecuredClient
see elastic/elasticsearch#15396
Original commit: elastic/x-pack-elasticsearch@c60c2717382b67b20c564e0cdf655cf61235d69c
| elasticsearch/x-pack/marvel/src/main/java/org/elasticsearch/marvel/shield/SecuredClient.java | Marvel: add implementation for synced flush to SecuredClient | <ide><path>lasticsearch/x-pack/marvel/src/main/java/org/elasticsearch/marvel/shield/SecuredClient.java
<ide> import org.elasticsearch.action.admin.indices.flush.FlushRequest;
<ide> import org.elasticsearch.action.admin.indices.flush.FlushRequestBuilder;
<ide> import org.elasticsearch.action.admin.indices.flush.FlushResponse;
<add>import org.elasticsearch.action.admin.indices.flush.SyncedFlushAction;
<add>import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest;
<add>import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequestBuilder;
<add>import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse;
<ide> import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction;
<ide> import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
<ide> import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequestBuilder;
<ide> import org.elasticsearch.common.Strings;
<ide> import org.elasticsearch.common.inject.Inject;
<ide> import org.elasticsearch.common.settings.Settings;
<add>import org.elasticsearch.indices.flush.SyncedFlushService;
<ide> import org.elasticsearch.threadpool.ThreadPool;
<ide>
<ide> /**
<ide> return (new FlushRequestBuilder(this, FlushAction.INSTANCE)).setIndices(indices);
<ide> }
<ide>
<add> @Override
<add> public ActionFuture<SyncedFlushResponse> syncedFlush(SyncedFlushRequest syncedFlushRequest) {
<add> return this.execute(SyncedFlushAction.INSTANCE, syncedFlushRequest);
<add> }
<add>
<add> @Override
<add> public void syncedFlush(SyncedFlushRequest syncedFlushRequest, ActionListener<SyncedFlushResponse> actionListener) {
<add> this.execute(SyncedFlushAction.INSTANCE, syncedFlushRequest, actionListener);
<add> }
<add>
<add> @Override
<add> public SyncedFlushRequestBuilder prepareSyncedFlush(String... indices) {
<add> return (new SyncedFlushRequestBuilder(this, SyncedFlushAction.INSTANCE)).setIndices(indices);
<add> }
<add>
<ide> public void getMappings(GetMappingsRequest request, ActionListener<GetMappingsResponse> listener) {
<ide> this.execute(GetMappingsAction.INSTANCE, request, listener);
<ide> } |
|
Java | lgpl-2.1 | 1dcbf1822d311164af6131cc392937c6b5f12e3d | 0 | codelibs/jcifs,AgNO3/jcifs-ng | /* jcifs smb client library in Java
* Copyright (C) 2000 "Michael B. Allen" <jcifs at samba dot org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package jcifs.internal.smb1.trans;
import java.util.Enumeration;
import jcifs.Configuration;
import jcifs.internal.SMBProtocolDecodingException;
import jcifs.internal.smb1.ServerMessageBlock;
import jcifs.internal.util.SMBUtil;
import jcifs.smb.FileEntry;
/**
*
*/
public abstract class SmbComTransactionResponse extends ServerMessageBlock implements Enumeration<SmbComTransactionResponse> {
// relative to headerStart
static final int SETUP_OFFSET = 61;
static final int DISCONNECT_TID = 0x01;
static final int ONE_WAY_TRANSACTION = 0x02;
private int pad;
private int pad1;
private boolean parametersDone, dataDone;
protected int totalParameterCount;
protected int totalDataCount;
protected int parameterCount;
protected int parameterOffset;
protected int parameterDisplacement;
protected int dataOffset;
protected int dataDisplacement;
protected int setupCount;
protected int bufParameterStart;
protected int bufDataStart;
int dataCount;
byte subCommand;
volatile boolean hasMore = true;
volatile boolean isPrimary = true;
byte[] txn_buf;
/* for doNetEnum and doFindFirstNext */
private int status;
private int numEntries;
private FileEntry[] results;
protected SmbComTransactionResponse ( Configuration config ) {
super(config);
}
protected SmbComTransactionResponse ( Configuration config, byte command, byte subcommand ) {
super(config, command);
this.subCommand = subcommand;
}
/**
* @return the dataCount
*/
protected final int getDataCount () {
return this.dataCount;
}
/**
* @param dataCount
* the dataCount to set
*/
public final void setDataCount ( int dataCount ) {
this.dataCount = dataCount;
}
/**
* @param buffer
*/
public void setBuffer ( byte[] buffer ) {
this.txn_buf = buffer;
}
/**
* @return the txn_buf
*/
public byte[] releaseBuffer () {
byte[] buf = this.txn_buf;
this.txn_buf = null;
return buf;
}
/**
* @return the subCommand
*/
public final byte getSubCommand () {
return this.subCommand;
}
/**
* @param subCommand
* the subCommand to set
*/
public final void setSubCommand ( byte subCommand ) {
this.subCommand = subCommand;
}
/**
* @return the status
*/
public final int getStatus () {
return this.status;
}
/**
* @param status
* the status to set
*/
protected final void setStatus ( int status ) {
this.status = status;
}
/**
* @return the numEntries
*/
public final int getNumEntries () {
return this.numEntries;
}
/**
* @param numEntries
* the numEntries to set
*/
protected final void setNumEntries ( int numEntries ) {
this.numEntries = numEntries;
}
/**
* @return the results
*/
public final FileEntry[] getResults () {
return this.results;
}
/**
* @param results
* the results to set
*/
protected final void setResults ( FileEntry[] results ) {
this.results = results;
}
@Override
public void reset () {
super.reset();
this.bufDataStart = 0;
this.isPrimary = this.hasMore = true;
this.parametersDone = this.dataDone = false;
}
@Override
public boolean hasMoreElements () {
return this.errorCode == 0 && this.hasMore;
}
@Override
public SmbComTransactionResponse nextElement () {
if ( this.isPrimary ) {
this.isPrimary = false;
}
return this;
}
@Override
protected int writeParameterWordsWireFormat ( byte[] dst, int dstIndex ) {
return 0;
}
@Override
protected int writeBytesWireFormat ( byte[] dst, int dstIndex ) {
return 0;
}
/**
* {@inheritDoc}
*
* @see jcifs.internal.smb1.ServerMessageBlock#decode(byte[], int)
*/
@Override
public int decode ( byte[] buffer, int bufferIndex ) throws SMBProtocolDecodingException {
int len = super.decode(buffer, bufferIndex);
if ( this.byteCount == 0 ) {
// otherwise hasMore may not be correctly set
readBytesWireFormat(buffer, len + bufferIndex);
}
nextElement();
return len;
}
@Override
protected int readParameterWordsWireFormat ( byte[] buffer, int bufferIndex ) {
int start = bufferIndex;
this.totalParameterCount = SMBUtil.readInt2(buffer, bufferIndex);
if ( this.bufDataStart == 0 ) {
this.bufDataStart = this.totalParameterCount;
}
bufferIndex += 2;
this.totalDataCount = SMBUtil.readInt2(buffer, bufferIndex);
bufferIndex += 4; // Reserved
this.parameterCount = SMBUtil.readInt2(buffer, bufferIndex);
bufferIndex += 2;
this.parameterOffset = SMBUtil.readInt2(buffer, bufferIndex);
bufferIndex += 2;
this.parameterDisplacement = SMBUtil.readInt2(buffer, bufferIndex);
bufferIndex += 2;
this.dataCount = SMBUtil.readInt2(buffer, bufferIndex);
bufferIndex += 2;
this.dataOffset = SMBUtil.readInt2(buffer, bufferIndex);
bufferIndex += 2;
this.dataDisplacement = SMBUtil.readInt2(buffer, bufferIndex);
bufferIndex += 2;
this.setupCount = buffer[ bufferIndex ] & 0xFF;
bufferIndex += 2;
return bufferIndex - start;
}
@Override
protected int readBytesWireFormat ( byte[] buffer, int bufferIndex ) throws SMBProtocolDecodingException {
this.pad = this.pad1 = 0;
if ( this.parameterCount > 0 ) {
bufferIndex += this.pad = this.parameterOffset - ( bufferIndex - this.headerStart );
System.arraycopy(buffer, bufferIndex, this.txn_buf, this.bufParameterStart + this.parameterDisplacement, this.parameterCount);
bufferIndex += this.parameterCount;
}
if ( this.dataCount > 0 ) {
bufferIndex += this.pad1 = this.dataOffset - ( bufferIndex - this.headerStart );
System.arraycopy(buffer, bufferIndex, this.txn_buf, this.bufDataStart + this.dataDisplacement, this.dataCount);
bufferIndex += this.dataCount;
}
/*
* Check to see if the entire transaction has been
* read. If so call the read methods.
*/
if ( !this.parametersDone && ( this.parameterDisplacement + this.parameterCount ) == this.totalParameterCount ) {
this.parametersDone = true;
}
if ( !this.dataDone && ( this.dataDisplacement + this.dataCount ) == this.totalDataCount ) {
this.dataDone = true;
}
if ( this.parametersDone && this.dataDone ) {
readParametersWireFormat(this.txn_buf, this.bufParameterStart, this.totalParameterCount);
readDataWireFormat(this.txn_buf, this.bufDataStart, this.totalDataCount);
this.hasMore = false;
}
return this.pad + this.parameterCount + this.pad1 + this.dataCount;
}
protected abstract int writeSetupWireFormat ( byte[] dst, int dstIndex );
protected abstract int writeParametersWireFormat ( byte[] dst, int dstIndex );
protected abstract int writeDataWireFormat ( byte[] dst, int dstIndex );
protected abstract int readSetupWireFormat ( byte[] buffer, int bufferIndex, int len );
protected abstract int readParametersWireFormat ( byte[] buffer, int bufferIndex, int len ) throws SMBProtocolDecodingException;
protected abstract int readDataWireFormat ( byte[] buffer, int bufferIndex, int len ) throws SMBProtocolDecodingException;
@Override
public String toString () {
return new String(
super.toString() + ",totalParameterCount=" + this.totalParameterCount + ",totalDataCount=" + this.totalDataCount + ",parameterCount="
+ this.parameterCount + ",parameterOffset=" + this.parameterOffset + ",parameterDisplacement=" + this.parameterDisplacement
+ ",dataCount=" + this.dataCount + ",dataOffset=" + this.dataOffset + ",dataDisplacement=" + this.dataDisplacement
+ ",setupCount=" + this.setupCount + ",pad=" + this.pad + ",pad1=" + this.pad1);
}
}
| src/main/java/jcifs/internal/smb1/trans/SmbComTransactionResponse.java | /* jcifs smb client library in Java
* Copyright (C) 2000 "Michael B. Allen" <jcifs at samba dot org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package jcifs.internal.smb1.trans;
import java.util.Enumeration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jcifs.Configuration;
import jcifs.internal.SMBProtocolDecodingException;
import jcifs.internal.smb1.ServerMessageBlock;
import jcifs.internal.smb1.trans.nt.SmbComNtTransactionResponse;
import jcifs.internal.util.SMBUtil;
import jcifs.smb.FileEntry;
/**
*
*/
public abstract class SmbComTransactionResponse extends ServerMessageBlock implements Enumeration<SmbComTransactionResponse> {
private static final Logger log = LoggerFactory.getLogger(SmbComNtTransactionResponse.class);
// relative to headerStart
static final int SETUP_OFFSET = 61;
static final int DISCONNECT_TID = 0x01;
static final int ONE_WAY_TRANSACTION = 0x02;
private int pad;
private int pad1;
private boolean parametersDone, dataDone;
protected int totalParameterCount;
protected int totalDataCount;
protected int parameterCount;
protected int parameterOffset;
protected int parameterDisplacement;
protected int dataOffset;
protected int dataDisplacement;
protected int setupCount;
protected int bufParameterStart;
protected int bufDataStart;
int dataCount;
byte subCommand;
volatile boolean hasMore = true;
volatile boolean isPrimary = true;
byte[] txn_buf;
/* for doNetEnum and doFindFirstNext */
private int status;
private int numEntries;
private FileEntry[] results;
protected SmbComTransactionResponse ( Configuration config ) {
super(config);
}
protected SmbComTransactionResponse ( Configuration config, byte command, byte subcommand ) {
super(config, command);
this.subCommand = subcommand;
}
/**
* @return the dataCount
*/
protected final int getDataCount () {
return this.dataCount;
}
/**
* @param dataCount
* the dataCount to set
*/
public final void setDataCount ( int dataCount ) {
this.dataCount = dataCount;
}
/**
* @param buffer
*/
public void setBuffer ( byte[] buffer ) {
this.txn_buf = buffer;
}
/**
* @return the txn_buf
*/
public byte[] releaseBuffer () {
byte[] buf = this.txn_buf;
this.txn_buf = null;
return buf;
}
/**
* @return the subCommand
*/
public final byte getSubCommand () {
return this.subCommand;
}
/**
* @param subCommand
* the subCommand to set
*/
public final void setSubCommand ( byte subCommand ) {
this.subCommand = subCommand;
}
/**
* @return the status
*/
public final int getStatus () {
return this.status;
}
/**
* @param status
* the status to set
*/
protected final void setStatus ( int status ) {
this.status = status;
}
/**
* @return the numEntries
*/
public final int getNumEntries () {
return this.numEntries;
}
/**
* @param numEntries
* the numEntries to set
*/
protected final void setNumEntries ( int numEntries ) {
this.numEntries = numEntries;
}
/**
* @return the results
*/
public final FileEntry[] getResults () {
return this.results;
}
/**
* @param results
* the results to set
*/
protected final void setResults ( FileEntry[] results ) {
this.results = results;
}
@Override
public void reset () {
super.reset();
this.bufDataStart = 0;
this.isPrimary = this.hasMore = true;
this.parametersDone = this.dataDone = false;
}
@Override
public boolean hasMoreElements () {
return this.errorCode == 0 && this.hasMore;
}
@Override
public SmbComTransactionResponse nextElement () {
if ( this.isPrimary ) {
this.isPrimary = false;
}
return this;
}
@Override
protected int writeParameterWordsWireFormat ( byte[] dst, int dstIndex ) {
return 0;
}
@Override
protected int writeBytesWireFormat ( byte[] dst, int dstIndex ) {
return 0;
}
/**
* {@inheritDoc}
*
* @see jcifs.internal.smb1.ServerMessageBlock#decode(byte[], int)
*/
@Override
public int decode ( byte[] buffer, int bufferIndex ) throws SMBProtocolDecodingException {
int len = super.decode(buffer, bufferIndex);
if ( this.byteCount == 0 ) {
// otherwise hasMore may not be correctly set
readBytesWireFormat(buffer, len + bufferIndex);
}
nextElement();
return len;
}
@Override
protected int readParameterWordsWireFormat ( byte[] buffer, int bufferIndex ) {
int start = bufferIndex;
this.totalParameterCount = SMBUtil.readInt2(buffer, bufferIndex);
if ( this.bufDataStart == 0 ) {
this.bufDataStart = this.totalParameterCount;
}
bufferIndex += 2;
this.totalDataCount = SMBUtil.readInt2(buffer, bufferIndex);
bufferIndex += 4; // Reserved
this.parameterCount = SMBUtil.readInt2(buffer, bufferIndex);
bufferIndex += 2;
this.parameterOffset = SMBUtil.readInt2(buffer, bufferIndex);
bufferIndex += 2;
this.parameterDisplacement = SMBUtil.readInt2(buffer, bufferIndex);
bufferIndex += 2;
this.dataCount = SMBUtil.readInt2(buffer, bufferIndex);
bufferIndex += 2;
this.dataOffset = SMBUtil.readInt2(buffer, bufferIndex);
bufferIndex += 2;
this.dataDisplacement = SMBUtil.readInt2(buffer, bufferIndex);
bufferIndex += 2;
this.setupCount = buffer[ bufferIndex ] & 0xFF;
bufferIndex += 2;
return bufferIndex - start;
}
@Override
protected int readBytesWireFormat ( byte[] buffer, int bufferIndex ) throws SMBProtocolDecodingException {
this.pad = this.pad1 = 0;
if ( this.parameterCount > 0 ) {
bufferIndex += this.pad = this.parameterOffset - ( bufferIndex - this.headerStart );
System.arraycopy(buffer, bufferIndex, this.txn_buf, this.bufParameterStart + this.parameterDisplacement, this.parameterCount);
bufferIndex += this.parameterCount;
}
if ( this.dataCount > 0 ) {
bufferIndex += this.pad1 = this.dataOffset - ( bufferIndex - this.headerStart );
System.arraycopy(buffer, bufferIndex, this.txn_buf, this.bufDataStart + this.dataDisplacement, this.dataCount);
bufferIndex += this.dataCount;
}
/*
* Check to see if the entire transaction has been
* read. If so call the read methods.
*/
if ( !this.parametersDone && ( this.parameterDisplacement + this.parameterCount ) == this.totalParameterCount ) {
this.parametersDone = true;
}
if ( !this.dataDone && ( this.dataDisplacement + this.dataCount ) == this.totalDataCount ) {
this.dataDone = true;
}
if ( this.parametersDone && this.dataDone ) {
readParametersWireFormat(this.txn_buf, this.bufParameterStart, this.totalParameterCount);
readDataWireFormat(this.txn_buf, this.bufDataStart, this.totalDataCount);
this.hasMore = false;
}
return this.pad + this.parameterCount + this.pad1 + this.dataCount;
}
protected abstract int writeSetupWireFormat ( byte[] dst, int dstIndex );
protected abstract int writeParametersWireFormat ( byte[] dst, int dstIndex );
protected abstract int writeDataWireFormat ( byte[] dst, int dstIndex );
protected abstract int readSetupWireFormat ( byte[] buffer, int bufferIndex, int len );
protected abstract int readParametersWireFormat ( byte[] buffer, int bufferIndex, int len ) throws SMBProtocolDecodingException;
protected abstract int readDataWireFormat ( byte[] buffer, int bufferIndex, int len ) throws SMBProtocolDecodingException;
@Override
public String toString () {
return new String(
super.toString() + ",totalParameterCount=" + this.totalParameterCount + ",totalDataCount=" + this.totalDataCount + ",parameterCount="
+ this.parameterCount + ",parameterOffset=" + this.parameterOffset + ",parameterDisplacement=" + this.parameterDisplacement
+ ",dataCount=" + this.dataCount + ",dataOffset=" + this.dataOffset + ",dataDisplacement=" + this.dataDisplacement
+ ",setupCount=" + this.setupCount + ",pad=" + this.pad + ",pad1=" + this.pad1);
}
}
| Remove unused logger
| src/main/java/jcifs/internal/smb1/trans/SmbComTransactionResponse.java | Remove unused logger | <ide><path>rc/main/java/jcifs/internal/smb1/trans/SmbComTransactionResponse.java
<ide>
<ide> import java.util.Enumeration;
<ide>
<del>import org.slf4j.Logger;
<del>import org.slf4j.LoggerFactory;
<del>
<ide> import jcifs.Configuration;
<ide> import jcifs.internal.SMBProtocolDecodingException;
<ide> import jcifs.internal.smb1.ServerMessageBlock;
<del>import jcifs.internal.smb1.trans.nt.SmbComNtTransactionResponse;
<ide> import jcifs.internal.util.SMBUtil;
<ide> import jcifs.smb.FileEntry;
<ide>
<ide> *
<ide> */
<ide> public abstract class SmbComTransactionResponse extends ServerMessageBlock implements Enumeration<SmbComTransactionResponse> {
<del>
<del> private static final Logger log = LoggerFactory.getLogger(SmbComNtTransactionResponse.class);
<ide>
<ide> // relative to headerStart
<ide> static final int SETUP_OFFSET = 61; |
|
Java | apache-2.0 | 116648186b9b0e0acacaadcafa9d20988c5affb2 | 0 | akarnokd/reactive4java | /*
* Copyright 2011-2012 David Karnok
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hu.akarnokd.reactive4java.reactive;
import static hu.akarnokd.reactive4java.base.Functions.and;
import hu.akarnokd.reactive4java.base.Action0;
import hu.akarnokd.reactive4java.base.Action1;
import hu.akarnokd.reactive4java.base.Actions;
import hu.akarnokd.reactive4java.base.CircularBuffer;
import hu.akarnokd.reactive4java.base.CloseableIterable;
import hu.akarnokd.reactive4java.base.CloseableIterator;
import hu.akarnokd.reactive4java.base.Closeables;
import hu.akarnokd.reactive4java.base.Func0;
import hu.akarnokd.reactive4java.base.Func1;
import hu.akarnokd.reactive4java.base.Func2;
import hu.akarnokd.reactive4java.base.Functions;
import hu.akarnokd.reactive4java.base.Option;
import hu.akarnokd.reactive4java.base.Pair;
import hu.akarnokd.reactive4java.base.Scheduler;
import hu.akarnokd.reactive4java.base.TooManyElementsException;
import hu.akarnokd.reactive4java.interactive.SingleContainer;
import hu.akarnokd.reactive4java.util.DefaultScheduler;
import hu.akarnokd.reactive4java.util.SingleLaneExecutor;
import java.io.Closeable;
import java.io.IOException;
import java.lang.reflect.Array;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import javax.annotation.Nonnull;
import javax.annotation.concurrent.GuardedBy;
/**
* Utility class with operators and helper methods for reactive programming with <code>Observable</code>s and <code>Observer</code>s.
* Guidances were taken from
* <ul>
* <li><a href='http://theburningmonk.com/tags/rx/'>http://theburningmonk.com/tags/rx/</a></li>
* <li><a href='http://blogs.bartdesmet.net/blogs/bart/archive/2010/01/01/the-essence-of-linq-minlinq.aspx'>http://blogs.bartdesmet.net/blogs/bart/archive/2010/01/01/the-essence-of-linq-minlinq.aspx</a></li>
* <li><a href='http://reactive4java.googlecode.com/svn/trunk/Reactive4Java/docs/javadoc/hu/akarnokd/reactive4java/reactive/Reactive.html'>http://reactive4java.googlecode.com/svn/trunk/Reactive4Java/docs/javadoc/hu/akarnokd/reactive4java/reactive/Reactive.html</a></li>
* <li><a href='http://rxwiki.wikidot.com/101samples#toc3'>http://rxwiki.wikidot.com/101samples#toc3</a></li>
* <li><a href='http://channel9.msdn.com/Tags/rx'>http://channel9.msdn.com/Tags/rx</a></li>
* </ul>
*
* @author akarnokd, 2011.01.26
* @see hu.akarnokd.reactive4java.interactive.Interactive
*/
public final class Reactive {
/**
* A variant of the registering observable which stores a group key.
* @author akarnokd, 2011.01.29.
* @param <Key> the type of the key
* @param <Value> the value type
*/
static class GroupedRegisteringObservable<Key, Value> extends DefaultObservable<Value> implements GroupedObservable<Key, Value> {
/** The group key. */
private final Key key;
/**
* Constructor.
* @param key the group key
*/
public GroupedRegisteringObservable(Key key) {
this.key = key;
}
@Override
public Key key() {
return key;
}
}
/** The diagnostic states of the current runnable. */
public enum ObserverState { OBSERVER_ERROR, OBSERVER_FINISHED, OBSERVER_RUNNING }
/** The common observable pool where the Observer methods get invoked by default. */
static final AtomicReference<Scheduler> DEFAULT_SCHEDULER = new AtomicReference<Scheduler>(new DefaultScheduler());
/**
* Returns an observable which provides a TimeInterval of Ts which
* records the elapsed time between successive elements.
* The time interval is evaluated using the System.nanoTime() differences
* as nanoseconds
* The first element contains the time elapsed since the registration occurred.
* @param <T> the time source
* @param source the source of Ts
* @return the new observable
*/
@Nonnull
public static <T> Observable<TimeInterval<T>> addTimeInterval(
@Nonnull final Observable<? extends T> source) {
return new Observable<TimeInterval<T>>() {
@Override
public Closeable register(final Observer<? super TimeInterval<T>> observer) {
return source.register(new Observer<T>() {
long lastTime = System.nanoTime();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
long t2 = System.nanoTime();
observer.next(TimeInterval.of(value, t2 - lastTime));
lastTime = t2;
}
});
}
};
}
/**
* Wrap the values within a observable to a timestamped value having always
* the System.currentTimeMillis() value.
* @param <T> the element type
* @param source the source which has its elements in a timestamped way.
* @return the raw observables of Ts
*/
@Nonnull
public static <T> Observable<Timestamped<T>> addTimestamped(
@Nonnull Observable<? extends T> source) {
return select(source, Reactive.<T>wrapTimestamped());
}
/**
* Apply an accumulator function over the observable source and submit the accumulated value to the returned observable.
* @param <T> the element type
* @param source the source observable
* @param accumulator the accumulator function where the first parameter is the current accumulated value and the second is the now received value.
* @return the observable for the result of the accumulation
*/
@Nonnull
public static <T> Observable<T> aggregate(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func2<? super T, ? super T, ? extends T> accumulator) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The current aggregation result. */
T result;
/** How many items did we get */
int phase;
@Override
public void error(Throwable ex) {
observer.error(ex);
};
@Override
public void finish() {
if (phase >= 1) { // FIXME not sure about this
observer.next(result);
}
observer.finish();
}
@Override
public void next(T value) {
if (phase == 0) {
result = value;
phase++;
} else {
result = accumulator.invoke(result, value);
phase = 2;
}
}
});
}
};
}
/**
* Computes an aggregated value of the source Ts by applying a sum function and applying the divide function when the source
* finishes, sending the result to the output.
* @param <T> the type of the values
* @param <U> the type of the intermediate sum value
* @param <V> the type of the final average value
* @param source the source of BigDecimals to aggregate.
* @param sum the function which sums the input Ts. The first received T will be acompanied by a null U.
* @param divide the function which perform the final division based on the number of elements
* @return the observable for the average value
*/
@Nonnull
public static <T, U, V> Observable<V> aggregate(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func2<? super U, ? super T, ? extends U> sum,
@Nonnull final Func2<? super U, ? super Integer, ? extends V> divide) {
return new Observable<V>() {
@Override
public Closeable register(final Observer<? super V> observer) {
return source.register(new Observer<T>() {
/** The number of values. */
int count;
/** The sum of the values thus far. */
U temp;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
if (count > 0) {
observer.next(divide.invoke(temp, count));
}
observer.finish();
}
@Override
public void next(T value) {
temp = sum.invoke(temp, value);
count++;
}
});
}
};
}
/**
* Apply an accumulator function over the observable source and submit the accumulated value to the returned observable.
* @param <T> the input element type
* @param <U> the ouput element type
* @param source the source observable
* @param seed the initial value of the accumulator
* @param accumulator the accumulator function where the first parameter is the current accumulated value and the second is the now received value.
* @return the observable for the result of the accumulation
*/
@Nonnull
public static <T, U> Observable<U> aggregate(
@Nonnull final Observable<? extends T> source,
final U seed,
@Nonnull final Func2<? super U, ? super T, ? extends U> accumulator) {
return new Observable<U>() {
@Override
public Closeable register(final Observer<? super U> observer) {
return source.register(new Observer<T>() {
/** The current aggregation result. */
U result = seed;
@Override
public void error(Throwable ex) {
observer.error(ex);
};
@Override
public void finish() {
observer.next(result);
observer.finish();
}
@Override
public void next(T value) {
result = accumulator.invoke(result, value);
}
});
}
};
}
/**
* Signals a single true or false if all elements of the observable match the predicate.
* It may return early with a result of false if the predicate simply does not match the current element.
* For a true result, it waits for all elements of the source observable.
* @param <T> the type of the source data
* @param source the source observable
* @param predicate the predicate to setisfy
* @return the observable resulting in a single result
*/
@Nonnull
public static <T> Observable<Boolean> all(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, Boolean> predicate) {
return new Observable<Boolean>() {
@Override
public Closeable register(final Observer<? super Boolean> observer) {
DefaultObserverEx<T> o = new DefaultObserverEx<T>(true) {
{
add("source", source);
}
/** Indicate if we returned early. */
boolean done;
@Override
public void onError(Throwable ex) {
observer.error(ex);
};
@Override
public void onFinish() {
if (!done) {
done = true;
observer.next(true);
observer.finish();
}
}
@Override
public void onNext(T value) {
if (!predicate.invoke(value)) {
done = true;
observer.next(false);
observer.finish();
}
}
};
return o;
}
};
}
/**
* Channels the values of the first observable who fires first from the given set of observables.
* E.g., <code>O3 = Amb(O1, O2)</code> if O1 starts to submit events first, O3 will relay these events and events of O2 will be completely ignored
* @param <T> the type of the observed element
* @param sources the iterable list of source observables.
* @return the observable which reacted first
*/
@Nonnull
public static <T> Observable<T> amb(
@Nonnull final Iterable<? extends Observable<? extends T>> sources) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final List<DefaultObserver<T>> observers = new ArrayList<DefaultObserver<T>>();
List<Observable<? extends T>> observables = new ArrayList<Observable<? extends T>>();
final AtomicReference<Object> first = new AtomicReference<Object>();
int i = 0;
for (final Observable<? extends T> os : sources) {
observables.add(os);
final int thisIndex = i;
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** We won the race. */
boolean weWon;
/** Cancel everyone else. */
void cancelRest() {
for (int i = 0; i < observers.size(); i++) {
if (i != thisIndex) {
observers.get(i).close();
}
}
}
/** @return Check if we won the race. */
boolean didWeWon() {
if (!weWon) {
if (first.compareAndSet(null, this)) {
weWon = true;
cancelRest();
} else {
close();
}
}
return weWon;
}
@Override
public void onError(Throwable ex) {
if (didWeWon()) {
observer.error(ex);
}
}
@Override
public void onFinish() {
if (didWeWon()) {
observer.finish();
}
}
@Override
public void onNext(T value) {
if (didWeWon()) {
observer.next(value);
} else {
close();
}
};
};
observers.add(obs);
}
i = 0;
List<Closeable> closers = new ArrayList<Closeable>(observables.size() * 2 + 1);
for (final Observable<? extends T> os : observables) {
DefaultObserver<T> dob = observers.get(i);
closers.add(dob);
closers.add(os.register(dob)); // FIXME deregister?!
i++;
}
return Closeables.closeAll(closers);
}
};
}
/**
* Signals a single true if the source observable contains any element.
* It might return early for a non-empty source but waits for the entire observable to return false.
* @param <T> the element type
* @param source the source
* @return the observable
*/
@Nonnull
public static <T> Observable<Boolean> any(
@Nonnull final Observable<T> source) {
return any(source, Functions.alwaysTrue1());
}
/**
* Signals a single TRUE if the source ever signals next() and any of the values matches the predicate before it signals a finish().
* It signals a false otherwise.
* @param <T> the source element type.
* @param source the source observable
* @param predicate the predicate to test the values
* @return the observable.
*/
@Nonnull
public static <T> Observable<Boolean> any(
@Nonnull final Observable<T> source,
@Nonnull final Func1<? super T, Boolean> predicate) {
return new Observable<Boolean>() {
@Override
public Closeable register(final Observer<? super Boolean> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
@Override
public void onError(Throwable ex) {
observer.error(ex);
close();
}
@Override
public void onFinish() {
observer.next(false);
observer.finish();
close();
}
@Override
public void onNext(T value) {
if (predicate.invoke(value)) {
observer.next(true);
observer.finish();
close();
}
}
};
return Closeables.close(obs, source.register(obs));
}
};
}
/**
* Wraps the given observer into an action object which then dispatches
* various incoming Option values to next(), finish() and error().
* @param <T> the element type
* @param observer the observer to wrap
* @return the wrapper action
*/
@Nonnull
public static <T> Action1<Option<T>> asAction(
@Nonnull final Observer<? super T> observer) {
return new Action1<Option<T>>() {
@Override
public void invoke(Option<T> value) {
dispatch(observer, value);
}
};
}
/**
* Convert the Observable instance into a functional-observable object.
* @param <T> the type of the elements
* @param source the source observable
* @return the action to action to option of T
*/
@Nonnull
public static <T> Action1<Action1<Option<T>>> asFObservable(
@Nonnull final Observable<? extends T> source) {
return new Action1<Action1<Option<T>>>() {
@Override
public void invoke(final Action1<Option<T>> o) {
source.register(asObserver(o));
}
};
}
/**
* Convert the functional observable into a normal observable object.
* @param <T> the type of the elements to observe.
* @param source the source of the functional-observable elements
* @return the observable object
*/
@Nonnull
public static <T> Observable<T> asObservable(
@Nonnull final Action1<Action1<Option<T>>> source) {
return Reactive.create(new Func1<Observer<? super T>, Action0>() {
@Override
public Action0 invoke(final Observer<? super T> o) {
source.invoke(asAction(o));
return Actions.noAction0();
}
});
}
/**
* Transform the given action to an observer.
* The wrapper observer converts its next() messages to Option.some(),
* the finish() to Option.none() and error() to Option.error().
* @param <T> the element type to observe
* @param action the action to wrap
* @return the observer
*/
@Nonnull
public static <T> Observer<T> asObserver(
@Nonnull final Action1<? super Option<T>> action) {
return new Observer<T>() {
@Override
public void error(Throwable ex) {
action.invoke(Option.<T>error(ex));
}
@Override
public void finish() {
action.invoke(Option.<T>none());
}
@Override
public void next(T value) {
action.invoke(Option.some(value));
}
};
}
/**
* Computes and signals the average value of the BigDecimal source.
* The source may not send nulls.
* @param source the source of BigDecimals to aggregate.
* @return the observable for the average value
*/
@Nonnull
public static Observable<BigDecimal> averageBigDecimal(
@Nonnull final Observable<BigDecimal> source) {
return aggregate(source,
Functions.sumBigDecimal(),
new Func2<BigDecimal, Integer, BigDecimal>() {
@Override
public BigDecimal invoke(BigDecimal param1, Integer param2) {
return param1.divide(BigDecimal.valueOf(param2.longValue()), RoundingMode.HALF_UP);
}
}
);
}
/**
* Computes and signals the average value of the BigInteger source.
* The source may not send nulls.
* @param source the source of BigIntegers to aggregate.
* @return the observable for the average value
*/
@Nonnull
public static Observable<BigDecimal> averageBigInteger(
@Nonnull final Observable<BigInteger> source) {
return aggregate(source,
Functions.sumBigInteger(),
new Func2<BigInteger, Integer, BigDecimal>() {
@Override
public BigDecimal invoke(BigInteger param1, Integer param2) {
return new BigDecimal(param1).divide(BigDecimal.valueOf(param2.longValue()), RoundingMode.HALF_UP);
}
}
);
}
/**
* Computes and signals the average value of the Double source.
* The source may not send nulls.
* @param source the source of Doubles to aggregate.
* @return the observable for the average value
*/
@Nonnull
public static Observable<Double> averageDouble(
@Nonnull final Observable<Double> source) {
return aggregate(source,
Functions.sumDouble(),
new Func2<Double, Integer, Double>() {
@Override
public Double invoke(Double param1, Integer param2) {
return param1 / param2;
}
}
);
}
/**
* Computes and signals the average value of the Float source.
* The source may not send nulls.
* @param source the source of Floats to aggregate.
* @return the observable for the average value
*/
@Nonnull
public static Observable<Float> averageFloat(
@Nonnull final Observable<Float> source) {
return aggregate(source,
Functions.sumFloat(),
new Func2<Float, Integer, Float>() {
@Override
public Float invoke(Float param1, Integer param2) {
return param1 / param2;
}
}
);
}
/**
* Computes and signals the average value of the integer source.
* The source may not send nulls.
* The intermediate aggregation used double values.
* @param source the source of integers to aggregate.
* @return the observable for the average value
*/
@Nonnull
public static Observable<Double> averageInt(
@Nonnull final Observable<Integer> source) {
return aggregate(source,
new Func2<Double, Integer, Double>() {
@Override
public Double invoke(Double param1, Integer param2) {
if (param1 != null) {
return param1 + param2;
}
return param2.doubleValue();
}
},
new Func2<Double, Integer, Double>() {
@Override
public Double invoke(Double param1, Integer param2) {
return param1 / param2;
}
}
);
}
/**
* Computes and signals the average value of the Long source.
* The source may not send nulls.
* The intermediate aggregation used double values.
* @param source the source of longs to aggregate.
* @return the observable for the average value
*/
@Nonnull
public static Observable<Double> averageLong(
@Nonnull final Observable<Long> source) {
return aggregate(source,
new Func2<Double, Long, Double>() {
@Override
public Double invoke(Double param1, Long param2) {
if (param1 != null) {
return param1 + param2;
}
return param2.doubleValue();
}
},
new Func2<Double, Integer, Double>() {
@Override
public Double invoke(Double param1, Integer param2) {
return param1 / param2;
}
}
);
}
/**
* Buffer the nodes as they become available and send them out in bufferSize chunks.
* The observers return a new and modifiable list of T on every next() call.
* @param <T> the type of the elements
* @param source the source observable
* @param bufferSize the target buffer size
* @return the observable of the list
*/
@Nonnull
public static <T> Observable<List<T>> buffer(
@Nonnull final Observable<? extends T> source,
final int bufferSize) {
return new Observable<List<T>>() {
@Override
public Closeable register(final Observer<? super List<T>> observer) {
return source.register(new Observer<T>() {
/** The current buffer. */
List<T> buffer;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
if (buffer != null && buffer.size() > 0) {
observer.next(buffer);
}
observer.finish();
}
@Override
public void next(T value) {
if (buffer == null) {
buffer = new ArrayList<T>(bufferSize);
}
buffer.add(value);
if (buffer.size() == bufferSize) {
observer.next(buffer);
buffer = new ArrayList<T>(bufferSize);
}
}
});
}
};
}
/**
* Buffer the Ts of the source until the buffer reaches its capacity or the current time unit runs out.
* Might result in empty list of Ts and might complete early when the source finishes before the time runs out.
* It uses the default scheduler pool.
* @param <T> the type of the values
* @param source the source observable
* @param bufferSize the allowed buffer size
* @param time the time value to wait betveen buffer fills
* @param unit the time unit
* @return the observable of list of Ts
*/
@Nonnull
public static <T> Observable<List<T>> buffer(
@Nonnull final Observable<? extends T> source,
final int bufferSize,
final long time,
@Nonnull final TimeUnit unit) {
return buffer(source, bufferSize, time, unit, DEFAULT_SCHEDULER.get());
}
/**
* Buffer the Ts of the source until the buffer reaches its capacity or the current time unit runs out.
* Might result in empty list of Ts and might complete early when the source finishes before the time runs out.
* @param <T> the type of the values
* @param source the source observable
* @param bufferSize the allowed buffer size
* @param time the time value to wait betveen buffer fills
* @param unit the time unit
* @param pool the pool where to schedule the buffer splits
* @return the observable of list of Ts
*/
@Nonnull
public static <T> Observable<List<T>> buffer(
@Nonnull final Observable<? extends T> source,
final int bufferSize,
final long time,
@Nonnull final TimeUnit unit,
@Nonnull final Scheduler pool) {
return new Observable<List<T>>() {
@Override
public Closeable register(final Observer<? super List<T>> observer) {
final BlockingQueue<T> buffer = new LinkedBlockingQueue<T>();
final AtomicInteger bufferLength = new AtomicInteger();
final Lock lock = new ReentrantLock(true);
final DefaultRunnable r = new DefaultRunnable(lock) {
@Override
public void onRun() {
List<T> curr = new ArrayList<T>();
buffer.drainTo(curr);
bufferLength.addAndGet(-curr.size());
observer.next(curr);
}
};
DefaultObserver<T> s = new DefaultObserver<T>(lock, true) {
/** The timer companion. */
Closeable timer = pool.schedule(r, time, time, unit);
@Override
protected void onClose() {
Closeables.close0(timer);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
List<T> curr = new ArrayList<T>();
buffer.drainTo(curr);
bufferLength.addAndGet(-curr.size());
observer.next(curr);
observer.finish();
}
/** The buffer to fill in. */
@Override
public void onNext(T value) {
buffer.add(value);
if (bufferLength.incrementAndGet() == bufferSize) {
List<T> curr = new ArrayList<T>();
buffer.drainTo(curr);
bufferLength.addAndGet(-curr.size());
observer.next(curr);
}
}
};
return Closeables.close(s, source.register(s));
}
};
}
/**
* Buffers the source observable Ts into a list of Ts periodically and submits them to the returned observable.
* Each next() invocation contains a new and modifiable list of Ts. The signaled List of Ts might be empty if
* no Ts appeared from the original source within the current timespan.
* The last T of the original source triggers an early submission to the output.
* The scheduling is done on the default Scheduler.
* @param <T> the type of elements to observe
* @param source the source of Ts.
* @param time the time value to split the buffer contents.
* @param unit the time unit of the time
* @return the observable of list of Ts
*/
@Nonnull
public static <T> Observable<List<T>> buffer(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit) {
return buffer(source, time, unit, DEFAULT_SCHEDULER.get());
}
/**
* Buffers the source observable Ts into a list of Ts periodically and submits them to the returned observable.
* Each next() invocation contains a new and modifiable list of Ts. The signaled List of Ts might be empty if
* no Ts appeared from the original source within the current timespan.
* The last T of the original source triggers an early submission to the output.
* @param <T> the type of elements to observe
* @param source the source of Ts.
* @param time the time value to split the buffer contents.
* @param unit the time unit of the time
* @param pool the scheduled execution pool to use
* @return the observable of list of Ts
*/
@Nonnull
public static <T> Observable<List<T>> buffer(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit,
@Nonnull final Scheduler pool) {
return new Observable<List<T>>() {
@Override
public Closeable register(final Observer<? super List<T>> observer) {
final BlockingQueue<T> buffer = new LinkedBlockingQueue<T>();
final Lock lock = new ReentrantLock(true);
final DefaultRunnable r = new DefaultRunnable(lock) {
@Override
public void onRun() {
List<T> curr = new ArrayList<T>();
buffer.drainTo(curr);
observer.next(curr);
}
};
DefaultObserver<T> o = new DefaultObserver<T>(lock, true) {
Closeable timer = pool.schedule(r, time, time, unit);
@Override
protected void onClose() {
Closeables.close0(timer);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
List<T> curr = new ArrayList<T>();
buffer.drainTo(curr);
observer.next(curr);
observer.finish();
}
/** The buffer to fill in. */
@Override
public void onNext(T value) {
buffer.add(value);
}
};
return Closeables.close(o, source.register(o));
}
};
}
/**
* Returns an observable which combines the latest values of
* both streams whenever one sends a new value.
* <p><b>Exception semantics:</b> if any stream throws an exception, the output stream
* throws an exception and all subscriptions are terminated.</p>
* <p><b>Completion semantics:</b> The output stream terminates
* after both streams terminate.</p>
* <p>Note that at the beginning, when the left or right fires first, the selector function
* will receive (value, null) or (null, value). If you want to react only in cases when both have sent
* a value, use the {@link #combineLatestSent(Observable, Observable, Func2)} method.</p>
* @param <T> the left element type
* @param <U> the right element type
* @param <V> the result element type
* @param left the left stream
* @param right the right stream
* @param selector the function which combines values from both streams and returns a new value
* @return the new observable.
*/
public static <T, U, V> Observable<V> combineLatest(
final Observable<? extends T> left,
final Observable<? extends U> right,
final Func2<? super T, ? super U, ? extends V> selector
) {
return new Observable<V>() {
@Override
public Closeable register(final Observer<? super V> observer) {
final Lock lock = new ReentrantLock(true);
final AtomicReference<Closeable> closeBoth = new AtomicReference<Closeable>();
final AtomicReference<T> leftRef = new AtomicReference<T>();
final AtomicReference<U> rightRef = new AtomicReference<U>();
final AtomicInteger wip = new AtomicInteger(2);
DefaultObserverEx<T> obs1 = new DefaultObserverEx<T>(lock, false) {
@Override
protected void onError(Throwable ex) {
observer.error(ex);
Closeables.close0(closeBoth.get());
}
@Override
protected void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
}
close();
}
@Override
protected void onNext(T value) {
leftRef.set(value);
observer.next(selector.invoke(value, rightRef.get()));
}
};
DefaultObserverEx<U> obs2 = new DefaultObserverEx<U>(lock, false) {
@Override
protected void onError(Throwable ex) {
observer.error(ex);
Closeables.close0(closeBoth.get());
}
@Override
protected void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
}
close();
}
@Override
protected void onNext(U value) {
rightRef.set(value);
observer.next(selector.invoke(leftRef.get(), value));
}
};
closeBoth.set(Closeables.close(obs1, obs2));
obs1.add(new Object(), left);
obs2.add(new Object(), right);
return closeBoth.get();
}
};
}
/**
* Returns an observable which combines the latest values of
* both streams whenever one sends a new value, but only after both sent a value.
* <p><b>Exception semantics:</b> if any stream throws an exception, the output stream
* throws an exception and all subscriptions are terminated.</p>
* <p><b>Completion semantics:</b> The output stream terminates
* after both streams terminate.</p>
* <p>The function will start combining the values only when both sides have already sent
* a value.</p>
* @param <T> the left element type
* @param <U> the right element type
* @param <V> the result element type
* @param left the left stream
* @param right the right stream
* @param selector the function which combines values from both streams and returns a new value
* @return the new observable.
*/
public static <T, U, V> Observable<V> combineLatestSent(
final Observable<? extends T> left,
final Observable<? extends U> right,
final Func2<? super T, ? super U, ? extends V> selector
) {
return new Observable<V>() {
@Override
public Closeable register(final Observer<? super V> observer) {
final Lock lock = new ReentrantLock(true);
final AtomicReference<Closeable> closeBoth = new AtomicReference<Closeable>();
final AtomicReference<T> leftRef = new AtomicReference<T>();
final AtomicBoolean leftFirst = new AtomicBoolean();
final AtomicReference<U> rightRef = new AtomicReference<U>();
final AtomicBoolean rightFirst = new AtomicBoolean();
final AtomicInteger wip = new AtomicInteger(2);
DefaultObserverEx<T> obs1 = new DefaultObserverEx<T>(lock, false) {
@Override
protected void onError(Throwable ex) {
observer.error(ex);
Closeables.close0(closeBoth.get());
}
@Override
protected void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
}
close();
}
@Override
protected void onNext(T value) {
leftRef.set(value);
leftFirst.set(true);
if (rightFirst.get()) {
observer.next(selector.invoke(value, rightRef.get()));
}
}
};
DefaultObserverEx<U> obs2 = new DefaultObserverEx<U>(lock, false) {
@Override
protected void onError(Throwable ex) {
observer.error(ex);
Closeables.close0(closeBoth.get());
}
@Override
protected void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
}
close();
}
@Override
protected void onNext(U value) {
rightRef.set(value);
rightFirst.set(true);
if (leftFirst.get()) {
observer.next(selector.invoke(leftRef.get(), value));
}
}
};
closeBoth.set(Closeables.close(obs1, obs2));
obs1.add(new Object(), left);
obs2.add(new Object(), right);
return closeBoth.get();
}
};
}
/**
* Concatenates the source observables in a way that when the first finish(), the
* second gets registered and continued, and so on.
* FIXME not sure how it should handle closability
* @param <T> the type of the values to observe
* @param sources the source list of subsequent observables
* @return the concatenated observable
*/
@Nonnull
public static <T> Observable<T> concat(
@Nonnull final Iterable<? extends Observable<? extends T>> sources) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final Iterator<? extends Observable<? extends T>> it = sources.iterator();
if (it.hasNext()) {
DefaultObserver<T> obs = new DefaultObserver<T>(false) {
/** The current registration. */
@GuardedBy("lock")
Closeable current;
{
lock.lock();
try {
current = it.next().register(this);
} finally {
lock.unlock();
}
}
@Override
protected void onClose() {
Closeables.close0(current);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
close();
}
@Override
public void onFinish() {
if (it.hasNext()) {
Closeables.close0(current);
current = it.next().register(this);
} else {
observer.finish();
close();
}
}
@Override
public void onNext(T value) {
observer.next(value);
}
};
return obs;
}
return Reactive.<T>empty().register(observer);
}
};
}
/**
* Concatenate the the multiple sources of T one after another.
* <p><b>Exception semantics:</b> if the sources or any inner observer signals an
* error, the outer observable will signal that error and the sequence is terminated.</p>
* @param <T> the element type
* @param sources the observable sequence of the observable sequence of Ts.
* @return the new observable
*/
public static <T> Observable<T> concat(
final Observable<? extends Observable<T>> sources
) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final LinkedBlockingQueue<Observable<T>> sourceQueue = new LinkedBlockingQueue<Observable<T>>();
final AtomicInteger wip = new AtomicInteger(1);
DefaultObserverEx<Observable<T>> o = new DefaultObserverEx<Observable<T>>(true) {
/** The first value arrived? */
@GuardedBy("lock")
boolean first;
{
add("sources", sources);
}
/**
* The inner exception to forward.
* @param ex the exception
*/
void innerError(Throwable ex) {
error(ex);
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
}
}
@Override
protected void onNext(Observable<T> value) {
if (!first) {
first = true;
registerOn(value);
} else {
sourceQueue.add(value);
}
}
void registerOn(Observable<T> value) {
wip.incrementAndGet();
replace("source", "source", value.register(new DefaultObserver<T>(lock, true) {
@Override
public void onError(Throwable ex) {
innerError(ex);
}
@Override
public void onFinish() {
Observable<T> nextO = sourceQueue.poll();
if (nextO != null) {
registerOn(nextO);
} else {
if (wip.decrementAndGet() == 0) {
observer.finish();
remove("source");
} else {
first = true;
}
}
}
@Override
public void onNext(T value) {
observer.next(value);
}
}));
}
};
return o;
}
};
}
/**
* Concatenate two observables in a way when the first finish() the second is registered
* and continued with.
* @param <T> the type of the elements
* @param first the first observable
* @param second the second observable
* @return the concatenated observable
*/
@Nonnull
public static <T> Observable<T> concat(
@Nonnull Observable<? extends T> first,
@Nonnull Observable<? extends T> second) {
List<Observable<? extends T>> list = new ArrayList<Observable<? extends T>>();
list.add(first);
list.add(second);
return concat(list);
}
/**
* Signals a single TRUE if the source observable signals a value equals() with the source value.
* Both the source and the test value might be null. The signal goes after the first encounter of
* the given value.
* @param <T> the type of the observed values
* @param source the source observable
* @param value the value to look for
* @return the observer for contains
*/
@Nonnull
public static <T> Observable<Boolean> contains(
@Nonnull final Observable<? extends T> source,
final T value) {
return any(source, new Func1<T, Boolean>() {
@Override
public Boolean invoke(T param1) {
return param1 == value || (param1 != null && param1.equals(value));
};
});
}
/**
* Counts the number of elements in the observable source.
* @param <T> the element type
* @param source the source observable
* @return the count signal
*/
@Nonnull
public static <T> Observable<Integer> count(
@Nonnull final Observable<T> source) {
return new Observable<Integer>() {
@Override
public Closeable register(final Observer<? super Integer> observer) {
//FIXME sequence guaranties?
return source.register(new Observer<T>() {
/** The counter. */
int count;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.next(count);
observer.finish();
}
@Override
public void next(T value) {
count++;
}
});
}
};
}
/**
* Counts the number of elements in the observable source as a long.
* @param <T> the element type
* @param source the source observable
* @return the count signal
*/
@Nonnull
public static <T> Observable<Long> countLong(
@Nonnull final Observable<T> source) {
return new Observable<Long>() {
@Override
public Closeable register(final Observer<? super Long> observer) {
//FIXME sequence guaranties?
return source.register(new Observer<T>() {
/** The counter. */
long count;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.next(count);
observer.finish();
}
@Override
public void next(T value) {
count++;
}
});
}
};
}
/**
* Create an observable instance by submitting a function which takes responsibility
* for registering observers.
* @param <T> the type of the value to observe
* @param subscribe the function to manage new subscriptions
* @return the observable instance
*/
@Nonnull
public static <T> Observable<T> create(
@Nonnull final Func1<Observer<? super T>, ? extends Action0> subscribe) {
return new Observable<T>() {
@Override
public Closeable register(Observer<? super T> observer) {
final Action0 a = subscribe.invoke(observer);
return new Closeable() {
@Override
public void close() {
a.invoke();
}
};
}
};
}
/**
* Create an observable instance by submitting a function which takes responsibility
* for registering observers and returns a custom Closeable to terminate the registration.
* @param <T> the type of the value to observe
* @param subscribe the function to manage new subscriptions
* @return the observable instance
*/
@Nonnull
public static <T> Observable<T> createWithCloseable(
@Nonnull final Func1<Observer<? super T>, ? extends Closeable> subscribe) {
return new Observable<T>() {
@Override
public Closeable register(Observer<? super T> observer) {
return subscribe.invoke(observer);
}
};
}
/**
* Constructs an observer which logs errors in case next(), finish() or error() is called
* and the observer is not in running state anymore due an earlier finish() or error() call.
* @param <T> the element type.
* @param source the source observable
* @return the augmented observable
*/
@Nonnull
public static <T> Observable<T> debugState(
@Nonnull final Observable<? extends T> source) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
ObserverState state = ObserverState.OBSERVER_RUNNING;
@Override
public void error(Throwable ex) {
if (state != ObserverState.OBSERVER_RUNNING) {
new IllegalStateException(state.toString()).printStackTrace();
}
state = ObserverState.OBSERVER_ERROR;
observer.error(ex);
}
@Override
public void finish() {
if (state != ObserverState.OBSERVER_RUNNING) {
new IllegalStateException(state.toString()).printStackTrace();
}
state = ObserverState.OBSERVER_FINISHED;
observer.finish();
}
@Override
public void next(T value) {
if (state != ObserverState.OBSERVER_RUNNING) {
new IllegalStateException(state.toString()).printStackTrace();
}
observer.next(value);
}
});
}
};
}
/**
* The returned observable invokes the <code>observableFactory</code> whenever an observer
* tries to subscribe to it.
* @param <T> the type of elements to observer
* @param observableFactory the factory which is responsivle to create a source observable.
* @return the result observable
*/
@Nonnull
public static <T> Observable<T> defer(
@Nonnull final Func0<? extends Observable<? extends T>> observableFactory) {
return new Observable<T>() {
@Override
public Closeable register(Observer<? super T> observer) {
return observableFactory.invoke().register(observer);
}
};
}
/**
* Delays the propagation of events of the source by the given amount. It uses the pool for the scheduled waits.
* The delay preserves the relative time difference between subsequent notifiactions.
* It uses the default scheduler pool when submitting the delayed values
* @param <T> the type of elements
* @param source the source of Ts
* @param time the time value
* @param unit the time unit
* @return the delayed observable of Ts
*/
@Nonnull
public static <T> Observable<T> delay(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit) {
return delay(source, time, unit, DEFAULT_SCHEDULER.get());
}
/**
* Delays the propagation of events of the source by the given amount. It uses the pool for the scheduled waits.
* The delay preserves the relative time difference between subsequent notifiactions
* @param <T> the type of elements
* @param source the source of Ts
* @param time the time value
* @param unit the time unit
* @param pool the pool to use for scheduling
* @return the delayed observable of Ts
*/
@Nonnull
public static <T> Observable<T> delay(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** The outstanding requests. */
final BlockingQueue<Closeable> outstanding = new LinkedBlockingQueue<Closeable>();
@Override
public void onClose() {
List<Closeable> list = new LinkedList<Closeable>();
outstanding.drainTo(list);
for (Closeable c : list) {
Closeables.close0(c);
}
super.close();
}
@Override
public void onError(final Throwable ex) {
Runnable r = new Runnable() {
@Override
public void run() {
try {
observer.error(ex);
close();
} finally {
outstanding.poll();
}
}
};
outstanding.add(pool.schedule(r, time, unit));
}
@Override
public void onFinish() {
Runnable r = new Runnable() {
@Override
public void run() {
try {
observer.finish();
close();
} finally {
outstanding.poll();
}
}
};
outstanding.add(pool.schedule(r, time, unit));
}
@Override
public void onNext(final T value) {
Runnable r = new Runnable() {
@Override
public void run() {
try {
observer.next(value);
} finally {
outstanding.poll();
}
}
};
outstanding.add(pool.schedule(r, time, unit));
}
};
return obs;
}
};
}
/**
* Returns an observable which converts all option messages
* back to regular next(), error() and finish() messages.
* The returned observable adheres to the <code>next* (error|finish)?</code> pattern,
* which ensures that no further options are relayed after an error or finish.
* @param <T> the source element type
* @param source the source of Ts
* @return the new observable
* @see #materialize(Observable)
*/
@Nonnull
public static <T> Observable<T> dematerialize(
@Nonnull final Observable<? extends Option<T>> source) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<Option<T>>() {
/** Keeps track of the observer's state. */
final AtomicBoolean done = new AtomicBoolean();
@Override
public void error(Throwable ex) {
if (!done.get()) {
done.set(true);
observer.error(ex);
}
}
@Override
public void finish() {
if (!done.get()) {
done.set(true);
observer.finish();
}
}
@Override
public void next(Option<T> value) {
if (!done.get()) {
if (Option.isNone(value)) {
done.set(true);
observer.finish();
} else
if (Option.isSome(value)) {
observer.next(value.value());
} else {
done.set(true);
observer.error(Option.getError(value));
}
}
}
});
}
};
}
/**
* Dispatches the option to the various Observer methods.
* @param <T> the value type
* @param observer the observer
* @param value the value to dispatch
*/
@Nonnull
public static <T> void dispatch(
@Nonnull Observer<? super T> observer,
@Nonnull Option<T> value) {
if (value == Option.none()) {
observer.finish();
} else
if (Option.isError(value)) {
observer.error(((Option.Error<?>)value).error());
} else {
observer.next(value.value());
}
}
/**
* Returns an observable which fires next() events only when the subsequent values differ
* in terms of Object.equals().
* @param <T> the type of the values
* @param source the source observable
* @return the observable
*/
@Nonnull
public static <T> Observable<T> distinct(
@Nonnull final Observable<? extends T> source) {
return distinct(source, Functions.<T>identity());
}
/**
* Returns Ts from the source observable if the subsequent keys extracted by <code>keyExtractor</code> are different.
* @param <T> the type of the values to observe
* @param <U> the key type check for distinction
* @param source the source of Ts
* @param keyExtractor the etractor for the keys
* @return the new filtered observable
*/
@Nonnull
public static <T, U> Observable<T> distinct(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<T, U> keyExtractor) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** Indication as the first. */
boolean first = true;
/** The last value. */
U lastKey;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
U key = keyExtractor.invoke(value);
if (first) {
first = false;
observer.next(value);
} else
if (lastKey != value && (lastKey == null || !lastKey.equals(key))) {
observer.next(value);
}
lastKey = key;
}
});
}
};
}
/**
* Maintains a queue of Ts which is then drained by the pump. Uses the default pool.
* FIXME not sure what this method should do and how.
* @param <T> the type of the values
* @param source the source of Ts
* @param pump the pump that drains the queue
* @return the new observable
*/
@Nonnull
public static <T> Observable<Void> drain(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Observable<Void>> pump) {
return drain(source, pump, DEFAULT_SCHEDULER.get());
}
/**
* Maintains a queue of Ts which is then drained by the pump.
* FIXME not sure what this method should do and how.
* @param <T> the type of the values
* @param source the source of Ts
* @param pump the pump that drains the queue
* @param pool the pool for the drain
* @return the new observable
*/
@Nonnull
public static <T> Observable<Void> drain(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Observable<Void>> pump,
@Nonnull final Scheduler pool) {
return new Observable<Void>() {
@Override
public Closeable register(final Observer<? super Void> observer) {
// keep track of the forked observers so the last should invoke finish() on the observer
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** The work in progress counter. */
final AtomicInteger wip = new AtomicInteger(1);
/** The executor which ensures the sequence. */
final SingleLaneExecutor<T> exec = new SingleLaneExecutor<T>(pool, new Action1<T>() {
@Override
public void invoke(T value) {
pump.invoke(value).register(
new Observer<Void>() {
@Override
public void error(Throwable ex) {
lock.lock();
try {
observer.error(ex);
close();
} finally {
lock.unlock();
}
}
@Override
public void finish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
close();
}
}
@Override
public void next(Void value) {
throw new AssertionError();
}
}
);
};
});
@Override
public void onClose() {
// exec.close(); FIXME should not cancel the pool?!
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
close();
}
@Override
public void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
}
}
@Override
public void onNext(T value) {
wip.incrementAndGet();
exec.add(value);
}
};
return Closeables.close(obs, source.register(obs));
}
};
}
/**
* @param <T> the type of the values to observe (irrelevant)
* @return Returns an empty observable which signals only finish() on the default observer pool.
*/
@Nonnull
public static <T> Observable<T> empty() {
return empty(DEFAULT_SCHEDULER.get());
}
/**
* Returns an empty observable which signals only finish() on the given pool.
* @param <T> the expected type, (irrelevant)
* @param pool the pool to invoke the the finish()
* @return the observable
*/
@Nonnull
public static <T> Observable<T> empty(
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return pool.schedule(new Runnable() {
@Override
public void run() {
observer.finish();
}
});
}
};
}
/**
* Invokes the given action when the source signals a finish() or error().
* @param <T> the type of the observed values
* @param source the source of Ts
* @param action the action to invoke on finish() or error()
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> finish(
@Nonnull final Observable<? extends T> source,
@Nonnull final Action0 action) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
action.invoke();
observer.error(ex);
}
@Override
public void finish() {
action.invoke();
observer.finish();
}
@Override
public void next(T value) {
observer.next(value);
}
});
}
};
}
/**
* Blocks until the first element of the observable becomes availabel and returns that element.
* Might block forever.
* Might throw a NoSuchElementException when the observable doesn't produce any more elements
* @param <T> the type of the elements
* @param source the source of Ts
* @return the first element
*/
public static <T> T first(
@Nonnull final Observable<? extends T> source) {
CloseableIterator<T> it = toIterable(source).iterator();
try {
if (it.hasNext()) {
return it.next();
}
throw new NoSuchElementException();
} finally {
Closeables.close0(it);
}
}
/**
* Creates a concatenated sequence of Observables based on the decision function of <code>selector</code> keyed by the source iterable.
* @param <T> the type of the source values
* @param <U> the type of the observable elements.
* @param source the source of keys
* @param selector the selector of keys which returns a new observable
* @return the concatenated observable.
*/
public static <T, U> Observable<U> forEach(
@Nonnull final Iterable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Observable<? extends U>> selector) {
List<Observable<? extends U>> list = new ArrayList<Observable<? extends U>>();
for (T t : source) {
list.add(selector.invoke(t));
}
return concat(list);
}
/**
* Runs the observables in parallel and joins their last values whenever one fires.
* FIXME not sure what this method should do in case of error.
* @param <T> the type of the source values
* @param sources the list of sources
* @return the observable
*/
@Nonnull
public static <T> Observable<List<T>> forkJoin(
@Nonnull final Iterable<? extends Observable<? extends T>> sources) {
return new Observable<List<T>>() {
@Override
public Closeable register(final Observer<? super List<T>> observer) {
final List<AtomicReference<T>> lastValues = new ArrayList<AtomicReference<T>>();
final List<Observable<? extends T>> observableList = new ArrayList<Observable<? extends T>>();
final List<Observer<T>> observers = new ArrayList<Observer<T>>();
final AtomicInteger wip = new AtomicInteger(observableList.size() + 1);
int i = 0;
for (Observable<? extends T> o : sources) {
final int j = i;
observableList.add(o);
lastValues.add(new AtomicReference<T>());
observers.add(new Observer<T>() {
/** The last value. */
T last;
@Override
public void error(Throwable ex) {
// TODO Auto-generated method stub
}
@Override
public void finish() {
lastValues.get(j).set(last);
runIfComplete(observer, lastValues, wip);
}
@Override
public void next(T value) {
last = value;
}
});
}
List<Closeable> closeables = new ArrayList<Closeable>();
i = 0;
for (Observable<? extends T> o : observableList) {
closeables.add(o.register(observers.get(i)));
i++;
}
runIfComplete(observer, lastValues, wip);
return Closeables.closeAll(closeables);
}
/**
* Runs the completion sequence once the WIP drops to zero.
* @param observer the observer who will receive the values
* @param lastValues the array of last values
* @param wip the work in progress counter
*/
public void runIfComplete(
final Observer<? super List<T>> observer,
final List<AtomicReference<T>> lastValues,
final AtomicInteger wip) {
if (wip.decrementAndGet() == 0) {
List<T> values = new ArrayList<T>();
for (AtomicReference<T> r : lastValues) {
values.add(r.get());
}
observer.next(values);
observer.finish();
}
}
};
}
/**
* Generates a stream of Us by using a value T stream using the default pool fo the generator loop.
* If T = int and U is double, this would be seen as for (int i = 0; i < 10; i++) { yield return i / 2.0; }
* @param <T> the type of the generator values
* @param <U> the type of the observed values
* @param initial the initial generator value
* @param condition the condition that must hold to continue generating Ts
* @param next the function that computes the next value of T
* @param selector the selector which turns Ts into Us.
* @return the observable
*/
@Nonnull
public static <T, U> Observable<U> generate(
final T initial,
@Nonnull final Func1<? super T, Boolean> condition,
@Nonnull final Func1<? super T, ? extends T> next,
@Nonnull final Func1<? super T, ? extends U> selector) {
return generate(initial, condition, next, selector, DEFAULT_SCHEDULER.get());
}
/**
* Generates a stream of Us by using a value T stream.
* If T = int and U is double, this would be seen as for (int i = 0; i < 10; i++) { yield return i / 2.0; }
* @param <T> the type of the generator values
* @param <U> the type of the observed values
* @param initial the initial generator value
* @param condition the condition that must hold to continue generating Ts
* @param next the function that computes the next value of T
* @param selector the selector which turns Ts into Us.
* @param pool the thread pool where the generation loop should run.
* @return the observable
*/
@Nonnull
public static <T, U> Observable<U> generate(
final T initial,
@Nonnull final Func1<? super T, Boolean> condition,
@Nonnull final Func1<? super T, ? extends T> next,
@Nonnull final Func1<? super T, ? extends U> selector,
@Nonnull final Scheduler pool) {
return new Observable<U>() {
@Override
public Closeable register(final Observer<? super U> observer) {
DefaultRunnable s = new DefaultRunnable() {
@Override
public void onRun() {
T t = initial;
while (condition.invoke(t) && !cancelled()) {
observer.next(selector.invoke(t));
t = next.invoke(t);
}
if (!cancelled()) {
observer.finish();
}
}
};
return pool.schedule(s);
}
};
}
/**
* Generates a stream of Us by using a value T stream.
* If T = int and U is double, this would be seen as for (int i = 0; i < 10; i++) { sleep(time); yield return i / 2.0; }
* @param <T> the type of the generator values
* @param <U> the type of the observed values
* @param initial the initial generator value
* @param condition the condition that must hold to continue generating Ts
* @param next the function that computes the next value of T
* @param selector the selector which turns Ts into Us.
* @param delay the selector which tells how much to wait before releasing the next U
* @return the observable
*/
@Nonnull
public static <T, U> Observable<Timestamped<U>> generateTimed(
final T initial,
@Nonnull final Func1<? super T, Boolean> condition,
@Nonnull final Func1<? super T, ? extends T> next,
@Nonnull final Func1<? super T, ? extends U> selector,
@Nonnull final Func1<? super T, Long> delay) {
return generateTimed(initial, condition, next, selector, delay, DEFAULT_SCHEDULER.get());
}
/**
* Generates a stream of Us by using a value T stream.
* If T = int and U is double, this would be seen as for (int i = 0; i < 10; i++) { sleep(time); yield return i / 2.0; }
* FIXME timeunit for the delay function!
* @param <T> the type of the generator values
* @param <U> the type of the observed values
* @param initial the initial generator value
* @param condition the condition that must hold to continue generating Ts
* @param next the function that computes the next value of T
* @param selector the selector which turns Ts into Us.
* @param delay the selector which tells how much to wait (in milliseconds) before releasing the next U
* @param pool the scheduled pool where the generation loop should run.
* @return the observable
*/
@Nonnull
public static <T, U> Observable<Timestamped<U>> generateTimed(
final T initial,
@Nonnull final Func1<? super T, Boolean> condition,
@Nonnull final Func1<? super T, ? extends T> next,
@Nonnull final Func1<? super T, ? extends U> selector,
@Nonnull final Func1<? super T, Long> delay,
@Nonnull final Scheduler pool) {
return new Observable<Timestamped<U>>() {
@Override
public Closeable register(final Observer<? super Timestamped<U>> observer) {
// the cancellation indicator
DefaultRunnable s = new DefaultRunnable() {
T current = initial;
@Override
public void onRun() {
U invoke = selector.invoke(current);
Timestamped<U> of = Timestamped.of(invoke, System.currentTimeMillis());
observer.next(of);
final T tn = next.invoke(current);
current = tn;
if (condition.invoke(tn) && !cancelled()) {
pool.schedule(this, delay.invoke(tn), TimeUnit.MILLISECONDS);
} else {
if (!cancelled()) {
observer.finish();
}
}
}
};
if (condition.invoke(initial)) {
return pool.schedule(s, delay.invoke(initial), TimeUnit.MILLISECONDS);
}
return Functions.EMPTY_CLOSEABLE;
}
};
}
/**
* @return the current default pool used by the Observables methods
*/
@Nonnull
public static Scheduler getDefaultScheduler() {
return DEFAULT_SCHEDULER.get();
}
/**
* Group the specified source accoring to the keys provided by the extractor function.
* The resulting observable gets notified once a new group is encountered.
* Each previously encountered group by itself receives updates along the way.
* If the source finish(), all encountered group will finish().
* FIXME not sure how this should work.
* @param <T> the type of the source element
* @param <Key> the key type of the group
* @param source the source of Ts
* @param keyExtractor the key extractor which creates Keys from Ts
* @return the observable
*/
@Nonnull
public static <T, Key> Observable<GroupedObservable<Key, T>> groupBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Key> keyExtractor) {
return groupBy(source, keyExtractor, Functions.<T>identity());
}
/**
* Group the specified source accoring to the keys provided by the extractor function.
* The resulting observable gets notified once a new group is encountered.
* Each previously encountered group by itself receives updates along the way.
* If the source finish(), all encountered group will finish().
* FIXME not sure how this should work
* @param <T> the type of the source element
* @param <U> the type of the output element
* @param <Key> the key type of the group
* @param source the source of Ts
* @param keyExtractor the key extractor which creates Keys from Ts
* @param valueExtractor the extractor which makes Us from Ts
* @return the observable
*/
@Nonnull
public static <T, U, Key> Observable<GroupedObservable<Key, U>> groupBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Key> keyExtractor,
@Nonnull final Func1<? super T, ? extends U> valueExtractor) {
return new Observable<GroupedObservable<Key, U>>() {
@Override
public Closeable register(
final Observer<? super GroupedObservable<Key, U>> observer) {
final ConcurrentMap<Key, GroupedRegisteringObservable<Key, U>> knownGroups = new ConcurrentHashMap<Key, GroupedRegisteringObservable<Key, U>>();
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
for (GroupedRegisteringObservable<Key, U> group : knownGroups.values()) {
group.finish();
}
observer.finish();
}
@Override
public void next(T value) {
final Key key = keyExtractor.invoke(value);
GroupedRegisteringObservable<Key, U> group = knownGroups.get(key);
if (group == null) {
group = new GroupedRegisteringObservable<Key, U>(key);
GroupedRegisteringObservable<Key, U> group2 = knownGroups.putIfAbsent(key, group);
if (group2 != null) {
group = group2;
}
observer.next(group);
}
group.next(valueExtractor.invoke(value));
}
});
}
};
}
/**
* Groups the source sequence of Ts until the specified duration for that group fires.
* <p>The key comparison is done by the <code>Object.equals()</code> semantics of the <code>HashMap</code>.</p>
* <p><b>Exception semantics:</b> if the source throws an exception, all active groups will receive
* the exception followed by the outer observer of the groups.</p>
* <p><b>Completion semantics:</b> if the source finishes, all active groups will receive a finish
* signal followed by the outer observer.</p>
* @param <T> the source element type
* @param <K> the key type
* @param <D> the duration element type, ignored
* @param source the source of Ts
* @param keySelector the key extractor
* @param durationSelector the observable for a particular group termination
* @return the new observable
*/
public static <T, K, D> Observable<GroupedObservable<K, T>> groupByUntil(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func1<? super GroupedObservable<K, T>, ? extends Observable<D>> durationSelector
) {
return groupByUntil(source, keySelector, Functions.<T>identity(), durationSelector);
}
/**
* Groups the source sequence of Ts until the specified duration for that group fires.
* <p><b>Exception semantics:</b> if the source throws an exception, all active groups will receive
* the exception followed by the outer observer of the groups.</p>
* <p><b>Completion semantics:</b> if the source finishes, all active groups will receive a finish
* signal followed by the outer observer.</p>
* @param <T> the source element type
* @param <K> the key type
* @param <D> the duration element type, ignored
* @param source the source of Ts
* @param keySelector the key extractor
* @param durationSelector the observable for a particular group termination
* @param keyComparer the key comparer for the grouping
* @return the new observable
*/
public static <T, K, D> Observable<GroupedObservable<K, T>> groupByUntil(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func1<? super GroupedObservable<K, T>, ? extends Observable<D>> durationSelector,
final Func2<? super K, ? super K, Boolean> keyComparer
) {
return groupByUntil(source, keySelector, Functions.<T>identity(), durationSelector, keyComparer);
}
/**
* Groups the source sequence of Ts until the specified duration for that group fires.
* <p>The key comparison is done by the <code>Object.equals()</code> semantics of the <code>HashMap</code>.</p>
* <p><b>Exception semantics:</b> if the source throws an exception, all active groups will receive
* the exception followed by the outer observer of the groups.</p>
* <p><b>Completion semantics:</b> if the source finishes, all active groups will receive a finish
* signal followed by the outer observer.</p>
* @param <T> the source element type
* @param <K> the key type
* @param <V> the value type
* @param <D> the duration element type, ignored
* @param source the source of Ts
* @param keySelector the key extractor
* @param valueSelector the value extractor
* @param durationSelector the observable for a particular group termination
* @return the new observable
*/
public static <T, K, V, D> Observable<GroupedObservable<K, V>> groupByUntil(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func1<? super T, ? extends V> valueSelector,
final Func1<? super GroupedObservable<K, V>, ? extends Observable<D>> durationSelector
) {
return new Observable<GroupedObservable<K, V>>() {
@Override
public Closeable register(
final Observer<? super GroupedObservable<K, V>> observer) {
DefaultObserverEx<T> o = new DefaultObserverEx<T>(true) {
/** The active groups. */
final Map<K, GroupedRegisteringObservable<K, V>> groups = new HashMap<K, GroupedRegisteringObservable<K, V>>();
{
add("source", source);
}
@Override
protected void onError(Throwable ex) {
for (Observer<V> o : groups.values()) {
o.error(ex);
}
observer.error(ex);
}
@Override
protected void onFinish() {
for (Observer<V> o : groups.values()) {
o.finish();
}
observer.finish();
}
@Override
protected void onNext(T value) {
final K k = keySelector.invoke(value);
final V v = valueSelector.invoke(value);
GroupedRegisteringObservable<K, V> gr = groups.get(k);
if (gr != null) {
gr = new GroupedRegisteringObservable<K, V>(k);
final GroupedRegisteringObservable<K, V> fgr = gr;
groups.put(k, gr);
add(fgr, durationSelector.invoke(gr).register(new DefaultObserver<D>(lock, true) {
@Override
protected void onError(Throwable ex) {
fgr.error(ex); // FIXME error propagation
groups.remove(k);
remove(fgr);
}
@Override
protected void onFinish() {
fgr.finish();
groups.remove(k);
remove(fgr);
}
@Override
protected void onNext(D value) {
fgr.finish();
groups.remove(k);
remove(fgr);
}
}));
observer.next(gr);
}
gr.next(v);
}
};
return o;
}
};
}
/**
* Groups the source sequence of Ts until the specified duration for that group fires.
* <p><b>Exception semantics:</b> if the source throws an exception, all active groups will receive
* the exception followed by the outer observer of the groups.</p>
* <p><b>Completion semantics:</b> if the source finishes, all active groups will receive a finish
* signal followed by the outer observer.</p>
* @param <T> the source element type
* @param <K> the key type
* @param <V> the value type
* @param <D> the duration element type, ignored
* @param source the source of Ts
* @param keySelector the key extractor
* @param valueSelector the value extractor
* @param durationSelector the observable for a particular group termination
* @param keyComparer the key comparer for the grouping
* @return the new observable
*/
public static <T, K, V, D> Observable<GroupedObservable<K, V>> groupByUntil(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func1<? super T, ? extends V> valueSelector,
final Func1<? super GroupedObservable<K, V>, ? extends Observable<D>> durationSelector,
final Func2<? super K, ? super K, Boolean> keyComparer
) {
return new Observable<GroupedObservable<K, V>>() {
@Override
public Closeable register(
final Observer<? super GroupedObservable<K, V>> observer) {
DefaultObserverEx<T> o = new DefaultObserverEx<T>(true) {
/** The key class with custom equality comparer. */
class Key {
/** The key value. */
final K key;
/**
* Constructor.
* @param key the key
*/
Key(K key) {
this.key = key;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Key) {
return keyComparer.invoke(key, ((Key)obj).key);
}
return false;
}
@Override
public int hashCode() {
return key != null ? key.hashCode() : 0;
}
}
/** The active groups. */
final Map<Key, GroupedRegisteringObservable<K, V>> groups = new HashMap<Key, GroupedRegisteringObservable<K, V>>();
{
add("source", source);
}
@Override
protected void onError(Throwable ex) {
for (Observer<V> o : groups.values()) {
o.error(ex);
}
observer.error(ex);
}
@Override
protected void onFinish() {
for (Observer<V> o : groups.values()) {
o.finish();
}
observer.finish();
}
@Override
protected void onNext(T value) {
final K kv = keySelector.invoke(value);
final Key k = new Key(kv);
final V v = valueSelector.invoke(value);
GroupedRegisteringObservable<K, V> gr = groups.get(k);
if (gr != null) {
gr = new GroupedRegisteringObservable<K, V>(kv);
final GroupedRegisteringObservable<K, V> fgr = gr;
groups.put(k, gr);
add(fgr, durationSelector.invoke(gr).register(new DefaultObserver<D>(lock, true) {
@Override
protected void onError(Throwable ex) {
fgr.error(ex); // FIXME error propagation
groups.remove(k);
remove(fgr);
}
@Override
protected void onFinish() {
fgr.finish();
groups.remove(k);
remove(fgr);
}
@Override
protected void onNext(D value) {
fgr.finish();
groups.remove(k);
remove(fgr);
}
}));
observer.next(gr);
}
gr.next(v);
}
};
return o;
}
};
}
/**
* Returns an observable which correlates two streams of values based on
* their time when they overlapped and groups the results.
* FIXME not sure how to implement it
* @param <Left> the element type of the left stream
* @param <Right> the element type of the right stream
* @param <LeftDuration> the overlapping duration indicator for the left stream (e.g., the event when it leaves)
* @param <RightDuration> the overlapping duration indicator for the right stream (e.g., the event when it leaves)
* @param <Result> the type of the grouping based on the coincidence.
* @param left the left source of elements
* @param right the right source of elements
* @param leftDurationSelector the duration selector for a left element
* @param rightDurationSelector the duration selector for a right element
* @param resultSelector the selector which will produce the output value
* @return the new observable
* @see #join(Observable, Observable, Func1, Func1, Func2)
*/
public static <Left, Right, LeftDuration, RightDuration, Result> Observable<Result> groupJoin(
final Observable<? extends Left> left,
final Observable<? extends Right> right,
final Func1<? super Left, ? extends Observable<LeftDuration>> leftDurationSelector,
final Func1<? super Right, ? extends Observable<RightDuration>> rightDurationSelector,
final Func2<? super Left, ? super Observable<? extends Right>, ? extends Result> resultSelector
) {
return new Observable<Result>() {
@Override
public Closeable register(final Observer<? super Result> observer) {
final Lock lock = new ReentrantLock(true);
final HashSet<Left> leftActive = new HashSet<Left>();
final HashSet<Right> rightActive = new HashSet<Right>();
final Map<Right, DefaultObservable<Right>> rightGroups = new IdentityHashMap<Right, DefaultObservable<Right>>();
final AtomicReference<Closeable> closeBoth = new AtomicReference<Closeable>();
DefaultObserverEx<Left> o1 = new DefaultObserverEx<Left>(lock, true) {
/** Relay the inner error to the outer. */
void innerError(Throwable ex) {
error(ex);
}
@Override
protected void onClose() {
super.onClose();
Closeables.close0(closeBoth.get());
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
observer.finish();
}
@Override
protected void onNext(final Left value) {
leftActive.add(value);
Observable<LeftDuration> completion = leftDurationSelector.invoke(value);
final Object token = new Object();
add(token, completion.register(new DefaultObserver<LeftDuration>(lock, true) {
@Override
protected void onClose() {
remove(token);
}
@Override
protected void onError(Throwable ex) {
innerError(ex);
}
@Override
protected void onFinish() {
leftActive.remove(value);
}
@Override
protected void onNext(LeftDuration value) {
// NO OP?
}
}));
for (Right r : rightActive) {
observer.next(resultSelector.invoke(value, rightGroups.get(r)));
}
}
};
DefaultObserverEx<Right> o2 = new DefaultObserverEx<Right>(lock, true) {
/** Relay the inner error to the outer. */
void innerError(Throwable ex) {
error(ex);
}
@Override
protected void onClose() {
super.onClose();
Closeables.close0(closeBoth.get());
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
observer.finish();
}
@Override
protected void onNext(final Right value) {
rightActive.add(value);
Observable<RightDuration> completion = rightDurationSelector.invoke(value);
final Object token = new Object();
add(token, completion.register(new DefaultObserver<RightDuration>(lock, true) {
@Override
protected void onClose() {
remove(token);
DefaultObservable<Right> rg = rightGroups.remove(value);
if (rg != null) {
rg.finish();
}
}
@Override
protected void onError(Throwable ex) {
innerError(ex);
}
@Override
protected void onFinish() {
rightActive.remove(value);
}
@Override
protected void onNext(RightDuration value) {
// NO OP?!
}
}));
DefaultObservable<Right> r = rightGroups.get(value);
if (r == null) {
r = new DefaultObservable<Right>();
rightGroups.put(value, r);
}
for (Left left : leftActive) {
observer.next(resultSelector.invoke(left, r));
}
r.next(value);
}
};
Closeable c = Closeables.close(o1, o2);
closeBoth.set(c);
o1.add(new Object(), left);
o2.add(new Object(), right);
return c;
}
};
}
/**
* Returns an observable where the submitted condition decides whether the <code>then</code> source is allowed to submit values.
* @param <T> the type of the values to observe
* @param condition the condition function
* @param then the source to use when the condition is true
* @return the observable
*/
@Nonnull
public static <T> Observable<T> ifThen(
@Nonnull final Func0<Boolean> condition,
@Nonnull final Observable<? extends T> then) {
return ifThen(condition, then, Reactive.<T>never());
}
/**
* Returns an observable where the submitted condition decides whether the <code>then</code> or <code>orElse</code>
* source is allowed to submit values.
* FIXME not sure how it should work
* @param <T> the type of the values to observe
* @param condition the condition function
* @param then the source to use when the condition is true
* @param orElse the source to use when the condition is false
* @return the observable
*/
@Nonnull
public static <T> Observable<T> ifThen(
@Nonnull final Func0<Boolean> condition,
@Nonnull final Observable<? extends T> then,
@Nonnull final Observable<? extends T> orElse) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final Closeable s1 = then.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
if (condition.invoke()) {
observer.error(ex);
}
}
@Override
public void finish() {
if (condition.invoke()) {
observer.finish();
}
}
@Override
public void next(T value) {
if (condition.invoke()) {
observer.next(value);
}
}
});
final Closeable s2 = orElse.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
if (!condition.invoke()) {
observer.error(ex);
}
}
@Override
public void finish() {
if (!condition.invoke()) {
observer.finish();
}
}
@Override
public void next(T value) {
if (!condition.invoke()) {
observer.next(value);
}
}
});
return Closeables.close(s1, s2);
}
};
}
/**
* Ignores the next() messages of the source and forwards only the error() and
* finish() messages.
* @param <T> the source element type
* @param source the source of Ts
* @return the new observable
*/
public static <T> Observable<T> ignoreValues(final Observable<? extends T> source) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
// ignored
}
});
}
};
}
/**
* Invoke a specific action before relaying the Ts to the observable. The <code>action</code> might
* have some effect on each individual Ts passing through this filter.
* @param <T> the type of the values observed
* @param source the source of Ts
* @param action the action to invoke on every T
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> invoke(
@Nonnull final Observable<? extends T> source,
@Nonnull final Action1<? super T> action) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
action.invoke(value);
observer.next(value);
}
});
}
};
}
/**
* Invoke a specific observer before relaying the Ts, finish() and error() to the observable. The <code>action</code> might
* have some effect on each individual Ts passing through this filter.
* @param <T> the type of the values observed
* @param source the source of Ts
* @param observer the observer to invoke before any registered observers are called
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> invoke(
@Nonnull final Observable<? extends T> source,
@Nonnull final Observer<? super T> observer) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> o) {
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
observer.error(ex);
o.error(ex);
}
@Override
public void finish() {
observer.finish();
o.finish();
}
@Override
public void next(T value) {
observer.next(value);
o.next(value);
}
});
}
};
}
/**
* Invoke the given callable on the default pool and observe its result via the returned observable.
* Any exception thrown by the callable is relayed via the error() message.
* @param <T> the return type
* @param call the callable
* @return the observable
*/
@Nonnull
public static <T> Observable<T> invokeAsync(
@Nonnull final Callable<? extends T> call) {
return invokeAsync(call, DEFAULT_SCHEDULER.get());
}
/**
* Invoke the given callable on the given pool and observe its result via the returned observable.
* Any exception thrown by the callable is relayed via the error() message.
* @param <T> the return type
* @param call the callable
* @param pool the thread pool
* @return the observable
*/
@Nonnull
public static <T> Observable<T> invokeAsync(
@Nonnull final Callable<? extends T> call,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return pool.schedule(new Runnable() {
@Override
public void run() {
try {
observer.next(call.call());
observer.finish();
} catch (Throwable ex) {
observer.error(ex);
}
}
});
}
};
}
/**
* Invoke the given callable on the given pool and observe its result via the returned observable.
* Any exception thrown by the callable is relayed via the error() message.
* @param <T> the return type
* @param run the runnable
* @return the observable
*/
@Nonnull
public static <T> Observable<T> invokeAsync(
@Nonnull final Runnable run) {
return invokeAsync(run, DEFAULT_SCHEDULER.get());
}
/**
* Invoke the given callable on the given pool and observe its result via the returned observable.
* Any exception thrown by the callable is relayed via the error() message.
* @param <T> the return type
* @param run the runnable
* @param pool the thread pool
* @return the observable
*/
@Nonnull
public static <T> Observable<T> invokeAsync(
@Nonnull final Runnable run,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return pool.schedule(new Runnable() {
@Override
public void run() {
try {
run.run();
observer.finish();
} catch (Throwable ex) {
observer.error(ex);
}
}
});
}
};
}
/**
* Invoke the given callable on the given pool and observe its result via the returned observable.
* Any exception thrown by the callable is relayed via the error() message.
* @param <T> the return type
* @param run the runnable
* @param defaultValue the value to return when the Runnable completes
* @return the observable
*/
@Nonnull
public static <T> Observable<T> invokeAsync(
@Nonnull final Runnable run,
final T defaultValue) {
return invokeAsync(run, defaultValue, DEFAULT_SCHEDULER.get());
}
/**
* Invoke the given callable on the given pool and observe its result via the returned observable.
* Any exception thrown by the callable is relayed via the error() message.
* @param <T> the return type
* @param run the runnable
* @param pool the thread pool
* @param defaultValue the value to return by default
* @return the observable
*/
@Nonnull
public static <T> Observable<T> invokeAsync(
@Nonnull final Runnable run,
final T defaultValue,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return pool.schedule(new Runnable() {
@Override
public void run() {
try {
run.run();
observer.next(defaultValue);
observer.finish();
} catch (Throwable ex) {
observer.error(ex);
}
}
});
}
};
}
/**
* Signals true if the source observable fires finish() without ever firing next().
* This means once the next() is fired, the resulting observer will return early.
* @param source the source observable of any type
* @return the observer
*/
@Nonnull
public static Observable<Boolean> isEmpty(
@Nonnull final Observable<?> source) {
return new Observable<Boolean>() {
@Override
public Closeable register(final Observer<? super Boolean> observer) {
return source.register(new Observer<Object>() {
/** We already determined the answer? */
boolean done;
@Override
public void error(Throwable ex) {
if (!done) {
observer.error(ex);
}
}
@Override
public void finish() {
if (!done) {
done = true;
observer.next(false);
observer.finish();
}
}
@Override
public void next(Object value) {
if (!done) {
done = true;
observer.next(true);
observer.finish();
}
}
});
}
};
}
/**
* Returns an observable which correlates two streams of values based on
* their time when they overlapped.
* <p>The difference between this operator and the groupJoin operator
* is that in this case, the result selector takes the concrete left and
* right elements, whereas the groupJoin associates an observable of rights
* for each left.</p>
* FIXME not sure how to implement it
* @param <Left> the element type of the left stream
* @param <Right> the element type of the right stream
* @param <LeftDuration> the overlapping duration indicator for the left stream (e.g., the event when it leaves)
* @param <RightDuration> the overlapping duration indicator for the right stream (e.g., the event when it leaves)
* @param <Result> the type of the grouping based on the coincidence.
* @param left the left source of elements
* @param right the right source of elements
* @param leftDurationSelector the duration selector for a left element
* @param rightDurationSelector the duration selector for a right element
* @param resultSelector the selector which will produce the output value
* @return the new observable
* @see #groupJoin(Observable, Observable, Func1, Func1, Func2)
*/
public static <Left, Right, LeftDuration, RightDuration, Result> Observable<Result> join(
final Observable<? extends Left> left,
final Observable<? extends Right> right,
final Func1<? super Left, ? extends Observable<LeftDuration>> leftDurationSelector,
final Func1<? super Right, ? extends Observable<RightDuration>> rightDurationSelector,
final Func2<? super Left, ? super Right, ? extends Result> resultSelector
) {
return new Observable<Result>() {
@Override
public Closeable register(final Observer<? super Result> observer) {
final Lock lock = new ReentrantLock(true);
final HashSet<Left> leftActive = new HashSet<Left>();
final HashSet<Right> rightActive = new HashSet<Right>();
final AtomicReference<Closeable> closeBoth = new AtomicReference<Closeable>();
DefaultObserverEx<Left> o1 = new DefaultObserverEx<Left>(lock, true) {
/** Relay the inner error to the outer. */
void innerError(Throwable ex) {
error(ex);
}
@Override
protected void onClose() {
super.onClose();
Closeables.close0(closeBoth.get());
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
observer.finish();
}
@Override
protected void onNext(final Left value) {
leftActive.add(value);
Observable<LeftDuration> completion = leftDurationSelector.invoke(value);
final Object token = new Object();
add(token, completion.register(new DefaultObserver<LeftDuration>(lock, true) {
@Override
protected void onClose() {
remove(token);
}
@Override
protected void onError(Throwable ex) {
innerError(ex);
}
@Override
protected void onFinish() {
leftActive.remove(value);
}
@Override
protected void onNext(LeftDuration value) {
// NO OP?
}
}));
for (Right r : rightActive) {
observer.next(resultSelector.invoke(value, r));
}
}
};
DefaultObserverEx<Right> o2 = new DefaultObserverEx<Right>(lock, true) {
/** Relay the inner error to the outer. */
void innerError(Throwable ex) {
error(ex);
}
@Override
protected void onClose() {
super.onClose();
Closeables.close0(closeBoth.get());
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
observer.finish();
}
@Override
protected void onNext(final Right value) {
rightActive.add(value);
Observable<RightDuration> completion = rightDurationSelector.invoke(value);
final Object token = new Object();
add(token, completion.register(new DefaultObserver<RightDuration>(lock, true) {
@Override
protected void onClose() {
remove(token);
}
@Override
protected void onError(Throwable ex) {
innerError(ex);
}
@Override
protected void onFinish() {
rightActive.remove(value);
}
@Override
protected void onNext(RightDuration value) {
// NO OP?!
}
}));
for (Left left : leftActive) {
observer.next(resultSelector.invoke(left, value));
}
}
};
Closeable c = Closeables.close(o1, o2);
closeBoth.set(c);
o1.add(new Object(), left);
o2.add(new Object(), right);
return c;
}
};
}
/**
* Returns the last element of the source observable or throws
* NoSuchElementException if the source is empty.
* @param <T> the type of the elements
* @param source the source of Ts
* @return the last element
*/
@Nonnull
public static <T> T last(
@Nonnull final Observable<? extends T> source) {
final LinkedBlockingQueue<Option<T>> queue = new LinkedBlockingQueue<Option<T>>();
Closeable c = source.register(new Observer<T>() {
/** The current value. */
T current;
/** Are we the first? */
boolean first = true;
@Override
public void error(Throwable ex) {
queue.add(Option.<T>none());
}
@Override
public void finish() {
if (first) {
queue.add(Option.<T>none());
} else {
queue.add(Option.some(current));
}
}
@Override
public void next(T value) {
first = false;
current = value;
}
});
try {
Option<T> value = queue.take();
c.close();
if (value == Option.none()) {
throw new NoSuchElementException();
}
return value.value();
} catch (InterruptedException e) {
throw new RuntimeException(e);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
/**
* Returns an iterable which returns values on a momentary basis from the
* source. Useful when source produces values at different rate than the consumer takes it.
* The iterable.next() call might block until the first value becomes available or something else happens in the observable
* FIXME not sure where the observer should run
* @param <T> the type of the values
* @param source the source
* @return the iterable
*/
@Nonnull
public static <T> Iterable<T> latest(
@Nonnull final Observable<? extends T> source) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
final AtomicBoolean complete = new AtomicBoolean();
final CountDownLatch first = new CountDownLatch(1);
final AtomicBoolean hasValue = new AtomicBoolean();
final AtomicReference<T> current = new AtomicReference<T>();
final Closeable c = source.register(new Observer<T>() {
/** Set the has value once. */
boolean once = true;
@Override
public void error(Throwable ex) {
complete.set(true);
first.countDown();
}
@Override
public void finish() {
complete.set(true);
first.countDown();
}
@Override
public void next(T value) {
if (once) {
once = false;
hasValue.set(true);
}
current.set(value);
first.countDown();
}
});
return new Iterator<T>() {
@Override
protected void finalize() throws Throwable {
c.close();
}
@Override
public boolean hasNext() {
try {
first.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
return !complete.get() && hasValue.get();
}
@Override
public T next() {
if (hasValue.get()) {
return current.get();
}
throw new NoSuchElementException();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
/**
* Returns an observable which calls the given selector with the given value
* when a client wants to register with it. The client then
* gets registered with the observable returned by the function.
* E.g., <code>return selector.invoke(value).register(observer)</code> in the outer register method.
* @param <T> the selection key type
* @param <U> the result type
* @param value the value to pass to the selector function
* @param selector the selector function
* @return a new observable
*/
public static <T, U> Observable<U> let(
final T value,
final Func1<? super T, ? extends Observable<U>> selector) {
return new Observable<U>() {
@Override
public Closeable register(Observer<? super U> observer) {
return selector.invoke(value).register(observer);
}
};
}
/**
* Uses the selector function on the given source observable to extract a single
* value and send this value to the registered observer.
* It is sometimes called the comonadic bind operator and compared to the ContinueWith
* semantics.
* The default scheduler is used to emit the output value
* FIXME not sure what it should do
* @param <T> the source element type
* @param <U> the result element type
* @param source the source of Ts
* @param selector the selector that extracts an U from the series of Ts.
* @return the new observable.
*/
public static <T, U> Observable<U> manySelect0(
final Observable<? extends T> source,
final Func1<? super Observable<T>, ? extends U> selector) {
return manySelect(source, selector, DEFAULT_SCHEDULER.get());
}
/**
* For each value of the source observable, it creates a view starting from that value into the source
* and calls the given selector function asynchronously on the given scheduler.
* The result of that computation is then transmitted to the observer.
* <p>It is sometimes called the comonadic bind operator and compared to the ContinueWith
* semantics.</p>
* @param <T> the source element type
* @param <U> the result element type
* @param source the source of Ts
* @param selector the selector that extracts an U from the series of Ts.
* @param scheduler the scheduler where the extracted U will be emmitted from.
* @return the new observable.
*/
public static <T, U> Observable<U> manySelect(
final Observable<? extends T> source,
final Func1<? super Observable<T>, ? extends U> selector,
final Scheduler scheduler) {
return new Observable<U>() {
@Override
public Closeable register(final Observer<? super U> observer) {
final AtomicInteger wip = new AtomicInteger(1);
Closeable c = source.register(new DefaultObserverEx<T>(true) {
/** The skip position. */
int counter;
@Override
protected void onNext(T value) {
final Observable<T> ot = skip(source, counter);
wip.incrementAndGet();
add(counter, scheduler.schedule(new Runnable() {
@Override
public void run() {
observer.next(selector.invoke(ot));
if (wip.decrementAndGet() == 0) {
observer.finish();
}
}
}));
counter++;
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
close();
}
@Override
protected void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
}
}
});
return c;
}
};
}
/**
* For each of the source elements, creates a view of the source starting with the given
* element and calls the selector function. The function's return observable is then merged
* into a single observable sequence.<p>
* For example, a source sequence of (1, 2, 3) will create three function calls with (1, 2, 3), (2, 3) and (3) as a content.
* @param <T> the source element type
* @param <U> the result element type
* @param source the source of Ts
* @param selector the selector function
* @return the new observable
*/
public static <T, U> Observable<U> manySelect(
final Observable<? extends T> source,
final Func1<? super Observable<T>, ? extends Observable<U>> selector
) {
return merge(select(source, new Func1<T, Observable<U>>() {
/** The skip position. */
int counter;
@Override
public Observable<U> invoke(T param1) {
int i = counter++;
return selector.invoke(skip(source, i));
}
}));
}
/**
* Returns an observable which converts all messages to an <code>Option</code> value.
* The returned observable does not itself signal error or finish.
* Its dual is the <code>dematerialize</code> method.
* @param <T> the source element type
* @param source the source of Ts
* @return the new observable
* @see #dematerialize(Observable)
*/
@Nonnull
public static <T> Observable<Option<T>> materialize(
@Nonnull final Observable<? extends T> source) {
return new Observable<Option<T>>() {
@Override
public Closeable register(final Observer<? super Option<T>> observer) {
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
observer.next(Option.<T>error(ex));
}
@Override
public void finish() {
observer.next(Option.<T>none());
}
@Override
public void next(T value) {
observer.next(Option.some(value));
}
});
}
};
};
/**
* Returns the maximum value encountered in the source observable once it sends finish().
* @param <T> the element type which must be comparable to itself
* @param source the source of integers
* @return the the maximum value
*/
@Nonnull
public static <T extends Comparable<? super T>> Observable<T> max(
@Nonnull final Observable<? extends T> source) {
return aggregate(source, Functions.<T>max(), Functions.<T, Integer>identityFirst());
}
/**
* Returns the maximum value encountered in the source observable once it sends finish().
* @param <T> the element type
* @param source the source of integers
* @param comparator the comparator to decide the relation of values
* @return the the maximum value
* @see Functions#asComparator(Func2)
*/
@Nonnull
public static <T> Observable<T> max(
@Nonnull final Observable<T> source,
@Nonnull final Comparator<T> comparator) {
return aggregate(source, Functions.<T>max(comparator), Functions.<T, Integer>identityFirst());
}
/**
* Returns an observable which provides with the list of <code>T</code>s which had their keys as maximums.
* The returned observer may finish() if the source sends finish() without any next().
* The generated list is modifiable.
* @param <T> the type of elements
* @param <Key> the key type, which must be comparable to itself
* @param source the source of <code>T</code>s
* @param keyExtractor the key extractor to produce <code>Key</code>s from <code>T</code>s.
* @return the observable for the maximum keyed Ts
*/
@Nonnull
public static <T, Key extends Comparable<? super Key>> Observable<List<T>> maxBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Key> keyExtractor) {
return minMax(source, keyExtractor, Functions.<Key>comparator(), true);
}
/**
* Returns an observable which provides with the list of <code>T</code>s which had their keys as maximums.
* The returned observer may finish() if the source sends finish() without any next().
* The generated list is modifiable.
* @param <T> the type of elements
* @param <Key> the key type
* @param source the source of <code>T</code>s
* @param keyExtractor the key extractor to produce <code>Key</code>s from <code>T</code>s.
* @param keyComparator the comparator for the keys
* @return the observable for the maximum keyed Ts
*/
@Nonnull
public static <T, Key> Observable<List<T>> maxBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Key> keyExtractor,
@Nonnull final Comparator<? super Key> keyComparator) {
return minMax(source, keyExtractor, keyComparator, true);
}
/**
* Combines the notifications of all sources. The resulting stream of Ts might come from any of the sources.
* @param <T> the type of the values
* @param sources the list of sources
* @return the observable
*/
@Nonnull
public static <T> Observable<T> merge(
@Nonnull final Iterable<? extends Observable<? extends T>> sources) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final List<Closeable> disposables = new ArrayList<Closeable>();
List<Observable<? extends T>> sourcesList = new ArrayList<Observable<? extends T>>();
for (Observable<? extends T> os : sources) {
sourcesList.add(os);
}
final AtomicInteger wip = new AtomicInteger(sourcesList.size() + 1);
final List<DefaultObserver<T>> observers = new ArrayList<DefaultObserver<T>>();
final Lock lock = new ReentrantLock();
for (int i = 0; i < sourcesList.size(); i++) {
final int j = i;
DefaultObserver<T> obs = new DefaultObserver<T>(lock, true) {
@Override
public void onError(Throwable ex) {
observer.error(ex);
for (int k = 0; k < observers.size(); k++) {
if (k != j) {
observers.get(k).close();
}
}
}
@Override
public void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
}
}
@Override
public void onNext(T value) {
observer.next(value);
}
};
observers.add(obs);
disposables.add(obs);
}
for (int i = 0; i < observers.size(); i++) {
disposables.add(sourcesList.get(i).register(observers.get(i)));
}
if (wip.decrementAndGet() == 0) {
observer.finish();
}
return Closeables.closeAll(disposables);
}
};
};
/**
* Merge the dynamic sequence of observables of T.
* <p><b>Exception semantics:</b> if the sources or any inner observer signals an
* error, the outer observable will signal that error and all active source observers are terminated.</p>
* @param <T> the element type
* @param sources the observable sequence of observable sequence of Ts
* @return the new observable
*/
public static <T> Observable<T> merge(
final Observable<? extends Observable<T>> sources) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final AtomicInteger wip = new AtomicInteger(1);
DefaultObserverEx<Observable<T>> obs = new DefaultObserverEx<Observable<T>>(false) {
/**
* The inner exception to forward.
* @param ex the exception
*/
void innerError(Throwable ex) {
error(ex);
}
/** Signal finish if the sources and inner observables have all finished. */
void ifDoneFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
close();
}
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
ifDoneFinish();
}
@Override
protected void onNext(Observable<T> value) {
final Object token = new Object();
wip.incrementAndGet();
add(token, value.register(new DefaultObserver<T>(lock, true) {
@Override
public void onError(Throwable ex) {
innerError(ex);
}
@Override
public void onFinish() {
remove(token);
ifDoneFinish();
}
@Override
public void onNext(T value) {
observer.next(value);
}
}));
}
};
obs.add("sources", sources);
return obs;
}
};
}
/**
* Merge the events of two observable sequences.
* @param <T> the type of the elements
* @param first the first observable
* @param second the second observable
* @return the merged observable
*/
@Nonnull
public static <T> Observable<T> merge(
@Nonnull Observable<? extends T> first,
@Nonnull Observable<? extends T> second) {
List<Observable<? extends T>> list = new ArrayList<Observable<? extends T>>();
list.add(first);
list.add(second);
return merge(list);
}
/**
* Returns the minimum value encountered in the source observable once it sends finish().
* @param <T> the element type which must be comparable to itself
* @param source the source of integers
* @return the the minimum value
*/
@Nonnull
public static <T extends Comparable<? super T>> Observable<T> min(
@Nonnull final Observable<? extends T> source) {
return aggregate(source, Functions.<T>min(), Functions.<T, Integer>identityFirst());
}
/**
* Returns the minimum value encountered in the source observable once it sends finish().
* @param <T> the element type
* @param source the source of integers
* @param comparator the comparator to decide the relation of values
* @return the the minimum value
* @see Functions#asComparator(Func2)
*/
@Nonnull
public static <T> Observable<T> min(
@Nonnull final Observable<? extends T> source,
@Nonnull final Comparator<? super T> comparator) {
return aggregate(source, Functions.<T>min(comparator), Functions.<T, Integer>identityFirst());
}
/**
* Returns an observable which provides with the list of <code>T</code>s which had their keys as minimums.
* The returned observer may finish() if the source sends finish() without any next().
* The generated list is modifiable.
* @param <T> the type of elements
* @param <Key> the key type, which must be comparable to itself
* @param source the source of <code>T</code>s
* @param keyExtractor the key extractor to produce <code>Key</code>s from <code>T</code>s.
* @return the observable for the minimum keyed Ts
*/
@Nonnull
public static <T, Key extends Comparable<? super Key>> Observable<List<T>> minBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Key> keyExtractor) {
return minMax(source, keyExtractor, Functions.<Key>comparator(), false);
}
/**
* Returns an observable which provides with the list of <code>T</code>s which had their keys as minimums.
* The returned observer may finish() if the source sends finish() without any next().
* The generated list is modifiable.
* @param <T> the type of elements
* @param <Key> the key type
* @param source the source of <code>T</code>s
* @param keyExtractor the key extractor to produce <code>Key</code>s from <code>T</code>s.
* @param keyComparator the comparator for the keys
* @return the observable for the minimum keyed Ts
* @see Functions#asComparator(Func2)
*/
@Nonnull
public static <T, Key> Observable<List<T>> minBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Key> keyExtractor,
@Nonnull final Comparator<? super Key> keyComparator) {
return minMax(source, keyExtractor, keyComparator, false);
}
/**
* Returns an observable which provides with the list of <code>T</code>s which had their keys as maximums.
* The returned observer may finish() if the source sends finish() without any next().
* The generated list is modifiable.
* @param <T> the type of elements
* @param <Key> the key type
* @param source the source of <code>T</code>s
* @param keyExtractor the key extractor to produce <code>Key</code>s from <code>T</code>s.
* @param keyComparator the comparator for the keys
* @param max compute the maximums?
* @return the observable for the maximum keyed Ts
* @see Functions#asComparator(Func2)
*/
@Nonnull
public static <T, Key> Observable<List<T>> minMax(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Key> keyExtractor,
@Nonnull final Comparator<? super Key> keyComparator,
@Nonnull final boolean max
) {
return new Observable<List<T>>() {
@Override
public Closeable register(final Observer<? super List<T>> observer) {
return source.register(new Observer<T>() {
/** The current collection for the minimum of Ts. */
List<T> collect;
/** The current minimum value. */
Key maxKey;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
if (collect != null) {
observer.next(collect);
}
observer.finish();
}
@Override
public void next(T value) {
Key key = keyExtractor.invoke(value);
if (collect == null) {
maxKey = key;
collect = new ArrayList<T>();
collect.add(value);
} else {
int order = keyComparator.compare(maxKey, key);
if (order == 0) {
collect.add(value);
} else
if (max ^ (order > 0)) {
maxKey = key;
collect = new ArrayList<T>();
collect.add(value);
}
}
}
});
}
};
}
/**
* Samples the latest T value coming from the source observable or the initial
* value when no messages arrived so far. If the producer and consumer run
* on different speeds, the consumer might receive the same value multiple times.
* The iterable sequence terminates if the source finishes or returns an error.
* <p>The returned iterator throws <code>UnsupportedOperationException</code> for its <code>remove()</code> method.</p>
* @param <T> the source element type
* @param source the source of Ts
* @param initialValue the initial value to return until the source actually produces something.
* @return the iterable
*/
public static <T> Iterable<T> mostRecent(final Observable<? extends T> source, final T initialValue) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
final AtomicReference<Option<T>> latest = new AtomicReference<Option<T>>(Option.some(initialValue));
final Closeable c = source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
latest.set(Option.<T>error(ex));
}
@Override
public void finish() {
latest.set(Option.<T>none());
}
@Override
public void next(T value) {
latest.set(Option.some(value));
}
});
return new Iterator<T>() {
@Override
protected void finalize() throws Throwable {
Closeables.close0(c);
super.finalize();
}
@Override
public boolean hasNext() {
return !Option.isNone(latest.get());
}
@Override
public T next() {
if (hasNext()) {
Option<T> o = latest.get();
// if the latest value is error, emit it only once, then
// do if the source simply terminated
if (Option.isError(o)) {
latest.set(Option.<T>none());
return o.value();
}
return o.value();
}
throw new NoSuchElementException();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
/**
* Returns an observable which remains connected to the <code>source</code>
* observable as long as there is at least one registration to this output observable.
* <p>The <code>observer</code> and <code>observable</code> parameters should denote
* the same object which implements both Observable and Observer interfaces.</p>
* @param <T> the source element type
* @param <U> the result element type
* @param source the source elements
* @param observer the observer that listens for Ts. Should be the same object as observable.
* @param observable the observable that will produce Us. Should be the same object as observable.
* @return the new observable
*/
public static <T, U> Observable<U> multicast(final Observable<? extends T> source,
final Observer<? super T> observer, final Observable<? extends U> observable) {
final Closeable outer = source.register(observer);
final AtomicInteger wip = new AtomicInteger();
return new Observable<U>() {
@Override
public Closeable register(Observer<? super U> o) {
wip.incrementAndGet();
final Closeable inner = observable.register(o);
return new Closeable() {
@Override
public void close() throws IOException {
inner.close();
if (wip.decrementAndGet() == 0) {
Closeables.close0(outer);
}
}
};
}
};
}
/**
* Returns an observable which never fires.
* @param <T> the type of the observable, irrelevant
* @return the observable
*/
@Nonnull
public static <T> Observable<T> never() {
return new Observable<T>() {
@Override
public Closeable register(Observer<? super T> observer) {
return Functions.EMPTY_CLOSEABLE;
}
};
}
/**
* Returns an iterable which returns a single element from the
* given source then terminates. It blocks the current thread.
* <p>For hot observables, this
* will be the first element they produce, for cold observables,
* this will be the next value (e.g., the next mouse move event).</p>
* <p><b>Exception semantics:</b> The <code>Iterator.next()</code> will rethrow the exception.</p>
* <p><b>Completion semantics:</b> If the source completes instantly, the iterator completes as empty.</p>
* <p>The returned iterator will throw an <code>UnsupportedOperationException</code> for its
* <code>remove()</code> method.
* @param <T> the element type
* @param source the source of elements
* @return the iterable
*/
public static <T> Iterable<T> next(final Observable<? extends T> source) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
final BlockingQueue<Option<T>> element = new LinkedBlockingQueue<Option<T>>();
final Closeable c = source.register(new DefaultObserver<T>(true) {
@Override
protected void onError(Throwable ex) {
element.add(Option.<T>error(ex));
}
@Override
protected void onFinish() {
element.add(Option.<T>none());
}
@Override
protected void onNext(T value) {
element.add(Option.some(value));
close();
}
});
return new Iterator<T>() {
/** The completion marker. */
boolean done;
/** The single element look-ahead. */
final SingleContainer<Option<T>> peek = new SingleContainer<Option<T>>();
@Override
public boolean hasNext() {
if (!done) {
if (peek.isEmpty()) {
try {
Option<T> e = element.take();
if (!Option.isNone(e)) {
peek.add(e);
}
} catch (InterruptedException ex) {
peek.add(Option.<T>error(ex));
}
done = true;
Closeables.close0(c);
}
}
return !peek.isEmpty() && !done;
}
@Override
public T next() {
if (hasNext()) {
return element.peek().value();
}
throw new NoSuchElementException();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
};
};
}
};
}
/**
* Wrap the given observable object in a way that any of its observers receive callbacks on
* the given thread pool.
* @param <T> the type of the objects to observe
* @param source the original observable
* @param pool the target observable
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> observeOn(
@Nonnull final Observable<? extends T> source,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserverEx<T> obs = new DefaultObserverEx<T>(true) {
/** The single lane executor. */
final SingleLaneExecutor<Runnable> run = new SingleLaneExecutor<Runnable>(pool,
new Action1<Runnable>() {
@Override
public void invoke(Runnable value) {
value.run();
}
}
);
{
add("source", source);
}
@Override
public void onError(final Throwable ex) {
run.add(new Runnable() {
@Override
public void run() {
observer.error(ex);
}
});
}
@Override
public void onFinish() {
run.add(new Runnable() {
@Override
public void run() {
observer.finish();
}
});
}
@Override
public void onNext(final T value) {
run.add(new Runnable() {
@Override
public void run() {
observer.next(value);
}
});
}
};
return obs;
}
};
}
/**
* Returns an Observable which traverses the entire
* source Observable and creates an ordered list
* of elements. Once the source Observable completes,
* the elements are streamed to the output.
* @param <T> the source element type, must be self comparable
* @param source the source of Ts
* @return the new iterable
*/
@Nonnull
public static <T extends Comparable<? super T>> Observable<T> orderBy(
@Nonnull final Observable<? extends T> source
) {
return orderBy(source, Functions.<T>identity(), Functions.<T>comparator());
}
/**
* Returns an Observable which traverses the entire
* source Observable and creates an ordered list
* of elements. Once the source Observable completes,
* the elements are streamed to the output.
* @param <T> the source element type, must be self comparable
* @param source the source of Ts
* @param comparator the value comparator
* @return the new iterable
*/
@Nonnull
public static <T> Observable<T> orderBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Comparator<? super T> comparator
) {
return orderBy(source, Functions.<T>identity(), comparator);
}
/**
* Returns an Observable which traverses the entire
* source Observable and creates an ordered list
* of elements. Once the source Observable completes,
* the elements are streamed to the output.
* @param <T> the source element type
* @param <U> the key type for the ordering, must be self comparable
* @param source the source of Ts
* @param keySelector the key selector for comparison
* @return the new iterable
*/
@Nonnull
public static <T, U extends Comparable<? super U>> Observable<T> orderBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends U> keySelector
) {
return orderBy(source, keySelector, Functions.<U>comparator());
}
/**
* Returns an Observable which traverses the entire
* source Observable and creates an ordered list
* of elements. Once the source iterator completes,
* the elements are streamed to the output.
* <p>Note that it buffers the elements of <code>source</code> until it
* signals finish.</p>
* <p><b>Exception semantics:</b> the exception is relayed and no ordering is performed.</p>
* <p><b>Completion semantics:</b> the output terminates when the source terminates and the sorted values are all submitted.</p>
* @param <T> the source element type
* @param <U> the key type for the ordering
* @param source the source of Ts
* @param keySelector the key selector for comparison
* @param keyComparator the key comparator function
* @return the new iterable
*/
@Nonnull
public static <T, U> Observable<T> orderBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends U> keySelector,
@Nonnull final Comparator<? super U> keyComparator
) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The buffer. */
final List<T> buffer = new ArrayList<T>();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
Collections.sort(buffer, new Comparator<T>() {
@Override
public int compare(T o1, T o2) {
return keyComparator.compare(keySelector.invoke(o1), keySelector.invoke(o2));
};
});
for (T t : buffer) {
observer.next(t);
}
observer.finish();
}
@Override
public void next(T value) {
buffer.add(value);
}
});
}
};
}
/**
* Creates an observer with debugging purposes.
* It prints the submitted values to STDOUT separated by commas and line-broken by 80 characters, the exceptions to STDERR
* and prints an empty newline when it receives a finish().
* @param <T> the value type
* @return the observer
*/
@Nonnull
public static <T> Observer<T> print() {
return print(", ", 80);
}
/**
* Creates an observer with debugging purposes.
* It prints the submitted values to STDOUT, the exceptions to STDERR
* and prints an empty newline when it receives a finish().
* @param <T> the value type
* @param separator the separator to use between subsequent values
* @param maxLineLength how many characters to print into each line
* @return the observer
*/
@Nonnull
public static <T> Observer<T> print(
final String separator,
final int maxLineLength) {
return new Observer<T>() {
/** Indicator for the first element. */
boolean first = true;
/** The current line length. */
int len;
@Override
public void error(Throwable ex) {
ex.printStackTrace();
}
@Override
public void finish() {
System.out.println();
}
@Override
public void next(T value) {
String s = String.valueOf(value);
if (first) {
first = false;
System.out.print(s);
len = s.length();
} else {
if (len + separator.length() + s.length() > maxLineLength) {
if (len == 0) {
System.out.print(separator);
System.out.print(s);
len = s.length() + separator.length();
} else {
System.out.println(separator);
System.out.print(s);
len = s.length();
}
} else {
System.out.print(separator);
System.out.print(s);
len += s.length() + separator.length();
}
}
};
};
}
/**
* Creates an observer with debugging purposes.
* It prints the submitted values to STDOUT with a line break, the exceptions to STDERR
* and prints an empty newline when it receives a finish().
* @param <T> the value type
* @return the observer
*/
@Nonnull
public static <T> Observer<T> println() {
return new Observer<T>() {
@Override
public void error(Throwable ex) {
ex.printStackTrace();
}
@Override
public void finish() {
System.out.println();
}
@Override
public void next(T value) {
System.out.println(value);
};
};
}
/**
* Creates an observer with debugging purposes.
* It prints the submitted values to STDOUT with a line break, the exceptions to STDERR
* and prints an empty newline when it receives a finish().
* @param <T> the value type
* @param prefix the prefix to use when printing
* @return the observer
*/
@Nonnull
public static <T> Observer<T> println(final String prefix) {
return new Observer<T>() {
@Override
public void error(Throwable ex) {
System.err.print(prefix);
ex.printStackTrace();
}
@Override
public void finish() {
System.out.print(prefix);
System.out.println();
}
@Override
public void next(T value) {
System.out.print(prefix);
System.out.println(value);
};
};
}
/**
* Returns an observable which shares all registration to the source observable and
* each observer will only see the last notification.
* <p>Basically a replay with buffer size 1.</p>
* @param <T> the source element type
* @param source the source of Ts
* @return the observable
*/
public static <T> Observable<T> prune(
final Observable<? extends T> source
) {
return replay(source, 1);
}
/**
* Returns an observable which shares all registration to the source observable and
* each observer will only see the last notification.
* <p>Basically a replay with buffer size 1.</p>
* @param <T> the source element type
* @param <U> the return element type
* @param source the source of Ts
* @param selector the output stream selector
* @return the observable
*/
public static <T, U> Observable<U> prune(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector
) {
return replay(source, selector, 1);
}
/**
* Returns an observable which shares all registration to the source observable and
* each observer will only see the last notification.
* <p>Basically a replay with buffer size 1.</p>
* @param <T> the source element type
* @param <U> the return element type
* @param source the source of Ts
* @param selector the output stream selector
* @param scheduler the scheduler for replaying the single value
* @return the observable
*/
public static <T, U> Observable<U> prune(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final Scheduler scheduler
) {
return replay(source, selector, 1, scheduler);
}
/**
* Returns an observable which shares all registration to the source observable and
* each observer will only see the last notification.
* <p>Basically a replay with buffer size 1.</p>
* @param <T> the source element type
* @param source the source of Ts
* @param scheduler the scheduler for replaying the single value
* @return the observable
*/
public static <T> Observable<T> prune(
final Observable<? extends T> source,
final Scheduler scheduler
) {
return replay(source, 1, scheduler);
}
/**
* Returns an observable which shares a single subscription to the underlying source.
* @param <T> the element type
* @param source the source of Ts
* @return the new observable
*/
public static <T> Observable<T> publish(
final Observable<? extends T> source
) {
return publish(source, DEFAULT_SCHEDULER.get());
}
/**
* Returns an observable which shares a single subscription to the underlying source.
* @param <T> the element type
* @param <U> the result type
* @param source the source of Ts
* @param selector the selector function for the return stream
* @return the new observable
*/
public static <T, U> Observable<U> publish(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector
) {
return publish(selector.invoke(source));
}
/**
* Returns an observable which shares a single subscription to the underlying source.
* @param <T> the element type
* @param <U> the result type
* @param source the source of Ts
* @param selector the selector function for the return stream
* @param scheduler the scheduler where the values will be replayed
* @return the new observable
*/
public static <T, U> Observable<U> publish(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final Scheduler scheduler
) {
return publish(selector.invoke(source), scheduler);
}
/**
* Returns an observable which shares a single subscription to the underlying source.
* @param <T> the element type
* @param <U> the result type
* @param source the source of Ts
* @param selector the selector function for the return stream
* @param initialValue the initial stream value
* @return the new observable
*/
public static <T, U> Observable<U> publish(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final U initialValue
) {
return publish(selector.invoke(source), initialValue);
}
/**
* Returns an observable which shares a single subscription to the underlying source.
* @param <T> the element type
* @param <U> the result type
* @param source the source of Ts
* @param selector the selector function for the return stream
* @param initialValue the initial stream value
* @param scheduler the scheduler where the values will be replayed
* @return the new observable
*/
public static <T, U> Observable<U> publish(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final U initialValue,
final Scheduler scheduler
) {
return publish(selector.invoke(source), initialValue, scheduler);
}
/**
* Returns an observable which shares a single subscription to the underlying source.
* @param <T> the element type
* @param source the source of Ts
* @param scheduler the scheduler where the values will be replayed
* @return the new observable
*/
public static <T> Observable<T> publish(
final Observable<? extends T> source,
final Scheduler scheduler
) {
return new Observable<T>() {
/** The first registree to initialize the common observer. */
final Lock lock = new ReentrantLock();
@GuardedBy("lock")
DefaultObservable<T> obs;
@Override
public Closeable register(Observer<? super T> observer) {
lock.lock();
try {
if (obs == null) {
obs = new DefaultObservable<T>();
observeOn(source, scheduler).register(obs);
}
} finally {
lock.unlock();
}
return obs.register(observer);
}
};
}
/**
* Returns an observable which shares a single subscription to the underlying source.
* @param <T> the element type
* @param source the source of Ts
* @param initialValue the initial stream value
* @return the new observable
*/
public static <T> Observable<T> publish(
final Observable<? extends T> source,
final T initialValue
) {
return publish(source, initialValue, DEFAULT_SCHEDULER.get());
}
/**
* Returns an observable which shares a single subscription to the underlying source.
* @param <T> the element type
* @param source the source of Ts
* @param initialValue the initial stream value
* @param scheduler the scheduler where the values will be replayed
* @return the new observable
*/
public static <T> Observable<T> publish(
final Observable<? extends T> source,
final T initialValue,
final Scheduler scheduler
) {
return publish(startWith(source, initialValue, scheduler), scheduler);
}
/**
* Creates an observable which generates numbers from start.
* @param start the start value.
* @param count the count
* @param step the stepping
* @return the observable
*/
@Nonnull
public static Observable<BigDecimal> range(
@Nonnull final BigDecimal start,
final int count,
@Nonnull final BigDecimal step) {
return range(start, count, step, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which generates BigDecimal numbers from start.
* @param start the start value.
* @param count the count
* @param step the stepping
* @param pool the execution thread pool.
* @return the observable
*/
@Nonnull
public static Observable<BigDecimal> range(
@Nonnull final BigDecimal start,
final int count,
@Nonnull final BigDecimal step,
@Nonnull final Scheduler pool) {
return new Observable<BigDecimal>() {
@Override
public Closeable register(final Observer<? super BigDecimal> observer) {
DefaultRunnable s = new DefaultRunnable() {
@Override
public void onRun() {
BigDecimal value = start;
for (int i = 0; i < count && !cancelled(); i++) {
observer.next(value);
value = value.add(step);
}
if (!cancelled()) {
observer.finish();
}
}
};
return pool.schedule(s);
}
};
}
/**
* Creates an observable which generates numbers from start.
* @param start the start value.
* @param count the count
* @return the observable
*/
@Nonnull
public static Observable<BigInteger> range(
@Nonnull final BigInteger start,
@Nonnull final BigInteger count) {
return range(start, count, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which generates BigInteger numbers from start.
* @param start the start value.
* @param count the count
* @param pool the execution thread pool.
* @return the observable
*/
@Nonnull
public static Observable<BigInteger> range(
@Nonnull final BigInteger start,
@Nonnull final BigInteger count,
final Scheduler pool) {
return new Observable<BigInteger>() {
@Override
public Closeable register(final Observer<? super BigInteger> observer) {
DefaultRunnable s = new DefaultRunnable() {
@Override
public void onRun() {
BigInteger end = start.add(count);
for (BigInteger i = start; i.compareTo(end) < 0
&& !cancelled(); i = i.add(BigInteger.ONE)) {
observer.next(i);
}
if (!cancelled()) {
observer.finish();
}
}
};
return pool.schedule(s);
}
};
}
/**
* Creates an observable which generates numbers from start.
* @param start the start value.
* @param count the count
* @param step the stepping
* @return the observable
*/
@Nonnull
public static Observable<Double> range(
final double start,
final int count,
final double step) {
return range(start, count, step, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which produces Double values from <code>start</code> in <code>count</code>
* amount and each subsequent element has a difference of <code>step</code>.
* @param start the starting value
* @param count how many values to produce
* @param step the incrementation amount
* @param pool the pool where to emit the values
* @return the observable of float
*/
public static Observable<Double> range(
final double start,
final int count,
final double step,
@Nonnull final Scheduler pool) {
return new Observable<Double>() {
@Override
public Closeable register(final Observer<? super Double> observer) {
DefaultRunnable s = new DefaultRunnable() {
@Override
public void onRun() {
for (int i = 0; i < count && !cancelled(); i++) {
observer.next(start + i * step);
}
if (!cancelled()) {
observer.finish();
}
}
};
return pool.schedule(s);
}
};
}
/**
* Creates an observable which generates numbers from start.
* @param start the start value.
* @param count the count
* @param step the stepping
* @return the observable
*/
@Nonnull
public static Observable<Float> range(
final float start,
final int count,
final float step) {
return range(start, count, step, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which produces Float values from <code>start</code> in <code>count</code>
* amount and each subsequent element has a difference of <code>step</code>.
* @param start the starting value
* @param count how many values to produce
* @param step the incrementation amount
* @param pool the pool where to emit the values
* @return the observable of float
*/
@Nonnull
public static Observable<Float> range(
final float start,
final int count,
final float step,
@Nonnull final Scheduler pool) {
return new Observable<Float>() {
@Override
public Closeable register(final Observer<? super Float> observer) {
DefaultRunnable s = new DefaultRunnable() {
@Override
public void onRun() {
for (int i = 0; i < count && !cancelled(); i++) {
observer.next(start + i * step);
}
if (!cancelled()) {
observer.finish();
}
}
};
return pool.schedule(s);
}
};
}
/**
* Creates an observable which generates numbers from start.
* @param start the start value.
* @param count the count
* @return the observable
*/
@Nonnull
public static Observable<Integer> range(
final int start,
@Nonnull final int count) {
return range(start, count, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which generates numbers from start.
* @param start the start value.
* @param count the count
* @param pool the execution thread pool.
* @return the observable
*/
public static Observable<Integer> range(
final int start,
final int count,
@Nonnull final Scheduler pool) {
return new Observable<Integer>() {
@Override
public Closeable register(final Observer<? super Integer> observer) {
DefaultRunnable s = new DefaultRunnable() {
@Override
public void onRun() {
for (int i = start; i < start + count && !cancelled(); i++) {
observer.next(i);
}
if (!cancelled()) {
observer.finish();
}
}
};
return pool.schedule(s);
}
};
}
/**
* Relay values of T while the given condition does not hold.
* Once the condition turns true the relaying stops.
* @param <T> the element type
* @param source the source of elements
* @param condition the condition that must be false to relay Ts
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> relayUntil(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func0<Boolean> condition) {
return relayWhile(source, Functions.not(condition));
}
/**
* Relay the stream of Ts until condition turns into false.
* @param <T> the type of the values
* @param source the source of Ts
* @param condition the condition that must hold to relay Ts
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> relayWhile(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func0<Boolean> condition) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
observer.finish();
}
@Override
public void onNext(T value) {
if (condition.invoke()) {
observer.next(value);
} else {
finish();
}
}
};
return Closeables.close(obs, source.register(obs));
}
};
}
/**
* Unwrap the values within a timeinterval observable to its normal value.
* @param <T> the element type
* @param source the source which has its elements in a timeinterval way.
* @return the raw observables of Ts
*/
@Nonnull
public static <T> Observable<T> removeTimeInterval(
@Nonnull Observable<TimeInterval<T>> source) {
return select(source, Reactive.<T>unwrapTimeInterval());
}
/**
* Unwrap the values within a timestamped observable to its normal value.
* @param <T> the element type
* @param source the source which has its elements in a timestamped way.
* @return the raw observables of Ts
*/
@Nonnull
public static <T> Observable<T> removeTimestamped(
@Nonnull Observable<Timestamped<T>> source) {
Func1<Timestamped<T>, T> f = Reactive.unwrapTimestamped();
return select(source, f);
}
/**
* Creates an observable which repeatedly calls the given function which generates the Ts indefinitely.
* The generator runs on the default pool. Note that observers must unregister to stop the infinite loop.
* @param <T> the type of elements to produce
* @param func the function which generates elements
* @return the observable
*/
@Nonnull
public static <T> Observable<T> repeat(
@Nonnull final Func0<? extends T> func) {
return repeat(func, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which repeatedly calls the given function <code>count</code> times to generate Ts
* and runs on the default pool.
* @param <T> the element type
* @param func the function to call to generate values
* @param count the numer of times to repeat the value
* @return the observable
*/
@Nonnull
public static <T> Observable<T> repeat(
@Nonnull final Func0<? extends T> func,
final int count) {
return repeat(func, count, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which repeatedly calls the given function <code>count</code> times to generate Ts
* and runs on the given pool.
* @param <T> the element type
* @param func the function to call to generate values
* @param count the numer of times to repeat the value
* @param pool the pool where the loop should be executed
* @return the observable
*/
@Nonnull
public static <T> Observable<T> repeat(
@Nonnull final Func0<? extends T> func,
final int count,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultRunnable r = new DefaultRunnable() {
@Override
public void onRun() {
int i = count;
while (!cancelled() && i-- > 0) {
observer.next(func.invoke());
}
if (!cancelled()) {
observer.finish();
}
}
};
return pool.schedule(r);
}
};
}
/**
* Creates an observable which repeatedly calls the given function which generates the Ts indefinitely.
* The generator runs on the pool. Note that observers must unregister to stop the infinite loop.
* @param <T> the type of elements to produce
* @param func the function which generates elements
* @param pool the pool where the generator loop runs
* @return the observable
*/
public static <T> Observable<T> repeat(
@Nonnull final Func0<? extends T> func,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultRunnable r = new DefaultRunnable() {
@Override
public void onRun() {
while (!cancelled()) {
observer.next(func.invoke());
}
}
};
return pool.schedule(r);
}
};
}
/**
* Repeat the source observable count times. Basically it creates
* a list of observables, all the source instance and applies
* the concat() operator on it.
* @param <T> the element type
* @param source the source observable
* @param count the number of times to repeat
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> repeat(
@Nonnull Observable<? extends T> source,
int count) {
if (count > 0) {
List<Observable<? extends T>> srcs = new ArrayList<Observable<? extends T>>(count);
for (int i = 0; i < count; i++) {
srcs.add(source);
}
return concat(srcs);
}
return empty();
}
/**
* Creates an observable which repeates the given value indefinitely
* and runs on the default pool. Note that the observers must
* deregister to stop the infinite background loop
* @param <T> the element type
* @param value the value to repeat
* @return the observable
*/
@Nonnull
public static <T> Observable<T> repeat(final T value) {
return repeat(value, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which repeates the given value <code>count</code> times
* and runs on the default pool.
* @param <T> the element type
* @param value the value to repeat
* @param count the numer of times to repeat the value
* @return the observable
*/
@Nonnull
public static <T> Observable<T> repeat(
final T value,
final int count) {
return repeat(value, count, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which repeates the given value <code>count</code> times
* and runs on the given pool.
* @param <T> the element type
* @param value the value to repeat
* @param count the numer of times to repeat the value
* @param pool the pool where the loop should be executed
* @return the observable
*/
@Nonnull
public static <T> Observable<T> repeat(
final T value,
final int count,
@Nonnull final Scheduler pool) {
return repeat(Functions.constant0(value), count, pool);
}
/**
* Creates an observable which repeates the given value indefinitely
* and runs on the given pool. Note that the observers must
* deregister to stop the infinite background loop
* @param <T> the element type
* @param value the value to repeat
* @param pool the pool where the loop should be executed
* @return the observable
*/
public static <T> Observable<T> repeat(
final T value,
@Nonnull final Scheduler pool) {
return repeat(Functions.constant0(value), pool);
}
/**
* Replace the current default scheduler with the specified new scheduler.
* This method is threadsafe
* @param newScheduler the new scheduler
* @return the current scheduler
*/
@Nonnull
public static Scheduler replaceDefaultScheduler(
@Nonnull Scheduler newScheduler) {
if (newScheduler == null) {
throw new IllegalArgumentException("newScheduler is null");
}
return DEFAULT_SCHEDULER.getAndSet(newScheduler);
}
/**
* Creates an observable which shares the source observable and replays all source Ts
* to any of the registering observers.
* @param <T> the element type
* @param source the source of Ts
* @return the new observable
*/
public static <T> Observable<T> replay(
final Observable<? extends T> source
) {
return replay(source, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which shares the source observable and replays the buffered source Ts
* to any of the registering observers.
* @param <T> the element type
* @param <U> the return element type
* @param source the source of Ts
* @param selector the output stream selector
* @param bufferSize the target buffer size
* @return the new observable
*/
public static <T, U> Observable<U> replay(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final int bufferSize
) {
return replay(selector.invoke(source), bufferSize);
}
/**
* Creates an observable which shares the source observable and replays the bufferSize source Ts
* to any of the registering observers. After the periodic timespan, the buffer is reset.
* @param <T> the source element type
* @param <U> the return element type
* @param source the source of Ts
* @param selector the output stream selector
* @param bufferSize the buffer size
* @param timeSpan the window length
* @param unit the time unit
* @return the new observer
*/
public static <T, U> Observable<U> replay(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final int bufferSize,
final long timeSpan,
final TimeUnit unit
) {
return replay(selector.invoke(source), bufferSize, timeSpan, unit);
}
/**
* Creates an observable which shares the source observable and replays the bufferSize source Ts
* to any of the registering observers. After the periodic timespan, the buffer is reset.
* @param <T> the source element type
* @param <U> the return element type
* @param source the source of Ts
* @param selector the output stream selector
* @param bufferSize the buffer size
* @param timeSpan the window length
* @param unit the time unit
* @param scheduler the target scheduler
* @return the new observer
*/
public static <T, U> Observable<U> replay(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final int bufferSize,
final long timeSpan,
final TimeUnit unit,
final Scheduler scheduler
) {
return replay(selector.invoke(source), bufferSize, timeSpan, unit, scheduler);
}
/**
* Creates an observable which shares the source observable and replays all source Ts
* to any of the registering observers. After the periodic timespan, the buffer is reset.
* @param <T> the source element type
* @param <U> the return element type
* @param source the source of Ts
* @param selector the output stream selector
* @param timeSpan the window length
* @param unit the time unit
* @return the new observer
*/
public static <T, U> Observable<U> replay(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final long timeSpan,
final TimeUnit unit
) {
return replay(selector.invoke(source), timeSpan, unit);
}
/**
* Creates an observable which shares the source observable and replays all source Ts
* to any of the registering observers. After the periodic timespan, the buffer is reset.
* @param <T> the source element type
* @param <U> the return element type
* @param source the source of Ts
* @param selector the output stream selector
* @param timeSpan the window length
* @param unit the time unit
* @param scheduler the target scheduler
* @return the new observer
*/
public static <T, U> Observable<U> replay(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final long timeSpan,
final TimeUnit unit,
final Scheduler scheduler
) {
return replay(selector.invoke(source), timeSpan, unit, scheduler);
}
/**
* Creates an observable which shares the source observable and replays the buffered source Ts
* to any of the registering observers.
* @param <T> the element type
* @param source the source of Ts
* @param bufferSize the target buffer size
* @return the new observable
*/
public static <T> Observable<T> replay(
final Observable<? extends T> source,
final int bufferSize
) {
return replay(source, bufferSize, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which shares the source observable and replays the bufferSize source Ts
* to any of the registering observers. After the periodic timespan, the buffer is reset.
* @param <T> the source element type
* @param source the source of Ts
* @param bufferSize the buffer size
* @param timeSpan the window length
* @param unit the time unit
* @return the new observer
*/
public static <T> Observable<T> replay(
final Observable<? extends T> source,
final int bufferSize,
final long timeSpan,
final TimeUnit unit
) {
return replay(source, bufferSize, timeSpan, unit, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which shares the source observable and replays the bufferSize source Ts
* to any of the registering observers. After the periodic timespan, the buffer is reset.
* @param <T> the source element type
* @param source the source of Ts
* @param bufferSize the buffer size
* @param timeSpan the window length
* @param unit the time unit
* @param scheduler the target scheduler
* @return the new observer
*/
public static <T> Observable<T> replay(
final Observable<? extends T> source,
final int bufferSize,
final long timeSpan,
final TimeUnit unit,
final Scheduler scheduler
) {
return new Observable<T>() {
/** The read-write lock. */
final ReadWriteLock rwLock = new ReentrantReadWriteLock(true);
/** The read lock for reading elements of the buffer. */
final Lock readLock = rwLock.readLock();
/** The write lock to write elements of the buffer and add new listeners. */
final Lock writeLock = rwLock.writeLock();
/** The buffer that holds the observed values so far. */
@GuardedBy("rwLock")
CircularBuffer<Option<T>> buffer = new CircularBuffer<Option<T>>(bufferSize);
/** The single registration handler. */
@GuardedBy("writeLock")
Closeable sourceClose;
/** The single registration handler. */
@GuardedBy("writeLock")
Closeable timerClose;
/** The set of listeners active. */
@GuardedBy("writeLock")
Set<SingleLaneExecutor<Pair<Integer, CircularBuffer<Option<T>>>>> listeners = new HashSet<SingleLaneExecutor<Pair<Integer, CircularBuffer<Option<T>>>>>();
@Override
protected void finalize() throws Throwable {
Closeables.close0(timerClose);
Closeables.close0(sourceClose);
super.finalize();
}
@Override
public Closeable register(final Observer<? super T> observer) {
writeLock.lock();
try {
if (sourceClose != null) {
sourceClose = source.register(new Observer<T>() {
/**
* Buffer and submit the option to all registered listeners.
* @param opt the option to submit
*/
void doOption(Option<T> opt) {
writeLock.lock();
try {
buffer.add(opt);
Pair<Integer, CircularBuffer<Option<T>>> of = Pair.of(buffer.tail(), buffer);
for (SingleLaneExecutor<Pair<Integer, CircularBuffer<Option<T>>>> l : listeners) {
l.add(of);
}
} finally {
writeLock.unlock();
}
}
@Override
public void error(Throwable ex) {
doOption(Option.<T>error(ex));
}
@Override
public void finish() {
doOption(Option.<T>none());
}
@Override
public void next(T value) {
doOption(Option.some(value));
}
});
timerClose = scheduler.schedule(new Runnable() {
@Override
public void run() {
writeLock.lock();
try {
buffer = new CircularBuffer<Option<T>>(bufferSize);
} finally {
writeLock.unlock();
}
}
}, timeSpan, timeSpan, unit);
}
} finally {
writeLock.unlock();
}
final AtomicBoolean cancel = new AtomicBoolean();
final SingleLaneExecutor<Pair<Integer, CircularBuffer<Option<T>>>> playback = SingleLaneExecutor.create(scheduler, new Action1<Pair<Integer, CircularBuffer<Option<T>>>>() {
/** The local buffer reader index. */
@GuardedBy("readLock")
int index = 0;
/** The last buffer. */
@GuardedBy("readLock")
CircularBuffer<Option<T>> last;
@Override
public void invoke(Pair<Integer, CircularBuffer<Option<T>>> value) {
readLock.lock();
try {
if (last != value.second) {
index = 0;
last = value.second;
}
index = Math.max(index, buffer.head());
while (index < value.first && !cancel.get()) {
dispatch(observer, last.get(index++));
}
} finally {
readLock.unlock();
}
}
});
writeLock.lock();
try {
playback.add(Pair.of(buffer.tail(), buffer));
listeners.add(playback);
} finally {
writeLock.unlock();
}
final Closeable c = new Closeable() {
@Override
public void close() throws IOException {
cancel.set(true);
writeLock.lock();
try {
listeners.remove(playback);
} finally {
writeLock.unlock();
}
Closeables.close0(playback);
}
};
return c;
}
};
}
/**
* Creates an observable which shares the source observable and replays all source Ts
* to any of the registering observers.
* @param <T> the element type
* @param source the source of Ts
* @param bufferSize the target buffer size
* @param scheduler the scheduler from where the historical elements are emitted
* @return the new observable
*/
public static <T> Observable<T> replay(
final Observable<? extends T> source,
final int bufferSize,
final Scheduler scheduler
) {
return new Observable<T>() {
/** The read-write lock. */
final ReadWriteLock rwLock = new ReentrantReadWriteLock(true);
/** The read lock for reading elements of the buffer. */
final Lock readLock = rwLock.readLock();
/** The write lock to write elements of the buffer and add new listeners. */
final Lock writeLock = rwLock.writeLock();
/** The buffer that holds the observed values so far. */
@GuardedBy("rwLock")
final CircularBuffer<Option<T>> buffer = new CircularBuffer<Option<T>>(bufferSize);
/** The single registration handler. */
@GuardedBy("writeLock")
Closeable sourceClose;
/** The set of listeners active. */
@GuardedBy("writeLock")
Set<SingleLaneExecutor<Integer>> listeners = new HashSet<SingleLaneExecutor<Integer>>();
@Override
protected void finalize() throws Throwable {
Closeables.close0(sourceClose);
super.finalize();
}
@Override
public Closeable register(final Observer<? super T> observer) {
writeLock.lock();
try {
if (sourceClose == null) {
sourceClose = source.register(new Observer<T>() {
/**
* Buffer and submit the option to all registered listeners.
* @param opt the option to submit
*/
void doOption(Option<T> opt) {
writeLock.lock();
try {
buffer.add(opt);
for (SingleLaneExecutor<Integer> l : listeners) {
l.add(buffer.tail());
}
} finally {
writeLock.unlock();
}
}
@Override
public void error(Throwable ex) {
doOption(Option.<T>error(ex));
}
@Override
public void finish() {
doOption(Option.<T>none());
}
@Override
public void next(T value) {
doOption(Option.some(value));
}
});
}
} finally {
writeLock.unlock();
}
final AtomicBoolean cancel = new AtomicBoolean();
final SingleLaneExecutor<Integer> playback = SingleLaneExecutor.create(scheduler, new Action1<Integer>() {
/** The local buffer reader index. */
@GuardedBy("readLock")
int index = 0;
@Override
public void invoke(Integer value) {
readLock.lock();
try {
index = Math.max(index, buffer.head());
while (index < value && !cancel.get()) {
dispatch(observer, buffer.get(index++));
}
} finally {
readLock.unlock();
}
}
});
writeLock.lock();
try {
playback.add(buffer.size());
listeners.add(playback);
} finally {
writeLock.unlock();
}
final Closeable c = new Closeable() {
@Override
public void close() throws IOException {
cancel.set(true);
writeLock.lock();
try {
listeners.remove(playback);
} finally {
writeLock.unlock();
}
Closeables.close0(playback);
}
};
return c;
}
};
}
/**
* Creates an observable which shares the source observable and replays all source Ts
* to any of the registering observers. After the periodic timespan, the buffer is reset.
* @param <T> the source element type
* @param source the source of Ts
* @param timeSpan the window length
* @param unit the time unit
* @return the new observer
*/
public static <T> Observable<T> replay(
final Observable<? extends T> source,
final long timeSpan,
final TimeUnit unit
) {
return replay(source, timeSpan, unit, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which shares the source observable and replays all source Ts
* to any of the registering observers. After the periodic timespan, the buffer is reset.
* @param <T> the source element type
* @param source the source of Ts
* @param timeSpan the window length
* @param unit the time unit
* @param scheduler the target scheduler
* @return the new observer
*/
public static <T> Observable<T> replay(
final Observable<? extends T> source,
final long timeSpan,
final TimeUnit unit,
final Scheduler scheduler
) {
return new Observable<T>() {
/** The read-write lock. */
final ReadWriteLock rwLock = new ReentrantReadWriteLock(true);
/** The read lock for reading elements of the buffer. */
final Lock readLock = rwLock.readLock();
/** The write lock to write elements of the buffer and add new listeners. */
final Lock writeLock = rwLock.writeLock();
/** The buffer that holds the observed values so far. */
@GuardedBy("rwLock")
List<Option<T>> buffer = new ArrayList<Option<T>>();
/** The single registration handler. */
@GuardedBy("writeLock")
Closeable sourceClose;
/** The single registration handler. */
@GuardedBy("writeLock")
Closeable timerClose;
/** The set of listeners active. */
@GuardedBy("writeLock")
Set<SingleLaneExecutor<Pair<Integer, List<Option<T>>>>> listeners = new HashSet<SingleLaneExecutor<Pair<Integer, List<Option<T>>>>>();
@Override
protected void finalize() throws Throwable {
Closeables.close0(timerClose);
Closeables.close0(sourceClose);
super.finalize();
}
@Override
public Closeable register(final Observer<? super T> observer) {
writeLock.lock();
try {
if (sourceClose == null) {
sourceClose = source.register(new Observer<T>() {
/**
* Buffer and submit the option to all registered listeners.
* @param opt the option to submit
*/
void doOption(Option<T> opt) {
writeLock.lock();
try {
buffer.add(opt);
Pair<Integer, List<Option<T>>> of = Pair.of(buffer.size(), buffer);
for (SingleLaneExecutor<Pair<Integer, List<Option<T>>>> l : listeners) {
l.add(of);
}
} finally {
writeLock.unlock();
}
}
@Override
public void error(Throwable ex) {
doOption(Option.<T>error(ex));
}
@Override
public void finish() {
doOption(Option.<T>none());
}
@Override
public void next(T value) {
doOption(Option.some(value));
}
});
timerClose = scheduler.schedule(new Runnable() {
@Override
public void run() {
writeLock.lock();
try {
buffer = new ArrayList<Option<T>>();
} finally {
writeLock.unlock();
}
}
}, timeSpan, timeSpan, unit);
}
} finally {
writeLock.unlock();
}
final AtomicBoolean cancel = new AtomicBoolean();
final SingleLaneExecutor<Pair<Integer, List<Option<T>>>> playback = SingleLaneExecutor.create(scheduler, new Action1<Pair<Integer, List<Option<T>>>>() {
/** The local buffer reader index. */
@GuardedBy("readLock")
int index = 0;
/** The last buffer. */
@GuardedBy("readLock")
List<Option<T>> last;
@Override
public void invoke(Pair<Integer, List<Option<T>>> value) {
readLock.lock();
try {
if (last != value.second) {
index = 0;
last = value.second;
}
while (index < value.first && !cancel.get()) {
dispatch(observer, last.get(index++));
}
} finally {
readLock.unlock();
}
}
});
writeLock.lock();
try {
playback.add(Pair.of(buffer.size(), buffer));
listeners.add(playback);
} finally {
writeLock.unlock();
}
final Closeable c = new Closeable() {
@Override
public void close() throws IOException {
cancel.set(true);
writeLock.lock();
try {
listeners.remove(playback);
} finally {
writeLock.unlock();
}
Closeables.close0(playback);
}
};
return c;
}
};
}
/**
* Creates an observable which shares the source observable and replays all source Ts
* to any of the registering observers.
* @param <T> the element type
* @param source the source of Ts
* @param scheduler the scheduler from where the historical elements are emitted
* @return the new observable
*/
public static <T> Observable<T> replay(
final Observable<? extends T> source,
final Scheduler scheduler
) {
return new Observable<T>() {
/** The read-write lock. */
final ReadWriteLock rwLock = new ReentrantReadWriteLock(true);
/** The read lock for reading elements of the buffer. */
final Lock readLock = rwLock.readLock();
/** The write lock to write elements of the buffer and add new listeners. */
final Lock writeLock = rwLock.writeLock();
/** The buffer that holds the observed values so far. */
@GuardedBy("rwLock")
final List<Option<T>> buffer = new ArrayList<Option<T>>();
/** The single registration handler. */
@GuardedBy("writeLock")
Closeable sourceClose;
/** The set of listeners active. */
@GuardedBy("writeLock")
Set<SingleLaneExecutor<Integer>> listeners = new HashSet<SingleLaneExecutor<Integer>>();
@Override
protected void finalize() throws Throwable {
Closeables.close0(sourceClose);
super.finalize();
}
@Override
public Closeable register(final Observer<? super T> observer) {
writeLock.lock();
try {
if (sourceClose == null) {
sourceClose = source.register(new Observer<T>() {
/**
* Buffer and submit the option to all registered listeners.
* @param opt the option to submit
*/
void doOption(Option<T> opt) {
writeLock.lock();
try {
buffer.add(opt);
for (SingleLaneExecutor<Integer> l : listeners) {
l.add(buffer.size());
}
} finally {
writeLock.unlock();
}
}
@Override
public void error(Throwable ex) {
doOption(Option.<T>error(ex));
}
@Override
public void finish() {
doOption(Option.<T>none());
}
@Override
public void next(T value) {
doOption(Option.some(value));
}
});
}
} finally {
writeLock.unlock();
}
final AtomicBoolean cancel = new AtomicBoolean();
final SingleLaneExecutor<Integer> playback = SingleLaneExecutor.create(scheduler, new Action1<Integer>() {
/** The local buffer reader index. */
@GuardedBy("readLock")
int index = 0;
@Override
public void invoke(Integer value) {
readLock.lock();
try {
while (index < value && !cancel.get()) {
dispatch(observer, buffer.get(index++));
}
} finally {
readLock.unlock();
}
}
});
writeLock.lock();
try {
playback.add(buffer.size());
listeners.add(playback);
} finally {
writeLock.unlock();
}
final Closeable c = new Closeable() {
@Override
public void close() throws IOException {
cancel.set(true);
writeLock.lock();
try {
listeners.remove(playback);
} finally {
writeLock.unlock();
}
Closeables.close0(playback);
}
};
return c;
}
};
}
/**
* Creates an observable which shares the source observable returned by the selector and replays all source Ts
* to any of the registering observers.
* @param <T> the element type
* @param <U> the return element type
* @param source the source of Ts
* @param selector the output stream selector
* @param bufferSize the target buffer size
* @param scheduler the scheduler from where the historical elements are emitted
* @return the new observable
*/
public static <T, U> Observable<U> replay(
final Observable<T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final int bufferSize,
final Scheduler scheduler
) {
return replay(selector.invoke(source), bufferSize, scheduler);
}
/**
* Returns the observable sequence for the supplied source observable by
* invoking the selector function with it.
* @param <T> the source element type
* @param <U> the output element type
* @param source the source of Ts
* @param selector the selector which returns an observable of Us for the given <code>source</code>
* @return the new observable
*/
public static <T, U> Observable<U> replay(
final Observable<T> source,
final Func1<? super Observable<T>, ? extends Observable<U>> selector
) {
return selector.invoke(source);
}
/**
* Restore the default scheduler back to the <code>DefaultScheduler</code>
* used when this class was initialized.
*/
public static void restoreDefaultScheduler() {
DEFAULT_SCHEDULER.set(new DefaultScheduler());
}
/**
* Returns an observable which listens to elements from a source until it signals an error()
* or finish() and continues with the next observable. The registration happens only when the
* previous observables finished in any way.
* FIXME not sure how to close previous registrations
* @param <T> the type of the elements
* @param sources the list of observables
* @return the observable
*/
@Nonnull
public static <T> Observable<T> resumeAlways(
@Nonnull final Iterable<? extends Observable<? extends T>> sources) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final Iterator<? extends Observable<? extends T>> it = sources.iterator();
if (it.hasNext()) {
DefaultObserver<T> obs = new DefaultObserver<T>(false) {
Closeable c;
{
lock.lock();
try {
c = it.next().register(this);
} finally {
lock.unlock();
}
}
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
Closeables.close0(c);
if (it.hasNext()) {
c = it.next().register(this);
} else {
observer.finish();
close();
}
}
@Override
public void onFinish() {
Closeables.close0(c);
if (it.hasNext()) {
c = it.next().register(this);
} else {
observer.finish();
close();
}
}
@Override
public void onNext(T value) {
observer.next(value);
}
};
return obs;
}
return Reactive.<T>empty().register(observer);
}
};
}
/**
* It tries to submit the values of first observable, but when it throws an exeption,
* the next observable within source is used further on. Basically a failover between the Observables.
* If the current source finish() then the result observable calls finish().
* If the last of the sources calls error() the result observable calls error()
* FIXME not sure how to close previous registrations
* @param <T> the type of the values
* @param sources the available source observables.
* @return the failover observable
*/
@Nonnull
public static <T> Observable<T> resumeOnError(
@Nonnull final Iterable<? extends Observable<? extends T>> sources) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final Iterator<? extends Observable<? extends T>> it = sources.iterator();
if (it.hasNext()) {
DefaultObserver<T> obs = new DefaultObserver<T>(false) {
Closeable c;
{
lock.lock();
try {
c = it.next().register(this);
} finally {
lock.unlock();
}
}
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
Closeables.close0(c);
if (it.hasNext()) {
c = it.next().register(this);
} else {
observer.finish();
close();
}
}
@Override
public void onFinish() {
Closeables.close0(c);
observer.finish();
close();
}
@Override
public void onNext(T value) {
observer.next(value);
}
};
return obs;
}
return Reactive.<T>empty().register(observer);
}
};
}
/**
* Restarts the observation until the source observable terminates normally.
* @param <T> the type of elements
* @param source the source observable
* @return the repeating observable
*/
@Nonnull
public static <T> Observable<T> retry(
@Nonnull final Observable<? extends T> source) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(false) {
/** The registration. */
Closeable c;
{
lock.lock();
try {
c = source.register(this);
} finally {
lock.unlock();
}
}
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
Closeables.close0(c);
c = source.register(this);
}
@Override
public void onFinish() {
observer.finish();
close();
}
@Override
public void onNext(T value) {
observer.next(value);
}
};
return obs;
}
};
}
/**
* Restarts the observation until the source observable terminates normally or the <code>count</code> retry count was used up.
* FIXME if the retry count is zero and yet another error comes, what should happen? finish or this time submit the error?
* @param <T> the type of elements
* @param source the source observable
* @param count the retry count
* @return the repeating observable
*/
@Nonnull
public static <T> Observable<T> retry(
@Nonnull final Observable<? extends T> source,
final int count) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(false) {
/** The remaining retry count. */
int remainingCount = count;
/** The registration. */
Closeable c;
{
lock.lock();
try {
c = source.register(this);
} finally {
lock.unlock();
}
}
@Override
public void onError(Throwable ex) {
Closeables.close0(c);
if (remainingCount-- > 0) {
c = source.register(this);
} else {
observer.error(ex);
close();
}
}
@Override
public void onFinish() {
observer.finish();
close();
}
@Override
public void onNext(T value) {
observer.next(value);
}
};
return obs;
}
};
}
/**
* Blocks until the observable calls finish() or error(). Values are submitted to the given action.
* @param <T> the type of the elements
* @param source the source observable
* @param action the action to invoke for each value
* @throws InterruptedException if the current thread is interrupted while waiting on
* the observable.
*/
public static <T> void run(
@Nonnull final Observable<? extends T> source,
@Nonnull final Action1<? super T> action) throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1);
Closeable c = source.register(new DefaultObserver<T>(true) {
@Override
public void onError(Throwable ex) {
latch.countDown();
}
@Override
public void onFinish() {
latch.countDown();
}
@Override
public void onNext(T value) {
action.invoke(value);
}
});
try {
latch.await();
} finally {
Closeables.close0(c);
}
}
/**
* Blocks until the observable calls finish() or error(). Events are submitted to the given observer.
* @param <T> the type of the elements
* @param source the source observable
* @param observer the observer to invoke for each event
* @throws InterruptedException if the current thread is interrupted while waiting on
* the observable.
*/
public static <T> void run(
@Nonnull final Observable<? extends T> source,
@Nonnull final Observer<? super T> observer) throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1);
Closeable c = source.register(new DefaultObserver<T>(true) {
@Override
public void onError(Throwable ex) {
try {
observer.error(ex);
} finally {
latch.countDown();
}
}
@Override
public void onFinish() {
try {
observer.finish();
} finally {
latch.countDown();
}
}
@Override
public void onNext(T value) {
observer.next(value);
}
});
try {
latch.await();
} finally {
Closeables.close0(c);
}
}
/**
* Blocks until the observable calls finish() or error(). Values are ignored.
* @param source the source observable
* @throws InterruptedException if the current thread is interrupted while waiting on
* the observable.
*/
public static void run(
@Nonnull final Observable<?> source) throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1);
Closeable c = source.register(new DefaultObserver<Object>(true) {
@Override
public void onError(Throwable ex) {
latch.countDown();
}
@Override
public void onFinish() {
latch.countDown();
}
@Override
public void onNext(Object value) {
}
});
try {
latch.await();
} finally {
Closeables.close0(c);
}
}
/**
* Blocks until the observable calls finish() or error() or the specified amount of time ellapses. Values are ignored.
* FIXME might be infeasible due the potential side effects along the event stream
* @param source the source observable
* @param time the time value
* @param unit the time unit
* @return false if the waiting time ellapsed before the run completed
* @throws InterruptedException if the current thread is interrupted while waiting on
* the observable.
*/
static boolean run(
@Nonnull final Observable<?> source,
long time,
@Nonnull TimeUnit unit) throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1);
Closeable c = source.register(new DefaultObserver<Object>(true) {
@Override
public void onError(Throwable ex) {
latch.countDown();
}
@Override
public void onFinish() {
latch.countDown();
}
@Override
public void onNext(Object value) {
}
});
try {
return latch.await(time, unit);
} finally {
Closeables.close0(c);
}
}
/**
* Periodically sample the given source observable, which means tracking the last value of
* the observable and periodically submitting it to the output observable.
* FIXME the error() and finish() are instantly propagated
* @param <T> the type of elements to watch
* @param source the source of elements
* @param time the time value to wait
* @param unit the time unit
* @return the sampled observable
*/
@Nonnull
public static <T> Observable<T> sample(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit) {
return sample(source, time, unit, DEFAULT_SCHEDULER.get());
}
/**
* Periodically sample the given source observable, which means tracking the last value of
* the observable and periodically submitting it to the output observable.
* FIXME the error() and finish() are instantly propagated
* @param <T> the type of elements to watch
* @param source the source of elements
* @param time the time value to wait
* @param unit the time unit
* @param pool the scheduler pool where the periodic submission should happen.
* @return the sampled observable
*/
@Nonnull
public static <T> Observable<T> sample(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** Are we waiting for the first event? */
@GuardedBy("lock")
boolean first = true;
/** The current value. */
@GuardedBy("lock")
T current;
final Closeable c = pool.schedule(new DefaultRunnable(lock) {
@Override
protected void onRun() {
if (!first) {
observer.next(current);
}
}
}, time, time, unit);
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
observer.finish();
}
@Override
public void onNext(T value) {
first = false;
current = value;
}
};
return Closeables.close(obs, source.register(obs));
}
};
}
/**
* Creates an observable which accumultates the given source and submits each intermediate results to its subscribers.
* Example:<br>
* <code>range(0, 5).accumulate((x, y) => x + y)</code> produces a sequence of [0, 1, 3, 6, 10];<br>
* basically the first event (0) is just relayed and then every pair of values are simply added together and relayed
* @param <T> the element type to accumulate
* @param source the source of the accumulation
* @param accumulator the accumulator which takest the current accumulation value and the current observed value
* and returns a new accumulated value
* @return the observable
*/
@Nonnull
public static <T> Observable<T> scan(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func2<? super T, ? super T, ? extends T> accumulator) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The current accumulated value. */
T current;
/** Are we waiting for the first value? */
boolean first = true;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
if (first) {
first = false;
current = value;
} else {
current = accumulator.invoke(current, value);
}
observer.next(current);
}
});
}
};
}
/**
* Creates an observable which accumultates the given source and submits each intermediate results to its subscribers.
* Example:<br>
* <code>range(0, 5).accumulate(1, (x, y) => x + y)</code> produces a sequence of [1, 2, 4, 7, 11];<br>
* basically the accumulation starts from zero and the first value (0) that comes in is simply added
* @param <T> the element type to accumulate
* @param source the source of the accumulation
* @param seed the initial value of the accumulation
* @param accumulator the accumulator which takest the current accumulation value and the current observed value
* and returns a new accumulated value
* @return the observable
*/
@Nonnull
public static <T> Observable<T> scan(
@Nonnull final Observable<? extends T> source,
final T seed,
@Nonnull final Func2<? super T, ? super T, ? extends T> accumulator) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The current accumulated value. */
T current = seed;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
current = accumulator.invoke(current, value);
observer.next(current);
}
});
}
};
}
/**
* Creates an observable which accumultates the given source and submits each intermediate results to its subscribers.
* Example:<br>
* <code>range(1, 5).accumulate0(1, (x, y) => x + y)</code> produces a sequence of [1, 2, 4, 7, 11, 16];<br>
* basically, it submits the seed value (1) and computes the current aggregate with the current value(1).
* @param <T> the element type to accumulate
* @param source the source of the accumulation
* @param seed the initial value of the accumulation
* @param accumulator the accumulator which takest the current accumulation value and the current observed value
* and returns a new accumulated value
* @return the observable
*/
@Nonnull
public static <T> Observable<T> scan0(
@Nonnull final Observable<? extends T> source,
final T seed,
@Nonnull final Func2<? super T, ? super T, ? extends T> accumulator) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The current accumulated value. */
T current;
/** Are we waiting for the first value? */
boolean first = true;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
if (first) {
first = false;
observer.next(seed);
current = accumulator.invoke(seed, value);
} else {
current = accumulator.invoke(current, value);
}
observer.next(current);
}
});
}
};
}
/**
* Use the mapper to transform the T source into an U source.
* @param <T> the type of the original observable
* @param <U> the type of the new observable
* @param source the source of Ts
* @param mapper the mapper from Ts to Us
* @return the observable on Us
*/
@Nonnull
public static <T, U> Observable<U> select(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends U> mapper) {
return new Observable<U>() {
@Override
public Closeable register(final Observer<? super U> observer) {
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
observer.next(mapper.invoke(value));
}
});
}
};
}
/**
* Transforms the elements of the source observable into Us by using a selector which receives an index indicating
* how many elements have been transformed this far.
* @param <T> the source element type
* @param <U> the output element type
* @param source the source observable
* @param selector the selector taking an index and the current T
* @return the transformed observable
*/
public static <T, U> Observable<U> select(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func2<? super Integer, ? super T, ? extends U> selector) {
return new Observable<U>() {
@Override
public Closeable register(final Observer<? super U> observer) {
return source.register(new Observer<T>() {
/** The running index. */
int index;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
observer.next(selector.invoke(index++, value));
}
});
}
};
}
/**
* Transform the given source of Ts into Us in a way that the
* selector might return an observable ofUs for a single T.
* The observable is fully channelled to the output observable.
* FIXME not sure how to do it
* @param <T> the input element type
* @param <U> the output element type
* @param source the source of Ts
* @param selector the selector to return an Iterable of Us
* @return the
*/
@Nonnull
public static <T, U> Observable<U> selectMany(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Observable<? extends U>> selector) {
return selectMany(source, selector, new Func2<T, U, U>() {
@Override
public U invoke(T param1, U param2) {
return param2;
};
});
}
/**
* Creates an observable in which for each of Ts an observable of Vs are
* requested which in turn will be transformed by the resultSelector for each
* pair of T and V giving an U.
* FIXME concurrency related questions
* @param <T> the source element type
* @param <U> the intermediate element type
* @param <V> the output element type
* @param source the source of Ts
* @param collectionSelector the selector which returns an observable of intermediate Vs
* @param resultSelector the selector which gives an U for a T and V
* @return the observable of Us
*/
@Nonnull
public static <T, U, V> Observable<V> selectMany(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Observable<? extends U>> collectionSelector,
@Nonnull final Func2<? super T, ? super U, ? extends V> resultSelector) {
return new Observable<V>() {
@Override
public Closeable register(final Observer<? super V> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(false) {
/** The work in progress counter. */
final AtomicInteger wip = new AtomicInteger(1);
/** The active observers. */
final Map<DefaultObserver<? extends U>, Closeable> active = new HashMap<DefaultObserver<? extends U>, Closeable>();
@Override
protected void onClose() {
for (Closeable c : active.values()) {
Closeables.close0(c);
}
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
close();
}
@Override
public void onFinish() {
onLast();
}
/**
* The error signal from the inner.
* @param ex the exception
*/
void onInnerError(Throwable ex) {
onError(ex);
}
/** The last one will signal a finish. */
public void onLast() {
if (wip.decrementAndGet() == 0) {
observer.finish();
close();
}
}
@Override
public void onNext(final T t) {
Observable<? extends U> sub = collectionSelector.invoke(t);
DefaultObserver<U> o = new DefaultObserver<U>(lock, true) {
@Override
protected void onClose() {
active.remove(this);
}
@Override
protected void onError(Throwable ex) {
onInnerError(ex);
close();
}
@Override
protected void onFinish() {
onLast();
close();
}
@Override
protected void onNext(U u) {
observer.next(resultSelector.invoke(t, u));
}
};
wip.incrementAndGet();
active.put(o, sub.register(o));
}
};
return Closeables.close(obs, source.register(obs));
}
};
}
/**
* Creates an observable of Us in a way when a source T arrives, the observable of
* Us is completely drained into the output. This is done again and again for
* each arriving Ts.
* @param <T> the type of the source, irrelevant
* @param <U> the output type
* @param source the source of Ts
* @param provider the source of Us
* @return the observable for Us
*/
@Nonnull
public static <T, U> Observable<U> selectMany(
@Nonnull Observable<? extends T> source,
@Nonnull Observable<? extends U> provider) {
return selectMany(source,
Functions.<T, Observable<? extends U>>constant(provider));
}
/**
* Transform the given source of Ts into Us in a way that the selector might return zero to multiple elements of Us for a single T.
* The iterable is flattened and submitted to the output
* @param <T> the input element type
* @param <U> the output element type
* @param source the source of Ts
* @param selector the selector to return an Iterable of Us
* @return the
*/
@Nonnull
public static <T, U> Observable<U> selectManyIterable(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Iterable<? extends U>> selector) {
return new Observable<U>() {
@Override
public Closeable register(final Observer<? super U> observer) {
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
for (U u : selector.invoke(value)) {
observer.next(u);
}
}
});
}
};
}
/**
* Compares two sequences and returns whether they are produce the same
* elements in terms of the null-safe object equality.
* <p>The equality only stands if the two sequence produces the same
* amount of values and those values are pairwise equal. If one of the sequences
* terminates before the other, the equality test will return false.</p>
* @param <T> the common element type
* @param first the first source of Ts
* @param second the second source of Ts
* @return the new observable
*/
public static <T> Observable<Boolean> sequenceEqual(
final Observable<? extends T> first,
final Observable<? extends T> second) {
return sequenceEqual(first, second, new Func2<T, T, Boolean>() {
@Override
public Boolean invoke(T param1, T param2) {
return param1 == param2 || (param1 != null && param1.equals(param2));
}
});
}
/**
* Compares two sequences and returns whether they are produce the same
* elements in terms of the comparer function.
* <p>The equality only stands if the two sequence produces the same
* amount of values and those values are pairwise equal. If one of the sequences
* terminates before the other, the equality test will return false.</p>
* @param <T> the common element type
* @param first the first source of Ts
* @param second the second source of Ts
* @param comparer the equality comparison function
* @return the new observable
*/
public static <T> Observable<Boolean> sequenceEqual(
final Observable<? extends T> first,
final Observable<? extends T> second,
final Func2<? super T, ? super T, Boolean> comparer) {
Observable<Boolean> pairwiseEqual = all(zip(first, second, comparer), Functions.<Boolean>identity());
Observable<Boolean> sameSize = combineLatestSent(count(first), count(second), Functions.equals());
return combineLatestSent(pairwiseEqual, sameSize, and());
}
/**
* Returns the single element of the given observable source.
* If the source is empty, a NoSuchElementException is thrown.
* If the source has more than one element, a TooManyElementsException is thrown.
* @param <T> the type of the element
* @param source the source of Ts
* @return the single element
*/
@Nonnull
public static <T> T single(
@Nonnull Observable<? extends T> source) {
CloseableIterator<T> it = toIterable(source).iterator();
try {
if (it.hasNext()) {
T one = it.next();
if (!it.hasNext()) {
return one;
}
throw new TooManyElementsException();
}
throw new NoSuchElementException();
} finally {
Closeables.close0(it);
}
}
/**
* Returns the single value in the observables.
* @param <T> the value type
* @param value the value
* @return the observable
*/
@Nonnull
public static <T> Observable<T> singleton(
final T value) {
return singleton(value, DEFAULT_SCHEDULER.get());
}
/**
* Returns the single value in the observables.
* @param <T> the value type
* @param value the value
* @param pool the pool where to submit the value to the observers
* @return the observable
*/
@Nonnull
public static <T> Observable<T> singleton(
final T value,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return pool.schedule(new Runnable() {
@Override
public void run() {
observer.next(value);
observer.finish();
}
});
}
};
}
/**
* Skips the given amount of next() messages from source and relays
* the rest.
* @param <T> the element type
* @param source the source of Ts
* @param count the number of messages to skip
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> skip(
@Nonnull final Observable<? extends T> source,
final int count) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The remaining count. */
int remaining = count;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
if (remaining <= 0) {
observer.next(value);
} else {
remaining--;
}
}
});
}
};
}
/**
* Skips the last <code>count</code> elements from the source observable.
* @param <T> the element type
* @param source the source of Ts
* @param count the number of elements to skip at the end
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> skipLast(final Observable<? extends T> source, final int count) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
final Queue<T> buffer = new ConcurrentLinkedQueue<T>();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
while (buffer.size() > count) {
observer.next(buffer.poll());
}
observer.finish();
}
@Override
public void next(T value) {
buffer.add(value);
}
});
}
};
}
/**
* Skip the source elements until the signaller sends its first element.
* FIXME: If the signaller sends an error or only finish(), the relaying is never enabled?
* FIXME: once the singaller fires, it gets deregistered
* @param <T> the element type of the source
* @param <U> the element type of the signaller, irrelevant
* @param source the source of Ts
* @param signaller the source of Us
* @return the new observable
*/
@Nonnull
public static <T, U> Observable<T> skipUntil(
@Nonnull final Observable<? extends T> source,
@Nonnull final Observable<? extends U> signaller) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** The signaller observer. */
final DefaultObserver<U> signal;
/** The signal closeable. */
final Closeable c;
/** The skip gate. */
boolean gate;
{
signal = new DefaultObserver<U>(lock, true) {
@Override
public void onError(Throwable ex) {
innerError(ex);
}
@Override
public void onFinish() {
if (!gate) {
innerFinish(); // signaller will never turn the gate on
}
}
@Override
public void onNext(U value) {
gate = true;
}
};
c = signaller.register(signal);
}
/**
* The callback for the inner error.
* @param ex the inner exception
*/
void innerError(Throwable ex) {
error(ex);
}
/** The callback for an inner finish. */
void innerFinish() {
finish();
}
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
observer.finish();
}
@Override
public void onNext(T value) {
if (gate) {
observer.next(value);
}
}
};
return Closeables.close(obs, source.register(obs));
}
};
}
/**
* Skips the Ts from source while the specified condition returns true.
* If the condition returns false, all subsequent Ts are relayed,
* ignoring the condition further on. Errors and completion
* is relayed regardless of the condition.
* @param <T> the element types
* @param source the source of Ts
* @param condition the condition that must turn false in order to start relaying
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> skipWhile(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, Boolean> condition) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** Can we relay stuff? */
boolean mayRelay;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
if (!mayRelay) {
mayRelay = !condition.invoke(value);
if (mayRelay) {
observer.next(value);
}
} else {
observer.next(value);
}
}
});
}
};
}
/**
* Invokes the action asynchronously on the given pool and
* relays its finish() or error() messages.
* @param action the action to invoke
* @return the observable
*/
@Nonnull
public static Observable<Void> start(
@Nonnull final Action0 action) {
return start(action, DEFAULT_SCHEDULER.get());
}
/**
* Invokes the action asynchronously on the given pool and
* relays its finish() or error() messages.
* @param action the action to invoke
* @param pool the pool where the action should run
* @return the observable
*/
@Nonnull
public static Observable<Void> start(
@Nonnull final Action0 action,
@Nonnull final Scheduler pool) {
return new Observable<Void>() {
@Override
public Closeable register(final Observer<? super Void> observer) {
return pool.schedule(new Runnable() {
@Override
public void run() {
try {
action.invoke();
observer.finish();
} catch (Throwable ex) {
observer.error(ex);
}
}
});
}
};
}
/**
* Invokes the function asynchronously on the default pool and
* relays its result followed by a finish. Exceptions are
* relayed as well.
* @param <T> the function return type
* @param func the function
* @return the observable
*/
@Nonnull
public static <T> Observable<T> start(
@Nonnull final Func0<? extends T> func) {
return start(func, DEFAULT_SCHEDULER.get());
}
/**
* Invokes the function asynchronously on the given pool and
* relays its result followed by a finish. Exceptions are
* relayed as well.
* @param <T> the function return type
* @param func the function
* @param pool the pool where the action should run
* @return the observable
*/
@Nonnull
public static <T> Observable<T> start(
@Nonnull final Func0<? extends T> func,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return pool.schedule(new Runnable() {
@Override
public void run() {
try {
T value = func.invoke();
observer.next(value);
observer.finish();
} catch (Throwable ex) {
observer.error(ex);
}
}
});
}
};
}
/**
* Start with the given iterable of values before relaying the Ts from the
* source. The iterable values are emmitted on the default pool.
* @param <T> the element type
* @param source the source
* @param values the values to start with
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> startWith(
@Nonnull Observable<? extends T> source,
@Nonnull Iterable<? extends T> values) {
return startWith(source, values, DEFAULT_SCHEDULER.get());
}
/**
* Start with the given iterable of values before relaying the Ts from the
* source. The iterable values are emmitted on the given pool.
* @param <T> the element type
* @param source the source
* @param values the values to start with
* @param pool the pool where the iterable values should be emitted
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> startWith(
@Nonnull Observable<? extends T> source,
@Nonnull Iterable<? extends T> values,
@Nonnull Scheduler pool) {
return concat(toObservable(values, pool), source);
}
/**
* Start with the given iterable of values before relaying the Ts from the
* source. The value is emmitted on the default pool.
* @param <T> the element type
* @param source the source
* @param value the single value to start with
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> startWith(
@Nonnull Observable<? extends T> source,
T value) {
return startWith(source, Collections.singleton(value), DEFAULT_SCHEDULER.get());
}
/**
* Start with the given iterable of values before relaying the Ts from the
* source. The value is emmitted on the given pool.
* @param <T> the element type
* @param source the source
* @param value the value to start with
* @param pool the pool where the iterable values should be emitted
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> startWith(
@Nonnull Observable<? extends T> source,
T value,
@Nonnull Scheduler pool) {
return startWith(source, Collections.singleton(value), pool);
}
/**
* Wrap the given observable into an new Observable instance, which calls the original register() method
* on the supplied pool.
* @param <T> the type of the objects to observe
* @param observable the original observable
* @param pool the pool to perform the original subscribe() call
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> registerOn(
@Nonnull final Observable<T> observable,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
// start the registration asynchronously
final BlockingQueue<Closeable> cq = new LinkedBlockingQueue<Closeable>();
pool.schedule(new Runnable() {
@Override
public void run() {
cq.add(observable.register(observer));
}
});
// use the disposable future when the deregistration is required
return new Closeable() {
@Override
public void close() {
pool.schedule(new Runnable() {
@Override
public void run() {
try {
cq.take().close(); // wait until the dispose becomes available then call it
} catch (InterruptedException e) {
throw new RuntimeException();
} catch (IOException e) {
throw new RuntimeException();
}
}
});
}
};
}
};
}
/**
* Computes and signals the sum of the values of the BigDecimal source.
* The source may not send nulls.
* @param source the source of BigDecimals to aggregate.
* @return the observable for the sum value
*/
@Nonnull
public static Observable<BigDecimal> sumBigDecimal(
@Nonnull final Observable<BigDecimal> source) {
return aggregate(source, Functions.sumBigDecimal(), Functions.<BigDecimal, Integer>identityFirst());
}
/**
* Computes and signals the sum of the values of the BigInteger source.
* The source may not send nulls.
* @param source the source of BigIntegers to aggregate.
* @return the observable for the sum value
*/
@Nonnull
public static Observable<BigInteger> sumBigInteger(
@Nonnull final Observable<BigInteger> source) {
return aggregate(source, Functions.sumBigInteger(), Functions.<BigInteger, Integer>identityFirst());
}
/**
* Computes and signals the sum of the values of the Double source.
* The source may not send nulls.
* @param source the source of Doubles to aggregate.
* @return the observable for the sum value
*/
@Nonnull
public static Observable<Double> sumDouble(
@Nonnull final Observable<Double> source) {
return aggregate(source, Functions.sumDouble(), Functions.<Double, Integer>identityFirst());
}
/**
* Computes and signals the sum of the values of the Float source.
* The source may not send nulls.
* @param source the source of Floats to aggregate.
* @return the observable for the sum value
*/
@Nonnull
public static Observable<Float> sumFloat(
@Nonnull final Observable<Float> source) {
return aggregate(source, Functions.sumFloat(), Functions.<Float, Integer>identityFirst());
}
/**
* Computes and signals the sum of the values of the Integer source.
* The source may not send nulls. An empty source produces an empty sum
* @param source the source of integers to aggregate.
* @return the observable for the sum value
*/
@Nonnull
public static Observable<Integer> sumInt(
@Nonnull final Observable<Integer> source) {
return aggregate(source, Functions.sumInteger(), Functions.<Integer, Integer>identityFirst());
}
/**
* Computes and signals the sum of the values of the Integer source by using
* a double intermediate representation.
* The source may not send nulls. An empty source produces an empty sum
* @param source the source of integers to aggregate.
* @return the observable for the sum value
*/
@Nonnull
public static Observable<Double> sumIntAsDouble(
@Nonnull final Observable<Integer> source) {
return aggregate(source,
new Func2<Double, Integer, Double>() {
@Override
public Double invoke(Double param1, Integer param2) {
return param1 + param2;
}
},
Functions.<Double, Integer>identityFirst()
);
}
/**
* Computes and signals the sum of the values of the Long source.
* The source may not send nulls.
* @param source the source of longs to aggregate.
* @return the observable for the sum value
*/
@Nonnull
public static Observable<Long> sumLong(
@Nonnull final Observable<Long> source) {
return aggregate(source, Functions.sumLong(), Functions.<Long, Integer>identityFirst());
}
/**
* Computes and signals the sum of the values of the Long sourceby using
* a double intermediate representation.
* The source may not send nulls.
* @param source the source of longs to aggregate.
* @return the observable for the sum value
*/
@Nonnull
public static Observable<Double> sumLongAsDouble(
@Nonnull final Observable<Long> source) {
return aggregate(source,
new Func2<Double, Long, Double>() {
@Override
public Double invoke(Double param1, Long param2) {
return param1 + param2;
}
},
Functions.<Double, Integer>identityFirst()
);
}
/**
* Returns an observer which relays Ts from the source observables in a way, when
* a new inner observable comes in, the previous one is deregistered and the new one is
* continued with. Basically, it is an unbounded ys.takeUntil(xs).takeUntil(zs)...
* @param <T> the element type
* @param sources the source of multiple observables of Ts.
* @return the new observable
*/
public static <T> Observable<T> switchToNext(final Observable<? extends Observable<? extends T>> sources) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserver<Observable<? extends T>> outer
= new DefaultObserver<Observable<? extends T>>(false) {
/** The inner observer. */
@GuardedBy("lock")
Closeable inner;
DefaultObserver<T> innerObserver = new DefaultObserver<T>(lock, true) {
@Override
protected void onError(Throwable ex) {
innerError(ex);
}
@Override
protected void onFinish() {
innerFinish();
}
@Override
protected void onNext(T value) {
observer.next(value);
}
};
/** Called from the inner observer when an error condition occurs. */
void innerError(Throwable ex) {
error(ex);
}
/** Called from the inner observer when it finished. */
void innerFinish() {
observer.finish();
close();
}
@Override
protected void onClose() {
Closeables.close0(inner);
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
close();
}
@Override
protected void onFinish() {
// nothing to do
}
@Override
protected void onNext(Observable<? extends T> value) {
Closeables.close0(inner);
inner = value.register(innerObserver);
}
};
return sources.register(outer);
}
};
}
/**
* Creates an observable which takes the specified number of
* Ts from the source, unregisters and completes.
* @param <T> the element type
* @param source the source of Ts
* @param count the number of elements to relay
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> take(
@Nonnull final Observable<? extends T> source,
final int count) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserverEx<T> o = new DefaultObserverEx<T>(true) {
/** The countdown. */
protected int i = count;
@Override
protected void onNext(T value) {
observer.next(value);
if (--i == 0) {
finish();
}
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
observer.finish();
}
};
return o.registerWith(source);
}
};
}
/**
* Returns an observable which returns the last <code>count</code>
* elements from the source observable.
* @param <T> the element type
* @param source the source of the elements
* @param count the number elements to return
* @return the new observable
*/
public static <T> Observable<T> takeLast(final Observable<? extends T> source, final int count) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
final CircularBuffer<T> buffer = new CircularBuffer<T>(count);
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
while (!buffer.isEmpty()) {
observer.next(buffer.take());
}
observer.finish();
}
@Override
public void next(T value) {
buffer.add(value);
}
});
}
};
}
/**
* Creates an observable which takes values from the source until
* the signaller produces a value. If the signaller never signals,
* all source elements are relayed.
* @param <T> the element type
* @param <U> the signaller element type, irrelevant
* @param source the source of Ts
* @param signaller the source of Us
* @return the new observable
*/
@Nonnull
public static <T, U> Observable<T> takeUntil(
@Nonnull final Observable<? extends T> source,
@Nonnull final Observable<U> signaller) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final Lock lock0 = new ReentrantLock(true);
DefaultObserverEx<T> o = new DefaultObserverEx<T>(lock0, true) {
@Override
protected void onRegister() {
add("signaller", signaller.register(new Observer<U>() {
@Override
public void next(U value) {
innerFinish();
}
@Override
public void error(Throwable ex) {
innerError(ex);
}
@Override
public void finish() {
innerFinish();
}
}));
}
/** Error call from the inner. */
protected void innerError(Throwable t) {
error(t);
}
/** Finish call from the inner. */
protected void innerFinish() {
finish();
}
@Override
protected void onNext(T value) {
observer.next(value);
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
observer.finish();
}
};
return o.registerWith(source);
}
};
}
/**
* Creates an observable which takes values from source until
* the predicate returns false for the current element, then skips the remaining values.
* @param <T> the element type
* @param source the source of Ts
* @param predicate the predicate
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> takeWhile(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, Boolean> predicate) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserverEx<T> obs = new DefaultObserverEx<T>(true) {
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
observer.finish();
}
@Override
public void onNext(T value) {
if (predicate.invoke(value)) {
observer.next(value);
} else {
observer.finish();
close();
}
}
};
return obs.registerWith(source);
}
};
}
/**
* Creates and observable which fires the last value
* from source when the given timespan elapsed without a new
* value occurring from the source. It is basically how Content Assistant
* popup works after the user pauses in its typing. Uses the default scheduler.
* @param <T> the value type
* @param source the source of Ts
* @param delay how much time should elapse since the last event to actually forward that event
* @param unit the delay time unit
* @return the observable
*/
@Nonnull
public static <T> Observable<T> throttle(
@Nonnull final Observable<? extends T> source,
final long delay,
@Nonnull final TimeUnit unit) {
return throttle(source, delay, unit, DEFAULT_SCHEDULER.get());
}
/**
* Creates and observable which fires the last value
* from source when the given timespan elapsed without a new
* value occurring from the source. It is basically how Content Assistant
* popup works after the user pauses in its typing.
* @param <T> the value type
* @param source the source of Ts
* @param delay how much time should elapse since the last event to actually forward that event
* @param unit the delay time unit
* @param pool the pool where the delay-watcher should operate
* @return the observable
*/
@Nonnull
public static <T> Observable<T> throttle(
@Nonnull final Observable<? extends T> source,
final long delay,
@Nonnull final TimeUnit unit,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** The last seen value. */
T last;
/** The closeable. */
Closeable c;
/** The timeout action. */
final DefaultRunnable r = new DefaultRunnable(lock) {
@Override
public void onRun() {
if (!cancelled()) {
observer.next(last);
}
}
};
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
observer.finish();
}
@Override
public void onNext(T value) {
last = value;
Closeables.close0(c);
c = pool.schedule(r, delay, unit);
}
};
return Closeables.close(obs, source.register(obs));
}
};
}
/**
* Creates an observable which instantly sends the exception to
* its subscribers while running on the default pool.
* @param <T> the element type, irrelevant
* @param ex the exception to throw
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> throwException(
@Nonnull final Throwable ex) {
return throwException(ex, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which instantly sends the exception to
* its subscribers while running on the given pool.
* @param <T> the element type, irrelevant
* @param ex the exception to throw
* @param pool the pool from where to send the values
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> throwException(
@Nonnull final Throwable ex,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return pool.schedule(new Runnable() {
@Override
public void run() {
observer.error(ex);
}
});
}
};
}
/**
* Returns an observable which produces an ordered sequence of numbers with the specified delay.
* It uses the default scheduler pool.
* @param start the starting value of the tick
* @param end the finishing value of the tick exclusive
* @param delay the delay value
* @param unit the time unit of the delay
* @return the observer
*/
@Nonnull
public static Observable<Long> tick(
final long start,
final long end,
final long delay,
@Nonnull final TimeUnit unit) {
return tick(start, end, delay, unit, DEFAULT_SCHEDULER.get());
}
/**
* Returns an observable which produces an ordered sequence of numbers with the specified delay.
* @param start the starting value of the tick inclusive
* @param end the finishing value of the tick exclusive
* @param delay the delay value
* @param unit the time unit of the delay
* @param pool the scheduler pool for the wait
* @return the observer
*/
@Nonnull
public static Observable<Long> tick(
final long start,
final long end,
final long delay,
@Nonnull final TimeUnit unit,
@Nonnull final Scheduler pool) {
if (start > end) {
throw new IllegalArgumentException("ensure start <= end");
}
return new Observable<Long>() {
@Override
public Closeable register(final Observer<? super Long> observer) {
return pool.schedule(new DefaultRunnable() {
/** The current value. */
long current = start;
@Override
protected void onRun() {
if (current < end && !cancelled()) {
observer.next(current++);
} else {
if (!cancelled()) {
observer.finish();
}
cancel();
}
}
}, delay, delay, unit);
}
};
}
/**
* Returns an observable which produces an ordered sequence of numbers with the specified delay.
* It uses the default scheduler pool.
* @param delay the delay value
* @param unit the time unit of the delay
* @return the observer
*/
@Nonnull
public static Observable<Long> tick(
@Nonnull final long delay,
@Nonnull final TimeUnit unit) {
return tick(0, Long.MAX_VALUE, delay, unit, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which relays events if they arrive
* from the source observable within the specified amount of time
* or it singlals a java.util.concurrent.TimeoutException.
* @param <T> the element type to observe
* @param source the source observable
* @param time the maximum allowed timespan between events
* @param unit the time unit
* @return the observer.
*/
@Nonnull
public static <T> Observable<T> timeout(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit) {
return timeout(source, time, unit, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which relays events if they arrive
* from the source observable within the specified amount of time
* or it switches to the <code>other</code> observable.
* FIXME not sure if the timeout should happen only when
* distance between elements get to large or just the first element
* does not arrive within the specified timespan.
* @param <T> the element type to observe
* @param source the source observable
* @param time the maximum allowed timespan between events
* @param unit the time unit
* @param other the other observable to continue with in case a timeout occurs
* @return the observer.
*/
@Nonnull
public static <T> Observable<T> timeout(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit,
@Nonnull final Observable<? extends T> other) {
return timeout(source, time, unit, other, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which relays events if they arrive
* from the source observable within the specified amount of time
* or it switches to the <code>other</code> observable.
* FIXME not sure if the timeout should happen only when
* distance between elements get to large or just the first element
* does not arrive within the specified timespan.
* @param <T> the element type to observe
* @param source the source observable
* @param time the maximum allowed timespan between events
* @param unit the time unit
* @param other the other observable to continue with in case a timeout occurs
* @param pool the scheduler pool for the timeout evaluation
* @return the observer.
*/
@Nonnull
public static <T> Observable<T> timeout(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit,
@Nonnull final Observable<? extends T> other,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** The current source. */
@GuardedBy("lock")
Closeable src;
/** The current timer.*/
@GuardedBy("lock")
Closeable timer;
{
lock.lock();
try {
src = source.register(this);
registerTimer();
} finally {
lock.unlock();
}
}
@Override
protected void onClose() {
Closeables.close0(timer);
Closeables.close0(src);
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
observer.finish();
}
@Override
protected void onNext(T value) {
if (timer != null) {
Closeables.close0(timer);
timer = null;
}
observer.next(value);
registerTimer();
}
/**
* Register the timer that when fired, switches to the second
* observable sequence
*/
private void registerTimer() {
timer = pool.schedule(new DefaultRunnable(lock) {
@Override
public void onRun() {
if (!cancelled()) {
Closeables.close0(src);
timer = null;
src = other.register(observer);
}
}
}, time, unit);
}
};
return obs;
}
};
}
/**
* Creates an observable which relays events if they arrive
* from the source observable within the specified amount of time
* or it singlals a java.util.concurrent.TimeoutException.
* FIXME not sure if the timeout should happen only when
* distance between elements get to large or just the first element
* does not arrive within the specified timespan.
* @param <T> the element type to observe
* @param source the source observable
* @param time the maximum allowed timespan between events
* @param unit the time unit
* @param pool the scheduler pool for the timeout evaluation
* @return the observer.
*/
@Nonnull
public static <T> Observable<T> timeout(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit,
@Nonnull final Scheduler pool) {
Observable<T> other = Reactive.throwException(new TimeoutException());
return timeout(source, time, unit, other, pool);
}
/**
* Creates an array from the observable sequence elements by using the given
* array for the template to create a dynamicly typed array of Ts.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial array is created).</p>
* @param <T> the source element type
* @param source the source of Ts
* @param a the template array, noes not change its value
* @return the observable
*/
public static <T> Observable<T[]> toArray(
@Nonnull final Observable<? extends T> source,
@Nonnull final T[] a) {
final Class<?> ct = a.getClass().getComponentType();
return new Observable<T[]>() {
@Override
public Closeable register(final Observer<? super T[]> observer) {
return source.register(new Observer<T>() {
/** The buffer for the Ts. */
final List<T> list = new LinkedList<T>();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
@SuppressWarnings("unchecked") T[] arr = (T[])Array.newInstance(ct, list.size());
observer.next(list.toArray(arr));
observer.finish();
}
@Override
public void next(T value) {
list.add(value);
}
});
}
};
}
/**
* Creates an array from the observable sequence elements.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial array is created).</p>
* @param source the source of anything
* @return the object array
*/
@Nonnull
public static Observable<Object[]> toArray(@Nonnull final Observable<?> source) {
return toArray(source, new Object[0]);
}
/**
* Convert the given observable instance into a classical iterable instance.
* <p>THe resulting iterable does not support the {@code remove()} method.</p>
* @param <T> the element type to iterate
* @param observable the original observable
* @return the iterable
*/
@Nonnull
public static <T> CloseableIterable<T> toIterable(
@Nonnull final Observable<? extends T> observable) {
return new CloseableIterable<T>() {
@Override
public CloseableIterator<T> iterator() {
final LinkedBlockingQueue<Option<T>> queue = new LinkedBlockingQueue<Option<T>>();
final Closeable c = observable.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
queue.add(Option.<T>error(ex));
}
@Override
public void finish() {
queue.add(Option.<T>none());
}
@Override
public void next(T value) {
queue.add(Option.some(value));
}
});
return new CloseableIterator<T>() {
/** Close the association if there is no more elements. */
Closeable close = c;
/** The peek value due hasNext. */
Option<T> peek;
/** Indicator if there was a hasNext() call before the next() call. */
boolean peekBeforeNext;
/** Close the helper observer. */
@Override
public void close() throws IOException {
Closeable cl = close;
close = null;
if (cl != null) {
cl.close();
}
}
@Override
protected void finalize() throws Throwable {
close();
}
@Override
public boolean hasNext() {
if (peek != Option.none()) {
if (!peekBeforeNext) {
try {
peek = queue.take();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
peekBeforeNext = true;
}
boolean result = peek != Option.none();
return result;
}
@Override
public T next() {
if (peekBeforeNext) {
peekBeforeNext = false;
if (peek != Option.none()) {
return peek.value();
}
throw new NoSuchElementException();
}
peekBeforeNext = false;
if (peek != Option.none()) {
try {
peek = queue.take();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
if (peek != Option.none()) {
return peek.value();
}
}
throw new NoSuchElementException();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
/**
* Collect the elements of the source observable into a single list.
* @param <T> the source element type
* @param source the source observable
* @return the new observable
*/
public static <T> Observable<List<T>> toList(
final Observable<? extends T> source
) {
return new Observable<List<T>>() {
@Override
public Closeable register(final Observer<? super List<T>> observer) {
return source.register(new Observer<T>() {
/** The list for aggregation. */
final List<T> list = new LinkedList<T>();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.next(new ArrayList<T>(list));
observer.finish();
}
@Override
public void next(T value) {
list.add(value);
}
});
}
};
}
/**
* Maps the given source of Ts by using the key extractor and
* returns a single Map of them. The keys are compared against each other
* by the <code>Object.equals()</code> semantics.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial map is created).</p>
* @param <T> the element type
* @param <K> the key type
* @param <V> the value type
* @param source the source of Ts
* @param keySelector the key selector
* @param valueSelector the value selector
* @return the new observable
*/
public static <T, K, V> Observable<Map<K, V>> toMap(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func1<? super T, ? extends V> valueSelector
) {
return new Observable<Map<K, V>>() {
@Override
public Closeable register(final Observer<? super Map<K, V>> observer) {
return source.register(new Observer<T>() {
/** The map. */
final Map<K, V> map = new HashMap<K, V>();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.next(map);
observer.finish();
}
@Override
public void next(T value) {
map.put(keySelector.invoke(value), valueSelector.invoke(value));
}
});
}
};
}
/**
* Maps the given source of Ts by using the key and value extractor and
* returns a single Map of them.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial map is created).</p>
* @param <T> the element type
* @param <K> the key type
* @param <V> the value type
* @param source the source of Ts
* @param keySelector the key selector
* @param valueSelector the value selector
* @param keyComparer the comparison function for keys
* @return the new observable
*/
public static <T, K, V> Observable<Map<K, V>> toMap(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func1<? super T, ? extends V> valueSelector,
final Func2<? super K, ? super K, Boolean> keyComparer
) {
return new Observable<Map<K, V>>() {
@Override
public Closeable register(final Observer<? super Map<K, V>> observer) {
return source.register(new Observer<T>() {
/** The key class with custom equality comparer. */
class Key {
/** The key value. */
final K key;
/**
* Constructor.
* @param key the key
*/
Key(K key) {
this.key = key;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Key) {
return keyComparer.invoke(key, ((Key)obj).key);
}
return false;
}
@Override
public int hashCode() {
return key != null ? key.hashCode() : 0;
}
}
/** The map. */
final Map<Key, V> map = new HashMap<Key, V>();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
Map<K, V> result = new HashMap<K, V>();
for (Map.Entry<Key, V> e : map.entrySet()) {
result.put(e.getKey().key, e.getValue());
}
observer.next(result);
observer.finish();
}
@Override
public void next(T value) {
Key k = new Key(keySelector.invoke(value));
V v = valueSelector.invoke(value);
map.put(k, v);
}
});
}
};
}
/**
* Maps the given source of Ts by using the key extractor and
* returns a single Map of them.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial map is created).</p>
* @param <T> the element type
* @param <K> the key type
* @param source the source of Ts
* @param keySelector the key selector
* @param keyComparer the key comparer function
* @return the new observable
*/
public static <K, T> Observable<Map<K, T>> toMap(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func2<? super K, ? super K, Boolean> keyComparer
) {
return toMap(source, keySelector, Functions.<T>identity(), keyComparer);
}
/**
* Maps the given source of Ts by using the key extractor and
* returns a single Map of them. The keys are compared against each other
* by the <code>Object.equals()</code> semantics.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial map is created).</p>
* @param <T> the element type
* @param <K> the key type
* @param source the source of Ts
* @param keySelector the key selector
* @return the new observable
*/
public static <K, T> Observable<Map<K, T>> toMap(
final Observable<T> source,
final Func1<? super T, ? extends K> keySelector
) {
return toMap(source, keySelector, Functions.<T>identity());
}
/**
* Maps the given source of Ts by using the key extractor and
* returns a single multi-map of them. The keys are compared against each other
* by the <code>Object.equals()</code> semantics.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial map is created).</p>
* @param <T> the element type
* @param <K> the key type
* @param source the source of Ts
* @param keySelector the key selector
* @param collectionSupplier the function which retuns a collection to hold the Vs.
* @return the new observable
*/
public static <T, K> Observable<Map<K, Collection<T>>> toMultiMap(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func0<? extends Collection<T>> collectionSupplier
) {
return toMultiMap(
source,
keySelector,
collectionSupplier,
Functions.<T>identity());
}
/**
* Maps the given source of Ts by using the key extractor and
* returns a single multi-map of them.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial map is created).</p>
* @param <T> the element type
* @param <K> the key type
* @param source the source of Ts
* @param keySelector the key selector
* @param collectionSupplier the function which retuns a collection to hold the Vs.
* @param keyComparer the comparison function for keys
* @return the new observable
*/
public static <T, K> Observable<Map<K, Collection<T>>> toMultiMap(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func0<? extends Collection<T>> collectionSupplier,
final Func2<? super K, ? super K, Boolean> keyComparer
) {
return toMultiMap(
source,
keySelector,
collectionSupplier,
Functions.<T>identity(),
keyComparer);
}
/**
* Maps the given source of Ts by using the key and value extractor and
* returns a single multi-map of them. The keys are compared against each other
* by the <code>Object.equals()</code> semantics.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial map is created).</p>
* @param <T> the element type
* @param <K> the key type
* @param <V> the value type
* @param source the source of Ts
* @param keySelector the key selector
* @param collectionSupplier the function which retuns a collection to hold the Vs.
* @param valueSelector the value selector
* @return the new observable
* @see Functions#listSupplier()
* @see Functions#setSupplier()
*/
public static <T, K, V> Observable<Map<K, Collection<V>>> toMultiMap(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func0<? extends Collection<V>> collectionSupplier,
final Func1<? super T, ? extends V> valueSelector
) {
return new Observable<Map<K, Collection<V>>>() {
@Override
public Closeable register(final Observer<? super Map<K, Collection<V>>> observer) {
return source.register(new Observer<T>() {
/** The map. */
final Map<K, Collection<V>> map = new HashMap<K, Collection<V>>();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.next(map);
observer.finish();
}
@Override
public void next(T value) {
K k = keySelector.invoke(value);
Collection<V> coll = map.get(k);
if (coll == null) {
coll = collectionSupplier.invoke();
map.put(k, coll);
}
V v = valueSelector.invoke(value);
coll.add(v);
}
});
}
};
}
/**
* Maps the given source of Ts by using the key and value extractor and
* returns a single multi-map of them.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial map is created).</p>
* @param <T> the element type
* @param <K> the key type
* @param <V> the value type
* @param source the source of Ts
* @param keySelector the key selector
* @param collectionSupplier the function which retuns a collection to hold the Vs.
* @param valueSelector the value selector
* @param keyComparer the comparison function for keys
* @return the new observable
*/
public static <T, K, V> Observable<Map<K, Collection<V>>> toMultiMap(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func0<? extends Collection<V>> collectionSupplier,
final Func1<? super T, ? extends V> valueSelector,
final Func2<? super K, ? super K, Boolean> keyComparer
) {
return new Observable<Map<K, Collection<V>>>() {
@Override
public Closeable register(final Observer<? super Map<K, Collection<V>>> observer) {
return source.register(new Observer<T>() {
/** The key class with custom equality comparer. */
class Key {
/** The key value. */
final K key;
/**
* Constructor.
* @param key the key
*/
Key(K key) {
this.key = key;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Key) {
return keyComparer.invoke(key, ((Key)obj).key);
}
return false;
}
@Override
public int hashCode() {
return key != null ? key.hashCode() : 0;
}
}
/** The map. */
final Map<Key, Collection<V>> map = new HashMap<Key, Collection<V>>();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
Map<K, Collection<V>> result = new HashMap<K, Collection<V>>();
for (Map.Entry<Key, Collection<V>> e : map.entrySet()) {
result.put(e.getKey().key, e.getValue());
}
observer.next(result);
observer.finish();
}
@Override
public void next(T value) {
Key k = new Key(keySelector.invoke(value));
Collection<V> coll = map.get(k);
if (coll == null) {
coll = collectionSupplier.invoke();
map.put(k, coll);
}
V v = valueSelector.invoke(value);
coll.add(v);
}
});
}
};
}
/**
* Wrap the iterable object into an observable and use the
* default pool when generating the iterator sequence.
* @param <T> the type of the values
* @param iterable the iterable instance
* @return the observable
*/
@Nonnull
public static <T> Observable<T> toObservable(
@Nonnull final Iterable<? extends T> iterable) {
return toObservable(iterable, DEFAULT_SCHEDULER.get());
}
/**
* Wrap the iterable object into an observable and use the
* given pool when generating the iterator sequence.
* @param <T> the type of the values
* @param iterable the iterable instance
* @param pool the thread pool where to generate the events from the iterable
* @return the observable
*/
@Nonnull
public static <T> Observable<T> toObservable(
@Nonnull final Iterable<? extends T> iterable,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultRunnable s = new DefaultRunnable() {
@Override
public void onRun() {
for (T t : iterable) {
if (cancelled()) {
break;
}
observer.next(t);
}
if (!cancelled()) {
observer.finish();
}
}
};
return pool.schedule(s);
}
};
}
/**
* Wraps the given action as an observable which reacts only to <code>next()</code> events.
* @param <T> the type of the values
* @param action the action to wrap
* @return the observer wrapping the action
*/
@Nonnull
public static <T> Observer<T> toObserver(
@Nonnull final Action1<? super T> action) {
return new Observer<T>() {
@Override
public void error(Throwable ex) {
// ignored
}
@Override
public void finish() {
// ignored
}
@Override
public void next(T value) {
action.invoke(value);
};
};
}
/**
* Creates an observer which calls the given functions on its similarly named methods.
* @param <T> the value type to receive
* @param next the action to invoke on next()
* @param error the action to invoke on error()
* @param finish the action to invoke on finish()
* @return the observer
*/
@Nonnull
public static <T> Observer<T> toObserver(
@Nonnull final Action1<? super T> next,
@Nonnull final Action1<? super Throwable> error,
@Nonnull final Action0 finish) {
return new Observer<T>() {
@Override
public void error(Throwable ex) {
error.invoke(ex);
}
@Override
public void finish() {
finish.invoke();
}
@Override
public void next(T value) {
next.invoke(value);
}
};
}
/**
* Filters objects from source which are assignment compatible with T.
* Note that due java erasure complex generic types can't be filtered this way in runtime (e.g., List<String>.class is just List.class).
* FIXME is this what cast stands for?
* @param <T> the type of the expected values
* @param source the source of unknown elements
* @param token the token to test agains the elements
* @return the observable containing Ts
*/
@Nonnull
public static <T> Observable<T> typedAs(
@Nonnull final Observable<?> source,
@Nonnull final Class<T> token) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<Object>() {
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(Object value) {
if (token.isInstance(value)) {
observer.next(token.cast(value));
}
}
});
}
};
}
/**
* A convenience function which unwraps the T from a TimeInterval of T.
* @param <T> the value type
* @return the unwrapper function
*/
@Nonnull
public static <T> Func1<TimeInterval<T>, T> unwrapTimeInterval() {
return new Func1<TimeInterval<T>, T>() {
@Override
public T invoke(TimeInterval<T> param1) {
return param1.value();
}
};
}
/**
* A convenience function which unwraps the T from a Timestamped of T.
* @param <T> the value type
* @return the unwrapper function
*/
@Nonnull
public static <T> Func1<Timestamped<T>, T> unwrapTimestamped() {
return new Func1<Timestamped<T>, T>() {
@Override
public T invoke(Timestamped<T> param1) {
return param1.value();
}
};
}
/**
* Receives a resource from the resource selector and
* uses the resource until it terminates, then closes the resource.
* FIXME not sure how this method should work
* @param <T> the output resource type.
* @param <U> the closeable resource to work with
* @param resourceSelector the function that gives a resource
* @param resourceUsage a function that returns an observable of T for the given resource.
* @return the observable of Ts which terminates once the usage terminates
*/
@Nonnull
public static <T, U extends Closeable> Observable<T> using(
@Nonnull final Func0<? extends U> resourceSelector,
@Nonnull final Func1<? super U, ? extends Observable<? extends T>> resourceUsage) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final U resource = resourceSelector.invoke();
return resourceUsage.invoke(resource).register(new Observer<T>() {
@Override
public void error(Throwable ex) {
try {
observer.error(ex);
} finally {
Closeables.close0(resource);
}
}
@Override
public void finish() {
try {
observer.finish();
} finally {
Closeables.close0(resource);
}
}
@Override
public void next(T value) {
observer.next(value);
}
});
}
};
}
/**
* Creates a filtered observable where only Ts are relayed which satisfy the clause.
* The clause receives the index and the current element to test.
* The clauseFactory is used for each individual registering observer.
* This can be used to create memorizing filter functions such as distinct.
* @param <T> the element type
* @param source the source of Ts
* @param clauseFactory the filter clause, the first parameter receives the current index, the second receives the current element
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> where(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func0<Func2<Integer, ? super T, Boolean>> clauseFactory) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The current element index. */
int index;
/** The clause factory to use. */
final Func2<Integer, ? super T, Boolean> clause = clauseFactory.invoke();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
if (clause.invoke(index, value)) {
observer.next(value);
}
index++;
}
});
}
};
}
/**
* Creates a filtered observable where only Ts are relayed which satisfy the clause.
* @param <T> the element type
* @param source the source of Ts
* @param clause the filter clause
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> where(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, Boolean> clause) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
if (clause.invoke(value)) {
observer.next(value);
}
}
});
}
};
}
/**
* Creates a filtered observable where only Ts are relayed which satisfy the clause.
* The clause receives the index and the current element to test.
* @param <T> the element type
* @param source the source of Ts
* @param clause the filter clause, the first parameter receives the current index, the second receives the current element
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> where(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func2<Integer, ? super T, Boolean> clause) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The current element index. */
int index;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
if (clause.invoke(index, value)) {
observer.next(value);
}
index++;
}
});
}
};
}
/**
* Splits the source stream into separate observables once
* the windowClosing fires an event.
* @param <T> the element type to observe
* @param <U> the closing event type, irrelevant
* @param source the source of Ts
* @param windowClosing the source of the window splitting events
* @return the observable on sequences of observables of Ts
*/
@Nonnull
public static <T, U> Observable<Observable<T>> window(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func0<? extends Observable<U>> windowClosing) {
return window(source, windowClosing, DEFAULT_SCHEDULER.get());
}
/**
* Splits the source stream into separate observables on
* each windowClosing event.
* FIXME not sure how to implement
* @param <T> the element type to observe
* @param <U> the closing event type, irrelevant
* @param source the source of Ts
* @param windowClosing the source of the window splitting events
* @param pool the pool where the first group is signalled from directly after
* the registration
* @return the observable on sequences of observables of Ts
*/
@Nonnull
public static <T, U> Observable<Observable<T>> window(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func0<? extends Observable<U>> windowClosing,
@Nonnull final Scheduler pool) {
return new Observable<Observable<T>>() {
@Override
public Closeable register(final Observer<? super Observable<T>> observer) {
// The current observable
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** The current observable window. */
@GuardedBy("lock")
DefaultObservable<T> current;
/** The window watcher. */
final DefaultObserver<U> wo = new DefaultObserver<U>(lock, true) {
@Override
public void onError(Throwable ex) {
innerError(ex);
}
@Override
public void onFinish() {
innerFinish();
}
@Override
public void onNext(U value) {
DefaultObservable<T> o = new DefaultObservable<T>();
Observer<T> os = current;
current = o;
if (os != null) {
os.finish();
}
observer.next(o);
}
};
/** The close handler for the inner observer of closing events. */
Closeable woc = windowClosing.invoke().register(wo);
/**
* The scheduled action which will open the first window as soon as possible.
*/
Closeable openWindow = pool.schedule(new DefaultRunnable(lock) {
@Override
protected void onRun() {
if (current == null) {
DefaultObservable<T> o = new DefaultObservable<T>();
current = o;
observer.next(o);
}
}
});
/**
* The inner exception callback.
* @param ex the exception
*/
void innerError(Throwable ex) {
error(ex);
}
/** The inner finish callback. */
void innerFinish() {
finish();
}
@Override
public void onClose() {
Closeables.close0(woc);
Closeables.close0(openWindow);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
observer.finish();
}
@Override
public void onNext(T value) {
if (current == null) {
DefaultObservable<T> o = new DefaultObservable<T>();
current = o;
observer.next(o);
}
current.next(value);
}
};
return Closeables.close(obs, source.register(obs));
}
};
}
/**
* Project the source elements into observable windows of size <code>count</code>
* and skip some initial values.
* @param <T> the element type
* @param source the source of Ts
* @param count the count of elements
* @return the new observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
int count
) {
return window(source, count, 0, DEFAULT_SCHEDULER.get());
}
/**
* Project the source elements into observable windows of size <code>count</code>
* and skip some initial values.
* @param <T> the element type
* @param source the source of Ts
* @param count the count of elements
* @param skip the elements to skip
* @return the new observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
int count,
int skip
) {
return window(source, count, skip, DEFAULT_SCHEDULER.get());
}
/**
* Project the source elements into observable windows of size <code>count</code>
* and skip some initial values.
* FIXME implement
* @param <T> the element type
* @param source the source of Ts
* @param count the count of elements
* @param skip the elements to skip
* @param scheduler the scheduler
* @return the new observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
final int count,
final int skip,
final Scheduler scheduler
) {
return new Observable<Observable<T>>() {
@Override
public Closeable register(final Observer<? super Observable<T>> observer) {
final AtomicReference<DefaultObservable<T>> current = new AtomicReference<DefaultObservable<T>>();
final AtomicInteger counter = new AtomicInteger(0);
DefaultObserverEx<T> o = new DefaultObserverEx<T>(true) {
/** The elements to skip at first. */
int skipCount = skip;
{
registerTimer();
add("source", source);
}
/**
* Create a new observable and reset the limit counter as well.
*/
void createNewObservable() {
counter.set(count);
DefaultObservable<T> d = current.get();
if (d != null) {
d.finish();
}
d = new DefaultObservable<T>();
current.set(d);
observer.next(d);
}
@Override
protected void onError(Throwable ex) {
remove("timer");
DefaultObservable<T> d = current.get();
d.error(ex);
observer.error(ex);
}
@Override
protected void onFinish() {
remove("timer");
DefaultObservable<T> d = current.get();
d.finish();
observer.finish();
}
@Override
protected void onNext(T value) {
if (skipCount > 0) {
skipCount--;
return;
}
if (counter.get() == 0 || current.get() == null) {
createNewObservable();
}
counter.decrementAndGet();
DefaultObservable<T> d = current.get();
d.next(value);
}
void registerTimer() {
replace("timer", "timer", scheduler.schedule(
new DefaultRunnable(lock) {
@Override
protected void onRun() {
// first only
if (current.get() == null) {
createNewObservable();
}
}
}, 0, TimeUnit.MILLISECONDS
));
}
};
return o;
}
};
}
/**
* Projects each value of T into an observable which are closed by
* either the <code>count</code> limit or the ellapsed timespan.
* @param <T> the element type
* @param source the source of Ts
* @param count the maximum count of the elements in each window
* @param timeSpan the maximum time for each window
* @param unit the time unit
* @return the new observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
final int count,
final long timeSpan,
final TimeUnit unit
) {
return window(source, count, timeSpan, unit, DEFAULT_SCHEDULER.get());
}
/**
* Projects each value of T into an observable which are closed by
* either the <code>count</code> limit or the ellapsed timespan.
* @param <T> the element type
* @param source the source of Ts
* @param count the maximum count of the elements in each window
* @param timeSpan the maximum time for each window
* @param unit the time unit
* @param scheduler the scheduler
* @return the new observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
final int count,
final long timeSpan,
final TimeUnit unit,
final Scheduler scheduler
) {
return new Observable<Observable<T>>() {
@Override
public Closeable register(final Observer<? super Observable<T>> observer) {
final AtomicReference<DefaultObservable<T>> current = new AtomicReference<DefaultObservable<T>>();
final AtomicInteger counter = new AtomicInteger(0);
DefaultObserverEx<T> o = new DefaultObserverEx<T>(true) {
{
registerTimer();
add("source", source);
}
/**
* Create a new observable and reset the limit counter as well.
*/
void createNewObservable() {
counter.set(count);
DefaultObservable<T> d = current.get();
if (d != null) {
d.finish();
}
d = new DefaultObservable<T>();
current.set(d);
observer.next(d);
}
@Override
protected void onError(Throwable ex) {
remove("timer");
DefaultObservable<T> d = current.get();
d.error(ex);
observer.error(ex);
}
@Override
protected void onFinish() {
remove("timer");
DefaultObservable<T> d = current.get();
d.finish();
observer.finish();
}
@Override
protected void onNext(T value) {
if (counter.get() == 0 || current.get() == null) {
createNewObservable();
}
counter.decrementAndGet();
DefaultObservable<T> d = current.get();
d.next(value);
}
void registerTimer() {
replace("timer", "timer", scheduler.schedule(
new DefaultRunnable(lock) {
/** First run. */
boolean first;
@Override
protected void onRun() {
if (!first) {
first = true;
if (current.get() == null) {
createNewObservable();
}
} else {
createNewObservable();
}
}
}, timeSpan, unit
));
}
};
return o;
}
};
}
/**
* Project the source elements into observable windows of size <code>count</code>
* and skip some initial values.
* @param <T> the element type
* @param source the source of Ts
* @param count the count of elements
* @param scheduler the scheduler
* @return the new observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
int count,
Scheduler scheduler
) {
return window(source, count, 0, scheduler);
}
/**
* Project each of the source Ts into observable sequences separated by
* the timespan and initial timeskip values.
* @param <T> the element type
* @param source the source of Ts
* @param timeSpan the timespan between window openings
* @param timeSkip the initial delay to open the first window
* @param unit the time unit
* @return the observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
final long timeSpan,
final long timeSkip,
final TimeUnit unit
) {
return window(source, timeSpan, timeSkip, unit, DEFAULT_SCHEDULER.get());
}
/**
* Project each of the source Ts into observable sequences separated by
* the timespan and initial timeskip values.
* FIXME implement
* @param <T> the element type
* @param source the source of Ts
* @param timeSpan the timespan between window openings
* @param timeSkip the initial delay to open the first window
* @param unit the time unit
* @param scheduler the scheduler
* @return the observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
final long timeSpan,
final long timeSkip,
final TimeUnit unit,
final Scheduler scheduler
) {
return new Observable<Observable<T>>() {
@Override
public Closeable register(final Observer<? super Observable<T>> observer) {
final AtomicReference<DefaultObservable<T>> current = new AtomicReference<DefaultObservable<T>>();
DefaultObserverEx<T> o = new DefaultObserverEx<T>(true) {
{
registerTimer();
add("source", source);
}
/**
* Create a new observable and reset the limit counter as well.
*/
void createNewObservable() {
DefaultObservable<T> d = current.get();
if (d != null) {
d.finish();
}
d = new DefaultObservable<T>();
current.set(d);
observer.next(d);
}
@Override
protected void onError(Throwable ex) {
remove("timer");
DefaultObservable<T> d = current.get();
d.error(ex);
observer.error(ex);
}
@Override
protected void onFinish() {
remove("timer");
DefaultObservable<T> d = current.get();
d.finish();
observer.finish();
}
@Override
protected void onNext(T value) {
DefaultObservable<T> d = current.get();
if (d != null) {
d.next(value);
}
}
void registerTimer() {
replace("timer", "timer", scheduler.schedule(
new DefaultRunnable(lock) {
@Override
protected void onRun() {
createNewObservable();
}
}, timeSkip, timeSpan, unit
));
}
};
return o;
}
};
}
/**
* Project each of the source Ts into observable sequences separated by
* the timespan and initial timeskip values.
* @param <T> the element type
* @param source the source of Ts
* @param timeSpan the timespan between window openings
* @param unit the time unit
* @return the observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
final long timeSpan,
final TimeUnit unit
) {
return window(source, timeSpan, 0L, unit, DEFAULT_SCHEDULER.get());
}
/**
* Project each of the source Ts into observable sequences separated by
* the timespan and initial timeskip values.
* @param <T> the element type
* @param source the source of Ts
* @param timeSpan the timespan between window openings
* @param unit the time unit
* @param scheduler the scheduler
* @return the observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
final long timeSpan,
final TimeUnit unit,
final Scheduler scheduler
) {
return window(source, timeSpan, 0L, unit, scheduler);
}
/**
* Splits the source stream into separate observables
* by starting at windowOpening events and closing at windowClosing events.
* FIXME not sure how to implement
* @param <T> the element type to observe
* @param <U> the opening event type, irrelevant
* @param <V> the closing event type, irrelevant
* @param source the source of Ts
* @param windowOpening te source of the window opening events
* @param windowClosing the source of the window splitting events
* @return the observable on sequences of observables of Ts
*/
@Nonnull
public static <T, U, V> Observable<Observable<T>> window(
@Nonnull final Observable<? extends T> source,
@Nonnull final Observable<? extends U> windowOpening,
@Nonnull final Func1<? super U, ? extends Observable<V>> windowClosing) {
return new Observable<Observable<T>>() {
@Override
public Closeable register(final Observer<? super Observable<T>> observer) {
final Lock lock = new ReentrantLock(true);
final Map<U, DefaultObservable<T>> openWindows = new IdentityHashMap<U, DefaultObservable<T>>();
final AtomicReference<Closeable> closeBoth = new AtomicReference<Closeable>();
// relay Ts to open windows
DefaultObserverEx<T> o1 = new DefaultObserverEx<T>(lock, true) {
@Override
protected void onClose() {
super.onClose();
Closeables.close0(closeBoth.get());
}
@Override
protected void onError(Throwable ex) {
for (DefaultObservable<T> ot : openWindows.values()) {
ot.error(ex);
}
observer.error(ex);
}
@Override
protected void onFinish() {
for (DefaultObservable<T> ot : openWindows.values()) {
ot.finish();
}
observer.finish();
}
@Override
protected void onNext(T value) {
for (DefaultObservable<T> ot : openWindows.values()) {
ot.next(value);
}
}
};
DefaultObserverEx<U> o2 = new DefaultObserverEx<U>(lock, true) {
@Override
protected void onClose() {
super.onClose();
Closeables.close0(closeBoth.get());
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
observer.finish();
}
@Override
protected void onNext(final U value) {
final DefaultObservable<T> newWindow = new DefaultObservable<T>();
openWindows.put(value, newWindow);
add(value, windowClosing.invoke(value).register(new Observer<V>() {
@Override
public void error(Throwable ex) {
openWindows.remove(value);
newWindow.error(ex);
}
@Override
public void finish() {
openWindows.remove(value);
newWindow.finish();
}
@Override
public void next(V value) {
// No op?!
}
}));
observer.next(newWindow);
}
};
closeBoth.set(Closeables.close(o1, o2));
o1.add(new Object(), source);
o2.add(new Object(), windowOpening);
return closeBoth.get();
}
};
}
/**
* Wrap the given type into a timestamped container of T.
* @param <T> the type of the contained element
* @return the function performing the wrapping
*/
@Nonnull
public static <T> Func1<T, Timestamped<T>> wrapTimestamped() {
return new Func1<T, Timestamped<T>>() {
@Override
public Timestamped<T> invoke(T param1) {
return Timestamped.of(param1);
};
};
}
/**
* Creates an observable which waits for events from left
* and combines it with the next available value from the right iterable,
* applies the selector function and emits the resulting T.
* The error() and finish() signals are relayed to the output.
* The result is finished if the right iterator runs out of
* values before the left iterator.
* @param <T> the resulting element type
* @param <U> the value type streamed on the left observable
* @param <V> the value type streamed on the right iterable
* @param left the left observables of Us
* @param right the right iterable of Vs
* @param selector the selector taking the left Us and right Vs.
* @return the resulting observable
*/
@Nonnull
public static <T, U, V> Observable<V> zip(
@Nonnull final Observable<? extends T> left,
@Nonnull final Iterable<? extends U> right,
@Nonnull final Func2<? super T, ? super U, ? extends V> selector) {
return new Observable<V>() {
@Override
public Closeable register(final Observer<? super V> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** The second source. */
final Iterator<? extends U> it = right.iterator();
/** The registration handler. */
final Closeable c;
{
lock.lock();
try {
c = left.register(this);
} finally {
lock.unlock();
}
}
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
observer.finish();
}
@Override
public void onNext(T t) {
if (it.hasNext()) {
U u = it.next();
observer.next(selector.invoke(t, u));
} else {
observer.finish();
close();
}
}
};
return obs;
}
};
}
/**
* Creates an observable which waits for events from left
* and combines it with the next available value from the right observable,
* applies the selector function and emits the resulting T.
* Basically it emmits a T when both an U and V is available.
* The output stream throws error or terminates if any of the streams
* throws or terminates.
* FIXME not sure how to implement this, and how to close and signal
* @param <T> the resulting element type
* @param <U> the value type streamed on the left observable
* @param <V> the value type streamed on the right iterable
* @param left the left observables of Us
* @param right the right iterable of Vs
* @param selector the selector taking the left Us and right Vs.
* @return the resulting observable
*/
@Nonnull
public static <T, U, V> Observable<T> zip(
@Nonnull final Observable<? extends U> left,
@Nonnull final Observable<? extends V> right,
@Nonnull final Func2<U, V, T> selector) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final LinkedBlockingQueue<U> queueU = new LinkedBlockingQueue<U>();
final LinkedBlockingQueue<V> queueV = new LinkedBlockingQueue<V>();
final AtomicReference<Closeable> closeBoth = new AtomicReference<Closeable>();
final AtomicInteger wip = new AtomicInteger(2);
final Lock lockBoth = new ReentrantLock(true);
lockBoth.lock();
try {
final DefaultObserver<U> oU = new DefaultObserver<U>(lockBoth, false) {
/** The source handler. */
final Closeable c;
{
lock.lock();
try {
c = left.register(this);
} finally {
lock.unlock();
}
}
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
Closeables.close0(closeBoth.get());
}
@Override
public void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
Closeables.close0(closeBoth.get());
}
}
@Override
public void onNext(U u) {
V v = queueV.poll();
if (v != null) {
observer.next(selector.invoke(u, v));
} else {
if (wip.get() == 2) {
queueU.add(u);
} else {
this.finish();
}
}
}
};
final DefaultObserver<V> oV = new DefaultObserver<V>(lockBoth, false) {
/** The source handler. */
final Closeable c;
{
lock.lock();
try {
c = right.register(this);
} finally {
lock.unlock();
}
}
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
Closeables.close0(closeBoth.get());
}
@Override
public void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
Closeables.close0(closeBoth.get());
}
}
@Override
public void onNext(V v) {
U u = queueU.poll();
if (u != null) {
observer.next(selector.invoke(u, v));
} else {
if (wip.get() == 2) {
queueV.add(v);
} else {
this.finish();
}
}
}
};
Closeable c = Closeables.close(oU, oV);
closeBoth.set(c);
} finally {
lockBoth.unlock();
}
return closeBoth.get();
}
};
}
/**
* Combine the incoming Ts of the various observables into a single list of Ts like
* using Reactive.zip() on more than two sources.
* @param <T> the element type
* @param srcs the iterable of observable sources.
* @return the new observable
*/
public static <T> Observable<List<T>> combine(final List<? extends Observable<? extends T>> srcs) {
if (srcs.size() < 1) {
return Reactive.never();
} else
if (srcs.size() == 1) {
return Reactive.select(srcs.get(0), new Func1<T, List<T>>() {
@Override
public List<T> invoke(T param1) {
List<T> result = new ArrayList<T>(1);
result.add(param1);
return result;
};
});
}
return new Observable<List<T>>() {
@Override
public Closeable register(Observer<? super List<T>> observer) {
Observable<List<T>> res0 = Reactive.zip(srcs.get(0), srcs.get(1), new Func2<T, T, List<T>>() {
@Override
public java.util.List<T> invoke(T param1, T param2) {
List<T> result = new ArrayList<T>();
result.add(param1);
result.add(param2);
return result;
};
});
for (int i = 2; i < srcs.size(); i++) {
res0 = Reactive.zip(res0, srcs.get(i), new Func2<List<T>, T, List<T>>() {
@Override
public java.util.List<T> invoke(java.util.List<T> param1, T param2) {
param1.add(param2);
return param1;
};
});
}
return res0.register(observer);
}
};
}
/**
* Combine a stream of Ts with a constant T whenever the src fires.
* The observed list contains the values of src as the first value, constant as the second.
* @param <T> the element type
* @param src the source of Ts
* @param constant the constant T to combine with
* @return the new observer
*/
public static <T> Observable<List<T>> combine(Observable<? extends T> src, final T constant) {
return Reactive.select(src, new Func1<T, List<T>>() {
@Override
public List<T> invoke(T param1) {
List<T> result = new ArrayList<T>();
result.add(param1);
result.add(constant);
return result;
};
});
}
/**
* Combine a constant T with a stream of Ts whenever the src fires.
* The observed sequence contains the constant as first, the src value as second.
* @param <T> the element type
* @param constant the constant T to combine with
* @param src the source of Ts
* @return the new observer
*/
public static <T> Observable<List<T>> combine(final T constant, Observable<? extends T> src) {
return Reactive.select(src, new Func1<T, List<T>>() {
@Override
public List<T> invoke(T param1) {
List<T> result = new ArrayList<T>();
result.add(constant);
result.add(param1);
return result;
};
});
}
/** Utility class. */
private Reactive() {
// utility class
}
}
| Reactive4Java/src/hu/akarnokd/reactive4java/reactive/Reactive.java | /*
* Copyright 2011-2012 David Karnok
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hu.akarnokd.reactive4java.reactive;
import hu.akarnokd.reactive4java.base.Action0;
import hu.akarnokd.reactive4java.base.Action1;
import hu.akarnokd.reactive4java.base.Actions;
import hu.akarnokd.reactive4java.base.CircularBuffer;
import hu.akarnokd.reactive4java.base.CloseableIterable;
import hu.akarnokd.reactive4java.base.CloseableIterator;
import hu.akarnokd.reactive4java.base.Closeables;
import hu.akarnokd.reactive4java.base.Func0;
import hu.akarnokd.reactive4java.base.Func1;
import hu.akarnokd.reactive4java.base.Func2;
import hu.akarnokd.reactive4java.base.Functions;
import hu.akarnokd.reactive4java.base.Option;
import hu.akarnokd.reactive4java.base.Pair;
import hu.akarnokd.reactive4java.base.Scheduler;
import hu.akarnokd.reactive4java.base.TooManyElementsException;
import hu.akarnokd.reactive4java.interactive.SingleContainer;
import hu.akarnokd.reactive4java.util.DefaultScheduler;
import hu.akarnokd.reactive4java.util.SingleLaneExecutor;
import java.io.Closeable;
import java.io.IOException;
import java.lang.reflect.Array;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import javax.annotation.Nonnull;
import javax.annotation.concurrent.GuardedBy;
/**
* Utility class with operators and helper methods for reactive programming with <code>Observable</code>s and <code>Observer</code>s.
* Guidances were taken from
* <ul>
* <li><a href='http://theburningmonk.com/tags/rx/'>http://theburningmonk.com/tags/rx/</a></li>
* <li><a href='http://blogs.bartdesmet.net/blogs/bart/archive/2010/01/01/the-essence-of-linq-minlinq.aspx'>http://blogs.bartdesmet.net/blogs/bart/archive/2010/01/01/the-essence-of-linq-minlinq.aspx</a></li>
* <li><a href='http://reactive4java.googlecode.com/svn/trunk/Reactive4Java/docs/javadoc/hu/akarnokd/reactive4java/reactive/Reactive.html'>http://reactive4java.googlecode.com/svn/trunk/Reactive4Java/docs/javadoc/hu/akarnokd/reactive4java/reactive/Reactive.html</a></li>
* <li><a href='http://rxwiki.wikidot.com/101samples#toc3'>http://rxwiki.wikidot.com/101samples#toc3</a></li>
* <li><a href='http://channel9.msdn.com/Tags/rx'>http://channel9.msdn.com/Tags/rx</a></li>
* </ul>
*
* @author akarnokd, 2011.01.26
* @see hu.akarnokd.reactive4java.interactive.Interactive
*/
public final class Reactive {
/**
* A variant of the registering observable which stores a group key.
* @author akarnokd, 2011.01.29.
* @param <Key> the type of the key
* @param <Value> the value type
*/
static class GroupedRegisteringObservable<Key, Value> extends DefaultObservable<Value> implements GroupedObservable<Key, Value> {
/** The group key. */
private final Key key;
/**
* Constructor.
* @param key the group key
*/
public GroupedRegisteringObservable(Key key) {
this.key = key;
}
@Override
public Key key() {
return key;
}
}
/** The diagnostic states of the current runnable. */
public enum ObserverState { OBSERVER_ERROR, OBSERVER_FINISHED, OBSERVER_RUNNING }
/** The common observable pool where the Observer methods get invoked by default. */
static final AtomicReference<Scheduler> DEFAULT_SCHEDULER = new AtomicReference<Scheduler>(new DefaultScheduler());
/**
* Returns an observable which provides a TimeInterval of Ts which
* records the elapsed time between successive elements.
* The time interval is evaluated using the System.nanoTime() differences
* as nanoseconds
* The first element contains the time elapsed since the registration occurred.
* @param <T> the time source
* @param source the source of Ts
* @return the new observable
*/
@Nonnull
public static <T> Observable<TimeInterval<T>> addTimeInterval(
@Nonnull final Observable<? extends T> source) {
return new Observable<TimeInterval<T>>() {
@Override
public Closeable register(final Observer<? super TimeInterval<T>> observer) {
return source.register(new Observer<T>() {
long lastTime = System.nanoTime();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
long t2 = System.nanoTime();
observer.next(TimeInterval.of(value, t2 - lastTime));
lastTime = t2;
}
});
}
};
}
/**
* Wrap the values within a observable to a timestamped value having always
* the System.currentTimeMillis() value.
* @param <T> the element type
* @param source the source which has its elements in a timestamped way.
* @return the raw observables of Ts
*/
@Nonnull
public static <T> Observable<Timestamped<T>> addTimestamped(
@Nonnull Observable<? extends T> source) {
return select(source, Reactive.<T>wrapTimestamped());
}
/**
* Apply an accumulator function over the observable source and submit the accumulated value to the returned observable.
* @param <T> the element type
* @param source the source observable
* @param accumulator the accumulator function where the first parameter is the current accumulated value and the second is the now received value.
* @return the observable for the result of the accumulation
*/
@Nonnull
public static <T> Observable<T> aggregate(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func2<? super T, ? super T, ? extends T> accumulator) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The current aggregation result. */
T result;
/** How many items did we get */
int phase;
@Override
public void error(Throwable ex) {
observer.error(ex);
};
@Override
public void finish() {
if (phase >= 1) { // FIXME not sure about this
observer.next(result);
}
observer.finish();
}
@Override
public void next(T value) {
if (phase == 0) {
result = value;
phase++;
} else {
result = accumulator.invoke(result, value);
phase = 2;
}
}
});
}
};
}
/**
* Computes an aggregated value of the source Ts by applying a sum function and applying the divide function when the source
* finishes, sending the result to the output.
* @param <T> the type of the values
* @param <U> the type of the intermediate sum value
* @param <V> the type of the final average value
* @param source the source of BigDecimals to aggregate.
* @param sum the function which sums the input Ts. The first received T will be acompanied by a null U.
* @param divide the function which perform the final division based on the number of elements
* @return the observable for the average value
*/
@Nonnull
public static <T, U, V> Observable<V> aggregate(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func2<? super U, ? super T, ? extends U> sum,
@Nonnull final Func2<? super U, ? super Integer, ? extends V> divide) {
return new Observable<V>() {
@Override
public Closeable register(final Observer<? super V> observer) {
return source.register(new Observer<T>() {
/** The number of values. */
int count;
/** The sum of the values thus far. */
U temp;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
if (count > 0) {
observer.next(divide.invoke(temp, count));
}
observer.finish();
}
@Override
public void next(T value) {
temp = sum.invoke(temp, value);
count++;
}
});
}
};
}
/**
* Apply an accumulator function over the observable source and submit the accumulated value to the returned observable.
* @param <T> the input element type
* @param <U> the ouput element type
* @param source the source observable
* @param seed the initial value of the accumulator
* @param accumulator the accumulator function where the first parameter is the current accumulated value and the second is the now received value.
* @return the observable for the result of the accumulation
*/
@Nonnull
public static <T, U> Observable<U> aggregate(
@Nonnull final Observable<? extends T> source,
final U seed,
@Nonnull final Func2<? super U, ? super T, ? extends U> accumulator) {
return new Observable<U>() {
@Override
public Closeable register(final Observer<? super U> observer) {
return source.register(new Observer<T>() {
/** The current aggregation result. */
U result = seed;
@Override
public void error(Throwable ex) {
observer.error(ex);
};
@Override
public void finish() {
observer.next(result);
observer.finish();
}
@Override
public void next(T value) {
result = accumulator.invoke(result, value);
}
});
}
};
}
/**
* Signals a single true or false if all elements of the observable matches the predicate.
* It may return early with a result of false if the predicate simply does not match the current element.
* For a true result, it waits for all elements of the source observable.
* @param <T> the type of the source data
* @param source the source observable
* @param predicate the predicate to setisfy
* @return the observable resulting in a single result
*/
@Nonnull
public static <T> Observable<Boolean> all(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, Boolean> predicate) {
return new Observable<Boolean>() {
@Override
public Closeable register(final Observer<? super Boolean> observer) {
DefaultObserverEx<T> o = new DefaultObserverEx<T>(true) {
{
add("source", source);
}
/** Indicate if we returned early. */
boolean done;
@Override
public void onError(Throwable ex) {
observer.error(ex);
};
@Override
public void onFinish() {
if (!done) {
done = true;
observer.next(true);
observer.finish();
}
}
@Override
public void onNext(T value) {
if (!predicate.invoke(value)) {
done = true;
observer.next(false);
observer.finish();
}
}
};
return o;
}
};
}
/**
* Channels the values of the first observable who fires first from the given set of observables.
* E.g., <code>O3 = Amb(O1, O2)</code> if O1 starts to submit events first, O3 will relay these events and events of O2 will be completely ignored
* @param <T> the type of the observed element
* @param sources the iterable list of source observables.
* @return the observable which reacted first
*/
@Nonnull
public static <T> Observable<T> amb(
@Nonnull final Iterable<? extends Observable<? extends T>> sources) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final List<DefaultObserver<T>> observers = new ArrayList<DefaultObserver<T>>();
List<Observable<? extends T>> observables = new ArrayList<Observable<? extends T>>();
final AtomicReference<Object> first = new AtomicReference<Object>();
int i = 0;
for (final Observable<? extends T> os : sources) {
observables.add(os);
final int thisIndex = i;
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** We won the race. */
boolean weWon;
/** Cancel everyone else. */
void cancelRest() {
for (int i = 0; i < observers.size(); i++) {
if (i != thisIndex) {
observers.get(i).close();
}
}
}
/** @return Check if we won the race. */
boolean didWeWon() {
if (!weWon) {
if (first.compareAndSet(null, this)) {
weWon = true;
cancelRest();
} else {
close();
}
}
return weWon;
}
@Override
public void onError(Throwable ex) {
if (didWeWon()) {
observer.error(ex);
}
}
@Override
public void onFinish() {
if (didWeWon()) {
observer.finish();
}
}
@Override
public void onNext(T value) {
if (didWeWon()) {
observer.next(value);
} else {
close();
}
};
};
observers.add(obs);
}
i = 0;
List<Closeable> closers = new ArrayList<Closeable>(observables.size() * 2 + 1);
for (final Observable<? extends T> os : observables) {
DefaultObserver<T> dob = observers.get(i);
closers.add(dob);
closers.add(os.register(dob)); // FIXME deregister?!
i++;
}
return Closeables.closeAll(closers);
}
};
}
/**
* Signals a single true if the source observable contains any element.
* It might return early for a non-empty source but waits for the entire observable to return false.
* @param <T> the element type
* @param source the source
* @return the observable
*/
@Nonnull
public static <T> Observable<Boolean> any(
@Nonnull final Observable<T> source) {
return any(source, Functions.alwaysTrue1());
}
/**
* Signals a single TRUE if the source signals any next() and the value matches the predicate before it signals a finish().
* It signals a false otherwise.
* @param <T> the source element type.
* @param source the source observable
* @param predicate the predicate to test the values
* @return the observable.
*/
@Nonnull
public static <T> Observable<Boolean> any(
@Nonnull final Observable<T> source,
@Nonnull final Func1<? super T, Boolean> predicate) {
return new Observable<Boolean>() {
@Override
public Closeable register(final Observer<? super Boolean> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
@Override
public void onError(Throwable ex) {
observer.error(ex);
close();
}
@Override
public void onFinish() {
observer.next(false);
observer.finish();
close();
}
@Override
public void onNext(T value) {
if (predicate.invoke(value)) {
observer.next(true);
observer.finish();
close();
}
}
};
return Closeables.close(obs, source.register(obs));
}
};
}
/**
* Wraps the given observer into an action object which then dispatches
* various incoming Option values to next(), finish() and error().
* @param <T> the element type
* @param observer the observer to wrap
* @return the wrapper action
*/
@Nonnull
public static <T> Action1<Option<T>> asAction(
@Nonnull final Observer<? super T> observer) {
return new Action1<Option<T>>() {
@Override
public void invoke(Option<T> value) {
dispatch(observer, value);
}
};
}
/**
* Convert the Observable instance into a functional-observable object.
* @param <T> the type of the elements
* @param source the source observable
* @return the action to action to option of T
*/
@Nonnull
public static <T> Action1<Action1<Option<T>>> asFObservable(
@Nonnull final Observable<? extends T> source) {
return new Action1<Action1<Option<T>>>() {
@Override
public void invoke(final Action1<Option<T>> o) {
source.register(asObserver(o));
}
};
}
/**
* Convert the functional observable into a normal observable object.
* @param <T> the type of the elements to observe.
* @param source the source of the functional-observable elements
* @return the observable object
*/
@Nonnull
public static <T> Observable<T> asObservable(
@Nonnull final Action1<Action1<Option<T>>> source) {
return Reactive.create(new Func1<Observer<? super T>, Action0>() {
@Override
public Action0 invoke(final Observer<? super T> o) {
source.invoke(asAction(o));
return Actions.noAction0();
}
});
}
/**
* Transform the given action to an observer.
* The wrapper observer converts its next() messages to Option.some(),
* the finish() to Option.none() and error() to Option.error().
* @param <T> the element type to observe
* @param action the action to wrap
* @return the observer
*/
@Nonnull
public static <T> Observer<T> asObserver(
@Nonnull final Action1<? super Option<T>> action) {
return new Observer<T>() {
@Override
public void error(Throwable ex) {
action.invoke(Option.<T>error(ex));
}
@Override
public void finish() {
action.invoke(Option.<T>none());
}
@Override
public void next(T value) {
action.invoke(Option.some(value));
}
};
}
/**
* Computes and signals the average value of the BigDecimal source.
* The source may not send nulls.
* @param source the source of BigDecimals to aggregate.
* @return the observable for the average value
*/
@Nonnull
public static Observable<BigDecimal> averageBigDecimal(
@Nonnull final Observable<BigDecimal> source) {
return aggregate(source,
Functions.sumBigDecimal(),
new Func2<BigDecimal, Integer, BigDecimal>() {
@Override
public BigDecimal invoke(BigDecimal param1, Integer param2) {
return param1.divide(BigDecimal.valueOf(param2.longValue()), RoundingMode.HALF_UP);
}
}
);
}
/**
* Computes and signals the average value of the BigInteger source.
* The source may not send nulls.
* @param source the source of BigIntegers to aggregate.
* @return the observable for the average value
*/
@Nonnull
public static Observable<BigDecimal> averageBigInteger(
@Nonnull final Observable<BigInteger> source) {
return aggregate(source,
Functions.sumBigInteger(),
new Func2<BigInteger, Integer, BigDecimal>() {
@Override
public BigDecimal invoke(BigInteger param1, Integer param2) {
return new BigDecimal(param1).divide(BigDecimal.valueOf(param2.longValue()), RoundingMode.HALF_UP);
}
}
);
}
/**
* Computes and signals the average value of the Double source.
* The source may not send nulls.
* @param source the source of Doubles to aggregate.
* @return the observable for the average value
*/
@Nonnull
public static Observable<Double> averageDouble(
@Nonnull final Observable<Double> source) {
return aggregate(source,
Functions.sumDouble(),
new Func2<Double, Integer, Double>() {
@Override
public Double invoke(Double param1, Integer param2) {
return param1 / param2;
}
}
);
}
/**
* Computes and signals the average value of the Float source.
* The source may not send nulls.
* @param source the source of Floats to aggregate.
* @return the observable for the average value
*/
@Nonnull
public static Observable<Float> averageFloat(
@Nonnull final Observable<Float> source) {
return aggregate(source,
Functions.sumFloat(),
new Func2<Float, Integer, Float>() {
@Override
public Float invoke(Float param1, Integer param2) {
return param1 / param2;
}
}
);
}
/**
* Computes and signals the average value of the integer source.
* The source may not send nulls.
* The intermediate aggregation used double values.
* @param source the source of integers to aggregate.
* @return the observable for the average value
*/
@Nonnull
public static Observable<Double> averageInt(
@Nonnull final Observable<Integer> source) {
return aggregate(source,
new Func2<Double, Integer, Double>() {
@Override
public Double invoke(Double param1, Integer param2) {
if (param1 != null) {
return param1 + param2;
}
return param2.doubleValue();
}
},
new Func2<Double, Integer, Double>() {
@Override
public Double invoke(Double param1, Integer param2) {
return param1 / param2;
}
}
);
}
/**
* Computes and signals the average value of the Long source.
* The source may not send nulls.
* The intermediate aggregation used double values.
* @param source the source of longs to aggregate.
* @return the observable for the average value
*/
@Nonnull
public static Observable<Double> averageLong(
@Nonnull final Observable<Long> source) {
return aggregate(source,
new Func2<Double, Long, Double>() {
@Override
public Double invoke(Double param1, Long param2) {
if (param1 != null) {
return param1 + param2;
}
return param2.doubleValue();
}
},
new Func2<Double, Integer, Double>() {
@Override
public Double invoke(Double param1, Integer param2) {
return param1 / param2;
}
}
);
}
/**
* Buffer the nodes as they become available and send them out in bufferSize chunks.
* The observers return a new and modifiable list of T on every next() call.
* @param <T> the type of the elements
* @param source the source observable
* @param bufferSize the target buffer size
* @return the observable of the list
*/
@Nonnull
public static <T> Observable<List<T>> buffer(
@Nonnull final Observable<? extends T> source,
final int bufferSize) {
return new Observable<List<T>>() {
@Override
public Closeable register(final Observer<? super List<T>> observer) {
return source.register(new Observer<T>() {
/** The current buffer. */
List<T> buffer;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
if (buffer != null && buffer.size() > 0) {
observer.next(buffer);
}
observer.finish();
}
@Override
public void next(T value) {
if (buffer == null) {
buffer = new ArrayList<T>(bufferSize);
}
buffer.add(value);
if (buffer.size() == bufferSize) {
observer.next(buffer);
buffer = new ArrayList<T>(bufferSize);
}
}
});
}
};
}
/**
* Buffer the Ts of the source until the buffer reaches its capacity or the current time unit runs out.
* Might result in empty list of Ts and might complete early when the source finishes before the time runs out.
* It uses the default scheduler pool.
* @param <T> the type of the values
* @param source the source observable
* @param bufferSize the allowed buffer size
* @param time the time value to wait betveen buffer fills
* @param unit the time unit
* @return the observable of list of Ts
*/
@Nonnull
public static <T> Observable<List<T>> buffer(
@Nonnull final Observable<? extends T> source,
final int bufferSize,
final long time,
@Nonnull final TimeUnit unit) {
return buffer(source, bufferSize, time, unit, DEFAULT_SCHEDULER.get());
}
/**
* Buffer the Ts of the source until the buffer reaches its capacity or the current time unit runs out.
* Might result in empty list of Ts and might complete early when the source finishes before the time runs out.
* @param <T> the type of the values
* @param source the source observable
* @param bufferSize the allowed buffer size
* @param time the time value to wait betveen buffer fills
* @param unit the time unit
* @param pool the pool where to schedule the buffer splits
* @return the observable of list of Ts
*/
@Nonnull
public static <T> Observable<List<T>> buffer(
@Nonnull final Observable<? extends T> source,
final int bufferSize,
final long time,
@Nonnull final TimeUnit unit,
@Nonnull final Scheduler pool) {
return new Observable<List<T>>() {
@Override
public Closeable register(final Observer<? super List<T>> observer) {
final BlockingQueue<T> buffer = new LinkedBlockingQueue<T>();
final AtomicInteger bufferLength = new AtomicInteger();
final Lock lock = new ReentrantLock(true);
final DefaultRunnable r = new DefaultRunnable(lock) {
@Override
public void onRun() {
List<T> curr = new ArrayList<T>();
buffer.drainTo(curr);
bufferLength.addAndGet(-curr.size());
observer.next(curr);
}
};
DefaultObserver<T> s = new DefaultObserver<T>(lock, true) {
/** The timer companion. */
Closeable timer = pool.schedule(r, time, time, unit);
@Override
protected void onClose() {
Closeables.close0(timer);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
List<T> curr = new ArrayList<T>();
buffer.drainTo(curr);
bufferLength.addAndGet(-curr.size());
observer.next(curr);
observer.finish();
}
/** The buffer to fill in. */
@Override
public void onNext(T value) {
buffer.add(value);
if (bufferLength.incrementAndGet() == bufferSize) {
List<T> curr = new ArrayList<T>();
buffer.drainTo(curr);
bufferLength.addAndGet(-curr.size());
observer.next(curr);
}
}
};
return Closeables.close(s, source.register(s));
}
};
}
/**
* Buffers the source observable Ts into a list of Ts periodically and submits them to the returned observable.
* Each next() invocation contains a new and modifiable list of Ts. The signaled List of Ts might be empty if
* no Ts appeared from the original source within the current timespan.
* The last T of the original source triggers an early submission to the output.
* The scheduling is done on the default Scheduler.
* @param <T> the type of elements to observe
* @param source the source of Ts.
* @param time the time value to split the buffer contents.
* @param unit the time unit of the time
* @return the observable of list of Ts
*/
@Nonnull
public static <T> Observable<List<T>> buffer(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit) {
return buffer(source, time, unit, DEFAULT_SCHEDULER.get());
}
/**
* Buffers the source observable Ts into a list of Ts periodically and submits them to the returned observable.
* Each next() invocation contains a new and modifiable list of Ts. The signaled List of Ts might be empty if
* no Ts appeared from the original source within the current timespan.
* The last T of the original source triggers an early submission to the output.
* @param <T> the type of elements to observe
* @param source the source of Ts.
* @param time the time value to split the buffer contents.
* @param unit the time unit of the time
* @param pool the scheduled execution pool to use
* @return the observable of list of Ts
*/
@Nonnull
public static <T> Observable<List<T>> buffer(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit,
@Nonnull final Scheduler pool) {
return new Observable<List<T>>() {
@Override
public Closeable register(final Observer<? super List<T>> observer) {
final BlockingQueue<T> buffer = new LinkedBlockingQueue<T>();
final Lock lock = new ReentrantLock(true);
final DefaultRunnable r = new DefaultRunnable(lock) {
@Override
public void onRun() {
List<T> curr = new ArrayList<T>();
buffer.drainTo(curr);
observer.next(curr);
}
};
DefaultObserver<T> o = new DefaultObserver<T>(lock, true) {
Closeable timer = pool.schedule(r, time, time, unit);
@Override
protected void onClose() {
Closeables.close0(timer);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
List<T> curr = new ArrayList<T>();
buffer.drainTo(curr);
observer.next(curr);
observer.finish();
}
/** The buffer to fill in. */
@Override
public void onNext(T value) {
buffer.add(value);
}
};
return Closeables.close(o, source.register(o));
}
};
}
/**
* Returns an observable which combines the latest values of
* both streams whenever one sends a new value.
* <p><b>Exception semantics:</b> if any stream throws an exception, the output stream
* throws an exception and all subscriptions are terminated.</p>
* <p><b>Completion semantics:</b> The output stream terminates
* after both streams terminate.</p>
* <p>Note that at the beginning, when the left or right fires first, the selector function
* will receive (value, null) or (null, value). If you want to react only in cases when both have sent
* a value, use the {@link #combineLatestSent(Observable, Observable, Func2)} method.</p>
* @param <T> the left element type
* @param <U> the right element type
* @param <V> the result element type
* @param left the left stream
* @param right the right stream
* @param selector the function which combines values from both streams and returns a new value
* @return the new observable.
*/
public static <T, U, V> Observable<V> combineLatest(
final Observable<? extends T> left,
final Observable<? extends U> right,
final Func2<? super T, ? super U, ? extends V> selector
) {
return new Observable<V>() {
@Override
public Closeable register(final Observer<? super V> observer) {
final Lock lock = new ReentrantLock(true);
final AtomicReference<Closeable> closeBoth = new AtomicReference<Closeable>();
final AtomicReference<T> leftRef = new AtomicReference<T>();
final AtomicReference<U> rightRef = new AtomicReference<U>();
final AtomicInteger wip = new AtomicInteger(2);
DefaultObserverEx<T> obs1 = new DefaultObserverEx<T>(lock, false) {
@Override
protected void onError(Throwable ex) {
observer.error(ex);
Closeables.close0(closeBoth.get());
}
@Override
protected void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
}
close();
}
@Override
protected void onNext(T value) {
leftRef.set(value);
observer.next(selector.invoke(value, rightRef.get()));
}
};
DefaultObserverEx<U> obs2 = new DefaultObserverEx<U>(lock, false) {
@Override
protected void onError(Throwable ex) {
observer.error(ex);
Closeables.close0(closeBoth.get());
}
@Override
protected void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
}
close();
}
@Override
protected void onNext(U value) {
rightRef.set(value);
observer.next(selector.invoke(leftRef.get(), value));
}
};
closeBoth.set(Closeables.close(obs1, obs2));
obs1.add(new Object(), left);
obs2.add(new Object(), right);
return closeBoth.get();
}
};
}
/**
* Returns an observable which combines the latest values of
* both streams whenever one sends a new value, but only after both sent a value.
* <p><b>Exception semantics:</b> if any stream throws an exception, the output stream
* throws an exception and all subscriptions are terminated.</p>
* <p><b>Completion semantics:</b> The output stream terminates
* after both streams terminate.</p>
* <p>The function will start combining the values only when both sides have already sent
* a value.</p>
* @param <T> the left element type
* @param <U> the right element type
* @param <V> the result element type
* @param left the left stream
* @param right the right stream
* @param selector the function which combines values from both streams and returns a new value
* @return the new observable.
*/
public static <T, U, V> Observable<V> combineLatestSent(
final Observable<? extends T> left,
final Observable<? extends U> right,
final Func2<? super T, ? super U, ? extends V> selector
) {
return new Observable<V>() {
@Override
public Closeable register(final Observer<? super V> observer) {
final Lock lock = new ReentrantLock(true);
final AtomicReference<Closeable> closeBoth = new AtomicReference<Closeable>();
final AtomicReference<T> leftRef = new AtomicReference<T>();
final AtomicBoolean leftFirst = new AtomicBoolean();
final AtomicReference<U> rightRef = new AtomicReference<U>();
final AtomicBoolean rightFirst = new AtomicBoolean();
final AtomicInteger wip = new AtomicInteger(2);
DefaultObserverEx<T> obs1 = new DefaultObserverEx<T>(lock, false) {
@Override
protected void onError(Throwable ex) {
observer.error(ex);
Closeables.close0(closeBoth.get());
}
@Override
protected void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
}
close();
}
@Override
protected void onNext(T value) {
leftRef.set(value);
leftFirst.set(true);
if (rightFirst.get()) {
observer.next(selector.invoke(value, rightRef.get()));
}
}
};
DefaultObserverEx<U> obs2 = new DefaultObserverEx<U>(lock, false) {
@Override
protected void onError(Throwable ex) {
observer.error(ex);
Closeables.close0(closeBoth.get());
}
@Override
protected void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
}
close();
}
@Override
protected void onNext(U value) {
rightRef.set(value);
rightFirst.set(true);
if (leftFirst.get()) {
observer.next(selector.invoke(leftRef.get(), value));
}
}
};
closeBoth.set(Closeables.close(obs1, obs2));
obs1.add(new Object(), left);
obs2.add(new Object(), right);
return closeBoth.get();
}
};
}
/**
* Concatenates the source observables in a way that when the first finish(), the
* second gets registered and continued, and so on.
* FIXME not sure how it should handle closability
* @param <T> the type of the values to observe
* @param sources the source list of subsequent observables
* @return the concatenated observable
*/
@Nonnull
public static <T> Observable<T> concat(
@Nonnull final Iterable<? extends Observable<? extends T>> sources) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final Iterator<? extends Observable<? extends T>> it = sources.iterator();
if (it.hasNext()) {
DefaultObserver<T> obs = new DefaultObserver<T>(false) {
/** The current registration. */
@GuardedBy("lock")
Closeable current;
{
lock.lock();
try {
current = it.next().register(this);
} finally {
lock.unlock();
}
}
@Override
protected void onClose() {
Closeables.close0(current);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
close();
}
@Override
public void onFinish() {
if (it.hasNext()) {
Closeables.close0(current);
current = it.next().register(this);
} else {
observer.finish();
close();
}
}
@Override
public void onNext(T value) {
observer.next(value);
}
};
return obs;
}
return Reactive.<T>empty().register(observer);
}
};
}
/**
* Concatenate the the multiple sources of T one after another.
* <p><b>Exception semantics:</b> if the sources or any inner observer signals an
* error, the outer observable will signal that error and the sequence is terminated.</p>
* @param <T> the element type
* @param sources the observable sequence of the observable sequence of Ts.
* @return the new observable
*/
public static <T> Observable<T> concat(
final Observable<? extends Observable<T>> sources
) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final LinkedBlockingQueue<Observable<T>> sourceQueue = new LinkedBlockingQueue<Observable<T>>();
final AtomicInteger wip = new AtomicInteger(1);
DefaultObserverEx<Observable<T>> o = new DefaultObserverEx<Observable<T>>(true) {
/** The first value arrived? */
@GuardedBy("lock")
boolean first;
{
add("sources", sources);
}
/**
* The inner exception to forward.
* @param ex the exception
*/
void innerError(Throwable ex) {
error(ex);
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
}
}
@Override
protected void onNext(Observable<T> value) {
if (!first) {
first = true;
registerOn(value);
} else {
sourceQueue.add(value);
}
}
void registerOn(Observable<T> value) {
wip.incrementAndGet();
replace("source", "source", value.register(new DefaultObserver<T>(lock, true) {
@Override
public void onError(Throwable ex) {
innerError(ex);
}
@Override
public void onFinish() {
Observable<T> nextO = sourceQueue.poll();
if (nextO != null) {
registerOn(nextO);
} else {
if (wip.decrementAndGet() == 0) {
observer.finish();
remove("source");
} else {
first = true;
}
}
}
@Override
public void onNext(T value) {
observer.next(value);
}
}));
}
};
return o;
}
};
}
/**
* Concatenate two observables in a way when the first finish() the second is registered
* and continued with.
* @param <T> the type of the elements
* @param first the first observable
* @param second the second observable
* @return the concatenated observable
*/
@Nonnull
public static <T> Observable<T> concat(
@Nonnull Observable<? extends T> first,
@Nonnull Observable<? extends T> second) {
List<Observable<? extends T>> list = new ArrayList<Observable<? extends T>>();
list.add(first);
list.add(second);
return concat(list);
}
/**
* Signals a single TRUE if the source observable signals a value equals() with the source value.
* Both the source and the test value might be null. The signal goes after the first encounter of
* the given value.
* @param <T> the type of the observed values
* @param source the source observable
* @param value the value to look for
* @return the observer for contains
*/
@Nonnull
public static <T> Observable<Boolean> contains(
@Nonnull final Observable<? extends T> source,
final T value) {
return any(source, new Func1<T, Boolean>() {
@Override
public Boolean invoke(T param1) {
return param1 == value || (param1 != null && param1.equals(value));
};
});
}
/**
* Counts the number of elements in the observable source.
* @param <T> the element type
* @param source the source observable
* @return the count signal
*/
@Nonnull
public static <T> Observable<Integer> count(
@Nonnull final Observable<T> source) {
return new Observable<Integer>() {
@Override
public Closeable register(final Observer<? super Integer> observer) {
//FIXME sequence guaranties?
return source.register(new Observer<T>() {
/** The counter. */
int count;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.next(count);
observer.finish();
}
@Override
public void next(T value) {
count++;
}
});
}
};
}
/**
* Counts the number of elements in the observable source as a long.
* @param <T> the element type
* @param source the source observable
* @return the count signal
*/
@Nonnull
public static <T> Observable<Long> countLong(
@Nonnull final Observable<T> source) {
return new Observable<Long>() {
@Override
public Closeable register(final Observer<? super Long> observer) {
//FIXME sequence guaranties?
return source.register(new Observer<T>() {
/** The counter. */
long count;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.next(count);
observer.finish();
}
@Override
public void next(T value) {
count++;
}
});
}
};
}
/**
* Create an observable instance by submitting a function which takes responsibility
* for registering observers.
* @param <T> the type of the value to observe
* @param subscribe the function to manage new subscriptions
* @return the observable instance
*/
@Nonnull
public static <T> Observable<T> create(
@Nonnull final Func1<Observer<? super T>, ? extends Action0> subscribe) {
return new Observable<T>() {
@Override
public Closeable register(Observer<? super T> observer) {
final Action0 a = subscribe.invoke(observer);
return new Closeable() {
@Override
public void close() {
a.invoke();
}
};
}
};
}
/**
* Create an observable instance by submitting a function which takes responsibility
* for registering observers and returns a custom Closeable to terminate the registration.
* @param <T> the type of the value to observe
* @param subscribe the function to manage new subscriptions
* @return the observable instance
*/
@Nonnull
public static <T> Observable<T> createWithCloseable(
@Nonnull final Func1<Observer<? super T>, ? extends Closeable> subscribe) {
return new Observable<T>() {
@Override
public Closeable register(Observer<? super T> observer) {
return subscribe.invoke(observer);
}
};
}
/**
* Constructs an observer which logs errors in case next(), finish() or error() is called
* and the observer is not in running state anymore due an earlier finish() or error() call.
* @param <T> the element type.
* @param source the source observable
* @return the augmented observable
*/
@Nonnull
public static <T> Observable<T> debugState(
@Nonnull final Observable<? extends T> source) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
ObserverState state = ObserverState.OBSERVER_RUNNING;
@Override
public void error(Throwable ex) {
if (state != ObserverState.OBSERVER_RUNNING) {
new IllegalStateException(state.toString()).printStackTrace();
}
state = ObserverState.OBSERVER_ERROR;
observer.error(ex);
}
@Override
public void finish() {
if (state != ObserverState.OBSERVER_RUNNING) {
new IllegalStateException(state.toString()).printStackTrace();
}
state = ObserverState.OBSERVER_FINISHED;
observer.finish();
}
@Override
public void next(T value) {
if (state != ObserverState.OBSERVER_RUNNING) {
new IllegalStateException(state.toString()).printStackTrace();
}
observer.next(value);
}
});
}
};
}
/**
* The returned observable invokes the <code>observableFactory</code> whenever an observer
* tries to subscribe to it.
* @param <T> the type of elements to observer
* @param observableFactory the factory which is responsivle to create a source observable.
* @return the result observable
*/
@Nonnull
public static <T> Observable<T> defer(
@Nonnull final Func0<? extends Observable<? extends T>> observableFactory) {
return new Observable<T>() {
@Override
public Closeable register(Observer<? super T> observer) {
return observableFactory.invoke().register(observer);
}
};
}
/**
* Delays the propagation of events of the source by the given amount. It uses the pool for the scheduled waits.
* The delay preserves the relative time difference between subsequent notifiactions.
* It uses the default scheduler pool when submitting the delayed values
* @param <T> the type of elements
* @param source the source of Ts
* @param time the time value
* @param unit the time unit
* @return the delayed observable of Ts
*/
@Nonnull
public static <T> Observable<T> delay(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit) {
return delay(source, time, unit, DEFAULT_SCHEDULER.get());
}
/**
* Delays the propagation of events of the source by the given amount. It uses the pool for the scheduled waits.
* The delay preserves the relative time difference between subsequent notifiactions
* @param <T> the type of elements
* @param source the source of Ts
* @param time the time value
* @param unit the time unit
* @param pool the pool to use for scheduling
* @return the delayed observable of Ts
*/
@Nonnull
public static <T> Observable<T> delay(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** The outstanding requests. */
final BlockingQueue<Closeable> outstanding = new LinkedBlockingQueue<Closeable>();
@Override
public void onClose() {
List<Closeable> list = new LinkedList<Closeable>();
outstanding.drainTo(list);
for (Closeable c : list) {
Closeables.close0(c);
}
super.close();
}
@Override
public void onError(final Throwable ex) {
Runnable r = new Runnable() {
@Override
public void run() {
try {
observer.error(ex);
close();
} finally {
outstanding.poll();
}
}
};
outstanding.add(pool.schedule(r, time, unit));
}
@Override
public void onFinish() {
Runnable r = new Runnable() {
@Override
public void run() {
try {
observer.finish();
close();
} finally {
outstanding.poll();
}
}
};
outstanding.add(pool.schedule(r, time, unit));
}
@Override
public void onNext(final T value) {
Runnable r = new Runnable() {
@Override
public void run() {
try {
observer.next(value);
} finally {
outstanding.poll();
}
}
};
outstanding.add(pool.schedule(r, time, unit));
}
};
return obs;
}
};
}
/**
* Returns an observable which converts all option messages
* back to regular next(), error() and finish() messages.
* The returned observable adheres to the <code>next* (error|finish)?</code> pattern,
* which ensures that no further options are relayed after an error or finish.
* @param <T> the source element type
* @param source the source of Ts
* @return the new observable
* @see #materialize(Observable)
*/
@Nonnull
public static <T> Observable<T> dematerialize(
@Nonnull final Observable<? extends Option<T>> source) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<Option<T>>() {
/** Keeps track of the observer's state. */
final AtomicBoolean done = new AtomicBoolean();
@Override
public void error(Throwable ex) {
if (!done.get()) {
done.set(true);
observer.error(ex);
}
}
@Override
public void finish() {
if (!done.get()) {
done.set(true);
observer.finish();
}
}
@Override
public void next(Option<T> value) {
if (!done.get()) {
if (Option.isNone(value)) {
done.set(true);
observer.finish();
} else
if (Option.isSome(value)) {
observer.next(value.value());
} else {
done.set(true);
observer.error(Option.getError(value));
}
}
}
});
}
};
}
/**
* Dispatches the option to the various Observer methods.
* @param <T> the value type
* @param observer the observer
* @param value the value to dispatch
*/
@Nonnull
public static <T> void dispatch(
@Nonnull Observer<? super T> observer,
@Nonnull Option<T> value) {
if (value == Option.none()) {
observer.finish();
} else
if (Option.isError(value)) {
observer.error(((Option.Error<?>)value).error());
} else {
observer.next(value.value());
}
}
/**
* Returns an observable which fires next() events only when the subsequent values differ
* in terms of Object.equals().
* @param <T> the type of the values
* @param source the source observable
* @return the observable
*/
@Nonnull
public static <T> Observable<T> distinct(
@Nonnull final Observable<? extends T> source) {
return distinct(source, Functions.<T>identity());
}
/**
* Returns Ts from the source observable if the subsequent keys extracted by <code>keyExtractor</code> are different.
* @param <T> the type of the values to observe
* @param <U> the key type check for distinction
* @param source the source of Ts
* @param keyExtractor the etractor for the keys
* @return the new filtered observable
*/
@Nonnull
public static <T, U> Observable<T> distinct(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<T, U> keyExtractor) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** Indication as the first. */
boolean first = true;
/** The last value. */
U lastKey;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
U key = keyExtractor.invoke(value);
if (first) {
first = false;
observer.next(value);
} else
if (lastKey != value && (lastKey == null || !lastKey.equals(key))) {
observer.next(value);
}
lastKey = key;
}
});
}
};
}
/**
* Maintains a queue of Ts which is then drained by the pump. Uses the default pool.
* FIXME not sure what this method should do and how.
* @param <T> the type of the values
* @param source the source of Ts
* @param pump the pump that drains the queue
* @return the new observable
*/
@Nonnull
public static <T> Observable<Void> drain(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Observable<Void>> pump) {
return drain(source, pump, DEFAULT_SCHEDULER.get());
}
/**
* Maintains a queue of Ts which is then drained by the pump.
* FIXME not sure what this method should do and how.
* @param <T> the type of the values
* @param source the source of Ts
* @param pump the pump that drains the queue
* @param pool the pool for the drain
* @return the new observable
*/
@Nonnull
public static <T> Observable<Void> drain(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Observable<Void>> pump,
@Nonnull final Scheduler pool) {
return new Observable<Void>() {
@Override
public Closeable register(final Observer<? super Void> observer) {
// keep track of the forked observers so the last should invoke finish() on the observer
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** The work in progress counter. */
final AtomicInteger wip = new AtomicInteger(1);
/** The executor which ensures the sequence. */
final SingleLaneExecutor<T> exec = new SingleLaneExecutor<T>(pool, new Action1<T>() {
@Override
public void invoke(T value) {
pump.invoke(value).register(
new Observer<Void>() {
@Override
public void error(Throwable ex) {
lock.lock();
try {
observer.error(ex);
close();
} finally {
lock.unlock();
}
}
@Override
public void finish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
close();
}
}
@Override
public void next(Void value) {
throw new AssertionError();
}
}
);
};
});
@Override
public void onClose() {
// exec.close(); FIXME should not cancel the pool?!
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
close();
}
@Override
public void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
}
}
@Override
public void onNext(T value) {
wip.incrementAndGet();
exec.add(value);
}
};
return Closeables.close(obs, source.register(obs));
}
};
}
/**
* @param <T> the type of the values to observe (irrelevant)
* @return Returns an empty observable which signals only finish() on the default observer pool.
*/
@Nonnull
public static <T> Observable<T> empty() {
return empty(DEFAULT_SCHEDULER.get());
}
/**
* Returns an empty observable which signals only finish() on the given pool.
* @param <T> the expected type, (irrelevant)
* @param pool the pool to invoke the the finish()
* @return the observable
*/
@Nonnull
public static <T> Observable<T> empty(
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return pool.schedule(new Runnable() {
@Override
public void run() {
observer.finish();
}
});
}
};
}
/**
* Invokes the given action when the source signals a finish() or error().
* @param <T> the type of the observed values
* @param source the source of Ts
* @param action the action to invoke on finish() or error()
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> finish(
@Nonnull final Observable<? extends T> source,
@Nonnull final Action0 action) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
action.invoke();
observer.error(ex);
}
@Override
public void finish() {
action.invoke();
observer.finish();
}
@Override
public void next(T value) {
observer.next(value);
}
});
}
};
}
/**
* Blocks until the first element of the observable becomes availabel and returns that element.
* Might block forever.
* Might throw a NoSuchElementException when the observable doesn't produce any more elements
* @param <T> the type of the elements
* @param source the source of Ts
* @return the first element
*/
public static <T> T first(
@Nonnull final Observable<? extends T> source) {
CloseableIterator<T> it = toIterable(source).iterator();
try {
if (it.hasNext()) {
return it.next();
}
throw new NoSuchElementException();
} finally {
Closeables.close0(it);
}
}
/**
* Creates a concatenated sequence of Observables based on the decision function of <code>selector</code> keyed by the source iterable.
* @param <T> the type of the source values
* @param <U> the type of the observable elements.
* @param source the source of keys
* @param selector the selector of keys which returns a new observable
* @return the concatenated observable.
*/
public static <T, U> Observable<U> forEach(
@Nonnull final Iterable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Observable<? extends U>> selector) {
List<Observable<? extends U>> list = new ArrayList<Observable<? extends U>>();
for (T t : source) {
list.add(selector.invoke(t));
}
return concat(list);
}
/**
* Runs the observables in parallel and joins their last values whenever one fires.
* FIXME not sure what this method should do in case of error.
* @param <T> the type of the source values
* @param sources the list of sources
* @return the observable
*/
@Nonnull
public static <T> Observable<List<T>> forkJoin(
@Nonnull final Iterable<? extends Observable<? extends T>> sources) {
return new Observable<List<T>>() {
@Override
public Closeable register(final Observer<? super List<T>> observer) {
final List<AtomicReference<T>> lastValues = new ArrayList<AtomicReference<T>>();
final List<Observable<? extends T>> observableList = new ArrayList<Observable<? extends T>>();
final List<Observer<T>> observers = new ArrayList<Observer<T>>();
final AtomicInteger wip = new AtomicInteger(observableList.size() + 1);
int i = 0;
for (Observable<? extends T> o : sources) {
final int j = i;
observableList.add(o);
lastValues.add(new AtomicReference<T>());
observers.add(new Observer<T>() {
/** The last value. */
T last;
@Override
public void error(Throwable ex) {
// TODO Auto-generated method stub
}
@Override
public void finish() {
lastValues.get(j).set(last);
runIfComplete(observer, lastValues, wip);
}
@Override
public void next(T value) {
last = value;
}
});
}
List<Closeable> closeables = new ArrayList<Closeable>();
i = 0;
for (Observable<? extends T> o : observableList) {
closeables.add(o.register(observers.get(i)));
i++;
}
runIfComplete(observer, lastValues, wip);
return Closeables.closeAll(closeables);
}
/**
* Runs the completion sequence once the WIP drops to zero.
* @param observer the observer who will receive the values
* @param lastValues the array of last values
* @param wip the work in progress counter
*/
public void runIfComplete(
final Observer<? super List<T>> observer,
final List<AtomicReference<T>> lastValues,
final AtomicInteger wip) {
if (wip.decrementAndGet() == 0) {
List<T> values = new ArrayList<T>();
for (AtomicReference<T> r : lastValues) {
values.add(r.get());
}
observer.next(values);
observer.finish();
}
}
};
}
/**
* Generates a stream of Us by using a value T stream using the default pool fo the generator loop.
* If T = int and U is double, this would be seen as for (int i = 0; i < 10; i++) { yield return i / 2.0; }
* @param <T> the type of the generator values
* @param <U> the type of the observed values
* @param initial the initial generator value
* @param condition the condition that must hold to continue generating Ts
* @param next the function that computes the next value of T
* @param selector the selector which turns Ts into Us.
* @return the observable
*/
@Nonnull
public static <T, U> Observable<U> generate(
final T initial,
@Nonnull final Func1<? super T, Boolean> condition,
@Nonnull final Func1<? super T, ? extends T> next,
@Nonnull final Func1<? super T, ? extends U> selector) {
return generate(initial, condition, next, selector, DEFAULT_SCHEDULER.get());
}
/**
* Generates a stream of Us by using a value T stream.
* If T = int and U is double, this would be seen as for (int i = 0; i < 10; i++) { yield return i / 2.0; }
* @param <T> the type of the generator values
* @param <U> the type of the observed values
* @param initial the initial generator value
* @param condition the condition that must hold to continue generating Ts
* @param next the function that computes the next value of T
* @param selector the selector which turns Ts into Us.
* @param pool the thread pool where the generation loop should run.
* @return the observable
*/
@Nonnull
public static <T, U> Observable<U> generate(
final T initial,
@Nonnull final Func1<? super T, Boolean> condition,
@Nonnull final Func1<? super T, ? extends T> next,
@Nonnull final Func1<? super T, ? extends U> selector,
@Nonnull final Scheduler pool) {
return new Observable<U>() {
@Override
public Closeable register(final Observer<? super U> observer) {
DefaultRunnable s = new DefaultRunnable() {
@Override
public void onRun() {
T t = initial;
while (condition.invoke(t) && !cancelled()) {
observer.next(selector.invoke(t));
t = next.invoke(t);
}
if (!cancelled()) {
observer.finish();
}
}
};
return pool.schedule(s);
}
};
}
/**
* Generates a stream of Us by using a value T stream.
* If T = int and U is double, this would be seen as for (int i = 0; i < 10; i++) { sleep(time); yield return i / 2.0; }
* @param <T> the type of the generator values
* @param <U> the type of the observed values
* @param initial the initial generator value
* @param condition the condition that must hold to continue generating Ts
* @param next the function that computes the next value of T
* @param selector the selector which turns Ts into Us.
* @param delay the selector which tells how much to wait before releasing the next U
* @return the observable
*/
@Nonnull
public static <T, U> Observable<Timestamped<U>> generateTimed(
final T initial,
@Nonnull final Func1<? super T, Boolean> condition,
@Nonnull final Func1<? super T, ? extends T> next,
@Nonnull final Func1<? super T, ? extends U> selector,
@Nonnull final Func1<? super T, Long> delay) {
return generateTimed(initial, condition, next, selector, delay, DEFAULT_SCHEDULER.get());
}
/**
* Generates a stream of Us by using a value T stream.
* If T = int and U is double, this would be seen as for (int i = 0; i < 10; i++) { sleep(time); yield return i / 2.0; }
* FIXME timeunit for the delay function!
* @param <T> the type of the generator values
* @param <U> the type of the observed values
* @param initial the initial generator value
* @param condition the condition that must hold to continue generating Ts
* @param next the function that computes the next value of T
* @param selector the selector which turns Ts into Us.
* @param delay the selector which tells how much to wait (in milliseconds) before releasing the next U
* @param pool the scheduled pool where the generation loop should run.
* @return the observable
*/
@Nonnull
public static <T, U> Observable<Timestamped<U>> generateTimed(
final T initial,
@Nonnull final Func1<? super T, Boolean> condition,
@Nonnull final Func1<? super T, ? extends T> next,
@Nonnull final Func1<? super T, ? extends U> selector,
@Nonnull final Func1<? super T, Long> delay,
@Nonnull final Scheduler pool) {
return new Observable<Timestamped<U>>() {
@Override
public Closeable register(final Observer<? super Timestamped<U>> observer) {
// the cancellation indicator
DefaultRunnable s = new DefaultRunnable() {
T current = initial;
@Override
public void onRun() {
U invoke = selector.invoke(current);
Timestamped<U> of = Timestamped.of(invoke, System.currentTimeMillis());
observer.next(of);
final T tn = next.invoke(current);
current = tn;
if (condition.invoke(tn) && !cancelled()) {
pool.schedule(this, delay.invoke(tn), TimeUnit.MILLISECONDS);
} else {
if (!cancelled()) {
observer.finish();
}
}
}
};
if (condition.invoke(initial)) {
return pool.schedule(s, delay.invoke(initial), TimeUnit.MILLISECONDS);
}
return Functions.EMPTY_CLOSEABLE;
}
};
}
/**
* @return the current default pool used by the Observables methods
*/
@Nonnull
public static Scheduler getDefaultScheduler() {
return DEFAULT_SCHEDULER.get();
}
/**
* Group the specified source accoring to the keys provided by the extractor function.
* The resulting observable gets notified once a new group is encountered.
* Each previously encountered group by itself receives updates along the way.
* If the source finish(), all encountered group will finish().
* FIXME not sure how this should work.
* @param <T> the type of the source element
* @param <Key> the key type of the group
* @param source the source of Ts
* @param keyExtractor the key extractor which creates Keys from Ts
* @return the observable
*/
@Nonnull
public static <T, Key> Observable<GroupedObservable<Key, T>> groupBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Key> keyExtractor) {
return groupBy(source, keyExtractor, Functions.<T>identity());
}
/**
* Group the specified source accoring to the keys provided by the extractor function.
* The resulting observable gets notified once a new group is encountered.
* Each previously encountered group by itself receives updates along the way.
* If the source finish(), all encountered group will finish().
* FIXME not sure how this should work
* @param <T> the type of the source element
* @param <U> the type of the output element
* @param <Key> the key type of the group
* @param source the source of Ts
* @param keyExtractor the key extractor which creates Keys from Ts
* @param valueExtractor the extractor which makes Us from Ts
* @return the observable
*/
@Nonnull
public static <T, U, Key> Observable<GroupedObservable<Key, U>> groupBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Key> keyExtractor,
@Nonnull final Func1<? super T, ? extends U> valueExtractor) {
return new Observable<GroupedObservable<Key, U>>() {
@Override
public Closeable register(
final Observer<? super GroupedObservable<Key, U>> observer) {
final ConcurrentMap<Key, GroupedRegisteringObservable<Key, U>> knownGroups = new ConcurrentHashMap<Key, GroupedRegisteringObservable<Key, U>>();
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
for (GroupedRegisteringObservable<Key, U> group : knownGroups.values()) {
group.finish();
}
observer.finish();
}
@Override
public void next(T value) {
final Key key = keyExtractor.invoke(value);
GroupedRegisteringObservable<Key, U> group = knownGroups.get(key);
if (group == null) {
group = new GroupedRegisteringObservable<Key, U>(key);
GroupedRegisteringObservable<Key, U> group2 = knownGroups.putIfAbsent(key, group);
if (group2 != null) {
group = group2;
}
observer.next(group);
}
group.next(valueExtractor.invoke(value));
}
});
}
};
}
/**
* Groups the source sequence of Ts until the specified duration for that group fires.
* <p>The key comparison is done by the <code>Object.equals()</code> semantics of the <code>HashMap</code>.</p>
* <p><b>Exception semantics:</b> if the source throws an exception, all active groups will receive
* the exception followed by the outer observer of the groups.</p>
* <p><b>Completion semantics:</b> if the source finishes, all active groups will receive a finish
* signal followed by the outer observer.</p>
* @param <T> the source element type
* @param <K> the key type
* @param <D> the duration element type, ignored
* @param source the source of Ts
* @param keySelector the key extractor
* @param durationSelector the observable for a particular group termination
* @return the new observable
*/
public static <T, K, D> Observable<GroupedObservable<K, T>> groupByUntil(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func1<? super GroupedObservable<K, T>, ? extends Observable<D>> durationSelector
) {
return groupByUntil(source, keySelector, Functions.<T>identity(), durationSelector);
}
/**
* Groups the source sequence of Ts until the specified duration for that group fires.
* <p><b>Exception semantics:</b> if the source throws an exception, all active groups will receive
* the exception followed by the outer observer of the groups.</p>
* <p><b>Completion semantics:</b> if the source finishes, all active groups will receive a finish
* signal followed by the outer observer.</p>
* @param <T> the source element type
* @param <K> the key type
* @param <D> the duration element type, ignored
* @param source the source of Ts
* @param keySelector the key extractor
* @param durationSelector the observable for a particular group termination
* @param keyComparer the key comparer for the grouping
* @return the new observable
*/
public static <T, K, D> Observable<GroupedObservable<K, T>> groupByUntil(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func1<? super GroupedObservable<K, T>, ? extends Observable<D>> durationSelector,
final Func2<? super K, ? super K, Boolean> keyComparer
) {
return groupByUntil(source, keySelector, Functions.<T>identity(), durationSelector, keyComparer);
}
/**
* Groups the source sequence of Ts until the specified duration for that group fires.
* <p>The key comparison is done by the <code>Object.equals()</code> semantics of the <code>HashMap</code>.</p>
* <p><b>Exception semantics:</b> if the source throws an exception, all active groups will receive
* the exception followed by the outer observer of the groups.</p>
* <p><b>Completion semantics:</b> if the source finishes, all active groups will receive a finish
* signal followed by the outer observer.</p>
* @param <T> the source element type
* @param <K> the key type
* @param <V> the value type
* @param <D> the duration element type, ignored
* @param source the source of Ts
* @param keySelector the key extractor
* @param valueSelector the value extractor
* @param durationSelector the observable for a particular group termination
* @return the new observable
*/
public static <T, K, V, D> Observable<GroupedObservable<K, V>> groupByUntil(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func1<? super T, ? extends V> valueSelector,
final Func1<? super GroupedObservable<K, V>, ? extends Observable<D>> durationSelector
) {
return new Observable<GroupedObservable<K, V>>() {
@Override
public Closeable register(
final Observer<? super GroupedObservable<K, V>> observer) {
DefaultObserverEx<T> o = new DefaultObserverEx<T>(true) {
/** The active groups. */
final Map<K, GroupedRegisteringObservable<K, V>> groups = new HashMap<K, GroupedRegisteringObservable<K, V>>();
{
add("source", source);
}
@Override
protected void onError(Throwable ex) {
for (Observer<V> o : groups.values()) {
o.error(ex);
}
observer.error(ex);
}
@Override
protected void onFinish() {
for (Observer<V> o : groups.values()) {
o.finish();
}
observer.finish();
}
@Override
protected void onNext(T value) {
final K k = keySelector.invoke(value);
final V v = valueSelector.invoke(value);
GroupedRegisteringObservable<K, V> gr = groups.get(k);
if (gr != null) {
gr = new GroupedRegisteringObservable<K, V>(k);
final GroupedRegisteringObservable<K, V> fgr = gr;
groups.put(k, gr);
add(fgr, durationSelector.invoke(gr).register(new DefaultObserver<D>(lock, true) {
@Override
protected void onError(Throwable ex) {
fgr.error(ex); // FIXME error propagation
groups.remove(k);
remove(fgr);
}
@Override
protected void onFinish() {
fgr.finish();
groups.remove(k);
remove(fgr);
}
@Override
protected void onNext(D value) {
fgr.finish();
groups.remove(k);
remove(fgr);
}
}));
observer.next(gr);
}
gr.next(v);
}
};
return o;
}
};
}
/**
* Groups the source sequence of Ts until the specified duration for that group fires.
* <p><b>Exception semantics:</b> if the source throws an exception, all active groups will receive
* the exception followed by the outer observer of the groups.</p>
* <p><b>Completion semantics:</b> if the source finishes, all active groups will receive a finish
* signal followed by the outer observer.</p>
* @param <T> the source element type
* @param <K> the key type
* @param <V> the value type
* @param <D> the duration element type, ignored
* @param source the source of Ts
* @param keySelector the key extractor
* @param valueSelector the value extractor
* @param durationSelector the observable for a particular group termination
* @param keyComparer the key comparer for the grouping
* @return the new observable
*/
public static <T, K, V, D> Observable<GroupedObservable<K, V>> groupByUntil(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func1<? super T, ? extends V> valueSelector,
final Func1<? super GroupedObservable<K, V>, ? extends Observable<D>> durationSelector,
final Func2<? super K, ? super K, Boolean> keyComparer
) {
return new Observable<GroupedObservable<K, V>>() {
@Override
public Closeable register(
final Observer<? super GroupedObservable<K, V>> observer) {
DefaultObserverEx<T> o = new DefaultObserverEx<T>(true) {
/** The key class with custom equality comparer. */
class Key {
/** The key value. */
final K key;
/**
* Constructor.
* @param key the key
*/
Key(K key) {
this.key = key;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Key) {
return keyComparer.invoke(key, ((Key)obj).key);
}
return false;
}
@Override
public int hashCode() {
return key != null ? key.hashCode() : 0;
}
}
/** The active groups. */
final Map<Key, GroupedRegisteringObservable<K, V>> groups = new HashMap<Key, GroupedRegisteringObservable<K, V>>();
{
add("source", source);
}
@Override
protected void onError(Throwable ex) {
for (Observer<V> o : groups.values()) {
o.error(ex);
}
observer.error(ex);
}
@Override
protected void onFinish() {
for (Observer<V> o : groups.values()) {
o.finish();
}
observer.finish();
}
@Override
protected void onNext(T value) {
final K kv = keySelector.invoke(value);
final Key k = new Key(kv);
final V v = valueSelector.invoke(value);
GroupedRegisteringObservable<K, V> gr = groups.get(k);
if (gr != null) {
gr = new GroupedRegisteringObservable<K, V>(kv);
final GroupedRegisteringObservable<K, V> fgr = gr;
groups.put(k, gr);
add(fgr, durationSelector.invoke(gr).register(new DefaultObserver<D>(lock, true) {
@Override
protected void onError(Throwable ex) {
fgr.error(ex); // FIXME error propagation
groups.remove(k);
remove(fgr);
}
@Override
protected void onFinish() {
fgr.finish();
groups.remove(k);
remove(fgr);
}
@Override
protected void onNext(D value) {
fgr.finish();
groups.remove(k);
remove(fgr);
}
}));
observer.next(gr);
}
gr.next(v);
}
};
return o;
}
};
}
/**
* Returns an observable which correlates two streams of values based on
* their time when they overlapped and groups the results.
* FIXME not sure how to implement it
* @param <Left> the element type of the left stream
* @param <Right> the element type of the right stream
* @param <LeftDuration> the overlapping duration indicator for the left stream (e.g., the event when it leaves)
* @param <RightDuration> the overlapping duration indicator for the right stream (e.g., the event when it leaves)
* @param <Result> the type of the grouping based on the coincidence.
* @param left the left source of elements
* @param right the right source of elements
* @param leftDurationSelector the duration selector for a left element
* @param rightDurationSelector the duration selector for a right element
* @param resultSelector the selector which will produce the output value
* @return the new observable
* @see #join(Observable, Observable, Func1, Func1, Func2)
*/
public static <Left, Right, LeftDuration, RightDuration, Result> Observable<Result> groupJoin(
final Observable<? extends Left> left,
final Observable<? extends Right> right,
final Func1<? super Left, ? extends Observable<LeftDuration>> leftDurationSelector,
final Func1<? super Right, ? extends Observable<RightDuration>> rightDurationSelector,
final Func2<? super Left, ? super Observable<? extends Right>, ? extends Result> resultSelector
) {
return new Observable<Result>() {
@Override
public Closeable register(final Observer<? super Result> observer) {
final Lock lock = new ReentrantLock(true);
final HashSet<Left> leftActive = new HashSet<Left>();
final HashSet<Right> rightActive = new HashSet<Right>();
final Map<Right, DefaultObservable<Right>> rightGroups = new IdentityHashMap<Right, DefaultObservable<Right>>();
final AtomicReference<Closeable> closeBoth = new AtomicReference<Closeable>();
DefaultObserverEx<Left> o1 = new DefaultObserverEx<Left>(lock, true) {
/** Relay the inner error to the outer. */
void innerError(Throwable ex) {
error(ex);
}
@Override
protected void onClose() {
super.onClose();
Closeables.close0(closeBoth.get());
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
observer.finish();
}
@Override
protected void onNext(final Left value) {
leftActive.add(value);
Observable<LeftDuration> completion = leftDurationSelector.invoke(value);
final Object token = new Object();
add(token, completion.register(new DefaultObserver<LeftDuration>(lock, true) {
@Override
protected void onClose() {
remove(token);
}
@Override
protected void onError(Throwable ex) {
innerError(ex);
}
@Override
protected void onFinish() {
leftActive.remove(value);
}
@Override
protected void onNext(LeftDuration value) {
// NO OP?
}
}));
for (Right r : rightActive) {
observer.next(resultSelector.invoke(value, rightGroups.get(r)));
}
}
};
DefaultObserverEx<Right> o2 = new DefaultObserverEx<Right>(lock, true) {
/** Relay the inner error to the outer. */
void innerError(Throwable ex) {
error(ex);
}
@Override
protected void onClose() {
super.onClose();
Closeables.close0(closeBoth.get());
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
observer.finish();
}
@Override
protected void onNext(final Right value) {
rightActive.add(value);
Observable<RightDuration> completion = rightDurationSelector.invoke(value);
final Object token = new Object();
add(token, completion.register(new DefaultObserver<RightDuration>(lock, true) {
@Override
protected void onClose() {
remove(token);
DefaultObservable<Right> rg = rightGroups.remove(value);
if (rg != null) {
rg.finish();
}
}
@Override
protected void onError(Throwable ex) {
innerError(ex);
}
@Override
protected void onFinish() {
rightActive.remove(value);
}
@Override
protected void onNext(RightDuration value) {
// NO OP?!
}
}));
DefaultObservable<Right> r = rightGroups.get(value);
if (r == null) {
r = new DefaultObservable<Right>();
rightGroups.put(value, r);
}
for (Left left : leftActive) {
observer.next(resultSelector.invoke(left, r));
}
r.next(value);
}
};
Closeable c = Closeables.close(o1, o2);
closeBoth.set(c);
o1.add(new Object(), left);
o2.add(new Object(), right);
return c;
}
};
}
/**
* Returns an observable where the submitted condition decides whether the <code>then</code> source is allowed to submit values.
* @param <T> the type of the values to observe
* @param condition the condition function
* @param then the source to use when the condition is true
* @return the observable
*/
@Nonnull
public static <T> Observable<T> ifThen(
@Nonnull final Func0<Boolean> condition,
@Nonnull final Observable<? extends T> then) {
return ifThen(condition, then, Reactive.<T>never());
}
/**
* Returns an observable where the submitted condition decides whether the <code>then</code> or <code>orElse</code>
* source is allowed to submit values.
* FIXME not sure how it should work
* @param <T> the type of the values to observe
* @param condition the condition function
* @param then the source to use when the condition is true
* @param orElse the source to use when the condition is false
* @return the observable
*/
@Nonnull
public static <T> Observable<T> ifThen(
@Nonnull final Func0<Boolean> condition,
@Nonnull final Observable<? extends T> then,
@Nonnull final Observable<? extends T> orElse) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final Closeable s1 = then.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
if (condition.invoke()) {
observer.error(ex);
}
}
@Override
public void finish() {
if (condition.invoke()) {
observer.finish();
}
}
@Override
public void next(T value) {
if (condition.invoke()) {
observer.next(value);
}
}
});
final Closeable s2 = orElse.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
if (!condition.invoke()) {
observer.error(ex);
}
}
@Override
public void finish() {
if (!condition.invoke()) {
observer.finish();
}
}
@Override
public void next(T value) {
if (!condition.invoke()) {
observer.next(value);
}
}
});
return Closeables.close(s1, s2);
}
};
}
/**
* Ignores the next() messages of the source and forwards only the error() and
* finish() messages.
* @param <T> the source element type
* @param source the source of Ts
* @return the new observable
*/
public static <T> Observable<T> ignoreValues(final Observable<? extends T> source) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
// ignored
}
});
}
};
}
/**
* Invoke a specific action before relaying the Ts to the observable. The <code>action</code> might
* have some effect on each individual Ts passing through this filter.
* @param <T> the type of the values observed
* @param source the source of Ts
* @param action the action to invoke on every T
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> invoke(
@Nonnull final Observable<? extends T> source,
@Nonnull final Action1<? super T> action) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
action.invoke(value);
observer.next(value);
}
});
}
};
}
/**
* Invoke a specific observer before relaying the Ts, finish() and error() to the observable. The <code>action</code> might
* have some effect on each individual Ts passing through this filter.
* @param <T> the type of the values observed
* @param source the source of Ts
* @param observer the observer to invoke before any registered observers are called
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> invoke(
@Nonnull final Observable<? extends T> source,
@Nonnull final Observer<? super T> observer) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> o) {
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
observer.error(ex);
o.error(ex);
}
@Override
public void finish() {
observer.finish();
o.finish();
}
@Override
public void next(T value) {
observer.next(value);
o.next(value);
}
});
}
};
}
/**
* Invoke the given callable on the default pool and observe its result via the returned observable.
* Any exception thrown by the callable is relayed via the error() message.
* @param <T> the return type
* @param call the callable
* @return the observable
*/
@Nonnull
public static <T> Observable<T> invokeAsync(
@Nonnull final Callable<? extends T> call) {
return invokeAsync(call, DEFAULT_SCHEDULER.get());
}
/**
* Invoke the given callable on the given pool and observe its result via the returned observable.
* Any exception thrown by the callable is relayed via the error() message.
* @param <T> the return type
* @param call the callable
* @param pool the thread pool
* @return the observable
*/
@Nonnull
public static <T> Observable<T> invokeAsync(
@Nonnull final Callable<? extends T> call,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return pool.schedule(new Runnable() {
@Override
public void run() {
try {
observer.next(call.call());
observer.finish();
} catch (Throwable ex) {
observer.error(ex);
}
}
});
}
};
}
/**
* Invoke the given callable on the given pool and observe its result via the returned observable.
* Any exception thrown by the callable is relayed via the error() message.
* @param <T> the return type
* @param run the runnable
* @return the observable
*/
@Nonnull
public static <T> Observable<T> invokeAsync(
@Nonnull final Runnable run) {
return invokeAsync(run, DEFAULT_SCHEDULER.get());
}
/**
* Invoke the given callable on the given pool and observe its result via the returned observable.
* Any exception thrown by the callable is relayed via the error() message.
* @param <T> the return type
* @param run the runnable
* @param pool the thread pool
* @return the observable
*/
@Nonnull
public static <T> Observable<T> invokeAsync(
@Nonnull final Runnable run,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return pool.schedule(new Runnable() {
@Override
public void run() {
try {
run.run();
observer.finish();
} catch (Throwable ex) {
observer.error(ex);
}
}
});
}
};
}
/**
* Invoke the given callable on the given pool and observe its result via the returned observable.
* Any exception thrown by the callable is relayed via the error() message.
* @param <T> the return type
* @param run the runnable
* @param defaultValue the value to return when the Runnable completes
* @return the observable
*/
@Nonnull
public static <T> Observable<T> invokeAsync(
@Nonnull final Runnable run,
final T defaultValue) {
return invokeAsync(run, defaultValue, DEFAULT_SCHEDULER.get());
}
/**
* Invoke the given callable on the given pool and observe its result via the returned observable.
* Any exception thrown by the callable is relayed via the error() message.
* @param <T> the return type
* @param run the runnable
* @param pool the thread pool
* @param defaultValue the value to return by default
* @return the observable
*/
@Nonnull
public static <T> Observable<T> invokeAsync(
@Nonnull final Runnable run,
final T defaultValue,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return pool.schedule(new Runnable() {
@Override
public void run() {
try {
run.run();
observer.next(defaultValue);
observer.finish();
} catch (Throwable ex) {
observer.error(ex);
}
}
});
}
};
}
/**
* Signals true if the source observable fires finish() without ever firing next().
* This means once the next() is fired, the resulting observer will return early.
* @param source the source observable of any type
* @return the observer
*/
@Nonnull
public static Observable<Boolean> isEmpty(
@Nonnull final Observable<?> source) {
return new Observable<Boolean>() {
@Override
public Closeable register(final Observer<? super Boolean> observer) {
return source.register(new Observer<Object>() {
/** We already determined the answer? */
boolean done;
@Override
public void error(Throwable ex) {
if (!done) {
observer.error(ex);
}
}
@Override
public void finish() {
if (!done) {
done = true;
observer.next(false);
observer.finish();
}
}
@Override
public void next(Object value) {
if (!done) {
done = true;
observer.next(true);
observer.finish();
}
}
});
}
};
}
/**
* Returns an observable which correlates two streams of values based on
* their time when they overlapped.
* <p>The difference between this operator and the groupJoin operator
* is that in this case, the result selector takes the concrete left and
* right elements, whereas the groupJoin associates an observable of rights
* for each left.</p>
* FIXME not sure how to implement it
* @param <Left> the element type of the left stream
* @param <Right> the element type of the right stream
* @param <LeftDuration> the overlapping duration indicator for the left stream (e.g., the event when it leaves)
* @param <RightDuration> the overlapping duration indicator for the right stream (e.g., the event when it leaves)
* @param <Result> the type of the grouping based on the coincidence.
* @param left the left source of elements
* @param right the right source of elements
* @param leftDurationSelector the duration selector for a left element
* @param rightDurationSelector the duration selector for a right element
* @param resultSelector the selector which will produce the output value
* @return the new observable
* @see #groupJoin(Observable, Observable, Func1, Func1, Func2)
*/
public static <Left, Right, LeftDuration, RightDuration, Result> Observable<Result> join(
final Observable<? extends Left> left,
final Observable<? extends Right> right,
final Func1<? super Left, ? extends Observable<LeftDuration>> leftDurationSelector,
final Func1<? super Right, ? extends Observable<RightDuration>> rightDurationSelector,
final Func2<? super Left, ? super Right, ? extends Result> resultSelector
) {
return new Observable<Result>() {
@Override
public Closeable register(final Observer<? super Result> observer) {
final Lock lock = new ReentrantLock(true);
final HashSet<Left> leftActive = new HashSet<Left>();
final HashSet<Right> rightActive = new HashSet<Right>();
final AtomicReference<Closeable> closeBoth = new AtomicReference<Closeable>();
DefaultObserverEx<Left> o1 = new DefaultObserverEx<Left>(lock, true) {
/** Relay the inner error to the outer. */
void innerError(Throwable ex) {
error(ex);
}
@Override
protected void onClose() {
super.onClose();
Closeables.close0(closeBoth.get());
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
observer.finish();
}
@Override
protected void onNext(final Left value) {
leftActive.add(value);
Observable<LeftDuration> completion = leftDurationSelector.invoke(value);
final Object token = new Object();
add(token, completion.register(new DefaultObserver<LeftDuration>(lock, true) {
@Override
protected void onClose() {
remove(token);
}
@Override
protected void onError(Throwable ex) {
innerError(ex);
}
@Override
protected void onFinish() {
leftActive.remove(value);
}
@Override
protected void onNext(LeftDuration value) {
// NO OP?
}
}));
for (Right r : rightActive) {
observer.next(resultSelector.invoke(value, r));
}
}
};
DefaultObserverEx<Right> o2 = new DefaultObserverEx<Right>(lock, true) {
/** Relay the inner error to the outer. */
void innerError(Throwable ex) {
error(ex);
}
@Override
protected void onClose() {
super.onClose();
Closeables.close0(closeBoth.get());
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
observer.finish();
}
@Override
protected void onNext(final Right value) {
rightActive.add(value);
Observable<RightDuration> completion = rightDurationSelector.invoke(value);
final Object token = new Object();
add(token, completion.register(new DefaultObserver<RightDuration>(lock, true) {
@Override
protected void onClose() {
remove(token);
}
@Override
protected void onError(Throwable ex) {
innerError(ex);
}
@Override
protected void onFinish() {
rightActive.remove(value);
}
@Override
protected void onNext(RightDuration value) {
// NO OP?!
}
}));
for (Left left : leftActive) {
observer.next(resultSelector.invoke(left, value));
}
}
};
Closeable c = Closeables.close(o1, o2);
closeBoth.set(c);
o1.add(new Object(), left);
o2.add(new Object(), right);
return c;
}
};
}
/**
* Returns the last element of the source observable or throws
* NoSuchElementException if the source is empty.
* @param <T> the type of the elements
* @param source the source of Ts
* @return the last element
*/
@Nonnull
public static <T> T last(
@Nonnull final Observable<? extends T> source) {
final LinkedBlockingQueue<Option<T>> queue = new LinkedBlockingQueue<Option<T>>();
Closeable c = source.register(new Observer<T>() {
/** The current value. */
T current;
/** Are we the first? */
boolean first = true;
@Override
public void error(Throwable ex) {
queue.add(Option.<T>none());
}
@Override
public void finish() {
if (first) {
queue.add(Option.<T>none());
} else {
queue.add(Option.some(current));
}
}
@Override
public void next(T value) {
first = false;
current = value;
}
});
try {
Option<T> value = queue.take();
c.close();
if (value == Option.none()) {
throw new NoSuchElementException();
}
return value.value();
} catch (InterruptedException e) {
throw new RuntimeException(e);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
/**
* Returns an iterable which returns values on a momentary basis from the
* source. Useful when source produces values at different rate than the consumer takes it.
* The iterable.next() call might block until the first value becomes available or something else happens in the observable
* FIXME not sure where the observer should run
* @param <T> the type of the values
* @param source the source
* @return the iterable
*/
@Nonnull
public static <T> Iterable<T> latest(
@Nonnull final Observable<? extends T> source) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
final AtomicBoolean complete = new AtomicBoolean();
final CountDownLatch first = new CountDownLatch(1);
final AtomicBoolean hasValue = new AtomicBoolean();
final AtomicReference<T> current = new AtomicReference<T>();
final Closeable c = source.register(new Observer<T>() {
/** Set the has value once. */
boolean once = true;
@Override
public void error(Throwable ex) {
complete.set(true);
first.countDown();
}
@Override
public void finish() {
complete.set(true);
first.countDown();
}
@Override
public void next(T value) {
if (once) {
once = false;
hasValue.set(true);
}
current.set(value);
first.countDown();
}
});
return new Iterator<T>() {
@Override
protected void finalize() throws Throwable {
c.close();
}
@Override
public boolean hasNext() {
try {
first.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
return !complete.get() && hasValue.get();
}
@Override
public T next() {
if (hasValue.get()) {
return current.get();
}
throw new NoSuchElementException();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
/**
* Returns an observable which calls the given selector with the given value
* when a client wants to register with it. The client then
* gets registered with the observable returned by the function.
* E.g., <code>return selector.invoke(value).register(observer)</code> in the outer register method.
* @param <T> the selection key type
* @param <U> the result type
* @param value the value to pass to the selector function
* @param selector the selector function
* @return a new observable
*/
public static <T, U> Observable<U> let(
final T value,
final Func1<? super T, ? extends Observable<U>> selector) {
return new Observable<U>() {
@Override
public Closeable register(Observer<? super U> observer) {
return selector.invoke(value).register(observer);
}
};
}
/**
* Uses the selector function on the given source observable to extract a single
* value and send this value to the registered observer.
* It is sometimes called the comonadic bind operator and compared to the ContinueWith
* semantics.
* The default scheduler is used to emit the output value
* FIXME not sure what it should do
* @param <T> the source element type
* @param <U> the result element type
* @param source the source of Ts
* @param selector the selector that extracts an U from the series of Ts.
* @return the new observable.
*/
public static <T, U> Observable<U> manySelect0(
final Observable<? extends T> source,
final Func1<? super Observable<T>, ? extends U> selector) {
return manySelect(source, selector, DEFAULT_SCHEDULER.get());
}
/**
* For each value of the source observable, it creates a view starting from that value into the source
* and calls the given selector function asynchronously on the given scheduler.
* The result of that computation is then transmitted to the observer.
* <p>It is sometimes called the comonadic bind operator and compared to the ContinueWith
* semantics.</p>
* @param <T> the source element type
* @param <U> the result element type
* @param source the source of Ts
* @param selector the selector that extracts an U from the series of Ts.
* @param scheduler the scheduler where the extracted U will be emmitted from.
* @return the new observable.
*/
public static <T, U> Observable<U> manySelect(
final Observable<? extends T> source,
final Func1<? super Observable<T>, ? extends U> selector,
final Scheduler scheduler) {
return new Observable<U>() {
@Override
public Closeable register(final Observer<? super U> observer) {
final AtomicInteger wip = new AtomicInteger(1);
Closeable c = source.register(new DefaultObserverEx<T>(true) {
/** The skip position. */
int counter;
@Override
protected void onNext(T value) {
final Observable<T> ot = skip(source, counter);
wip.incrementAndGet();
add(counter, scheduler.schedule(new Runnable() {
@Override
public void run() {
observer.next(selector.invoke(ot));
if (wip.decrementAndGet() == 0) {
observer.finish();
}
}
}));
counter++;
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
close();
}
@Override
protected void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
}
}
});
return c;
}
};
}
/**
* For each of the source elements, creates a view of the source starting with the given
* element and calls the selector function. The function's return observable is then merged
* into a single observable sequence.<p>
* For example, a source sequence of (1, 2, 3) will create three function calls with (1, 2, 3), (2, 3) and (3) as a content.
* @param <T> the source element type
* @param <U> the result element type
* @param source the source of Ts
* @param selector the selector function
* @return the new observable
*/
public static <T, U> Observable<U> manySelect(
final Observable<? extends T> source,
final Func1<? super Observable<T>, ? extends Observable<U>> selector
) {
return merge(select(source, new Func1<T, Observable<U>>() {
/** The skip position. */
int counter;
@Override
public Observable<U> invoke(T param1) {
int i = counter++;
return selector.invoke(skip(source, i));
}
}));
}
/**
* Returns an observable which converts all messages to an <code>Option</code> value.
* The returned observable does not itself signal error or finish.
* Its dual is the <code>dematerialize</code> method.
* @param <T> the source element type
* @param source the source of Ts
* @return the new observable
* @see #dematerialize(Observable)
*/
@Nonnull
public static <T> Observable<Option<T>> materialize(
@Nonnull final Observable<? extends T> source) {
return new Observable<Option<T>>() {
@Override
public Closeable register(final Observer<? super Option<T>> observer) {
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
observer.next(Option.<T>error(ex));
}
@Override
public void finish() {
observer.next(Option.<T>none());
}
@Override
public void next(T value) {
observer.next(Option.some(value));
}
});
}
};
};
/**
* Returns the maximum value encountered in the source observable once it sends finish().
* @param <T> the element type which must be comparable to itself
* @param source the source of integers
* @return the the maximum value
*/
@Nonnull
public static <T extends Comparable<? super T>> Observable<T> max(
@Nonnull final Observable<? extends T> source) {
return aggregate(source, Functions.<T>max(), Functions.<T, Integer>identityFirst());
}
/**
* Returns the maximum value encountered in the source observable once it sends finish().
* @param <T> the element type
* @param source the source of integers
* @param comparator the comparator to decide the relation of values
* @return the the maximum value
* @see Functions#asComparator(Func2)
*/
@Nonnull
public static <T> Observable<T> max(
@Nonnull final Observable<T> source,
@Nonnull final Comparator<T> comparator) {
return aggregate(source, Functions.<T>max(comparator), Functions.<T, Integer>identityFirst());
}
/**
* Returns an observable which provides with the list of <code>T</code>s which had their keys as maximums.
* The returned observer may finish() if the source sends finish() without any next().
* The generated list is modifiable.
* @param <T> the type of elements
* @param <Key> the key type, which must be comparable to itself
* @param source the source of <code>T</code>s
* @param keyExtractor the key extractor to produce <code>Key</code>s from <code>T</code>s.
* @return the observable for the maximum keyed Ts
*/
@Nonnull
public static <T, Key extends Comparable<? super Key>> Observable<List<T>> maxBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Key> keyExtractor) {
return minMax(source, keyExtractor, Functions.<Key>comparator(), true);
}
/**
* Returns an observable which provides with the list of <code>T</code>s which had their keys as maximums.
* The returned observer may finish() if the source sends finish() without any next().
* The generated list is modifiable.
* @param <T> the type of elements
* @param <Key> the key type
* @param source the source of <code>T</code>s
* @param keyExtractor the key extractor to produce <code>Key</code>s from <code>T</code>s.
* @param keyComparator the comparator for the keys
* @return the observable for the maximum keyed Ts
*/
@Nonnull
public static <T, Key> Observable<List<T>> maxBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Key> keyExtractor,
@Nonnull final Comparator<? super Key> keyComparator) {
return minMax(source, keyExtractor, keyComparator, true);
}
/**
* Combines the notifications of all sources. The resulting stream of Ts might come from any of the sources.
* @param <T> the type of the values
* @param sources the list of sources
* @return the observable
*/
@Nonnull
public static <T> Observable<T> merge(
@Nonnull final Iterable<? extends Observable<? extends T>> sources) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final List<Closeable> disposables = new ArrayList<Closeable>();
List<Observable<? extends T>> sourcesList = new ArrayList<Observable<? extends T>>();
for (Observable<? extends T> os : sources) {
sourcesList.add(os);
}
final AtomicInteger wip = new AtomicInteger(sourcesList.size() + 1);
final List<DefaultObserver<T>> observers = new ArrayList<DefaultObserver<T>>();
final Lock lock = new ReentrantLock();
for (int i = 0; i < sourcesList.size(); i++) {
final int j = i;
DefaultObserver<T> obs = new DefaultObserver<T>(lock, true) {
@Override
public void onError(Throwable ex) {
observer.error(ex);
for (int k = 0; k < observers.size(); k++) {
if (k != j) {
observers.get(k).close();
}
}
}
@Override
public void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
}
}
@Override
public void onNext(T value) {
observer.next(value);
}
};
observers.add(obs);
disposables.add(obs);
}
for (int i = 0; i < observers.size(); i++) {
disposables.add(sourcesList.get(i).register(observers.get(i)));
}
if (wip.decrementAndGet() == 0) {
observer.finish();
}
return Closeables.closeAll(disposables);
}
};
};
/**
* Merge the dynamic sequence of observables of T.
* <p><b>Exception semantics:</b> if the sources or any inner observer signals an
* error, the outer observable will signal that error and all active source observers are terminated.</p>
* @param <T> the element type
* @param sources the observable sequence of observable sequence of Ts
* @return the new observable
*/
public static <T> Observable<T> merge(
final Observable<? extends Observable<T>> sources) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final AtomicInteger wip = new AtomicInteger(1);
DefaultObserverEx<Observable<T>> obs = new DefaultObserverEx<Observable<T>>(false) {
/**
* The inner exception to forward.
* @param ex the exception
*/
void innerError(Throwable ex) {
error(ex);
}
/** Signal finish if the sources and inner observables have all finished. */
void ifDoneFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
close();
}
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
ifDoneFinish();
}
@Override
protected void onNext(Observable<T> value) {
final Object token = new Object();
wip.incrementAndGet();
add(token, value.register(new DefaultObserver<T>(lock, true) {
@Override
public void onError(Throwable ex) {
innerError(ex);
}
@Override
public void onFinish() {
remove(token);
ifDoneFinish();
}
@Override
public void onNext(T value) {
observer.next(value);
}
}));
}
};
obs.add("sources", sources);
return obs;
}
};
}
/**
* Merge the events of two observable sequences.
* @param <T> the type of the elements
* @param first the first observable
* @param second the second observable
* @return the merged observable
*/
@Nonnull
public static <T> Observable<T> merge(
@Nonnull Observable<? extends T> first,
@Nonnull Observable<? extends T> second) {
List<Observable<? extends T>> list = new ArrayList<Observable<? extends T>>();
list.add(first);
list.add(second);
return merge(list);
}
/**
* Returns the minimum value encountered in the source observable once it sends finish().
* @param <T> the element type which must be comparable to itself
* @param source the source of integers
* @return the the minimum value
*/
@Nonnull
public static <T extends Comparable<? super T>> Observable<T> min(
@Nonnull final Observable<? extends T> source) {
return aggregate(source, Functions.<T>min(), Functions.<T, Integer>identityFirst());
}
/**
* Returns the minimum value encountered in the source observable once it sends finish().
* @param <T> the element type
* @param source the source of integers
* @param comparator the comparator to decide the relation of values
* @return the the minimum value
* @see Functions#asComparator(Func2)
*/
@Nonnull
public static <T> Observable<T> min(
@Nonnull final Observable<? extends T> source,
@Nonnull final Comparator<? super T> comparator) {
return aggregate(source, Functions.<T>min(comparator), Functions.<T, Integer>identityFirst());
}
/**
* Returns an observable which provides with the list of <code>T</code>s which had their keys as minimums.
* The returned observer may finish() if the source sends finish() without any next().
* The generated list is modifiable.
* @param <T> the type of elements
* @param <Key> the key type, which must be comparable to itself
* @param source the source of <code>T</code>s
* @param keyExtractor the key extractor to produce <code>Key</code>s from <code>T</code>s.
* @return the observable for the minimum keyed Ts
*/
@Nonnull
public static <T, Key extends Comparable<? super Key>> Observable<List<T>> minBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Key> keyExtractor) {
return minMax(source, keyExtractor, Functions.<Key>comparator(), false);
}
/**
* Returns an observable which provides with the list of <code>T</code>s which had their keys as minimums.
* The returned observer may finish() if the source sends finish() without any next().
* The generated list is modifiable.
* @param <T> the type of elements
* @param <Key> the key type
* @param source the source of <code>T</code>s
* @param keyExtractor the key extractor to produce <code>Key</code>s from <code>T</code>s.
* @param keyComparator the comparator for the keys
* @return the observable for the minimum keyed Ts
* @see Functions#asComparator(Func2)
*/
@Nonnull
public static <T, Key> Observable<List<T>> minBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Key> keyExtractor,
@Nonnull final Comparator<? super Key> keyComparator) {
return minMax(source, keyExtractor, keyComparator, false);
}
/**
* Returns an observable which provides with the list of <code>T</code>s which had their keys as maximums.
* The returned observer may finish() if the source sends finish() without any next().
* The generated list is modifiable.
* @param <T> the type of elements
* @param <Key> the key type
* @param source the source of <code>T</code>s
* @param keyExtractor the key extractor to produce <code>Key</code>s from <code>T</code>s.
* @param keyComparator the comparator for the keys
* @param max compute the maximums?
* @return the observable for the maximum keyed Ts
* @see Functions#asComparator(Func2)
*/
@Nonnull
public static <T, Key> Observable<List<T>> minMax(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Key> keyExtractor,
@Nonnull final Comparator<? super Key> keyComparator,
@Nonnull final boolean max
) {
return new Observable<List<T>>() {
@Override
public Closeable register(final Observer<? super List<T>> observer) {
return source.register(new Observer<T>() {
/** The current collection for the minimum of Ts. */
List<T> collect;
/** The current minimum value. */
Key maxKey;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
if (collect != null) {
observer.next(collect);
}
observer.finish();
}
@Override
public void next(T value) {
Key key = keyExtractor.invoke(value);
if (collect == null) {
maxKey = key;
collect = new ArrayList<T>();
collect.add(value);
} else {
int order = keyComparator.compare(maxKey, key);
if (order == 0) {
collect.add(value);
} else
if (max ^ (order > 0)) {
maxKey = key;
collect = new ArrayList<T>();
collect.add(value);
}
}
}
});
}
};
}
/**
* Samples the latest T value coming from the source observable or the initial
* value when no messages arrived so far. If the producer and consumer run
* on different speeds, the consumer might receive the same value multiple times.
* The iterable sequence terminates if the source finishes or returns an error.
* <p>The returned iterator throws <code>UnsupportedOperationException</code> for its <code>remove()</code> method.</p>
* @param <T> the source element type
* @param source the source of Ts
* @param initialValue the initial value to return until the source actually produces something.
* @return the iterable
*/
public static <T> Iterable<T> mostRecent(final Observable<? extends T> source, final T initialValue) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
final AtomicReference<Option<T>> latest = new AtomicReference<Option<T>>(Option.some(initialValue));
final Closeable c = source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
latest.set(Option.<T>error(ex));
}
@Override
public void finish() {
latest.set(Option.<T>none());
}
@Override
public void next(T value) {
latest.set(Option.some(value));
}
});
return new Iterator<T>() {
@Override
protected void finalize() throws Throwable {
Closeables.close0(c);
super.finalize();
}
@Override
public boolean hasNext() {
return !Option.isNone(latest.get());
}
@Override
public T next() {
if (hasNext()) {
Option<T> o = latest.get();
// if the latest value is error, emit it only once, then
// do if the source simply terminated
if (Option.isError(o)) {
latest.set(Option.<T>none());
return o.value();
}
return o.value();
}
throw new NoSuchElementException();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
/**
* Returns an observable which remains connected to the <code>source</code>
* observable as long as there is at least one registration to this output observable.
* <p>The <code>observer</code> and <code>observable</code> parameters should denote
* the same object which implements both Observable and Observer interfaces.</p>
* @param <T> the source element type
* @param <U> the result element type
* @param source the source elements
* @param observer the observer that listens for Ts. Should be the same object as observable.
* @param observable the observable that will produce Us. Should be the same object as observable.
* @return the new observable
*/
public static <T, U> Observable<U> multicast(final Observable<? extends T> source,
final Observer<? super T> observer, final Observable<? extends U> observable) {
final Closeable outer = source.register(observer);
final AtomicInteger wip = new AtomicInteger();
return new Observable<U>() {
@Override
public Closeable register(Observer<? super U> o) {
wip.incrementAndGet();
final Closeable inner = observable.register(o);
return new Closeable() {
@Override
public void close() throws IOException {
inner.close();
if (wip.decrementAndGet() == 0) {
Closeables.close0(outer);
}
}
};
}
};
}
/**
* Returns an observable which never fires.
* @param <T> the type of the observable, irrelevant
* @return the observable
*/
@Nonnull
public static <T> Observable<T> never() {
return new Observable<T>() {
@Override
public Closeable register(Observer<? super T> observer) {
return Functions.EMPTY_CLOSEABLE;
}
};
}
/**
* Returns an iterable which returns a single element from the
* given source then terminates. It blocks the current thread.
* <p>For hot observables, this
* will be the first element they produce, for cold observables,
* this will be the next value (e.g., the next mouse move event).</p>
* <p><b>Exception semantics:</b> The <code>Iterator.next()</code> will rethrow the exception.</p>
* <p><b>Completion semantics:</b> If the source completes instantly, the iterator completes as empty.</p>
* <p>The returned iterator will throw an <code>UnsupportedOperationException</code> for its
* <code>remove()</code> method.
* @param <T> the element type
* @param source the source of elements
* @return the iterable
*/
public static <T> Iterable<T> next(final Observable<? extends T> source) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
final BlockingQueue<Option<T>> element = new LinkedBlockingQueue<Option<T>>();
final Closeable c = source.register(new DefaultObserver<T>(true) {
@Override
protected void onError(Throwable ex) {
element.add(Option.<T>error(ex));
}
@Override
protected void onFinish() {
element.add(Option.<T>none());
}
@Override
protected void onNext(T value) {
element.add(Option.some(value));
close();
}
});
return new Iterator<T>() {
/** The completion marker. */
boolean done;
/** The single element look-ahead. */
final SingleContainer<Option<T>> peek = new SingleContainer<Option<T>>();
@Override
public boolean hasNext() {
if (!done) {
if (peek.isEmpty()) {
try {
Option<T> e = element.take();
if (!Option.isNone(e)) {
peek.add(e);
}
} catch (InterruptedException ex) {
peek.add(Option.<T>error(ex));
}
done = true;
Closeables.close0(c);
}
}
return !peek.isEmpty() && !done;
}
@Override
public T next() {
if (hasNext()) {
return element.peek().value();
}
throw new NoSuchElementException();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
};
};
}
};
}
/**
* Wrap the given observable object in a way that any of its observers receive callbacks on
* the given thread pool.
* @param <T> the type of the objects to observe
* @param source the original observable
* @param pool the target observable
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> observeOn(
@Nonnull final Observable<? extends T> source,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserverEx<T> obs = new DefaultObserverEx<T>(true) {
/** The single lane executor. */
final SingleLaneExecutor<Runnable> run = new SingleLaneExecutor<Runnable>(pool,
new Action1<Runnable>() {
@Override
public void invoke(Runnable value) {
value.run();
}
}
);
{
add("source", source);
}
@Override
public void onError(final Throwable ex) {
run.add(new Runnable() {
@Override
public void run() {
observer.error(ex);
}
});
}
@Override
public void onFinish() {
run.add(new Runnable() {
@Override
public void run() {
observer.finish();
}
});
}
@Override
public void onNext(final T value) {
run.add(new Runnable() {
@Override
public void run() {
observer.next(value);
}
});
}
};
return obs;
}
};
}
/**
* Returns an Observable which traverses the entire
* source Observable and creates an ordered list
* of elements. Once the source Observable completes,
* the elements are streamed to the output.
* @param <T> the source element type, must be self comparable
* @param source the source of Ts
* @return the new iterable
*/
@Nonnull
public static <T extends Comparable<? super T>> Observable<T> orderBy(
@Nonnull final Observable<? extends T> source
) {
return orderBy(source, Functions.<T>identity(), Functions.<T>comparator());
}
/**
* Returns an Observable which traverses the entire
* source Observable and creates an ordered list
* of elements. Once the source Observable completes,
* the elements are streamed to the output.
* @param <T> the source element type, must be self comparable
* @param source the source of Ts
* @param comparator the value comparator
* @return the new iterable
*/
@Nonnull
public static <T> Observable<T> orderBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Comparator<? super T> comparator
) {
return orderBy(source, Functions.<T>identity(), comparator);
}
/**
* Returns an Observable which traverses the entire
* source Observable and creates an ordered list
* of elements. Once the source Observable completes,
* the elements are streamed to the output.
* @param <T> the source element type
* @param <U> the key type for the ordering, must be self comparable
* @param source the source of Ts
* @param keySelector the key selector for comparison
* @return the new iterable
*/
@Nonnull
public static <T, U extends Comparable<? super U>> Observable<T> orderBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends U> keySelector
) {
return orderBy(source, keySelector, Functions.<U>comparator());
}
/**
* Returns an Observable which traverses the entire
* source Observable and creates an ordered list
* of elements. Once the source iterator completes,
* the elements are streamed to the output.
* <p>Note that it buffers the elements of <code>source</code> until it
* signals finish.</p>
* <p><b>Exception semantics:</b> the exception is relayed and no ordering is performed.</p>
* <p><b>Completion semantics:</b> the output terminates when the source terminates and the sorted values are all submitted.</p>
* @param <T> the source element type
* @param <U> the key type for the ordering
* @param source the source of Ts
* @param keySelector the key selector for comparison
* @param keyComparator the key comparator function
* @return the new iterable
*/
@Nonnull
public static <T, U> Observable<T> orderBy(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends U> keySelector,
@Nonnull final Comparator<? super U> keyComparator
) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The buffer. */
final List<T> buffer = new ArrayList<T>();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
Collections.sort(buffer, new Comparator<T>() {
@Override
public int compare(T o1, T o2) {
return keyComparator.compare(keySelector.invoke(o1), keySelector.invoke(o2));
};
});
for (T t : buffer) {
observer.next(t);
}
observer.finish();
}
@Override
public void next(T value) {
buffer.add(value);
}
});
}
};
}
/**
* Creates an observer with debugging purposes.
* It prints the submitted values to STDOUT separated by commas and line-broken by 80 characters, the exceptions to STDERR
* and prints an empty newline when it receives a finish().
* @param <T> the value type
* @return the observer
*/
@Nonnull
public static <T> Observer<T> print() {
return print(", ", 80);
}
/**
* Creates an observer with debugging purposes.
* It prints the submitted values to STDOUT, the exceptions to STDERR
* and prints an empty newline when it receives a finish().
* @param <T> the value type
* @param separator the separator to use between subsequent values
* @param maxLineLength how many characters to print into each line
* @return the observer
*/
@Nonnull
public static <T> Observer<T> print(
final String separator,
final int maxLineLength) {
return new Observer<T>() {
/** Indicator for the first element. */
boolean first = true;
/** The current line length. */
int len;
@Override
public void error(Throwable ex) {
ex.printStackTrace();
}
@Override
public void finish() {
System.out.println();
}
@Override
public void next(T value) {
String s = String.valueOf(value);
if (first) {
first = false;
System.out.print(s);
len = s.length();
} else {
if (len + separator.length() + s.length() > maxLineLength) {
if (len == 0) {
System.out.print(separator);
System.out.print(s);
len = s.length() + separator.length();
} else {
System.out.println(separator);
System.out.print(s);
len = s.length();
}
} else {
System.out.print(separator);
System.out.print(s);
len += s.length() + separator.length();
}
}
};
};
}
/**
* Creates an observer with debugging purposes.
* It prints the submitted values to STDOUT with a line break, the exceptions to STDERR
* and prints an empty newline when it receives a finish().
* @param <T> the value type
* @return the observer
*/
@Nonnull
public static <T> Observer<T> println() {
return new Observer<T>() {
@Override
public void error(Throwable ex) {
ex.printStackTrace();
}
@Override
public void finish() {
System.out.println();
}
@Override
public void next(T value) {
System.out.println(value);
};
};
}
/**
* Creates an observer with debugging purposes.
* It prints the submitted values to STDOUT with a line break, the exceptions to STDERR
* and prints an empty newline when it receives a finish().
* @param <T> the value type
* @param prefix the prefix to use when printing
* @return the observer
*/
@Nonnull
public static <T> Observer<T> println(final String prefix) {
return new Observer<T>() {
@Override
public void error(Throwable ex) {
System.err.print(prefix);
ex.printStackTrace();
}
@Override
public void finish() {
System.out.print(prefix);
System.out.println();
}
@Override
public void next(T value) {
System.out.print(prefix);
System.out.println(value);
};
};
}
/**
* Returns an observable which shares all registration to the source observable and
* each observer will only see the last notification.
* <p>Basically a replay with buffer size 1.</p>
* @param <T> the source element type
* @param source the source of Ts
* @return the observable
*/
public static <T> Observable<T> prune(
final Observable<? extends T> source
) {
return replay(source, 1);
}
/**
* Returns an observable which shares all registration to the source observable and
* each observer will only see the last notification.
* <p>Basically a replay with buffer size 1.</p>
* @param <T> the source element type
* @param <U> the return element type
* @param source the source of Ts
* @param selector the output stream selector
* @return the observable
*/
public static <T, U> Observable<U> prune(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector
) {
return replay(source, selector, 1);
}
/**
* Returns an observable which shares all registration to the source observable and
* each observer will only see the last notification.
* <p>Basically a replay with buffer size 1.</p>
* @param <T> the source element type
* @param <U> the return element type
* @param source the source of Ts
* @param selector the output stream selector
* @param scheduler the scheduler for replaying the single value
* @return the observable
*/
public static <T, U> Observable<U> prune(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final Scheduler scheduler
) {
return replay(source, selector, 1, scheduler);
}
/**
* Returns an observable which shares all registration to the source observable and
* each observer will only see the last notification.
* <p>Basically a replay with buffer size 1.</p>
* @param <T> the source element type
* @param source the source of Ts
* @param scheduler the scheduler for replaying the single value
* @return the observable
*/
public static <T> Observable<T> prune(
final Observable<? extends T> source,
final Scheduler scheduler
) {
return replay(source, 1, scheduler);
}
/**
* Returns an observable which shares a single subscription to the underlying source.
* @param <T> the element type
* @param source the source of Ts
* @return the new observable
*/
public static <T> Observable<T> publish(
final Observable<? extends T> source
) {
return publish(source, DEFAULT_SCHEDULER.get());
}
/**
* Returns an observable which shares a single subscription to the underlying source.
* @param <T> the element type
* @param <U> the result type
* @param source the source of Ts
* @param selector the selector function for the return stream
* @return the new observable
*/
public static <T, U> Observable<U> publish(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector
) {
return publish(selector.invoke(source));
}
/**
* Returns an observable which shares a single subscription to the underlying source.
* @param <T> the element type
* @param <U> the result type
* @param source the source of Ts
* @param selector the selector function for the return stream
* @param scheduler the scheduler where the values will be replayed
* @return the new observable
*/
public static <T, U> Observable<U> publish(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final Scheduler scheduler
) {
return publish(selector.invoke(source), scheduler);
}
/**
* Returns an observable which shares a single subscription to the underlying source.
* @param <T> the element type
* @param <U> the result type
* @param source the source of Ts
* @param selector the selector function for the return stream
* @param initialValue the initial stream value
* @return the new observable
*/
public static <T, U> Observable<U> publish(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final U initialValue
) {
return publish(selector.invoke(source), initialValue);
}
/**
* Returns an observable which shares a single subscription to the underlying source.
* @param <T> the element type
* @param <U> the result type
* @param source the source of Ts
* @param selector the selector function for the return stream
* @param initialValue the initial stream value
* @param scheduler the scheduler where the values will be replayed
* @return the new observable
*/
public static <T, U> Observable<U> publish(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final U initialValue,
final Scheduler scheduler
) {
return publish(selector.invoke(source), initialValue, scheduler);
}
/**
* Returns an observable which shares a single subscription to the underlying source.
* @param <T> the element type
* @param source the source of Ts
* @param scheduler the scheduler where the values will be replayed
* @return the new observable
*/
public static <T> Observable<T> publish(
final Observable<? extends T> source,
final Scheduler scheduler
) {
return new Observable<T>() {
/** The first registree to initialize the common observer. */
final Lock lock = new ReentrantLock();
@GuardedBy("lock")
DefaultObservable<T> obs;
@Override
public Closeable register(Observer<? super T> observer) {
lock.lock();
try {
if (obs == null) {
obs = new DefaultObservable<T>();
observeOn(source, scheduler).register(obs);
}
} finally {
lock.unlock();
}
return obs.register(observer);
}
};
}
/**
* Returns an observable which shares a single subscription to the underlying source.
* @param <T> the element type
* @param source the source of Ts
* @param initialValue the initial stream value
* @return the new observable
*/
public static <T> Observable<T> publish(
final Observable<? extends T> source,
final T initialValue
) {
return publish(source, initialValue, DEFAULT_SCHEDULER.get());
}
/**
* Returns an observable which shares a single subscription to the underlying source.
* @param <T> the element type
* @param source the source of Ts
* @param initialValue the initial stream value
* @param scheduler the scheduler where the values will be replayed
* @return the new observable
*/
public static <T> Observable<T> publish(
final Observable<? extends T> source,
final T initialValue,
final Scheduler scheduler
) {
return publish(startWith(source, initialValue, scheduler), scheduler);
}
/**
* Creates an observable which generates numbers from start.
* @param start the start value.
* @param count the count
* @param step the stepping
* @return the observable
*/
@Nonnull
public static Observable<BigDecimal> range(
@Nonnull final BigDecimal start,
final int count,
@Nonnull final BigDecimal step) {
return range(start, count, step, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which generates BigDecimal numbers from start.
* @param start the start value.
* @param count the count
* @param step the stepping
* @param pool the execution thread pool.
* @return the observable
*/
@Nonnull
public static Observable<BigDecimal> range(
@Nonnull final BigDecimal start,
final int count,
@Nonnull final BigDecimal step,
@Nonnull final Scheduler pool) {
return new Observable<BigDecimal>() {
@Override
public Closeable register(final Observer<? super BigDecimal> observer) {
DefaultRunnable s = new DefaultRunnable() {
@Override
public void onRun() {
BigDecimal value = start;
for (int i = 0; i < count && !cancelled(); i++) {
observer.next(value);
value = value.add(step);
}
if (!cancelled()) {
observer.finish();
}
}
};
return pool.schedule(s);
}
};
}
/**
* Creates an observable which generates numbers from start.
* @param start the start value.
* @param count the count
* @return the observable
*/
@Nonnull
public static Observable<BigInteger> range(
@Nonnull final BigInteger start,
@Nonnull final BigInteger count) {
return range(start, count, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which generates BigInteger numbers from start.
* @param start the start value.
* @param count the count
* @param pool the execution thread pool.
* @return the observable
*/
@Nonnull
public static Observable<BigInteger> range(
@Nonnull final BigInteger start,
@Nonnull final BigInteger count,
final Scheduler pool) {
return new Observable<BigInteger>() {
@Override
public Closeable register(final Observer<? super BigInteger> observer) {
DefaultRunnable s = new DefaultRunnable() {
@Override
public void onRun() {
BigInteger end = start.add(count);
for (BigInteger i = start; i.compareTo(end) < 0
&& !cancelled(); i = i.add(BigInteger.ONE)) {
observer.next(i);
}
if (!cancelled()) {
observer.finish();
}
}
};
return pool.schedule(s);
}
};
}
/**
* Creates an observable which generates numbers from start.
* @param start the start value.
* @param count the count
* @param step the stepping
* @return the observable
*/
@Nonnull
public static Observable<Double> range(
final double start,
final int count,
final double step) {
return range(start, count, step, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which produces Double values from <code>start</code> in <code>count</code>
* amount and each subsequent element has a difference of <code>step</code>.
* @param start the starting value
* @param count how many values to produce
* @param step the incrementation amount
* @param pool the pool where to emit the values
* @return the observable of float
*/
public static Observable<Double> range(
final double start,
final int count,
final double step,
@Nonnull final Scheduler pool) {
return new Observable<Double>() {
@Override
public Closeable register(final Observer<? super Double> observer) {
DefaultRunnable s = new DefaultRunnable() {
@Override
public void onRun() {
for (int i = 0; i < count && !cancelled(); i++) {
observer.next(start + i * step);
}
if (!cancelled()) {
observer.finish();
}
}
};
return pool.schedule(s);
}
};
}
/**
* Creates an observable which generates numbers from start.
* @param start the start value.
* @param count the count
* @param step the stepping
* @return the observable
*/
@Nonnull
public static Observable<Float> range(
final float start,
final int count,
final float step) {
return range(start, count, step, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which produces Float values from <code>start</code> in <code>count</code>
* amount and each subsequent element has a difference of <code>step</code>.
* @param start the starting value
* @param count how many values to produce
* @param step the incrementation amount
* @param pool the pool where to emit the values
* @return the observable of float
*/
@Nonnull
public static Observable<Float> range(
final float start,
final int count,
final float step,
@Nonnull final Scheduler pool) {
return new Observable<Float>() {
@Override
public Closeable register(final Observer<? super Float> observer) {
DefaultRunnable s = new DefaultRunnable() {
@Override
public void onRun() {
for (int i = 0; i < count && !cancelled(); i++) {
observer.next(start + i * step);
}
if (!cancelled()) {
observer.finish();
}
}
};
return pool.schedule(s);
}
};
}
/**
* Creates an observable which generates numbers from start.
* @param start the start value.
* @param count the count
* @return the observable
*/
@Nonnull
public static Observable<Integer> range(
final int start,
@Nonnull final int count) {
return range(start, count, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which generates numbers from start.
* @param start the start value.
* @param count the count
* @param pool the execution thread pool.
* @return the observable
*/
public static Observable<Integer> range(
final int start,
final int count,
@Nonnull final Scheduler pool) {
return new Observable<Integer>() {
@Override
public Closeable register(final Observer<? super Integer> observer) {
DefaultRunnable s = new DefaultRunnable() {
@Override
public void onRun() {
for (int i = start; i < start + count && !cancelled(); i++) {
observer.next(i);
}
if (!cancelled()) {
observer.finish();
}
}
};
return pool.schedule(s);
}
};
}
/**
* Relay values of T while the given condition does not hold.
* Once the condition turns true the relaying stops.
* @param <T> the element type
* @param source the source of elements
* @param condition the condition that must be false to relay Ts
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> relayUntil(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func0<Boolean> condition) {
return relayWhile(source, Functions.not(condition));
}
/**
* Relay the stream of Ts until condition turns into false.
* @param <T> the type of the values
* @param source the source of Ts
* @param condition the condition that must hold to relay Ts
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> relayWhile(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func0<Boolean> condition) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
observer.finish();
}
@Override
public void onNext(T value) {
if (condition.invoke()) {
observer.next(value);
} else {
finish();
}
}
};
return Closeables.close(obs, source.register(obs));
}
};
}
/**
* Unwrap the values within a timeinterval observable to its normal value.
* @param <T> the element type
* @param source the source which has its elements in a timeinterval way.
* @return the raw observables of Ts
*/
@Nonnull
public static <T> Observable<T> removeTimeInterval(
@Nonnull Observable<TimeInterval<T>> source) {
return select(source, Reactive.<T>unwrapTimeInterval());
}
/**
* Unwrap the values within a timestamped observable to its normal value.
* @param <T> the element type
* @param source the source which has its elements in a timestamped way.
* @return the raw observables of Ts
*/
@Nonnull
public static <T> Observable<T> removeTimestamped(
@Nonnull Observable<Timestamped<T>> source) {
Func1<Timestamped<T>, T> f = Reactive.unwrapTimestamped();
return select(source, f);
}
/**
* Creates an observable which repeatedly calls the given function which generates the Ts indefinitely.
* The generator runs on the default pool. Note that observers must unregister to stop the infinite loop.
* @param <T> the type of elements to produce
* @param func the function which generates elements
* @return the observable
*/
@Nonnull
public static <T> Observable<T> repeat(
@Nonnull final Func0<? extends T> func) {
return repeat(func, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which repeatedly calls the given function <code>count</code> times to generate Ts
* and runs on the default pool.
* @param <T> the element type
* @param func the function to call to generate values
* @param count the numer of times to repeat the value
* @return the observable
*/
@Nonnull
public static <T> Observable<T> repeat(
@Nonnull final Func0<? extends T> func,
final int count) {
return repeat(func, count, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which repeatedly calls the given function <code>count</code> times to generate Ts
* and runs on the given pool.
* @param <T> the element type
* @param func the function to call to generate values
* @param count the numer of times to repeat the value
* @param pool the pool where the loop should be executed
* @return the observable
*/
@Nonnull
public static <T> Observable<T> repeat(
@Nonnull final Func0<? extends T> func,
final int count,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultRunnable r = new DefaultRunnable() {
@Override
public void onRun() {
int i = count;
while (!cancelled() && i-- > 0) {
observer.next(func.invoke());
}
if (!cancelled()) {
observer.finish();
}
}
};
return pool.schedule(r);
}
};
}
/**
* Creates an observable which repeatedly calls the given function which generates the Ts indefinitely.
* The generator runs on the pool. Note that observers must unregister to stop the infinite loop.
* @param <T> the type of elements to produce
* @param func the function which generates elements
* @param pool the pool where the generator loop runs
* @return the observable
*/
public static <T> Observable<T> repeat(
@Nonnull final Func0<? extends T> func,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultRunnable r = new DefaultRunnable() {
@Override
public void onRun() {
while (!cancelled()) {
observer.next(func.invoke());
}
}
};
return pool.schedule(r);
}
};
}
/**
* Repeat the source observable count times. Basically it creates
* a list of observables, all the source instance and applies
* the concat() operator on it.
* @param <T> the element type
* @param source the source observable
* @param count the number of times to repeat
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> repeat(
@Nonnull Observable<? extends T> source,
int count) {
if (count > 0) {
List<Observable<? extends T>> srcs = new ArrayList<Observable<? extends T>>(count);
for (int i = 0; i < count; i++) {
srcs.add(source);
}
return concat(srcs);
}
return empty();
}
/**
* Creates an observable which repeates the given value indefinitely
* and runs on the default pool. Note that the observers must
* deregister to stop the infinite background loop
* @param <T> the element type
* @param value the value to repeat
* @return the observable
*/
@Nonnull
public static <T> Observable<T> repeat(final T value) {
return repeat(value, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which repeates the given value <code>count</code> times
* and runs on the default pool.
* @param <T> the element type
* @param value the value to repeat
* @param count the numer of times to repeat the value
* @return the observable
*/
@Nonnull
public static <T> Observable<T> repeat(
final T value,
final int count) {
return repeat(value, count, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which repeates the given value <code>count</code> times
* and runs on the given pool.
* @param <T> the element type
* @param value the value to repeat
* @param count the numer of times to repeat the value
* @param pool the pool where the loop should be executed
* @return the observable
*/
@Nonnull
public static <T> Observable<T> repeat(
final T value,
final int count,
@Nonnull final Scheduler pool) {
return repeat(Functions.constant0(value), count, pool);
}
/**
* Creates an observable which repeates the given value indefinitely
* and runs on the given pool. Note that the observers must
* deregister to stop the infinite background loop
* @param <T> the element type
* @param value the value to repeat
* @param pool the pool where the loop should be executed
* @return the observable
*/
public static <T> Observable<T> repeat(
final T value,
@Nonnull final Scheduler pool) {
return repeat(Functions.constant0(value), pool);
}
/**
* Replace the current default scheduler with the specified new scheduler.
* This method is threadsafe
* @param newScheduler the new scheduler
* @return the current scheduler
*/
@Nonnull
public static Scheduler replaceDefaultScheduler(
@Nonnull Scheduler newScheduler) {
if (newScheduler == null) {
throw new IllegalArgumentException("newScheduler is null");
}
return DEFAULT_SCHEDULER.getAndSet(newScheduler);
}
/**
* Creates an observable which shares the source observable and replays all source Ts
* to any of the registering observers.
* @param <T> the element type
* @param source the source of Ts
* @return the new observable
*/
public static <T> Observable<T> replay(
final Observable<? extends T> source
) {
return replay(source, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which shares the source observable and replays the buffered source Ts
* to any of the registering observers.
* @param <T> the element type
* @param <U> the return element type
* @param source the source of Ts
* @param selector the output stream selector
* @param bufferSize the target buffer size
* @return the new observable
*/
public static <T, U> Observable<U> replay(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final int bufferSize
) {
return replay(selector.invoke(source), bufferSize);
}
/**
* Creates an observable which shares the source observable and replays the bufferSize source Ts
* to any of the registering observers. After the periodic timespan, the buffer is reset.
* @param <T> the source element type
* @param <U> the return element type
* @param source the source of Ts
* @param selector the output stream selector
* @param bufferSize the buffer size
* @param timeSpan the window length
* @param unit the time unit
* @return the new observer
*/
public static <T, U> Observable<U> replay(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final int bufferSize,
final long timeSpan,
final TimeUnit unit
) {
return replay(selector.invoke(source), bufferSize, timeSpan, unit);
}
/**
* Creates an observable which shares the source observable and replays the bufferSize source Ts
* to any of the registering observers. After the periodic timespan, the buffer is reset.
* @param <T> the source element type
* @param <U> the return element type
* @param source the source of Ts
* @param selector the output stream selector
* @param bufferSize the buffer size
* @param timeSpan the window length
* @param unit the time unit
* @param scheduler the target scheduler
* @return the new observer
*/
public static <T, U> Observable<U> replay(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final int bufferSize,
final long timeSpan,
final TimeUnit unit,
final Scheduler scheduler
) {
return replay(selector.invoke(source), bufferSize, timeSpan, unit, scheduler);
}
/**
* Creates an observable which shares the source observable and replays all source Ts
* to any of the registering observers. After the periodic timespan, the buffer is reset.
* @param <T> the source element type
* @param <U> the return element type
* @param source the source of Ts
* @param selector the output stream selector
* @param timeSpan the window length
* @param unit the time unit
* @return the new observer
*/
public static <T, U> Observable<U> replay(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final long timeSpan,
final TimeUnit unit
) {
return replay(selector.invoke(source), timeSpan, unit);
}
/**
* Creates an observable which shares the source observable and replays all source Ts
* to any of the registering observers. After the periodic timespan, the buffer is reset.
* @param <T> the source element type
* @param <U> the return element type
* @param source the source of Ts
* @param selector the output stream selector
* @param timeSpan the window length
* @param unit the time unit
* @param scheduler the target scheduler
* @return the new observer
*/
public static <T, U> Observable<U> replay(
final Observable<? extends T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final long timeSpan,
final TimeUnit unit,
final Scheduler scheduler
) {
return replay(selector.invoke(source), timeSpan, unit, scheduler);
}
/**
* Creates an observable which shares the source observable and replays the buffered source Ts
* to any of the registering observers.
* @param <T> the element type
* @param source the source of Ts
* @param bufferSize the target buffer size
* @return the new observable
*/
public static <T> Observable<T> replay(
final Observable<? extends T> source,
final int bufferSize
) {
return replay(source, bufferSize, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which shares the source observable and replays the bufferSize source Ts
* to any of the registering observers. After the periodic timespan, the buffer is reset.
* @param <T> the source element type
* @param source the source of Ts
* @param bufferSize the buffer size
* @param timeSpan the window length
* @param unit the time unit
* @return the new observer
*/
public static <T> Observable<T> replay(
final Observable<? extends T> source,
final int bufferSize,
final long timeSpan,
final TimeUnit unit
) {
return replay(source, bufferSize, timeSpan, unit, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which shares the source observable and replays the bufferSize source Ts
* to any of the registering observers. After the periodic timespan, the buffer is reset.
* @param <T> the source element type
* @param source the source of Ts
* @param bufferSize the buffer size
* @param timeSpan the window length
* @param unit the time unit
* @param scheduler the target scheduler
* @return the new observer
*/
public static <T> Observable<T> replay(
final Observable<? extends T> source,
final int bufferSize,
final long timeSpan,
final TimeUnit unit,
final Scheduler scheduler
) {
return new Observable<T>() {
/** The read-write lock. */
final ReadWriteLock rwLock = new ReentrantReadWriteLock(true);
/** The read lock for reading elements of the buffer. */
final Lock readLock = rwLock.readLock();
/** The write lock to write elements of the buffer and add new listeners. */
final Lock writeLock = rwLock.writeLock();
/** The buffer that holds the observed values so far. */
@GuardedBy("rwLock")
CircularBuffer<Option<T>> buffer = new CircularBuffer<Option<T>>(bufferSize);
/** The single registration handler. */
@GuardedBy("writeLock")
Closeable sourceClose;
/** The single registration handler. */
@GuardedBy("writeLock")
Closeable timerClose;
/** The set of listeners active. */
@GuardedBy("writeLock")
Set<SingleLaneExecutor<Pair<Integer, CircularBuffer<Option<T>>>>> listeners = new HashSet<SingleLaneExecutor<Pair<Integer, CircularBuffer<Option<T>>>>>();
@Override
protected void finalize() throws Throwable {
Closeables.close0(timerClose);
Closeables.close0(sourceClose);
super.finalize();
}
@Override
public Closeable register(final Observer<? super T> observer) {
writeLock.lock();
try {
if (sourceClose != null) {
sourceClose = source.register(new Observer<T>() {
/**
* Buffer and submit the option to all registered listeners.
* @param opt the option to submit
*/
void doOption(Option<T> opt) {
writeLock.lock();
try {
buffer.add(opt);
Pair<Integer, CircularBuffer<Option<T>>> of = Pair.of(buffer.tail(), buffer);
for (SingleLaneExecutor<Pair<Integer, CircularBuffer<Option<T>>>> l : listeners) {
l.add(of);
}
} finally {
writeLock.unlock();
}
}
@Override
public void error(Throwable ex) {
doOption(Option.<T>error(ex));
}
@Override
public void finish() {
doOption(Option.<T>none());
}
@Override
public void next(T value) {
doOption(Option.some(value));
}
});
timerClose = scheduler.schedule(new Runnable() {
@Override
public void run() {
writeLock.lock();
try {
buffer = new CircularBuffer<Option<T>>(bufferSize);
} finally {
writeLock.unlock();
}
}
}, timeSpan, timeSpan, unit);
}
} finally {
writeLock.unlock();
}
final AtomicBoolean cancel = new AtomicBoolean();
final SingleLaneExecutor<Pair<Integer, CircularBuffer<Option<T>>>> playback = SingleLaneExecutor.create(scheduler, new Action1<Pair<Integer, CircularBuffer<Option<T>>>>() {
/** The local buffer reader index. */
@GuardedBy("readLock")
int index = 0;
/** The last buffer. */
@GuardedBy("readLock")
CircularBuffer<Option<T>> last;
@Override
public void invoke(Pair<Integer, CircularBuffer<Option<T>>> value) {
readLock.lock();
try {
if (last != value.second) {
index = 0;
last = value.second;
}
index = Math.max(index, buffer.head());
while (index < value.first && !cancel.get()) {
dispatch(observer, last.get(index++));
}
} finally {
readLock.unlock();
}
}
});
writeLock.lock();
try {
playback.add(Pair.of(buffer.tail(), buffer));
listeners.add(playback);
} finally {
writeLock.unlock();
}
final Closeable c = new Closeable() {
@Override
public void close() throws IOException {
cancel.set(true);
writeLock.lock();
try {
listeners.remove(playback);
} finally {
writeLock.unlock();
}
Closeables.close0(playback);
}
};
return c;
}
};
}
/**
* Creates an observable which shares the source observable and replays all source Ts
* to any of the registering observers.
* @param <T> the element type
* @param source the source of Ts
* @param bufferSize the target buffer size
* @param scheduler the scheduler from where the historical elements are emitted
* @return the new observable
*/
public static <T> Observable<T> replay(
final Observable<? extends T> source,
final int bufferSize,
final Scheduler scheduler
) {
return new Observable<T>() {
/** The read-write lock. */
final ReadWriteLock rwLock = new ReentrantReadWriteLock(true);
/** The read lock for reading elements of the buffer. */
final Lock readLock = rwLock.readLock();
/** The write lock to write elements of the buffer and add new listeners. */
final Lock writeLock = rwLock.writeLock();
/** The buffer that holds the observed values so far. */
@GuardedBy("rwLock")
final CircularBuffer<Option<T>> buffer = new CircularBuffer<Option<T>>(bufferSize);
/** The single registration handler. */
@GuardedBy("writeLock")
Closeable sourceClose;
/** The set of listeners active. */
@GuardedBy("writeLock")
Set<SingleLaneExecutor<Integer>> listeners = new HashSet<SingleLaneExecutor<Integer>>();
@Override
protected void finalize() throws Throwable {
Closeables.close0(sourceClose);
super.finalize();
}
@Override
public Closeable register(final Observer<? super T> observer) {
writeLock.lock();
try {
if (sourceClose == null) {
sourceClose = source.register(new Observer<T>() {
/**
* Buffer and submit the option to all registered listeners.
* @param opt the option to submit
*/
void doOption(Option<T> opt) {
writeLock.lock();
try {
buffer.add(opt);
for (SingleLaneExecutor<Integer> l : listeners) {
l.add(buffer.tail());
}
} finally {
writeLock.unlock();
}
}
@Override
public void error(Throwable ex) {
doOption(Option.<T>error(ex));
}
@Override
public void finish() {
doOption(Option.<T>none());
}
@Override
public void next(T value) {
doOption(Option.some(value));
}
});
}
} finally {
writeLock.unlock();
}
final AtomicBoolean cancel = new AtomicBoolean();
final SingleLaneExecutor<Integer> playback = SingleLaneExecutor.create(scheduler, new Action1<Integer>() {
/** The local buffer reader index. */
@GuardedBy("readLock")
int index = 0;
@Override
public void invoke(Integer value) {
readLock.lock();
try {
index = Math.max(index, buffer.head());
while (index < value && !cancel.get()) {
dispatch(observer, buffer.get(index++));
}
} finally {
readLock.unlock();
}
}
});
writeLock.lock();
try {
playback.add(buffer.size());
listeners.add(playback);
} finally {
writeLock.unlock();
}
final Closeable c = new Closeable() {
@Override
public void close() throws IOException {
cancel.set(true);
writeLock.lock();
try {
listeners.remove(playback);
} finally {
writeLock.unlock();
}
Closeables.close0(playback);
}
};
return c;
}
};
}
/**
* Creates an observable which shares the source observable and replays all source Ts
* to any of the registering observers. After the periodic timespan, the buffer is reset.
* @param <T> the source element type
* @param source the source of Ts
* @param timeSpan the window length
* @param unit the time unit
* @return the new observer
*/
public static <T> Observable<T> replay(
final Observable<? extends T> source,
final long timeSpan,
final TimeUnit unit
) {
return replay(source, timeSpan, unit, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which shares the source observable and replays all source Ts
* to any of the registering observers. After the periodic timespan, the buffer is reset.
* @param <T> the source element type
* @param source the source of Ts
* @param timeSpan the window length
* @param unit the time unit
* @param scheduler the target scheduler
* @return the new observer
*/
public static <T> Observable<T> replay(
final Observable<? extends T> source,
final long timeSpan,
final TimeUnit unit,
final Scheduler scheduler
) {
return new Observable<T>() {
/** The read-write lock. */
final ReadWriteLock rwLock = new ReentrantReadWriteLock(true);
/** The read lock for reading elements of the buffer. */
final Lock readLock = rwLock.readLock();
/** The write lock to write elements of the buffer and add new listeners. */
final Lock writeLock = rwLock.writeLock();
/** The buffer that holds the observed values so far. */
@GuardedBy("rwLock")
List<Option<T>> buffer = new ArrayList<Option<T>>();
/** The single registration handler. */
@GuardedBy("writeLock")
Closeable sourceClose;
/** The single registration handler. */
@GuardedBy("writeLock")
Closeable timerClose;
/** The set of listeners active. */
@GuardedBy("writeLock")
Set<SingleLaneExecutor<Pair<Integer, List<Option<T>>>>> listeners = new HashSet<SingleLaneExecutor<Pair<Integer, List<Option<T>>>>>();
@Override
protected void finalize() throws Throwable {
Closeables.close0(timerClose);
Closeables.close0(sourceClose);
super.finalize();
}
@Override
public Closeable register(final Observer<? super T> observer) {
writeLock.lock();
try {
if (sourceClose == null) {
sourceClose = source.register(new Observer<T>() {
/**
* Buffer and submit the option to all registered listeners.
* @param opt the option to submit
*/
void doOption(Option<T> opt) {
writeLock.lock();
try {
buffer.add(opt);
Pair<Integer, List<Option<T>>> of = Pair.of(buffer.size(), buffer);
for (SingleLaneExecutor<Pair<Integer, List<Option<T>>>> l : listeners) {
l.add(of);
}
} finally {
writeLock.unlock();
}
}
@Override
public void error(Throwable ex) {
doOption(Option.<T>error(ex));
}
@Override
public void finish() {
doOption(Option.<T>none());
}
@Override
public void next(T value) {
doOption(Option.some(value));
}
});
timerClose = scheduler.schedule(new Runnable() {
@Override
public void run() {
writeLock.lock();
try {
buffer = new ArrayList<Option<T>>();
} finally {
writeLock.unlock();
}
}
}, timeSpan, timeSpan, unit);
}
} finally {
writeLock.unlock();
}
final AtomicBoolean cancel = new AtomicBoolean();
final SingleLaneExecutor<Pair<Integer, List<Option<T>>>> playback = SingleLaneExecutor.create(scheduler, new Action1<Pair<Integer, List<Option<T>>>>() {
/** The local buffer reader index. */
@GuardedBy("readLock")
int index = 0;
/** The last buffer. */
@GuardedBy("readLock")
List<Option<T>> last;
@Override
public void invoke(Pair<Integer, List<Option<T>>> value) {
readLock.lock();
try {
if (last != value.second) {
index = 0;
last = value.second;
}
while (index < value.first && !cancel.get()) {
dispatch(observer, last.get(index++));
}
} finally {
readLock.unlock();
}
}
});
writeLock.lock();
try {
playback.add(Pair.of(buffer.size(), buffer));
listeners.add(playback);
} finally {
writeLock.unlock();
}
final Closeable c = new Closeable() {
@Override
public void close() throws IOException {
cancel.set(true);
writeLock.lock();
try {
listeners.remove(playback);
} finally {
writeLock.unlock();
}
Closeables.close0(playback);
}
};
return c;
}
};
}
/**
* Creates an observable which shares the source observable and replays all source Ts
* to any of the registering observers.
* @param <T> the element type
* @param source the source of Ts
* @param scheduler the scheduler from where the historical elements are emitted
* @return the new observable
*/
public static <T> Observable<T> replay(
final Observable<? extends T> source,
final Scheduler scheduler
) {
return new Observable<T>() {
/** The read-write lock. */
final ReadWriteLock rwLock = new ReentrantReadWriteLock(true);
/** The read lock for reading elements of the buffer. */
final Lock readLock = rwLock.readLock();
/** The write lock to write elements of the buffer and add new listeners. */
final Lock writeLock = rwLock.writeLock();
/** The buffer that holds the observed values so far. */
@GuardedBy("rwLock")
final List<Option<T>> buffer = new ArrayList<Option<T>>();
/** The single registration handler. */
@GuardedBy("writeLock")
Closeable sourceClose;
/** The set of listeners active. */
@GuardedBy("writeLock")
Set<SingleLaneExecutor<Integer>> listeners = new HashSet<SingleLaneExecutor<Integer>>();
@Override
protected void finalize() throws Throwable {
Closeables.close0(sourceClose);
super.finalize();
}
@Override
public Closeable register(final Observer<? super T> observer) {
writeLock.lock();
try {
if (sourceClose == null) {
sourceClose = source.register(new Observer<T>() {
/**
* Buffer and submit the option to all registered listeners.
* @param opt the option to submit
*/
void doOption(Option<T> opt) {
writeLock.lock();
try {
buffer.add(opt);
for (SingleLaneExecutor<Integer> l : listeners) {
l.add(buffer.size());
}
} finally {
writeLock.unlock();
}
}
@Override
public void error(Throwable ex) {
doOption(Option.<T>error(ex));
}
@Override
public void finish() {
doOption(Option.<T>none());
}
@Override
public void next(T value) {
doOption(Option.some(value));
}
});
}
} finally {
writeLock.unlock();
}
final AtomicBoolean cancel = new AtomicBoolean();
final SingleLaneExecutor<Integer> playback = SingleLaneExecutor.create(scheduler, new Action1<Integer>() {
/** The local buffer reader index. */
@GuardedBy("readLock")
int index = 0;
@Override
public void invoke(Integer value) {
readLock.lock();
try {
while (index < value && !cancel.get()) {
dispatch(observer, buffer.get(index++));
}
} finally {
readLock.unlock();
}
}
});
writeLock.lock();
try {
playback.add(buffer.size());
listeners.add(playback);
} finally {
writeLock.unlock();
}
final Closeable c = new Closeable() {
@Override
public void close() throws IOException {
cancel.set(true);
writeLock.lock();
try {
listeners.remove(playback);
} finally {
writeLock.unlock();
}
Closeables.close0(playback);
}
};
return c;
}
};
}
/**
* Creates an observable which shares the source observable returned by the selector and replays all source Ts
* to any of the registering observers.
* @param <T> the element type
* @param <U> the return element type
* @param source the source of Ts
* @param selector the output stream selector
* @param bufferSize the target buffer size
* @param scheduler the scheduler from where the historical elements are emitted
* @return the new observable
*/
public static <T, U> Observable<U> replay(
final Observable<T> source,
final Func1<? super Observable<? extends T>, ? extends Observable<U>> selector,
final int bufferSize,
final Scheduler scheduler
) {
return replay(selector.invoke(source), bufferSize, scheduler);
}
/**
* Returns the observable sequence for the supplied source observable by
* invoking the selector function with it.
* @param <T> the source element type
* @param <U> the output element type
* @param source the source of Ts
* @param selector the selector which returns an observable of Us for the given <code>source</code>
* @return the new observable
*/
public static <T, U> Observable<U> replay(
final Observable<T> source,
final Func1<? super Observable<T>, ? extends Observable<U>> selector
) {
return selector.invoke(source);
}
/**
* Restore the default scheduler back to the <code>DefaultScheduler</code>
* used when this class was initialized.
*/
public static void restoreDefaultScheduler() {
DEFAULT_SCHEDULER.set(new DefaultScheduler());
}
/**
* Returns an observable which listens to elements from a source until it signals an error()
* or finish() and continues with the next observable. The registration happens only when the
* previous observables finished in any way.
* FIXME not sure how to close previous registrations
* @param <T> the type of the elements
* @param sources the list of observables
* @return the observable
*/
@Nonnull
public static <T> Observable<T> resumeAlways(
@Nonnull final Iterable<? extends Observable<? extends T>> sources) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final Iterator<? extends Observable<? extends T>> it = sources.iterator();
if (it.hasNext()) {
DefaultObserver<T> obs = new DefaultObserver<T>(false) {
Closeable c;
{
lock.lock();
try {
c = it.next().register(this);
} finally {
lock.unlock();
}
}
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
Closeables.close0(c);
if (it.hasNext()) {
c = it.next().register(this);
} else {
observer.finish();
close();
}
}
@Override
public void onFinish() {
Closeables.close0(c);
if (it.hasNext()) {
c = it.next().register(this);
} else {
observer.finish();
close();
}
}
@Override
public void onNext(T value) {
observer.next(value);
}
};
return obs;
}
return Reactive.<T>empty().register(observer);
}
};
}
/**
* It tries to submit the values of first observable, but when it throws an exeption,
* the next observable within source is used further on. Basically a failover between the Observables.
* If the current source finish() then the result observable calls finish().
* If the last of the sources calls error() the result observable calls error()
* FIXME not sure how to close previous registrations
* @param <T> the type of the values
* @param sources the available source observables.
* @return the failover observable
*/
@Nonnull
public static <T> Observable<T> resumeOnError(
@Nonnull final Iterable<? extends Observable<? extends T>> sources) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final Iterator<? extends Observable<? extends T>> it = sources.iterator();
if (it.hasNext()) {
DefaultObserver<T> obs = new DefaultObserver<T>(false) {
Closeable c;
{
lock.lock();
try {
c = it.next().register(this);
} finally {
lock.unlock();
}
}
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
Closeables.close0(c);
if (it.hasNext()) {
c = it.next().register(this);
} else {
observer.finish();
close();
}
}
@Override
public void onFinish() {
Closeables.close0(c);
observer.finish();
close();
}
@Override
public void onNext(T value) {
observer.next(value);
}
};
return obs;
}
return Reactive.<T>empty().register(observer);
}
};
}
/**
* Restarts the observation until the source observable terminates normally.
* @param <T> the type of elements
* @param source the source observable
* @return the repeating observable
*/
@Nonnull
public static <T> Observable<T> retry(
@Nonnull final Observable<? extends T> source) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(false) {
/** The registration. */
Closeable c;
{
lock.lock();
try {
c = source.register(this);
} finally {
lock.unlock();
}
}
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
Closeables.close0(c);
c = source.register(this);
}
@Override
public void onFinish() {
observer.finish();
close();
}
@Override
public void onNext(T value) {
observer.next(value);
}
};
return obs;
}
};
}
/**
* Restarts the observation until the source observable terminates normally or the <code>count</code> retry count was used up.
* FIXME if the retry count is zero and yet another error comes, what should happen? finish or this time submit the error?
* @param <T> the type of elements
* @param source the source observable
* @param count the retry count
* @return the repeating observable
*/
@Nonnull
public static <T> Observable<T> retry(
@Nonnull final Observable<? extends T> source,
final int count) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(false) {
/** The remaining retry count. */
int remainingCount = count;
/** The registration. */
Closeable c;
{
lock.lock();
try {
c = source.register(this);
} finally {
lock.unlock();
}
}
@Override
public void onError(Throwable ex) {
Closeables.close0(c);
if (remainingCount-- > 0) {
c = source.register(this);
} else {
observer.error(ex);
close();
}
}
@Override
public void onFinish() {
observer.finish();
close();
}
@Override
public void onNext(T value) {
observer.next(value);
}
};
return obs;
}
};
}
/**
* Blocks until the observable calls finish() or error(). Values are submitted to the given action.
* @param <T> the type of the elements
* @param source the source observable
* @param action the action to invoke for each value
* @throws InterruptedException if the current thread is interrupted while waiting on
* the observable.
*/
public static <T> void run(
@Nonnull final Observable<? extends T> source,
@Nonnull final Action1<? super T> action) throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1);
Closeable c = source.register(new DefaultObserver<T>(true) {
@Override
public void onError(Throwable ex) {
latch.countDown();
}
@Override
public void onFinish() {
latch.countDown();
}
@Override
public void onNext(T value) {
action.invoke(value);
}
});
try {
latch.await();
} finally {
Closeables.close0(c);
}
}
/**
* Blocks until the observable calls finish() or error(). Events are submitted to the given observer.
* @param <T> the type of the elements
* @param source the source observable
* @param observer the observer to invoke for each event
* @throws InterruptedException if the current thread is interrupted while waiting on
* the observable.
*/
public static <T> void run(
@Nonnull final Observable<? extends T> source,
@Nonnull final Observer<? super T> observer) throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1);
Closeable c = source.register(new DefaultObserver<T>(true) {
@Override
public void onError(Throwable ex) {
try {
observer.error(ex);
} finally {
latch.countDown();
}
}
@Override
public void onFinish() {
try {
observer.finish();
} finally {
latch.countDown();
}
}
@Override
public void onNext(T value) {
observer.next(value);
}
});
try {
latch.await();
} finally {
Closeables.close0(c);
}
}
/**
* Blocks until the observable calls finish() or error(). Values are ignored.
* @param source the source observable
* @throws InterruptedException if the current thread is interrupted while waiting on
* the observable.
*/
public static void run(
@Nonnull final Observable<?> source) throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1);
Closeable c = source.register(new DefaultObserver<Object>(true) {
@Override
public void onError(Throwable ex) {
latch.countDown();
}
@Override
public void onFinish() {
latch.countDown();
}
@Override
public void onNext(Object value) {
}
});
try {
latch.await();
} finally {
Closeables.close0(c);
}
}
/**
* Blocks until the observable calls finish() or error() or the specified amount of time ellapses. Values are ignored.
* FIXME might be infeasible due the potential side effects along the event stream
* @param source the source observable
* @param time the time value
* @param unit the time unit
* @return false if the waiting time ellapsed before the run completed
* @throws InterruptedException if the current thread is interrupted while waiting on
* the observable.
*/
static boolean run(
@Nonnull final Observable<?> source,
long time,
@Nonnull TimeUnit unit) throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1);
Closeable c = source.register(new DefaultObserver<Object>(true) {
@Override
public void onError(Throwable ex) {
latch.countDown();
}
@Override
public void onFinish() {
latch.countDown();
}
@Override
public void onNext(Object value) {
}
});
try {
return latch.await(time, unit);
} finally {
Closeables.close0(c);
}
}
/**
* Periodically sample the given source observable, which means tracking the last value of
* the observable and periodically submitting it to the output observable.
* FIXME the error() and finish() are instantly propagated
* @param <T> the type of elements to watch
* @param source the source of elements
* @param time the time value to wait
* @param unit the time unit
* @return the sampled observable
*/
@Nonnull
public static <T> Observable<T> sample(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit) {
return sample(source, time, unit, DEFAULT_SCHEDULER.get());
}
/**
* Periodically sample the given source observable, which means tracking the last value of
* the observable and periodically submitting it to the output observable.
* FIXME the error() and finish() are instantly propagated
* @param <T> the type of elements to watch
* @param source the source of elements
* @param time the time value to wait
* @param unit the time unit
* @param pool the scheduler pool where the periodic submission should happen.
* @return the sampled observable
*/
@Nonnull
public static <T> Observable<T> sample(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** Are we waiting for the first event? */
@GuardedBy("lock")
boolean first = true;
/** The current value. */
@GuardedBy("lock")
T current;
final Closeable c = pool.schedule(new DefaultRunnable(lock) {
@Override
protected void onRun() {
if (!first) {
observer.next(current);
}
}
}, time, time, unit);
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
observer.finish();
}
@Override
public void onNext(T value) {
first = false;
current = value;
}
};
return Closeables.close(obs, source.register(obs));
}
};
}
/**
* Creates an observable which accumultates the given source and submits each intermediate results to its subscribers.
* Example:<br>
* <code>range(0, 5).accumulate((x, y) => x + y)</code> produces a sequence of [0, 1, 3, 6, 10];<br>
* basically the first event (0) is just relayed and then every pair of values are simply added together and relayed
* @param <T> the element type to accumulate
* @param source the source of the accumulation
* @param accumulator the accumulator which takest the current accumulation value and the current observed value
* and returns a new accumulated value
* @return the observable
*/
@Nonnull
public static <T> Observable<T> scan(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func2<? super T, ? super T, ? extends T> accumulator) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The current accumulated value. */
T current;
/** Are we waiting for the first value? */
boolean first = true;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
if (first) {
first = false;
current = value;
} else {
current = accumulator.invoke(current, value);
}
observer.next(current);
}
});
}
};
}
/**
* Creates an observable which accumultates the given source and submits each intermediate results to its subscribers.
* Example:<br>
* <code>range(0, 5).accumulate(1, (x, y) => x + y)</code> produces a sequence of [1, 2, 4, 7, 11];<br>
* basically the accumulation starts from zero and the first value (0) that comes in is simply added
* @param <T> the element type to accumulate
* @param source the source of the accumulation
* @param seed the initial value of the accumulation
* @param accumulator the accumulator which takest the current accumulation value and the current observed value
* and returns a new accumulated value
* @return the observable
*/
@Nonnull
public static <T> Observable<T> scan(
@Nonnull final Observable<? extends T> source,
final T seed,
@Nonnull final Func2<? super T, ? super T, ? extends T> accumulator) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The current accumulated value. */
T current = seed;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
current = accumulator.invoke(current, value);
observer.next(current);
}
});
}
};
}
/**
* Creates an observable which accumultates the given source and submits each intermediate results to its subscribers.
* Example:<br>
* <code>range(1, 5).accumulate0(1, (x, y) => x + y)</code> produces a sequence of [1, 2, 4, 7, 11, 16];<br>
* basically, it submits the seed value (1) and computes the current aggregate with the current value(1).
* @param <T> the element type to accumulate
* @param source the source of the accumulation
* @param seed the initial value of the accumulation
* @param accumulator the accumulator which takest the current accumulation value and the current observed value
* and returns a new accumulated value
* @return the observable
*/
@Nonnull
public static <T> Observable<T> scan0(
@Nonnull final Observable<? extends T> source,
final T seed,
@Nonnull final Func2<? super T, ? super T, ? extends T> accumulator) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The current accumulated value. */
T current;
/** Are we waiting for the first value? */
boolean first = true;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
if (first) {
first = false;
observer.next(seed);
current = accumulator.invoke(seed, value);
} else {
current = accumulator.invoke(current, value);
}
observer.next(current);
}
});
}
};
}
/**
* Use the mapper to transform the T source into an U source.
* @param <T> the type of the original observable
* @param <U> the type of the new observable
* @param source the source of Ts
* @param mapper the mapper from Ts to Us
* @return the observable on Us
*/
@Nonnull
public static <T, U> Observable<U> select(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends U> mapper) {
return new Observable<U>() {
@Override
public Closeable register(final Observer<? super U> observer) {
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
observer.next(mapper.invoke(value));
}
});
}
};
}
/**
* Transforms the elements of the source observable into Us by using a selector which receives an index indicating
* how many elements have been transformed this far.
* @param <T> the source element type
* @param <U> the output element type
* @param source the source observable
* @param selector the selector taking an index and the current T
* @return the transformed observable
*/
public static <T, U> Observable<U> select(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func2<? super Integer, ? super T, ? extends U> selector) {
return new Observable<U>() {
@Override
public Closeable register(final Observer<? super U> observer) {
return source.register(new Observer<T>() {
/** The running index. */
int index;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
observer.next(selector.invoke(index++, value));
}
});
}
};
}
/**
* Transform the given source of Ts into Us in a way that the
* selector might return an observable ofUs for a single T.
* The observable is fully channelled to the output observable.
* FIXME not sure how to do it
* @param <T> the input element type
* @param <U> the output element type
* @param source the source of Ts
* @param selector the selector to return an Iterable of Us
* @return the
*/
@Nonnull
public static <T, U> Observable<U> selectMany(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Observable<? extends U>> selector) {
return selectMany(source, selector, new Func2<T, U, U>() {
@Override
public U invoke(T param1, U param2) {
return param2;
};
});
}
/**
* Creates an observable in which for each of Ts an observable of Vs are
* requested which in turn will be transformed by the resultSelector for each
* pair of T and V giving an U.
* FIXME concurrency related questions
* @param <T> the source element type
* @param <U> the intermediate element type
* @param <V> the output element type
* @param source the source of Ts
* @param collectionSelector the selector which returns an observable of intermediate Vs
* @param resultSelector the selector which gives an U for a T and V
* @return the observable of Us
*/
@Nonnull
public static <T, U, V> Observable<V> selectMany(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Observable<? extends U>> collectionSelector,
@Nonnull final Func2<? super T, ? super U, ? extends V> resultSelector) {
return new Observable<V>() {
@Override
public Closeable register(final Observer<? super V> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(false) {
/** The work in progress counter. */
final AtomicInteger wip = new AtomicInteger(1);
/** The active observers. */
final Map<DefaultObserver<? extends U>, Closeable> active = new HashMap<DefaultObserver<? extends U>, Closeable>();
@Override
protected void onClose() {
for (Closeable c : active.values()) {
Closeables.close0(c);
}
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
close();
}
@Override
public void onFinish() {
onLast();
}
/**
* The error signal from the inner.
* @param ex the exception
*/
void onInnerError(Throwable ex) {
onError(ex);
}
/** The last one will signal a finish. */
public void onLast() {
if (wip.decrementAndGet() == 0) {
observer.finish();
close();
}
}
@Override
public void onNext(final T t) {
Observable<? extends U> sub = collectionSelector.invoke(t);
DefaultObserver<U> o = new DefaultObserver<U>(lock, true) {
@Override
protected void onClose() {
active.remove(this);
}
@Override
protected void onError(Throwable ex) {
onInnerError(ex);
close();
}
@Override
protected void onFinish() {
onLast();
close();
}
@Override
protected void onNext(U u) {
observer.next(resultSelector.invoke(t, u));
}
};
wip.incrementAndGet();
active.put(o, sub.register(o));
}
};
return Closeables.close(obs, source.register(obs));
}
};
}
/**
* Creates an observable of Us in a way when a source T arrives, the observable of
* Us is completely drained into the output. This is done again and again for
* each arriving Ts.
* @param <T> the type of the source, irrelevant
* @param <U> the output type
* @param source the source of Ts
* @param provider the source of Us
* @return the observable for Us
*/
@Nonnull
public static <T, U> Observable<U> selectMany(
@Nonnull Observable<? extends T> source,
@Nonnull Observable<? extends U> provider) {
return selectMany(source,
Functions.<T, Observable<? extends U>>constant(provider));
}
/**
* Transform the given source of Ts into Us in a way that the selector might return zero to multiple elements of Us for a single T.
* The iterable is flattened and submitted to the output
* @param <T> the input element type
* @param <U> the output element type
* @param source the source of Ts
* @param selector the selector to return an Iterable of Us
* @return the
*/
@Nonnull
public static <T, U> Observable<U> selectManyIterable(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, ? extends Iterable<? extends U>> selector) {
return new Observable<U>() {
@Override
public Closeable register(final Observer<? super U> observer) {
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
for (U u : selector.invoke(value)) {
observer.next(u);
}
}
});
}
};
}
/**
* Compares two sequences and returns whether they are produce the same
* elements in terms of the null-safe object equality.
* <p>The equality only stands if the two sequence produces the same
* amount of values and those values are pairwise equal. If one of the sequences
* terminates before the other, the equality test will return false.</p>
* @param <T> the common element type
* @param first the first source of Ts
* @param second the second source of Ts
* @return the new observable
*/
public static <T> Observable<Boolean> sequenceEqual(
final Observable<? extends T> first,
final Observable<? extends T> second) {
return sequenceEqual(first, second, new Func2<T, T, Boolean>() {
@Override
public Boolean invoke(T param1, T param2) {
return param1 == param2 || (param1 != null && param1.equals(param2));
}
});
}
/**
* Compares two sequences and returns whether they are produce the same
* elements in terms of the comparer function.
* <p>The equality only stands if the two sequence produces the same
* amount of values and those values are pairwise equal. If one of the sequences
* terminates before the other, the equality test will return false.</p>
* @param <T> the common element type
* @param first the first source of Ts
* @param second the second source of Ts
* @param comparer the equality comparison function
* @return the new observable
*/
public static <T> Observable<Boolean> sequenceEqual(
final Observable<? extends T> first,
final Observable<? extends T> second,
final Func2<? super T, ? super T, Boolean> comparer) {
return new Observable<Boolean>() {
@Override
public Closeable register(final Observer<? super Boolean> observer) {
final LinkedBlockingQueue<T> queueU = new LinkedBlockingQueue<T>();
final LinkedBlockingQueue<T> queueV = new LinkedBlockingQueue<T>();
final AtomicReference<Closeable> closeBoth = new AtomicReference<Closeable>();
final AtomicInteger wip = new AtomicInteger(2);
final Lock lockBoth = new ReentrantLock(true);
final AtomicBoolean result = new AtomicBoolean(true);
lockBoth.lock();
try {
final DefaultObserverEx<T> oU = new DefaultObserverEx<T>(lockBoth, false) {
{
add("first", first);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
Closeables.close0(closeBoth.get());
}
@Override
public void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.next(result.get());
observer.finish();
Closeables.close0(closeBoth.get());
}
}
@Override
public void onNext(T u) {
T v = queueV.poll();
if (v != null) {
if (!comparer.invoke(u, v)) {
result.set(false);
this.finish();
}
} else {
if (wip.get() == 2) {
queueU.add(u);
} else {
this.finish();
}
}
}
};
final DefaultObserverEx<T> oV = new DefaultObserverEx<T>(lockBoth, false) {
{
add("second", second);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
Closeables.close0(closeBoth.get());
}
@Override
public void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.next(result.get());
observer.finish();
Closeables.close0(closeBoth.get());
}
}
@Override
public void onNext(T v) {
T u = queueU.poll();
if (u != null) {
if (!comparer.invoke(u, v)) {
result.set(false);
this.finish();
}
} else {
if (wip.get() == 2) {
queueV.add(v);
} else {
result.set(false);
this.finish();
}
}
}
};
Closeable c = Closeables.close(oU, oV);
closeBoth.set(c);
} finally {
lockBoth.unlock();
}
return closeBoth.get();
}
};
}
/**
* Returns the single element of the given observable source.
* If the source is empty, a NoSuchElementException is thrown.
* If the source has more than one element, a TooManyElementsException is thrown.
* @param <T> the type of the element
* @param source the source of Ts
* @return the single element
*/
@Nonnull
public static <T> T single(
@Nonnull Observable<? extends T> source) {
CloseableIterator<T> it = toIterable(source).iterator();
try {
if (it.hasNext()) {
T one = it.next();
if (!it.hasNext()) {
return one;
}
throw new TooManyElementsException();
}
throw new NoSuchElementException();
} finally {
Closeables.close0(it);
}
}
/**
* Returns the single value in the observables.
* @param <T> the value type
* @param value the value
* @return the observable
*/
@Nonnull
public static <T> Observable<T> singleton(
final T value) {
return singleton(value, DEFAULT_SCHEDULER.get());
}
/**
* Returns the single value in the observables.
* @param <T> the value type
* @param value the value
* @param pool the pool where to submit the value to the observers
* @return the observable
*/
@Nonnull
public static <T> Observable<T> singleton(
final T value,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return pool.schedule(new Runnable() {
@Override
public void run() {
observer.next(value);
observer.finish();
}
});
}
};
}
/**
* Skips the given amount of next() messages from source and relays
* the rest.
* @param <T> the element type
* @param source the source of Ts
* @param count the number of messages to skip
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> skip(
@Nonnull final Observable<? extends T> source,
final int count) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The remaining count. */
int remaining = count;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
if (remaining <= 0) {
observer.next(value);
} else {
remaining--;
}
}
});
}
};
}
/**
* Skips the last <code>count</code> elements from the source observable.
* @param <T> the element type
* @param source the source of Ts
* @param count the number of elements to skip at the end
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> skipLast(
@Nonnull final Observable<? extends T> source,
final int count) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The temporar buffer to delay the values. */
final CircularBuffer<T> buffer = new CircularBuffer<T>(count);
/** The current size of the buffer. */
int size;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
buffer.add(value);
size++;
if (size > count) {
observer.next(buffer.take());
size--;
}
}
});
}
};
}
/**
* Skip the source elements until the signaller sends its first element.
* FIXME: If the signaller sends an error or only finish(), the relaying is never enabled?
* FIXME: once the singaller fires, it gets deregistered
* @param <T> the element type of the source
* @param <U> the element type of the signaller, irrelevant
* @param source the source of Ts
* @param signaller the source of Us
* @return the new observable
*/
@Nonnull
public static <T, U> Observable<T> skipUntil(
@Nonnull final Observable<? extends T> source,
@Nonnull final Observable<? extends U> signaller) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** The signaller observer. */
final DefaultObserver<U> signal;
/** The signal closeable. */
final Closeable c;
/** The skip gate. */
boolean gate;
{
signal = new DefaultObserver<U>(lock, true) {
@Override
public void onError(Throwable ex) {
innerError(ex);
}
@Override
public void onFinish() {
if (!gate) {
innerFinish(); // signaller will never turn the gate on
}
}
@Override
public void onNext(U value) {
gate = true;
}
};
c = signaller.register(signal);
}
/**
* The callback for the inner error.
* @param ex the inner exception
*/
void innerError(Throwable ex) {
error(ex);
}
/** The callback for an inner finish. */
void innerFinish() {
finish();
}
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
observer.finish();
}
@Override
public void onNext(T value) {
if (gate) {
observer.next(value);
}
}
};
return Closeables.close(obs, source.register(obs));
}
};
}
/**
* Skips the Ts from source while the specified condition returns true.
* If the condition returns false, all subsequent Ts are relayed,
* ignoring the condition further on. Errors and completion
* is relayed regardless of the condition.
* @param <T> the element types
* @param source the source of Ts
* @param condition the condition that must turn false in order to start relaying
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> skipWhile(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, Boolean> condition) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** Can we relay stuff? */
boolean mayRelay;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
if (!mayRelay) {
mayRelay = !condition.invoke(value);
if (mayRelay) {
observer.next(value);
}
} else {
observer.next(value);
}
}
});
}
};
}
/**
* Invokes the action asynchronously on the given pool and
* relays its finish() or error() messages.
* @param action the action to invoke
* @return the observable
*/
@Nonnull
public static Observable<Void> start(
@Nonnull final Action0 action) {
return start(action, DEFAULT_SCHEDULER.get());
}
/**
* Invokes the action asynchronously on the given pool and
* relays its finish() or error() messages.
* @param action the action to invoke
* @param pool the pool where the action should run
* @return the observable
*/
@Nonnull
public static Observable<Void> start(
@Nonnull final Action0 action,
@Nonnull final Scheduler pool) {
return new Observable<Void>() {
@Override
public Closeable register(final Observer<? super Void> observer) {
return pool.schedule(new Runnable() {
@Override
public void run() {
try {
action.invoke();
observer.finish();
} catch (Throwable ex) {
observer.error(ex);
}
}
});
}
};
}
/**
* Invokes the function asynchronously on the default pool and
* relays its result followed by a finish. Exceptions are
* relayed as well.
* @param <T> the function return type
* @param func the function
* @return the observable
*/
@Nonnull
public static <T> Observable<T> start(
@Nonnull final Func0<? extends T> func) {
return start(func, DEFAULT_SCHEDULER.get());
}
/**
* Invokes the function asynchronously on the given pool and
* relays its result followed by a finish. Exceptions are
* relayed as well.
* @param <T> the function return type
* @param func the function
* @param pool the pool where the action should run
* @return the observable
*/
@Nonnull
public static <T> Observable<T> start(
@Nonnull final Func0<? extends T> func,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return pool.schedule(new Runnable() {
@Override
public void run() {
try {
T value = func.invoke();
observer.next(value);
observer.finish();
} catch (Throwable ex) {
observer.error(ex);
}
}
});
}
};
}
/**
* Start with the given iterable of values before relaying the Ts from the
* source. The iterable values are emmitted on the default pool.
* @param <T> the element type
* @param source the source
* @param values the values to start with
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> startWith(
@Nonnull Observable<? extends T> source,
@Nonnull Iterable<? extends T> values) {
return startWith(source, values, DEFAULT_SCHEDULER.get());
}
/**
* Start with the given iterable of values before relaying the Ts from the
* source. The iterable values are emmitted on the given pool.
* @param <T> the element type
* @param source the source
* @param values the values to start with
* @param pool the pool where the iterable values should be emitted
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> startWith(
@Nonnull Observable<? extends T> source,
@Nonnull Iterable<? extends T> values,
@Nonnull Scheduler pool) {
return concat(toObservable(values, pool), source);
}
/**
* Start with the given iterable of values before relaying the Ts from the
* source. The value is emmitted on the default pool.
* @param <T> the element type
* @param source the source
* @param value the single value to start with
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> startWith(
@Nonnull Observable<? extends T> source,
T value) {
return startWith(source, Collections.singleton(value), DEFAULT_SCHEDULER.get());
}
/**
* Start with the given iterable of values before relaying the Ts from the
* source. The value is emmitted on the given pool.
* @param <T> the element type
* @param source the source
* @param value the value to start with
* @param pool the pool where the iterable values should be emitted
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> startWith(
@Nonnull Observable<? extends T> source,
T value,
@Nonnull Scheduler pool) {
return startWith(source, Collections.singleton(value), pool);
}
/**
* Wrap the given observable into an new Observable instance, which calls the original register() method
* on the supplied pool.
* @param <T> the type of the objects to observe
* @param observable the original observable
* @param pool the pool to perform the original subscribe() call
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> registerOn(
@Nonnull final Observable<T> observable,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
// start the registration asynchronously
final BlockingQueue<Closeable> cq = new LinkedBlockingQueue<Closeable>();
pool.schedule(new Runnable() {
@Override
public void run() {
cq.add(observable.register(observer));
}
});
// use the disposable future when the deregistration is required
return new Closeable() {
@Override
public void close() {
pool.schedule(new Runnable() {
@Override
public void run() {
try {
cq.take().close(); // wait until the dispose becomes available then call it
} catch (InterruptedException e) {
throw new RuntimeException();
} catch (IOException e) {
throw new RuntimeException();
}
}
});
}
};
}
};
}
/**
* Computes and signals the sum of the values of the BigDecimal source.
* The source may not send nulls.
* @param source the source of BigDecimals to aggregate.
* @return the observable for the sum value
*/
@Nonnull
public static Observable<BigDecimal> sumBigDecimal(
@Nonnull final Observable<BigDecimal> source) {
return aggregate(source, Functions.sumBigDecimal(), Functions.<BigDecimal, Integer>identityFirst());
}
/**
* Computes and signals the sum of the values of the BigInteger source.
* The source may not send nulls.
* @param source the source of BigIntegers to aggregate.
* @return the observable for the sum value
*/
@Nonnull
public static Observable<BigInteger> sumBigInteger(
@Nonnull final Observable<BigInteger> source) {
return aggregate(source, Functions.sumBigInteger(), Functions.<BigInteger, Integer>identityFirst());
}
/**
* Computes and signals the sum of the values of the Double source.
* The source may not send nulls.
* @param source the source of Doubles to aggregate.
* @return the observable for the sum value
*/
@Nonnull
public static Observable<Double> sumDouble(
@Nonnull final Observable<Double> source) {
return aggregate(source, Functions.sumDouble(), Functions.<Double, Integer>identityFirst());
}
/**
* Computes and signals the sum of the values of the Float source.
* The source may not send nulls.
* @param source the source of Floats to aggregate.
* @return the observable for the sum value
*/
@Nonnull
public static Observable<Float> sumFloat(
@Nonnull final Observable<Float> source) {
return aggregate(source, Functions.sumFloat(), Functions.<Float, Integer>identityFirst());
}
/**
* Computes and signals the sum of the values of the Integer source.
* The source may not send nulls. An empty source produces an empty sum
* @param source the source of integers to aggregate.
* @return the observable for the sum value
*/
@Nonnull
public static Observable<Integer> sumInt(
@Nonnull final Observable<Integer> source) {
return aggregate(source, Functions.sumInteger(), Functions.<Integer, Integer>identityFirst());
}
/**
* Computes and signals the sum of the values of the Integer source by using
* a double intermediate representation.
* The source may not send nulls. An empty source produces an empty sum
* @param source the source of integers to aggregate.
* @return the observable for the sum value
*/
@Nonnull
public static Observable<Double> sumIntAsDouble(
@Nonnull final Observable<Integer> source) {
return aggregate(source,
new Func2<Double, Integer, Double>() {
@Override
public Double invoke(Double param1, Integer param2) {
return param1 + param2;
}
},
Functions.<Double, Integer>identityFirst()
);
}
/**
* Computes and signals the sum of the values of the Long source.
* The source may not send nulls.
* @param source the source of longs to aggregate.
* @return the observable for the sum value
*/
@Nonnull
public static Observable<Long> sumLong(
@Nonnull final Observable<Long> source) {
return aggregate(source, Functions.sumLong(), Functions.<Long, Integer>identityFirst());
}
/**
* Computes and signals the sum of the values of the Long sourceby using
* a double intermediate representation.
* The source may not send nulls.
* @param source the source of longs to aggregate.
* @return the observable for the sum value
*/
@Nonnull
public static Observable<Double> sumLongAsDouble(
@Nonnull final Observable<Long> source) {
return aggregate(source,
new Func2<Double, Long, Double>() {
@Override
public Double invoke(Double param1, Long param2) {
return param1 + param2;
}
},
Functions.<Double, Integer>identityFirst()
);
}
/**
* Returns an observer which relays Ts from the source observables in a way, when
* a new inner observable comes in, the previous one is deregistered and the new one is
* continued with. Basically, it is an unbounded ys.takeUntil(xs).takeUntil(zs)...
* @param <T> the element type
* @param sources the source of multiple observables of Ts.
* @return the new observable
*/
public static <T> Observable<T> switchToNext(final Observable<? extends Observable<? extends T>> sources) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserver<Observable<? extends T>> outer
= new DefaultObserver<Observable<? extends T>>(false) {
/** The inner observer. */
@GuardedBy("lock")
Closeable inner;
DefaultObserver<T> innerObserver = new DefaultObserver<T>(lock, true) {
@Override
protected void onError(Throwable ex) {
innerError(ex);
}
@Override
protected void onFinish() {
innerFinish();
}
@Override
protected void onNext(T value) {
observer.next(value);
}
};
/** Called from the inner observer when an error condition occurs. */
void innerError(Throwable ex) {
error(ex);
}
/** Called from the inner observer when it finished. */
void innerFinish() {
observer.finish();
close();
}
@Override
protected void onClose() {
Closeables.close0(inner);
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
close();
}
@Override
protected void onFinish() {
// nothing to do
}
@Override
protected void onNext(Observable<? extends T> value) {
Closeables.close0(inner);
inner = value.register(innerObserver);
}
};
return sources.register(outer);
}
};
}
/**
* Creates an observable which takes the specified number of
* Ts from the source, unregisters and completes.
* @param <T> the element type
* @param source the source of Ts
* @param count the number of elements to relay
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> take(
@Nonnull final Observable<? extends T> source,
final int count) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserverEx<T> o = new DefaultObserverEx<T>(true) {
/** The countdown. */
protected int i = count;
@Override
protected void onNext(T value) {
observer.next(value);
if (--i == 0) {
finish();
}
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
observer.finish();
}
};
return o.registerWith(source);
}
};
}
/**
* Returns an observable which returns the last <code>count</code>
* elements from the source observable.
* @param <T> the element type
* @param source the source of the elements
* @param count the number elements to return
* @return the new observable
*/
public static <T> Observable<T> takeLast(final Observable<? extends T> source, final int count) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
final CircularBuffer<T> buffer = new CircularBuffer<T>(count);
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
while (!buffer.isEmpty()) {
observer.next(buffer.take());
}
observer.finish();
}
@Override
public void next(T value) {
buffer.add(value);
}
});
}
};
}
/**
* Creates an observable which takes values from the source until
* the signaller produces a value. If the signaller never signals,
* all source elements are relayed.
* @param <T> the element type
* @param <U> the signaller element type, irrelevant
* @param source the source of Ts
* @param signaller the source of Us
* @return the new observable
*/
@Nonnull
public static <T, U> Observable<T> takeUntil(
@Nonnull final Observable<? extends T> source,
@Nonnull final Observable<U> signaller) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final Lock lock0 = new ReentrantLock(true);
DefaultObserverEx<T> o = new DefaultObserverEx<T>(lock0, true) {
@Override
protected void onRegister() {
add("signaller", signaller.register(new Observer<U>() {
@Override
public void next(U value) {
innerFinish();
}
@Override
public void error(Throwable ex) {
innerError(ex);
}
@Override
public void finish() {
innerFinish();
}
}));
}
/** Error call from the inner. */
protected void innerError(Throwable t) {
error(t);
}
/** Finish call from the inner. */
protected void innerFinish() {
finish();
}
@Override
protected void onNext(T value) {
observer.next(value);
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
observer.finish();
}
};
return o.registerWith(source);
}
};
}
/**
* Creates an observable which takes values from source until
* the predicate returns false for the current element, then skips the remaining values.
* @param <T> the element type
* @param source the source of Ts
* @param predicate the predicate
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> takeWhile(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, Boolean> predicate) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserverEx<T> obs = new DefaultObserverEx<T>(true) {
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
observer.finish();
}
@Override
public void onNext(T value) {
if (predicate.invoke(value)) {
observer.next(value);
} else {
observer.finish();
close();
}
}
};
return obs.registerWith(source);
}
};
}
/**
* Creates and observable which fires the last value
* from source when the given timespan elapsed without a new
* value occurring from the source. It is basically how Content Assistant
* popup works after the user pauses in its typing. Uses the default scheduler.
* @param <T> the value type
* @param source the source of Ts
* @param delay how much time should elapse since the last event to actually forward that event
* @param unit the delay time unit
* @return the observable
*/
@Nonnull
public static <T> Observable<T> throttle(
@Nonnull final Observable<? extends T> source,
final long delay,
@Nonnull final TimeUnit unit) {
return throttle(source, delay, unit, DEFAULT_SCHEDULER.get());
}
/**
* Creates and observable which fires the last value
* from source when the given timespan elapsed without a new
* value occurring from the source. It is basically how Content Assistant
* popup works after the user pauses in its typing.
* @param <T> the value type
* @param source the source of Ts
* @param delay how much time should elapse since the last event to actually forward that event
* @param unit the delay time unit
* @param pool the pool where the delay-watcher should operate
* @return the observable
*/
@Nonnull
public static <T> Observable<T> throttle(
@Nonnull final Observable<? extends T> source,
final long delay,
@Nonnull final TimeUnit unit,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** The last seen value. */
T last;
/** The closeable. */
Closeable c;
/** The timeout action. */
final DefaultRunnable r = new DefaultRunnable(lock) {
@Override
public void onRun() {
if (!cancelled()) {
observer.next(last);
}
}
};
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
observer.finish();
}
@Override
public void onNext(T value) {
last = value;
Closeables.close0(c);
c = pool.schedule(r, delay, unit);
}
};
return Closeables.close(obs, source.register(obs));
}
};
}
/**
* Creates an observable which instantly sends the exception to
* its subscribers while running on the default pool.
* @param <T> the element type, irrelevant
* @param ex the exception to throw
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> throwException(
@Nonnull final Throwable ex) {
return throwException(ex, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which instantly sends the exception to
* its subscribers while running on the given pool.
* @param <T> the element type, irrelevant
* @param ex the exception to throw
* @param pool the pool from where to send the values
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> throwException(
@Nonnull final Throwable ex,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return pool.schedule(new Runnable() {
@Override
public void run() {
observer.error(ex);
}
});
}
};
}
/**
* Returns an observable which produces an ordered sequence of numbers with the specified delay.
* It uses the default scheduler pool.
* @param start the starting value of the tick
* @param end the finishing value of the tick exclusive
* @param delay the delay value
* @param unit the time unit of the delay
* @return the observer
*/
@Nonnull
public static Observable<Long> tick(
final long start,
final long end,
final long delay,
@Nonnull final TimeUnit unit) {
return tick(start, end, delay, unit, DEFAULT_SCHEDULER.get());
}
/**
* Returns an observable which produces an ordered sequence of numbers with the specified delay.
* @param start the starting value of the tick inclusive
* @param end the finishing value of the tick exclusive
* @param delay the delay value
* @param unit the time unit of the delay
* @param pool the scheduler pool for the wait
* @return the observer
*/
@Nonnull
public static Observable<Long> tick(
final long start,
final long end,
final long delay,
@Nonnull final TimeUnit unit,
@Nonnull final Scheduler pool) {
if (start > end) {
throw new IllegalArgumentException("ensure start <= end");
}
return new Observable<Long>() {
@Override
public Closeable register(final Observer<? super Long> observer) {
return pool.schedule(new DefaultRunnable() {
/** The current value. */
long current = start;
@Override
protected void onRun() {
if (current < end && !cancelled()) {
observer.next(current++);
} else {
if (!cancelled()) {
observer.finish();
}
cancel();
}
}
}, delay, delay, unit);
}
};
}
/**
* Returns an observable which produces an ordered sequence of numbers with the specified delay.
* It uses the default scheduler pool.
* @param delay the delay value
* @param unit the time unit of the delay
* @return the observer
*/
@Nonnull
public static Observable<Long> tick(
@Nonnull final long delay,
@Nonnull final TimeUnit unit) {
return tick(0, Long.MAX_VALUE, delay, unit, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which relays events if they arrive
* from the source observable within the specified amount of time
* or it singlals a java.util.concurrent.TimeoutException.
* @param <T> the element type to observe
* @param source the source observable
* @param time the maximum allowed timespan between events
* @param unit the time unit
* @return the observer.
*/
@Nonnull
public static <T> Observable<T> timeout(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit) {
return timeout(source, time, unit, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which relays events if they arrive
* from the source observable within the specified amount of time
* or it switches to the <code>other</code> observable.
* FIXME not sure if the timeout should happen only when
* distance between elements get to large or just the first element
* does not arrive within the specified timespan.
* @param <T> the element type to observe
* @param source the source observable
* @param time the maximum allowed timespan between events
* @param unit the time unit
* @param other the other observable to continue with in case a timeout occurs
* @return the observer.
*/
@Nonnull
public static <T> Observable<T> timeout(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit,
@Nonnull final Observable<? extends T> other) {
return timeout(source, time, unit, other, DEFAULT_SCHEDULER.get());
}
/**
* Creates an observable which relays events if they arrive
* from the source observable within the specified amount of time
* or it switches to the <code>other</code> observable.
* FIXME not sure if the timeout should happen only when
* distance between elements get to large or just the first element
* does not arrive within the specified timespan.
* @param <T> the element type to observe
* @param source the source observable
* @param time the maximum allowed timespan between events
* @param unit the time unit
* @param other the other observable to continue with in case a timeout occurs
* @param pool the scheduler pool for the timeout evaluation
* @return the observer.
*/
@Nonnull
public static <T> Observable<T> timeout(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit,
@Nonnull final Observable<? extends T> other,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** The current source. */
@GuardedBy("lock")
Closeable src;
/** The current timer.*/
@GuardedBy("lock")
Closeable timer;
{
lock.lock();
try {
src = source.register(this);
registerTimer();
} finally {
lock.unlock();
}
}
@Override
protected void onClose() {
Closeables.close0(timer);
Closeables.close0(src);
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
observer.finish();
}
@Override
protected void onNext(T value) {
if (timer != null) {
Closeables.close0(timer);
timer = null;
}
observer.next(value);
registerTimer();
}
/**
* Register the timer that when fired, switches to the second
* observable sequence
*/
private void registerTimer() {
timer = pool.schedule(new DefaultRunnable(lock) {
@Override
public void onRun() {
if (!cancelled()) {
Closeables.close0(src);
timer = null;
src = other.register(observer);
}
}
}, time, unit);
}
};
return obs;
}
};
}
/**
* Creates an observable which relays events if they arrive
* from the source observable within the specified amount of time
* or it singlals a java.util.concurrent.TimeoutException.
* FIXME not sure if the timeout should happen only when
* distance between elements get to large or just the first element
* does not arrive within the specified timespan.
* @param <T> the element type to observe
* @param source the source observable
* @param time the maximum allowed timespan between events
* @param unit the time unit
* @param pool the scheduler pool for the timeout evaluation
* @return the observer.
*/
@Nonnull
public static <T> Observable<T> timeout(
@Nonnull final Observable<? extends T> source,
final long time,
@Nonnull final TimeUnit unit,
@Nonnull final Scheduler pool) {
Observable<T> other = Reactive.throwException(new TimeoutException());
return timeout(source, time, unit, other, pool);
}
/**
* Creates an array from the observable sequence elements by using the given
* array for the template to create a dynamicly typed array of Ts.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial array is created).</p>
* @param <T> the source element type
* @param source the source of Ts
* @param a the template array, noes not change its value
* @return the observable
*/
public static <T> Observable<T[]> toArray(
@Nonnull final Observable<? extends T> source,
@Nonnull final T[] a) {
final Class<?> ct = a.getClass().getComponentType();
return new Observable<T[]>() {
@Override
public Closeable register(final Observer<? super T[]> observer) {
return source.register(new Observer<T>() {
/** The buffer for the Ts. */
final List<T> list = new LinkedList<T>();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
@SuppressWarnings("unchecked") T[] arr = (T[])Array.newInstance(ct, list.size());
observer.next(list.toArray(arr));
observer.finish();
}
@Override
public void next(T value) {
list.add(value);
}
});
}
};
}
/**
* Creates an array from the observable sequence elements.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial array is created).</p>
* @param source the source of anything
* @return the object array
*/
@Nonnull
public static Observable<Object[]> toArray(@Nonnull final Observable<?> source) {
return toArray(source, new Object[0]);
}
/**
* Convert the given observable instance into a classical iterable instance.
* <p>THe resulting iterable does not support the {@code remove()} method.</p>
* @param <T> the element type to iterate
* @param observable the original observable
* @return the iterable
*/
@Nonnull
public static <T> CloseableIterable<T> toIterable(
@Nonnull final Observable<? extends T> observable) {
return new CloseableIterable<T>() {
@Override
public CloseableIterator<T> iterator() {
final LinkedBlockingQueue<Option<T>> queue = new LinkedBlockingQueue<Option<T>>();
final Closeable c = observable.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
queue.add(Option.<T>error(ex));
}
@Override
public void finish() {
queue.add(Option.<T>none());
}
@Override
public void next(T value) {
queue.add(Option.some(value));
}
});
return new CloseableIterator<T>() {
/** Close the association if there is no more elements. */
Closeable close = c;
/** The peek value due hasNext. */
Option<T> peek;
/** Indicator if there was a hasNext() call before the next() call. */
boolean peekBeforeNext;
/** Close the helper observer. */
@Override
public void close() throws IOException {
Closeable cl = close;
close = null;
if (cl != null) {
cl.close();
}
}
@Override
protected void finalize() throws Throwable {
close();
}
@Override
public boolean hasNext() {
if (peek != Option.none()) {
if (!peekBeforeNext) {
try {
peek = queue.take();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
peekBeforeNext = true;
}
boolean result = peek != Option.none();
return result;
}
@Override
public T next() {
if (peekBeforeNext) {
peekBeforeNext = false;
if (peek != Option.none()) {
return peek.value();
}
throw new NoSuchElementException();
}
peekBeforeNext = false;
if (peek != Option.none()) {
try {
peek = queue.take();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
if (peek != Option.none()) {
return peek.value();
}
}
throw new NoSuchElementException();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
/**
* Collect the elements of the source observable into a single list.
* @param <T> the source element type
* @param source the source observable
* @return the new observable
*/
public static <T> Observable<List<T>> toList(
final Observable<? extends T> source
) {
return new Observable<List<T>>() {
@Override
public Closeable register(final Observer<? super List<T>> observer) {
return source.register(new Observer<T>() {
/** The list for aggregation. */
final List<T> list = new LinkedList<T>();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.next(new ArrayList<T>(list));
observer.finish();
}
@Override
public void next(T value) {
list.add(value);
}
});
}
};
}
/**
* Maps the given source of Ts by using the key extractor and
* returns a single Map of them. The keys are compared against each other
* by the <code>Object.equals()</code> semantics.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial map is created).</p>
* @param <T> the element type
* @param <K> the key type
* @param <V> the value type
* @param source the source of Ts
* @param keySelector the key selector
* @param valueSelector the value selector
* @return the new observable
*/
public static <T, K, V> Observable<Map<K, V>> toMap(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func1<? super T, ? extends V> valueSelector
) {
return new Observable<Map<K, V>>() {
@Override
public Closeable register(final Observer<? super Map<K, V>> observer) {
return source.register(new Observer<T>() {
/** The map. */
final Map<K, V> map = new HashMap<K, V>();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.next(map);
observer.finish();
}
@Override
public void next(T value) {
map.put(keySelector.invoke(value), valueSelector.invoke(value));
}
});
}
};
}
/**
* Maps the given source of Ts by using the key and value extractor and
* returns a single Map of them.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial map is created).</p>
* @param <T> the element type
* @param <K> the key type
* @param <V> the value type
* @param source the source of Ts
* @param keySelector the key selector
* @param valueSelector the value selector
* @param keyComparer the comparison function for keys
* @return the new observable
*/
public static <T, K, V> Observable<Map<K, V>> toMap(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func1<? super T, ? extends V> valueSelector,
final Func2<? super K, ? super K, Boolean> keyComparer
) {
return new Observable<Map<K, V>>() {
@Override
public Closeable register(final Observer<? super Map<K, V>> observer) {
return source.register(new Observer<T>() {
/** The key class with custom equality comparer. */
class Key {
/** The key value. */
final K key;
/**
* Constructor.
* @param key the key
*/
Key(K key) {
this.key = key;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Key) {
return keyComparer.invoke(key, ((Key)obj).key);
}
return false;
}
@Override
public int hashCode() {
return key != null ? key.hashCode() : 0;
}
}
/** The map. */
final Map<Key, V> map = new HashMap<Key, V>();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
Map<K, V> result = new HashMap<K, V>();
for (Map.Entry<Key, V> e : map.entrySet()) {
result.put(e.getKey().key, e.getValue());
}
observer.next(result);
observer.finish();
}
@Override
public void next(T value) {
Key k = new Key(keySelector.invoke(value));
V v = valueSelector.invoke(value);
map.put(k, v);
}
});
}
};
}
/**
* Maps the given source of Ts by using the key extractor and
* returns a single Map of them.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial map is created).</p>
* @param <T> the element type
* @param <K> the key type
* @param source the source of Ts
* @param keySelector the key selector
* @param keyComparer the key comparer function
* @return the new observable
*/
public static <K, T> Observable<Map<K, T>> toMap(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func2<? super K, ? super K, Boolean> keyComparer
) {
return toMap(source, keySelector, Functions.<T>identity(), keyComparer);
}
/**
* Maps the given source of Ts by using the key extractor and
* returns a single Map of them. The keys are compared against each other
* by the <code>Object.equals()</code> semantics.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial map is created).</p>
* @param <T> the element type
* @param <K> the key type
* @param source the source of Ts
* @param keySelector the key selector
* @return the new observable
*/
public static <K, T> Observable<Map<K, T>> toMap(
final Observable<T> source,
final Func1<? super T, ? extends K> keySelector
) {
return toMap(source, keySelector, Functions.<T>identity());
}
/**
* Maps the given source of Ts by using the key extractor and
* returns a single multi-map of them. The keys are compared against each other
* by the <code>Object.equals()</code> semantics.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial map is created).</p>
* @param <T> the element type
* @param <K> the key type
* @param source the source of Ts
* @param keySelector the key selector
* @param collectionSupplier the function which retuns a collection to hold the Vs.
* @return the new observable
*/
public static <T, K> Observable<Map<K, Collection<T>>> toMultiMap(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func0<? extends Collection<T>> collectionSupplier
) {
return toMultiMap(
source,
keySelector,
collectionSupplier,
Functions.<T>identity());
}
/**
* Maps the given source of Ts by using the key extractor and
* returns a single multi-map of them.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial map is created).</p>
* @param <T> the element type
* @param <K> the key type
* @param source the source of Ts
* @param keySelector the key selector
* @param collectionSupplier the function which retuns a collection to hold the Vs.
* @param keyComparer the comparison function for keys
* @return the new observable
*/
public static <T, K> Observable<Map<K, Collection<T>>> toMultiMap(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func0<? extends Collection<T>> collectionSupplier,
final Func2<? super K, ? super K, Boolean> keyComparer
) {
return toMultiMap(
source,
keySelector,
collectionSupplier,
Functions.<T>identity(),
keyComparer);
}
/**
* Maps the given source of Ts by using the key and value extractor and
* returns a single multi-map of them. The keys are compared against each other
* by the <code>Object.equals()</code> semantics.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial map is created).</p>
* @param <T> the element type
* @param <K> the key type
* @param <V> the value type
* @param source the source of Ts
* @param keySelector the key selector
* @param collectionSupplier the function which retuns a collection to hold the Vs.
* @param valueSelector the value selector
* @return the new observable
* @see Functions#listSupplier()
* @see Functions#setSupplier()
*/
public static <T, K, V> Observable<Map<K, Collection<V>>> toMultiMap(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func0<? extends Collection<V>> collectionSupplier,
final Func1<? super T, ? extends V> valueSelector
) {
return new Observable<Map<K, Collection<V>>>() {
@Override
public Closeable register(final Observer<? super Map<K, Collection<V>>> observer) {
return source.register(new Observer<T>() {
/** The map. */
final Map<K, Collection<V>> map = new HashMap<K, Collection<V>>();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.next(map);
observer.finish();
}
@Override
public void next(T value) {
K k = keySelector.invoke(value);
Collection<V> coll = map.get(k);
if (coll == null) {
coll = collectionSupplier.invoke();
map.put(k, coll);
}
V v = valueSelector.invoke(value);
coll.add(v);
}
});
}
};
}
/**
* Maps the given source of Ts by using the key and value extractor and
* returns a single multi-map of them.
* <p><b>Exception semantics:</b> if the source throws an exception, that exception
* is forwarded (e.g., no partial map is created).</p>
* @param <T> the element type
* @param <K> the key type
* @param <V> the value type
* @param source the source of Ts
* @param keySelector the key selector
* @param collectionSupplier the function which retuns a collection to hold the Vs.
* @param valueSelector the value selector
* @param keyComparer the comparison function for keys
* @return the new observable
*/
public static <T, K, V> Observable<Map<K, Collection<V>>> toMultiMap(
final Observable<? extends T> source,
final Func1<? super T, ? extends K> keySelector,
final Func0<? extends Collection<V>> collectionSupplier,
final Func1<? super T, ? extends V> valueSelector,
final Func2<? super K, ? super K, Boolean> keyComparer
) {
return new Observable<Map<K, Collection<V>>>() {
@Override
public Closeable register(final Observer<? super Map<K, Collection<V>>> observer) {
return source.register(new Observer<T>() {
/** The key class with custom equality comparer. */
class Key {
/** The key value. */
final K key;
/**
* Constructor.
* @param key the key
*/
Key(K key) {
this.key = key;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Key) {
return keyComparer.invoke(key, ((Key)obj).key);
}
return false;
}
@Override
public int hashCode() {
return key != null ? key.hashCode() : 0;
}
}
/** The map. */
final Map<Key, Collection<V>> map = new HashMap<Key, Collection<V>>();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
Map<K, Collection<V>> result = new HashMap<K, Collection<V>>();
for (Map.Entry<Key, Collection<V>> e : map.entrySet()) {
result.put(e.getKey().key, e.getValue());
}
observer.next(result);
observer.finish();
}
@Override
public void next(T value) {
Key k = new Key(keySelector.invoke(value));
Collection<V> coll = map.get(k);
if (coll == null) {
coll = collectionSupplier.invoke();
map.put(k, coll);
}
V v = valueSelector.invoke(value);
coll.add(v);
}
});
}
};
}
/**
* Wrap the iterable object into an observable and use the
* default pool when generating the iterator sequence.
* @param <T> the type of the values
* @param iterable the iterable instance
* @return the observable
*/
@Nonnull
public static <T> Observable<T> toObservable(
@Nonnull final Iterable<? extends T> iterable) {
return toObservable(iterable, DEFAULT_SCHEDULER.get());
}
/**
* Wrap the iterable object into an observable and use the
* given pool when generating the iterator sequence.
* @param <T> the type of the values
* @param iterable the iterable instance
* @param pool the thread pool where to generate the events from the iterable
* @return the observable
*/
@Nonnull
public static <T> Observable<T> toObservable(
@Nonnull final Iterable<? extends T> iterable,
@Nonnull final Scheduler pool) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
DefaultRunnable s = new DefaultRunnable() {
@Override
public void onRun() {
for (T t : iterable) {
if (cancelled()) {
break;
}
observer.next(t);
}
if (!cancelled()) {
observer.finish();
}
}
};
return pool.schedule(s);
}
};
}
/**
* Wraps the given action as an observable which reacts only to <code>next()</code> events.
* @param <T> the type of the values
* @param action the action to wrap
* @return the observer wrapping the action
*/
@Nonnull
public static <T> Observer<T> toObserver(
@Nonnull final Action1<? super T> action) {
return new Observer<T>() {
@Override
public void error(Throwable ex) {
// ignored
}
@Override
public void finish() {
// ignored
}
@Override
public void next(T value) {
action.invoke(value);
};
};
}
/**
* Creates an observer which calls the given functions on its similarly named methods.
* @param <T> the value type to receive
* @param next the action to invoke on next()
* @param error the action to invoke on error()
* @param finish the action to invoke on finish()
* @return the observer
*/
@Nonnull
public static <T> Observer<T> toObserver(
@Nonnull final Action1<? super T> next,
@Nonnull final Action1<? super Throwable> error,
@Nonnull final Action0 finish) {
return new Observer<T>() {
@Override
public void error(Throwable ex) {
error.invoke(ex);
}
@Override
public void finish() {
finish.invoke();
}
@Override
public void next(T value) {
next.invoke(value);
}
};
}
/**
* Filters objects from source which are assignment compatible with T.
* Note that due java erasure complex generic types can't be filtered this way in runtime (e.g., List<String>.class is just List.class).
* FIXME is this what cast stands for?
* @param <T> the type of the expected values
* @param source the source of unknown elements
* @param token the token to test agains the elements
* @return the observable containing Ts
*/
@Nonnull
public static <T> Observable<T> typedAs(
@Nonnull final Observable<?> source,
@Nonnull final Class<T> token) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<Object>() {
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(Object value) {
if (token.isInstance(value)) {
observer.next(token.cast(value));
}
}
});
}
};
}
/**
* A convenience function which unwraps the T from a TimeInterval of T.
* @param <T> the value type
* @return the unwrapper function
*/
@Nonnull
public static <T> Func1<TimeInterval<T>, T> unwrapTimeInterval() {
return new Func1<TimeInterval<T>, T>() {
@Override
public T invoke(TimeInterval<T> param1) {
return param1.value();
}
};
}
/**
* A convenience function which unwraps the T from a Timestamped of T.
* @param <T> the value type
* @return the unwrapper function
*/
@Nonnull
public static <T> Func1<Timestamped<T>, T> unwrapTimestamped() {
return new Func1<Timestamped<T>, T>() {
@Override
public T invoke(Timestamped<T> param1) {
return param1.value();
}
};
}
/**
* Receives a resource from the resource selector and
* uses the resource until it terminates, then closes the resource.
* FIXME not sure how this method should work
* @param <T> the output resource type.
* @param <U> the closeable resource to work with
* @param resourceSelector the function that gives a resource
* @param resourceUsage a function that returns an observable of T for the given resource.
* @return the observable of Ts which terminates once the usage terminates
*/
@Nonnull
public static <T, U extends Closeable> Observable<T> using(
@Nonnull final Func0<? extends U> resourceSelector,
@Nonnull final Func1<? super U, ? extends Observable<? extends T>> resourceUsage) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final U resource = resourceSelector.invoke();
return resourceUsage.invoke(resource).register(new Observer<T>() {
@Override
public void error(Throwable ex) {
try {
observer.error(ex);
} finally {
Closeables.close0(resource);
}
}
@Override
public void finish() {
try {
observer.finish();
} finally {
Closeables.close0(resource);
}
}
@Override
public void next(T value) {
observer.next(value);
}
});
}
};
}
/**
* Creates a filtered observable where only Ts are relayed which satisfy the clause.
* The clause receives the index and the current element to test.
* The clauseFactory is used for each individual registering observer.
* This can be used to create memorizing filter functions such as distinct.
* @param <T> the element type
* @param source the source of Ts
* @param clauseFactory the filter clause, the first parameter receives the current index, the second receives the current element
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> where(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func0<Func2<Integer, ? super T, Boolean>> clauseFactory) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The current element index. */
int index;
/** The clause factory to use. */
final Func2<Integer, ? super T, Boolean> clause = clauseFactory.invoke();
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
if (clause.invoke(index, value)) {
observer.next(value);
}
index++;
}
});
}
};
}
/**
* Creates a filtered observable where only Ts are relayed which satisfy the clause.
* @param <T> the element type
* @param source the source of Ts
* @param clause the filter clause
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> where(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func1<? super T, Boolean> clause) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
if (clause.invoke(value)) {
observer.next(value);
}
}
});
}
};
}
/**
* Creates a filtered observable where only Ts are relayed which satisfy the clause.
* The clause receives the index and the current element to test.
* @param <T> the element type
* @param source the source of Ts
* @param clause the filter clause, the first parameter receives the current index, the second receives the current element
* @return the new observable
*/
@Nonnull
public static <T> Observable<T> where(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func2<Integer, ? super T, Boolean> clause) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
return source.register(new Observer<T>() {
/** The current element index. */
int index;
@Override
public void error(Throwable ex) {
observer.error(ex);
}
@Override
public void finish() {
observer.finish();
}
@Override
public void next(T value) {
if (clause.invoke(index, value)) {
observer.next(value);
}
index++;
}
});
}
};
}
/**
* Splits the source stream into separate observables once
* the windowClosing fires an event.
* @param <T> the element type to observe
* @param <U> the closing event type, irrelevant
* @param source the source of Ts
* @param windowClosing the source of the window splitting events
* @return the observable on sequences of observables of Ts
*/
@Nonnull
public static <T, U> Observable<Observable<T>> window(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func0<? extends Observable<U>> windowClosing) {
return window(source, windowClosing, DEFAULT_SCHEDULER.get());
}
/**
* Splits the source stream into separate observables on
* each windowClosing event.
* FIXME not sure how to implement
* @param <T> the element type to observe
* @param <U> the closing event type, irrelevant
* @param source the source of Ts
* @param windowClosing the source of the window splitting events
* @param pool the pool where the first group is signalled from directly after
* the registration
* @return the observable on sequences of observables of Ts
*/
@Nonnull
public static <T, U> Observable<Observable<T>> window(
@Nonnull final Observable<? extends T> source,
@Nonnull final Func0<? extends Observable<U>> windowClosing,
@Nonnull final Scheduler pool) {
return new Observable<Observable<T>>() {
@Override
public Closeable register(final Observer<? super Observable<T>> observer) {
// The current observable
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** The current observable window. */
@GuardedBy("lock")
DefaultObservable<T> current;
/** The window watcher. */
final DefaultObserver<U> wo = new DefaultObserver<U>(lock, true) {
@Override
public void onError(Throwable ex) {
innerError(ex);
}
@Override
public void onFinish() {
innerFinish();
}
@Override
public void onNext(U value) {
DefaultObservable<T> o = new DefaultObservable<T>();
Observer<T> os = current;
current = o;
if (os != null) {
os.finish();
}
observer.next(o);
}
};
/** The close handler for the inner observer of closing events. */
Closeable woc = windowClosing.invoke().register(wo);
/**
* The scheduled action which will open the first window as soon as possible.
*/
Closeable openWindow = pool.schedule(new DefaultRunnable(lock) {
@Override
protected void onRun() {
if (current == null) {
DefaultObservable<T> o = new DefaultObservable<T>();
current = o;
observer.next(o);
}
}
});
/**
* The inner exception callback.
* @param ex the exception
*/
void innerError(Throwable ex) {
error(ex);
}
/** The inner finish callback. */
void innerFinish() {
finish();
}
@Override
public void onClose() {
Closeables.close0(woc);
Closeables.close0(openWindow);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
observer.finish();
}
@Override
public void onNext(T value) {
if (current == null) {
DefaultObservable<T> o = new DefaultObservable<T>();
current = o;
observer.next(o);
}
current.next(value);
}
};
return Closeables.close(obs, source.register(obs));
}
};
}
/**
* Project the source elements into observable windows of size <code>count</code>
* and skip some initial values.
* @param <T> the element type
* @param source the source of Ts
* @param count the count of elements
* @return the new observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
int count
) {
return window(source, count, 0, DEFAULT_SCHEDULER.get());
}
/**
* Project the source elements into observable windows of size <code>count</code>
* and skip some initial values.
* @param <T> the element type
* @param source the source of Ts
* @param count the count of elements
* @param skip the elements to skip
* @return the new observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
int count,
int skip
) {
return window(source, count, skip, DEFAULT_SCHEDULER.get());
}
/**
* Project the source elements into observable windows of size <code>count</code>
* and skip some initial values.
* FIXME implement
* @param <T> the element type
* @param source the source of Ts
* @param count the count of elements
* @param skip the elements to skip
* @param scheduler the scheduler
* @return the new observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
final int count,
final int skip,
final Scheduler scheduler
) {
return new Observable<Observable<T>>() {
@Override
public Closeable register(final Observer<? super Observable<T>> observer) {
final AtomicReference<DefaultObservable<T>> current = new AtomicReference<DefaultObservable<T>>();
final AtomicInteger counter = new AtomicInteger(0);
DefaultObserverEx<T> o = new DefaultObserverEx<T>(true) {
/** The elements to skip at first. */
int skipCount = skip;
{
registerTimer();
add("source", source);
}
/**
* Create a new observable and reset the limit counter as well.
*/
void createNewObservable() {
counter.set(count);
DefaultObservable<T> d = current.get();
if (d != null) {
d.finish();
}
d = new DefaultObservable<T>();
current.set(d);
observer.next(d);
}
@Override
protected void onError(Throwable ex) {
remove("timer");
DefaultObservable<T> d = current.get();
d.error(ex);
observer.error(ex);
}
@Override
protected void onFinish() {
remove("timer");
DefaultObservable<T> d = current.get();
d.finish();
observer.finish();
}
@Override
protected void onNext(T value) {
if (skipCount > 0) {
skipCount--;
return;
}
if (counter.get() == 0 || current.get() == null) {
createNewObservable();
}
counter.decrementAndGet();
DefaultObservable<T> d = current.get();
d.next(value);
}
void registerTimer() {
replace("timer", "timer", scheduler.schedule(
new DefaultRunnable(lock) {
@Override
protected void onRun() {
// first only
if (current.get() == null) {
createNewObservable();
}
}
}, 0, TimeUnit.MILLISECONDS
));
}
};
return o;
}
};
}
/**
* Projects each value of T into an observable which are closed by
* either the <code>count</code> limit or the ellapsed timespan.
* @param <T> the element type
* @param source the source of Ts
* @param count the maximum count of the elements in each window
* @param timeSpan the maximum time for each window
* @param unit the time unit
* @return the new observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
final int count,
final long timeSpan,
final TimeUnit unit
) {
return window(source, count, timeSpan, unit, DEFAULT_SCHEDULER.get());
}
/**
* Projects each value of T into an observable which are closed by
* either the <code>count</code> limit or the ellapsed timespan.
* @param <T> the element type
* @param source the source of Ts
* @param count the maximum count of the elements in each window
* @param timeSpan the maximum time for each window
* @param unit the time unit
* @param scheduler the scheduler
* @return the new observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
final int count,
final long timeSpan,
final TimeUnit unit,
final Scheduler scheduler
) {
return new Observable<Observable<T>>() {
@Override
public Closeable register(final Observer<? super Observable<T>> observer) {
final AtomicReference<DefaultObservable<T>> current = new AtomicReference<DefaultObservable<T>>();
final AtomicInteger counter = new AtomicInteger(0);
DefaultObserverEx<T> o = new DefaultObserverEx<T>(true) {
{
registerTimer();
add("source", source);
}
/**
* Create a new observable and reset the limit counter as well.
*/
void createNewObservable() {
counter.set(count);
DefaultObservable<T> d = current.get();
if (d != null) {
d.finish();
}
d = new DefaultObservable<T>();
current.set(d);
observer.next(d);
}
@Override
protected void onError(Throwable ex) {
remove("timer");
DefaultObservable<T> d = current.get();
d.error(ex);
observer.error(ex);
}
@Override
protected void onFinish() {
remove("timer");
DefaultObservable<T> d = current.get();
d.finish();
observer.finish();
}
@Override
protected void onNext(T value) {
if (counter.get() == 0 || current.get() == null) {
createNewObservable();
}
counter.decrementAndGet();
DefaultObservable<T> d = current.get();
d.next(value);
}
void registerTimer() {
replace("timer", "timer", scheduler.schedule(
new DefaultRunnable(lock) {
/** First run. */
boolean first;
@Override
protected void onRun() {
if (!first) {
first = true;
if (current.get() == null) {
createNewObservable();
}
} else {
createNewObservable();
}
}
}, timeSpan, unit
));
}
};
return o;
}
};
}
/**
* Project the source elements into observable windows of size <code>count</code>
* and skip some initial values.
* @param <T> the element type
* @param source the source of Ts
* @param count the count of elements
* @param scheduler the scheduler
* @return the new observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
int count,
Scheduler scheduler
) {
return window(source, count, 0, scheduler);
}
/**
* Project each of the source Ts into observable sequences separated by
* the timespan and initial timeskip values.
* @param <T> the element type
* @param source the source of Ts
* @param timeSpan the timespan between window openings
* @param timeSkip the initial delay to open the first window
* @param unit the time unit
* @return the observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
final long timeSpan,
final long timeSkip,
final TimeUnit unit
) {
return window(source, timeSpan, timeSkip, unit, DEFAULT_SCHEDULER.get());
}
/**
* Project each of the source Ts into observable sequences separated by
* the timespan and initial timeskip values.
* FIXME implement
* @param <T> the element type
* @param source the source of Ts
* @param timeSpan the timespan between window openings
* @param timeSkip the initial delay to open the first window
* @param unit the time unit
* @param scheduler the scheduler
* @return the observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
final long timeSpan,
final long timeSkip,
final TimeUnit unit,
final Scheduler scheduler
) {
return new Observable<Observable<T>>() {
@Override
public Closeable register(final Observer<? super Observable<T>> observer) {
final AtomicReference<DefaultObservable<T>> current = new AtomicReference<DefaultObservable<T>>();
DefaultObserverEx<T> o = new DefaultObserverEx<T>(true) {
{
registerTimer();
add("source", source);
}
/**
* Create a new observable and reset the limit counter as well.
*/
void createNewObservable() {
DefaultObservable<T> d = current.get();
if (d != null) {
d.finish();
}
d = new DefaultObservable<T>();
current.set(d);
observer.next(d);
}
@Override
protected void onError(Throwable ex) {
remove("timer");
DefaultObservable<T> d = current.get();
d.error(ex);
observer.error(ex);
}
@Override
protected void onFinish() {
remove("timer");
DefaultObservable<T> d = current.get();
d.finish();
observer.finish();
}
@Override
protected void onNext(T value) {
DefaultObservable<T> d = current.get();
if (d != null) {
d.next(value);
}
}
void registerTimer() {
replace("timer", "timer", scheduler.schedule(
new DefaultRunnable(lock) {
@Override
protected void onRun() {
createNewObservable();
}
}, timeSkip, timeSpan, unit
));
}
};
return o;
}
};
}
/**
* Project each of the source Ts into observable sequences separated by
* the timespan and initial timeskip values.
* @param <T> the element type
* @param source the source of Ts
* @param timeSpan the timespan between window openings
* @param unit the time unit
* @return the observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
final long timeSpan,
final TimeUnit unit
) {
return window(source, timeSpan, 0L, unit, DEFAULT_SCHEDULER.get());
}
/**
* Project each of the source Ts into observable sequences separated by
* the timespan and initial timeskip values.
* @param <T> the element type
* @param source the source of Ts
* @param timeSpan the timespan between window openings
* @param unit the time unit
* @param scheduler the scheduler
* @return the observable
*/
public static <T> Observable<Observable<T>> window(
final Observable<? extends T> source,
final long timeSpan,
final TimeUnit unit,
final Scheduler scheduler
) {
return window(source, timeSpan, 0L, unit, scheduler);
}
/**
* Splits the source stream into separate observables
* by starting at windowOpening events and closing at windowClosing events.
* FIXME not sure how to implement
* @param <T> the element type to observe
* @param <U> the opening event type, irrelevant
* @param <V> the closing event type, irrelevant
* @param source the source of Ts
* @param windowOpening te source of the window opening events
* @param windowClosing the source of the window splitting events
* @return the observable on sequences of observables of Ts
*/
@Nonnull
public static <T, U, V> Observable<Observable<T>> window(
@Nonnull final Observable<? extends T> source,
@Nonnull final Observable<? extends U> windowOpening,
@Nonnull final Func1<? super U, ? extends Observable<V>> windowClosing) {
return new Observable<Observable<T>>() {
@Override
public Closeable register(final Observer<? super Observable<T>> observer) {
final Lock lock = new ReentrantLock(true);
final Map<U, DefaultObservable<T>> openWindows = new IdentityHashMap<U, DefaultObservable<T>>();
final AtomicReference<Closeable> closeBoth = new AtomicReference<Closeable>();
// relay Ts to open windows
DefaultObserverEx<T> o1 = new DefaultObserverEx<T>(lock, true) {
@Override
protected void onClose() {
super.onClose();
Closeables.close0(closeBoth.get());
}
@Override
protected void onError(Throwable ex) {
for (DefaultObservable<T> ot : openWindows.values()) {
ot.error(ex);
}
observer.error(ex);
}
@Override
protected void onFinish() {
for (DefaultObservable<T> ot : openWindows.values()) {
ot.finish();
}
observer.finish();
}
@Override
protected void onNext(T value) {
for (DefaultObservable<T> ot : openWindows.values()) {
ot.next(value);
}
}
};
DefaultObserverEx<U> o2 = new DefaultObserverEx<U>(lock, true) {
@Override
protected void onClose() {
super.onClose();
Closeables.close0(closeBoth.get());
}
@Override
protected void onError(Throwable ex) {
observer.error(ex);
}
@Override
protected void onFinish() {
observer.finish();
}
@Override
protected void onNext(final U value) {
final DefaultObservable<T> newWindow = new DefaultObservable<T>();
openWindows.put(value, newWindow);
add(value, windowClosing.invoke(value).register(new Observer<V>() {
@Override
public void error(Throwable ex) {
openWindows.remove(value);
newWindow.error(ex);
}
@Override
public void finish() {
openWindows.remove(value);
newWindow.finish();
}
@Override
public void next(V value) {
// No op?!
}
}));
observer.next(newWindow);
}
};
closeBoth.set(Closeables.close(o1, o2));
o1.add(new Object(), source);
o2.add(new Object(), windowOpening);
return closeBoth.get();
}
};
}
/**
* Wrap the given type into a timestamped container of T.
* @param <T> the type of the contained element
* @return the function performing the wrapping
*/
@Nonnull
public static <T> Func1<T, Timestamped<T>> wrapTimestamped() {
return new Func1<T, Timestamped<T>>() {
@Override
public Timestamped<T> invoke(T param1) {
return Timestamped.of(param1);
};
};
}
/**
* Creates an observable which waits for events from left
* and combines it with the next available value from the right iterable,
* applies the selector function and emits the resulting T.
* The error() and finish() signals are relayed to the output.
* The result is finished if the right iterator runs out of
* values before the left iterator.
* @param <T> the resulting element type
* @param <U> the value type streamed on the left observable
* @param <V> the value type streamed on the right iterable
* @param left the left observables of Us
* @param right the right iterable of Vs
* @param selector the selector taking the left Us and right Vs.
* @return the resulting observable
*/
@Nonnull
public static <T, U, V> Observable<V> zip(
@Nonnull final Observable<? extends T> left,
@Nonnull final Iterable<? extends U> right,
@Nonnull final Func2<? super T, ? super U, ? extends V> selector) {
return new Observable<V>() {
@Override
public Closeable register(final Observer<? super V> observer) {
DefaultObserver<T> obs = new DefaultObserver<T>(true) {
/** The second source. */
final Iterator<? extends U> it = right.iterator();
/** The registration handler. */
final Closeable c;
{
lock.lock();
try {
c = left.register(this);
} finally {
lock.unlock();
}
}
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
}
@Override
public void onFinish() {
observer.finish();
}
@Override
public void onNext(T t) {
if (it.hasNext()) {
U u = it.next();
observer.next(selector.invoke(t, u));
} else {
observer.finish();
close();
}
}
};
return obs;
}
};
}
/**
* Creates an observable which waits for events from left
* and combines it with the next available value from the right observable,
* applies the selector function and emits the resulting T.
* Basically it emmits a T when both an U and V is available.
* The output stream throws error or terminates if any of the streams
* throws or terminates.
* FIXME not sure how to implement this, and how to close and signal
* @param <T> the resulting element type
* @param <U> the value type streamed on the left observable
* @param <V> the value type streamed on the right iterable
* @param left the left observables of Us
* @param right the right iterable of Vs
* @param selector the selector taking the left Us and right Vs.
* @return the resulting observable
*/
@Nonnull
public static <T, U, V> Observable<T> zip(
@Nonnull final Observable<? extends U> left,
@Nonnull final Observable<? extends V> right,
@Nonnull final Func2<U, V, T> selector) {
return new Observable<T>() {
@Override
public Closeable register(final Observer<? super T> observer) {
final LinkedBlockingQueue<U> queueU = new LinkedBlockingQueue<U>();
final LinkedBlockingQueue<V> queueV = new LinkedBlockingQueue<V>();
final AtomicReference<Closeable> closeBoth = new AtomicReference<Closeable>();
final AtomicInteger wip = new AtomicInteger(2);
final Lock lockBoth = new ReentrantLock(true);
lockBoth.lock();
try {
final DefaultObserver<U> oU = new DefaultObserver<U>(lockBoth, false) {
/** The source handler. */
final Closeable c;
{
lock.lock();
try {
c = left.register(this);
} finally {
lock.unlock();
}
}
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
Closeables.close0(closeBoth.get());
}
@Override
public void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
Closeables.close0(closeBoth.get());
}
}
@Override
public void onNext(U u) {
V v = queueV.poll();
if (v != null) {
observer.next(selector.invoke(u, v));
} else {
if (wip.get() == 2) {
queueU.add(u);
} else {
this.finish();
}
}
}
};
final DefaultObserver<V> oV = new DefaultObserver<V>(lockBoth, false) {
/** The source handler. */
final Closeable c;
{
lock.lock();
try {
c = right.register(this);
} finally {
lock.unlock();
}
}
@Override
protected void onClose() {
Closeables.close0(c);
}
@Override
public void onError(Throwable ex) {
observer.error(ex);
Closeables.close0(closeBoth.get());
}
@Override
public void onFinish() {
if (wip.decrementAndGet() == 0) {
observer.finish();
Closeables.close0(closeBoth.get());
}
}
@Override
public void onNext(V v) {
U u = queueU.poll();
if (u != null) {
observer.next(selector.invoke(u, v));
} else {
if (wip.get() == 2) {
queueV.add(v);
} else {
this.finish();
}
}
}
};
Closeable c = Closeables.close(oU, oV);
closeBoth.set(c);
} finally {
lockBoth.unlock();
}
return closeBoth.get();
}
};
}
/**
* Combine the incoming Ts of the various observables into a single list of Ts like
* using Reactive.zip() on more than two sources.
* @param <T> the element type
* @param srcs the iterable of observable sources.
* @return the new observable
*/
public static <T> Observable<List<T>> combine(final List<? extends Observable<? extends T>> srcs) {
if (srcs.size() < 1) {
return Reactive.never();
} else
if (srcs.size() == 1) {
return Reactive.select(srcs.get(0), new Func1<T, List<T>>() {
@Override
public List<T> invoke(T param1) {
List<T> result = new ArrayList<T>(1);
result.add(param1);
return result;
};
});
}
return new Observable<List<T>>() {
@Override
public Closeable register(Observer<? super List<T>> observer) {
Observable<List<T>> res0 = Reactive.zip(srcs.get(0), srcs.get(1), new Func2<T, T, List<T>>() {
@Override
public java.util.List<T> invoke(T param1, T param2) {
List<T> result = new ArrayList<T>();
result.add(param1);
result.add(param2);
return result;
};
});
for (int i = 2; i < srcs.size(); i++) {
res0 = Reactive.zip(res0, srcs.get(i), new Func2<List<T>, T, List<T>>() {
@Override
public java.util.List<T> invoke(java.util.List<T> param1, T param2) {
param1.add(param2);
return param1;
};
});
}
return res0.register(observer);
}
};
}
/**
* Combine a stream of Ts with a constant T whenever the src fires.
* The observed list contains the values of src as the first value, constant as the second.
* @param <T> the element type
* @param src the source of Ts
* @param constant the constant T to combine with
* @return the new observer
*/
public static <T> Observable<List<T>> combine(Observable<? extends T> src, final T constant) {
return Reactive.select(src, new Func1<T, List<T>>() {
@Override
public List<T> invoke(T param1) {
List<T> result = new ArrayList<T>();
result.add(param1);
result.add(constant);
return result;
};
});
}
/**
* Combine a constant T with a stream of Ts whenever the src fires.
* The observed sequence contains the constant as first, the src value as second.
* @param <T> the element type
* @param constant the constant T to combine with
* @param src the source of Ts
* @return the new observer
*/
public static <T> Observable<List<T>> combine(final T constant, Observable<? extends T> src) {
return Reactive.select(src, new Func1<T, List<T>>() {
@Override
public List<T> invoke(T param1) {
List<T> result = new ArrayList<T>();
result.add(constant);
result.add(param1);
return result;
};
});
}
/** Utility class. */
private Reactive() {
// utility class
}
}
| Fixed sequenceEquals() and skipLast() | Reactive4Java/src/hu/akarnokd/reactive4java/reactive/Reactive.java | Fixed sequenceEquals() and skipLast() | <ide><path>eactive4Java/src/hu/akarnokd/reactive4java/reactive/Reactive.java
<ide>
<ide> package hu.akarnokd.reactive4java.reactive;
<ide>
<add>import static hu.akarnokd.reactive4java.base.Functions.and;
<ide> import hu.akarnokd.reactive4java.base.Action0;
<ide> import hu.akarnokd.reactive4java.base.Action1;
<ide> import hu.akarnokd.reactive4java.base.Actions;
<ide> import java.util.List;
<ide> import java.util.Map;
<ide> import java.util.NoSuchElementException;
<add>import java.util.Queue;
<ide> import java.util.Set;
<ide> import java.util.concurrent.BlockingQueue;
<ide> import java.util.concurrent.Callable;
<ide> import java.util.concurrent.ConcurrentHashMap;
<add>import java.util.concurrent.ConcurrentLinkedQueue;
<ide> import java.util.concurrent.ConcurrentMap;
<ide> import java.util.concurrent.CountDownLatch;
<ide> import java.util.concurrent.LinkedBlockingQueue;
<ide> };
<ide> }
<ide> /**
<del> * Signals a single true or false if all elements of the observable matches the predicate.
<add> * Signals a single true or false if all elements of the observable match the predicate.
<ide> * It may return early with a result of false if the predicate simply does not match the current element.
<ide> * For a true result, it waits for all elements of the source observable.
<ide> * @param <T> the type of the source data
<ide> return any(source, Functions.alwaysTrue1());
<ide> }
<ide> /**
<del> * Signals a single TRUE if the source signals any next() and the value matches the predicate before it signals a finish().
<add> * Signals a single TRUE if the source ever signals next() and any of the values matches the predicate before it signals a finish().
<ide> * It signals a false otherwise.
<ide> * @param <T> the source element type.
<ide> * @param source the source observable
<ide> final Observable<? extends T> first,
<ide> final Observable<? extends T> second,
<ide> final Func2<? super T, ? super T, Boolean> comparer) {
<del> return new Observable<Boolean>() {
<del> @Override
<del> public Closeable register(final Observer<? super Boolean> observer) {
<del> final LinkedBlockingQueue<T> queueU = new LinkedBlockingQueue<T>();
<del> final LinkedBlockingQueue<T> queueV = new LinkedBlockingQueue<T>();
<del> final AtomicReference<Closeable> closeBoth = new AtomicReference<Closeable>();
<del> final AtomicInteger wip = new AtomicInteger(2);
<del> final Lock lockBoth = new ReentrantLock(true);
<del> final AtomicBoolean result = new AtomicBoolean(true);
<del>
<del> lockBoth.lock();
<del> try {
<del> final DefaultObserverEx<T> oU = new DefaultObserverEx<T>(lockBoth, false) {
<del> {
<del> add("first", first);
<del> }
<del>
<del> @Override
<del> public void onError(Throwable ex) {
<del> observer.error(ex);
<del> Closeables.close0(closeBoth.get());
<del> }
<del>
<del> @Override
<del> public void onFinish() {
<del> if (wip.decrementAndGet() == 0) {
<del> observer.next(result.get());
<del> observer.finish();
<del> Closeables.close0(closeBoth.get());
<del> }
<del> }
<del> @Override
<del> public void onNext(T u) {
<del> T v = queueV.poll();
<del> if (v != null) {
<del> if (!comparer.invoke(u, v)) {
<del> result.set(false);
<del> this.finish();
<del> }
<del> } else {
<del> if (wip.get() == 2) {
<del> queueU.add(u);
<del> } else {
<del> this.finish();
<del> }
<del> }
<del> }
<del> };
<del> final DefaultObserverEx<T> oV = new DefaultObserverEx<T>(lockBoth, false) {
<del> {
<del> add("second", second);
<del> }
<del>
<del> @Override
<del> public void onError(Throwable ex) {
<del> observer.error(ex);
<del> Closeables.close0(closeBoth.get());
<del> }
<del>
<del> @Override
<del> public void onFinish() {
<del> if (wip.decrementAndGet() == 0) {
<del> observer.next(result.get());
<del> observer.finish();
<del> Closeables.close0(closeBoth.get());
<del> }
<del> }
<del> @Override
<del> public void onNext(T v) {
<del> T u = queueU.poll();
<del> if (u != null) {
<del> if (!comparer.invoke(u, v)) {
<del> result.set(false);
<del> this.finish();
<del> }
<del> } else {
<del> if (wip.get() == 2) {
<del> queueV.add(v);
<del> } else {
<del> result.set(false);
<del> this.finish();
<del> }
<del> }
<del> }
<del> };
<del> Closeable c = Closeables.close(oU, oV);
<del> closeBoth.set(c);
<del> } finally {
<del> lockBoth.unlock();
<del> }
<del> return closeBoth.get();
<del> }
<del> };
<add> Observable<Boolean> pairwiseEqual = all(zip(first, second, comparer), Functions.<Boolean>identity());
<add> Observable<Boolean> sameSize = combineLatestSent(count(first), count(second), Functions.equals());
<add> return combineLatestSent(pairwiseEqual, sameSize, and());
<ide> }
<ide> /**
<ide> * Returns the single element of the given observable source.
<ide> * @return the new observable
<ide> */
<ide> @Nonnull
<del> public static <T> Observable<T> skipLast(
<del> @Nonnull final Observable<? extends T> source,
<del> final int count) {
<add> public static <T> Observable<T> skipLast(final Observable<? extends T> source, final int count) {
<ide> return new Observable<T>() {
<ide> @Override
<ide> public Closeable register(final Observer<? super T> observer) {
<ide> return source.register(new Observer<T>() {
<del> /** The temporar buffer to delay the values. */
<del> final CircularBuffer<T> buffer = new CircularBuffer<T>(count);
<del> /** The current size of the buffer. */
<del> int size;
<add> final Queue<T> buffer = new ConcurrentLinkedQueue<T>();
<add>
<ide> @Override
<ide> public void error(Throwable ex) {
<ide> observer.error(ex);
<ide>
<ide> @Override
<ide> public void finish() {
<add> while (buffer.size() > count) {
<add> observer.next(buffer.poll());
<add> }
<ide> observer.finish();
<ide> }
<ide>
<ide> @Override
<ide> public void next(T value) {
<ide> buffer.add(value);
<del> size++;
<del> if (size > count) {
<del> observer.next(buffer.take());
<del> size--;
<del> }
<del> }
<del>
<add> }
<ide> });
<ide> }
<ide> }; |
|
JavaScript | mit | b87edd4de9253a548a5bf44b57dca8db12e9c02d | 0 | svivian/Pokemon-Showdown,sirDonovan/Pokemon-Showdown,panpawn/Gold-Server,xfix/Pokemon-Showdown,Enigami/Pokemon-Showdown,urkerab/Pokemon-Showdown,svivian/Pokemon-Showdown,QuiteQuiet/Pokemon-Showdown,sirDonovan/Pokemon-Showdown,Enigami/Pokemon-Showdown,Enigami/Pokemon-Showdown,panpawn/Pokemon-Showdown,xfix/Pokemon-Showdown,Zarel/Pokemon-Showdown,panpawn/Gold-Server,svivian/Pokemon-Showdown,QuiteQuiet/Pokemon-Showdown,AustinXII/Pokemon-Showdown,urkerab/Pokemon-Showdown,jumbowhales/Pokemon-Showdown,panpawn/Gold-Server,AustinXII/Pokemon-Showdown,panpawn/Pokemon-Showdown,Zarel/Pokemon-Showdown,xfix/Pokemon-Showdown,AustinXII/Pokemon-Showdown,xfix/Pokemon-Showdown,svivian/Pokemon-Showdown,sirDonovan/Pokemon-Showdown,Enigami/Pokemon-Showdown,svivian/Pokemon-Showdown,Zarel/Pokemon-Showdown,QuiteQuiet/Pokemon-Showdown,jumbowhales/Pokemon-Showdown,urkerab/Pokemon-Showdown,xfix/Pokemon-Showdown,jumbowhales/Pokemon-Showdown | /**
* Matchmaker
* Pokemon Showdown - http://pokemonshowdown.com/
*
* This keeps track of challenges to battle made between users, setting up
* matches between users looking for a battle, and starting new battles.
*
* @License MIT License
*/
'use strict';
/** @type {typeof LadderStoreT} */
const LadderStore = require(typeof Config === 'object' && Config.remoteladder ? './ladders-remote' : './ladders-local');
/** @type {number} */
const PERIODIC_MATCH_INTERVAL = 60 * 1000;
/**
* This represents a user's search for a battle under a format.
*/
class BattleReady {
/**
* @param {string} userid
* @param {string} formatid
* @param {string} team
* @param {number} [rating = 1000]
*/
constructor(userid, formatid, team, rating = 0) {
/** @type {string} */
this.userid = userid;
/** @type {string} */
this.formatid = formatid;
/** @type {string} */
this.team = team;
/** @type {number} */
this.rating = rating;
/** @type {number} */
this.time = Date.now();
}
}
/**
* formatid:userid:BattleReady
* @type {Map<string, Map<string, BattleReady>>}
*/
const searches = new Map();
class Challenge {
/**
* @param {BattleReady} ready
* @param {string} to
*/
constructor(ready, to) {
this.from = ready.userid;
this.to = to;
this.formatid = ready.formatid;
this.ready = ready;
}
}
/**
* formatid:userid:BattleReady
* @type {Map<string, Challenge[]>}
*/
const challenges = new Map();
/**
* This keeps track of searches for battles, creating a new battle for a newly
* added search if a valid match can be made, otherwise periodically
* attempting to make a match with looser restrictions until one can be made.
*/
class Ladder extends LadderStore {
/**
* @param {string} formatid
*/
constructor(formatid) {
super(formatid);
}
/**
* @param {Connection} connection
* @param {string?} team
* @return {Promise<BattleReady?>}
*/
async prepBattle(connection, team = null, isRated = false) {
// all validation for a battle goes through here
const user = connection.user || connection;
const userid = user.userid;
if (team === null) team = user.team;
if (Rooms.global.lockdown && Rooms.global.lockdown !== 'pre') {
let message = `The server is restarting. Battles will be available again in a few minutes.`;
if (Rooms.global.lockdown === 'ddos') {
message = `The server is under attack. Battles cannot be started at this time.`;
}
connection.popup(message);
return null;
}
let gameCount = user.games.size;
if (Monitor.countConcurrentBattle(gameCount, connection)) {
return null;
}
if (Monitor.countPrepBattle(connection.ip || connection.latestIp, connection)) {
return null;
}
try {
// @ts-ignore TypeScript bug: self-reference
this.formatid = Dex.validateFormat(this.formatid);
} catch (e) {
connection.popup(`Your selected format is invalid:\n\n- ${e.message}`);
return null;
}
let rating = 0, valResult;
if (isRated) {
try {
[valResult, rating] = await Promise.all([
TeamValidatorAsync(this.formatid).validateTeam(team, user.locked || user.namelocked),
this.getRating(user.userid),
]);
} catch (e) {
// Rejects iff ladders are disabled, or if we
// retrieved the rating but the user had changed their name.
if (Ladders.disabled) {
connection.popup(`The ladder is currently disabled due to high server load.`);
}
// User feedback for renames handled elsewhere.
return null;
}
if (!rating) rating = 1;
} else {
valResult = await TeamValidatorAsync(this.formatid).validateTeam(team, user.locked || user.namelocked);
}
if (valResult.charAt(0) !== '1') {
connection.popup(
`Your team was rejected for the following reasons:\n\n` +
`- ` + valResult.slice(1).replace(/\n/g, `\n- `)
);
return null;
}
return new BattleReady(userid, this.formatid, valResult.slice(1), rating);
}
/**
* @param {User} user
*/
static cancelChallenging(user) {
const chall = Ladder.getChallenging(user.userid);
if (chall) {
Ladder.removeChallenge(chall);
return true;
}
return false;
}
/**
* @param {User} user
* @param {User} targetUsername
*/
static rejectChallenge(user, targetUsername) {
const targetUserid = toId(targetUsername);
const chall = Ladder.getChallenging(targetUserid);
if (chall && chall.to === user.userid) {
Ladder.removeChallenge(chall);
return true;
}
return false;
}
/**
* @param {string} username
*/
static clearChallenges(username) {
const userid = toId(username);
const userChalls = Ladders.challenges.get(userid);
if (userChalls) {
for (const chall of userChalls.slice()) {
let otherUserid;
if (chall.from === userid) {
otherUserid = chall.to;
} else {
otherUserid = chall.from;
}
Ladder.removeChallenge(chall, true);
const otherUser = Users(otherUserid);
if (otherUser) Ladder.updateChallenges(otherUser);
}
const user = Users(userid);
if (user) Ladder.updateChallenges(user);
return true;
}
return false;
}
/**
* @param {Connection} connection
* @param {User} targetUser
*/
async makeChallenge(connection, targetUser) {
const user = connection.user;
if (targetUser === user) {
connection.popup(`You can't battle yourself. The best you can do is open PS in Private Browsing (or another browser) and log into a different username, and battle that username.`);
return false;
}
if (Ladder.getChallenging(connection.user.userid)) {
connection.popup(`You are already challenging someone. Cancel that challenge before challenging someone else.`);
return false;
}
if (targetUser.blockChallenges && !user.can('bypassblocks', targetUser)) {
connection.popup(`The user '${targetUser.name}' is not accepting challenges right now.`);
return false;
}
if (Date.now() < user.lastChallenge + 10000) {
// 10 seconds ago, probable misclick
connection.popup(`You challenged less than 10 seconds after your last challenge! It's cancelled in case it's a misclick.`);
return false;
}
const ready = await this.prepBattle(connection);
if (!ready) return false;
Ladder.addChallenge(new Challenge(ready, targetUser.userid));
user.lastChallenge = Date.now();
return true;
}
/**
* @param {Connection} connection
* @param {User} targetUser
*/
static async acceptChallenge(connection, targetUser) {
const chall = Ladder.getChallenging(targetUser.userid);
if (!chall || chall.to !== connection.user.userid) {
connection.popup(`${targetUser.userid} is not challenging you. Maybe they cancelled before you accepted?`);
return false;
}
const ladder = Ladders(chall.formatid);
const ready = await ladder.prepBattle(connection);
if (!ready) return false;
if (Ladder.removeChallenge(chall)) {
Ladders.match(chall.ready, ready);
}
return true;
}
/**
* @param {string} userid
*/
static getChallenging(userid) {
const userChalls = Ladders.challenges.get(userid);
if (userChalls) {
for (const chall of userChalls) {
if (chall.from === userid) return chall;
}
}
return null;
}
/**
* @param {Challenge} challenge
*/
static addChallenge(challenge, skipUpdate = false) {
let challs1 = Ladders.challenges.get(challenge.from);
if (!challs1) Ladders.challenges.set(challenge.from, challs1 = []);
let challs2 = Ladders.challenges.get(challenge.to);
if (!challs2) Ladders.challenges.set(challenge.to, challs2 = []);
challs1.push(challenge);
challs2.push(challenge);
if (!skipUpdate) {
const fromUser = Users(challenge.from);
if (fromUser) Ladder.updateChallenges(fromUser);
const toUser = Users(challenge.to);
if (toUser) Ladder.updateChallenges(toUser);
}
}
/**
* @param {Challenge} challenge
*/
static removeChallenge(challenge, skipUpdate = false) {
const fromChalls = /** @type {Challenge[]} */ (Ladders.challenges.get(challenge.from));
// the challenge may have been cancelled
if (!fromChalls) return false;
const fromIndex = fromChalls.indexOf(challenge);
if (fromIndex < 0) return false;
fromChalls.splice(fromIndex, 1);
if (!fromChalls.length) Ladders.challenges.delete(challenge.from);
const toChalls = /** @type {Challenge[]} */ (Ladders.challenges.get(challenge.to));
toChalls.splice(toChalls.indexOf(challenge), 1);
if (!toChalls.length) Ladders.challenges.delete(challenge.to);
if (!skipUpdate) {
const fromUser = Users(challenge.from);
if (fromUser) Ladder.updateChallenges(fromUser);
const toUser = Users(challenge.to);
if (toUser) Ladder.updateChallenges(toUser);
}
return true;
}
/**
* @param {User} user
* @param {Connection?} connection
*/
static updateChallenges(user, connection = null) {
if (!user.connected) return;
let challengeTo = null;
let challengesFrom = {};
const userChalls = Ladders.challenges.get(user.userid);
if (userChalls) {
for (const chall of userChalls) {
if (chall.from === user.userid) {
challengeTo = {
to: chall.to,
format: chall.formatid,
};
} else {
challengesFrom[chall.from] = chall.formatid;
}
}
}
(connection || user).send(`|updatechallenges|` + JSON.stringify({
challengesFrom: challengesFrom,
challengeTo: challengeTo,
}));
}
/**
* @param {User} user
* @return {boolean}
*/
cancelSearch(user) {
const formatid = toId(this.formatid);
const formatTable = Ladders.searches.get(formatid);
if (!formatTable) return false;
if (!formatTable.has(user.userid)) return false;
formatTable.delete(user.userid);
Ladder.updateSearch(user);
return true;
}
/**
* @param {User} user
* @return {number} cancel count
*/
static cancelSearches(user) {
let cancelCount = 0;
for (let formatTable of Ladders.searches.values()) {
const search = formatTable.get(user.userid);
if (!search) continue;
formatTable.delete(user.userid);
cancelCount++;
}
Ladder.updateSearch(user);
return cancelCount;
}
/**
* @param {BattleReady} search
*/
getSearcher(search) {
const formatid = toId(this.formatid);
const user = Users.get(search.userid);
if (!user || !user.connected || user.userid !== search.userid) {
const formatTable = Ladders.searches.get(formatid);
if (formatTable) formatTable.delete(search.userid);
if (user && user.connected) {
user.popup(`You changed your name and are no longer looking for a battle in ${formatid}`);
Ladder.updateSearch(user);
}
return;
}
return user;
}
/**
* @param {User} user
*/
static getSearches(user) {
let userSearches = [];
for (const [formatid, formatTable] of Ladders.searches) {
if (formatTable.has(user.userid)) userSearches.push(formatid);
}
return userSearches;
}
/**
* @param {User} user
* @param {Connection?} connection
*/
static updateSearch(user, connection = null) {
let games = /** @type {any} */ ({});
let atLeastOne = false;
for (const roomid of user.games) {
const room = Rooms(roomid);
if (!room) {
Monitor.warn(`while searching, room ${roomid} expired for user ${user.userid} in rooms ${[...user.inRooms]} and games ${[...user.games]}`);
user.games.delete(roomid);
return;
}
const game = room.game;
if (!game) {
Monitor.warn(`while searching, room ${roomid} has no game for user ${user.userid} in rooms ${[...user.inRooms]} and games ${[...user.games]}`);
user.games.delete(roomid);
return;
}
games[roomid] = game.title + (game.allowRenames ? '' : '*');
atLeastOne = true;
}
if (!atLeastOne) games = null;
let searching = Ladders.getSearches(user);
(connection || user).send(`|updatesearch|` + JSON.stringify({
searching: searching,
games: games,
}));
}
/**
* @param {User} user
*/
hasSearch(user) {
const formatid = toId(this.formatid);
const formatTable = Ladders.searches.get(formatid);
if (!formatTable) return false;
return formatTable.has(user.userid);
}
/**
* Validates a user's team and fetches their rating for a given format
* before creating a search for a battle.
* @param {User} user
* @param {Connection} connection
* @return {Promise<void>}
*/
async searchBattle(user, connection) {
if (!user.connected) return;
const format = Dex.getFormat(this.formatid);
if (!format.searchShow) {
connection.popup(`Error: Your format ${format.id} is not ladderable.`);
}
let oldUserid = user.userid;
const search = await this.prepBattle(connection, null, format.rated !== false);
if (oldUserid !== user.userid) return;
if (!search) return;
this.addSearch(search, user);
}
/**
* Verifies whether or not a match made between two users is valid. Returns
* @param {BattleReady} search1
* @param {BattleReady} search2
* @param {User=} user1
* @param {User=} user2
* @return {boolean}
*/
matchmakingOK(search1, search2, user1, user2) {
const formatid = toId(this.formatid);
if (!user1 || !user2) {
// This should never happen.
require('./crashlogger')(new Error(`Matched user ${user1 ? search2.userid : search1.userid} not found`), "The main process");
return false;
}
// users must be different
if (user1 === user2) return false;
// users must have different IPs
if (user1.latestIp === user2.latestIp) return false;
// users must not have been matched immediately previously
if (user1.lastMatch === user2.userid || user2.lastMatch === user1.userid) return false;
// search must be within range
let searchRange = 100;
let elapsed = Date.now() - Math.min(search1.time, search2.time);
if (formatid === 'gen7ou' || formatid === 'gen7oucurrent' ||
formatid === 'gen7oususpecttest' || formatid === 'gen7randombattle') {
searchRange = 50;
}
searchRange += elapsed / 300; // +1 every .3 seconds
if (searchRange > 300) searchRange = 300 + (searchRange - 300) / 10; // +1 every 3 sec after 300
if (searchRange > 600) searchRange = 600;
if (Math.abs(search1.rating - search2.rating) > searchRange) return false;
user1.lastMatch = user2.userid;
user2.lastMatch = user1.userid;
return true;
}
/**
* Starts a search for a battle for a user under the given format.
* @param {BattleReady} newSearch
* @param {User} user
*/
addSearch(newSearch, user) {
const formatid = newSearch.formatid;
let formatTable = Ladders.searches.get(formatid);
if (!formatTable) {
formatTable = new Map();
Ladders.searches.set(formatid, formatTable);
}
if (formatTable.has(user.userid)) {
user.popup(`Couldn't search: You are already searching for a ${formatid} battle.`);
return;
}
// In order from longest waiting to shortest waiting
for (let search of formatTable.values()) {
const searcher = this.getSearcher(search);
if (!searcher) continue;
const matched = this.matchmakingOK(search, newSearch, searcher, user);
if (matched) {
formatTable.delete(search.userid);
Ladder.match(search, newSearch);
return;
}
}
formatTable.set(newSearch.userid, newSearch);
Ladder.updateSearch(user);
}
/**
* Creates a match for a new battle for each format in this.searches if a
* valid match can be made. This is run periodically depending on
* PERIODIC_MATCH_INTERVAL.
*/
static periodicMatch() {
// In order from longest waiting to shortest waiting
for (const [formatid, formatTable] of Ladders.searches) {
const matchmaker = Ladders(formatid);
let longestSearch, longestSearcher;
for (let search of formatTable.values()) {
if (!longestSearch) {
longestSearcher = matchmaker.getSearcher(search);
if (!longestSearcher) continue;
longestSearch = search;
continue;
}
let searcher = matchmaker.getSearcher(search);
if (!searcher) continue;
let matched = matchmaker.matchmakingOK(search, longestSearch, searcher, longestSearcher);
if (matched) {
formatTable.delete(search.userid);
formatTable.delete(longestSearch.userid);
Ladder.match(longestSearch, search);
return;
}
}
}
}
/**
* @param {BattleReady} ready1
* @param {BattleReady} ready2
*/
static match(ready1, ready2) {
if (ready1.formatid !== ready2.formatid) throw new Error(`Format IDs don't match`);
const user1 = Users(ready1.userid);
const user2 = Users(ready2.userid);
if (!user1 && !user2) return false;
if (!user1) {
user2.popup(`Sorry, your opponent ${ready1.userid} went offline before your battle could start.`);
return false;
}
if (!user2) {
user1.popup(`Sorry, your opponent ${ready2.userid} went offline before your battle could start.`);
return false;
}
Rooms.createBattle(ready1.formatid, {
p1: user1,
p1team: ready1.team,
p2: user2,
p2team: ready2.team,
rated: Math.min(ready1.rating, ready2.rating),
});
}
}
/**
* @param {string} formatid
*/
function getLadder(formatid) {
return new Ladder(formatid);
}
/** @type {?NodeJS.Timer} */
let periodicMatchInterval = setInterval(
() => Ladder.periodicMatch(),
PERIODIC_MATCH_INTERVAL
);
const Ladders = Object.assign(getLadder, {
BattleReady,
LadderStore,
Ladder,
cancelSearches: Ladder.cancelSearches,
updateSearch: Ladder.updateSearch,
rejectChallenge: Ladder.rejectChallenge,
acceptChallenge: Ladder.acceptChallenge,
cancelChallenging: Ladder.cancelChallenging,
clearChallenges: Ladder.clearChallenges,
updateChallenges: Ladder.updateChallenges,
visualizeAll: Ladder.visualizeAll,
getSearches: Ladder.getSearches,
match: Ladder.match,
searches,
challenges,
periodicMatchInterval,
// tells the client to ask the server for format information
formatsListPrefix: LadderStore.formatsListPrefix,
/** @type {true | false | 'db'} */
disabled: false,
});
module.exports = Ladders;
| ladders.js | /**
* Matchmaker
* Pokemon Showdown - http://pokemonshowdown.com/
*
* This keeps track of challenges to battle made between users, setting up
* matches between users looking for a battle, and starting new battles.
*
* @License MIT License
*/
'use strict';
/** @type {typeof LadderStoreT} */
const LadderStore = require(typeof Config === 'object' && Config.remoteladder ? './ladders-remote' : './ladders-local');
/** @type {number} */
const PERIODIC_MATCH_INTERVAL = 60 * 1000;
/**
* This represents a user's search for a battle under a format.
*/
class BattleReady {
/**
* @param {string} userid
* @param {string} formatid
* @param {string} team
* @param {number} [rating = 1000]
*/
constructor(userid, formatid, team, rating = 0) {
/** @type {string} */
this.userid = userid;
/** @type {string} */
this.formatid = formatid;
/** @type {string} */
this.team = team;
/** @type {number} */
this.rating = rating;
/** @type {number} */
this.time = Date.now();
}
}
/**
* formatid:userid:BattleReady
* @type {Map<string, Map<string, BattleReady>>}
*/
const searches = new Map();
class Challenge {
/**
* @param {BattleReady} ready
* @param {string} to
*/
constructor(ready, to) {
this.from = ready.userid;
this.to = to;
this.formatid = ready.formatid;
this.ready = ready;
}
}
/**
* formatid:userid:BattleReady
* @type {Map<string, Challenge[]>}
*/
const challenges = new Map();
/**
* This keeps track of searches for battles, creating a new battle for a newly
* added search if a valid match can be made, otherwise periodically
* attempting to make a match with looser restrictions until one can be made.
*/
class Ladder extends LadderStore {
/**
* @param {string} formatid
*/
constructor(formatid) {
super(formatid);
}
/**
* @param {Connection} connection
* @param {string?} team
* @return {Promise<BattleReady?>}
*/
async prepBattle(connection, team = null, isRated = false) {
// all validation for a battle goes through here
const user = connection.user || connection;
const userid = user.userid;
if (team === null) team = user.team;
if (Rooms.global.lockdown && Rooms.global.lockdown !== 'pre') {
let message = `The server is restarting. Battles will be available again in a few minutes.`;
if (Rooms.global.lockdown === 'ddos') {
message = `The server is under attack. Battles cannot be started at this time.`;
}
connection.popup(message);
return null;
}
let gameCount = user.games.size;
if (Monitor.countConcurrentBattle(gameCount, connection)) {
return null;
}
if (Monitor.countPrepBattle(connection.ip || connection.latestIp, connection)) {
return null;
}
try {
// @ts-ignore TypeScript bug: self-reference
this.formatid = Dex.validateFormat(this.formatid);
} catch (e) {
connection.popup(`Your selected format is invalid:\n\n- ${e.message}`);
return null;
}
let rating = 0, valResult;
if (isRated) {
try {
[valResult, rating] = await Promise.all([
TeamValidatorAsync(this.formatid).validateTeam(team, user.locked || user.namelocked),
this.getRating(user.userid),
]);
} catch (e) {
// Rejects iff ladders are disabled, or if we
// retrieved the rating but the user had changed their name.
if (Ladders.disabled) {
connection.popup(`The ladder is currently disabled due to high server load.`);
}
// User feedback for renames handled elsewhere.
return null;
}
if (!rating) rating = 1;
} else {
valResult = await TeamValidatorAsync(this.formatid).validateTeam(team, user.locked || user.namelocked);
}
if (valResult.charAt(0) !== '1') {
connection.popup(
`Your team was rejected for the following reasons:\n\n` +
`- ` + valResult.slice(1).replace(/\n/g, `\n- `)
);
return null;
}
return new BattleReady(userid, this.formatid, valResult.slice(1), rating);
}
/**
* @param {User} user
*/
static cancelChallenging(user) {
const chall = Ladder.getChallenging(user.userid);
if (chall) {
Ladder.removeChallenge(chall);
return true;
}
return false;
}
/**
* @param {User} user
* @param {User} targetUsername
*/
static rejectChallenge(user, targetUsername) {
const targetUserid = toId(targetUsername);
const chall = Ladder.getChallenging(targetUserid);
if (chall && chall.to === user.userid) {
Ladder.removeChallenge(chall);
return true;
}
return false;
}
/**
* @param {string} username
*/
static clearChallenges(username) {
const userid = toId(username);
const userChalls = challenges.get(userid);
if (userChalls) {
for (const chall of userChalls.slice()) {
let otherUserid;
if (chall.from === userid) {
otherUserid = chall.to;
} else {
otherUserid = chall.from;
}
Ladder.removeChallenge(chall, true);
const otherUser = Users(otherUserid);
if (otherUser) Ladder.updateChallenges(otherUser);
}
const user = Users(userid);
if (user) Ladder.updateChallenges(user);
return true;
}
return false;
}
/**
* @param {Connection} connection
* @param {User} targetUser
*/
async makeChallenge(connection, targetUser) {
const user = connection.user;
if (targetUser === user) {
connection.popup(`You can't battle yourself. The best you can do is open PS in Private Browsing (or another browser) and log into a different username, and battle that username.`);
return false;
}
if (Ladder.getChallenging(connection.user.userid)) {
connection.popup(`You are already challenging someone. Cancel that challenge before challenging someone else.`);
return false;
}
if (targetUser.blockChallenges && !user.can('bypassblocks', targetUser)) {
connection.popup(`The user '${targetUser.name}' is not accepting challenges right now.`);
return false;
}
if (Date.now() < user.lastChallenge + 10000) {
// 10 seconds ago, probable misclick
connection.popup(`You challenged less than 10 seconds after your last challenge! It's cancelled in case it's a misclick.`);
return false;
}
const ready = await this.prepBattle(connection);
if (!ready) return false;
Ladder.addChallenge(new Challenge(ready, targetUser.userid));
user.lastChallenge = Date.now();
return true;
}
/**
* @param {Connection} connection
* @param {User} targetUser
*/
static async acceptChallenge(connection, targetUser) {
const chall = Ladder.getChallenging(targetUser.userid);
if (!chall || chall.to !== connection.user.userid) {
connection.popup(`${targetUser.userid} is not challenging you. Maybe they cancelled before you accepted?`);
return false;
}
const ladder = Ladders(chall.formatid);
const ready = await ladder.prepBattle(connection);
if (!ready) return false;
if (Ladder.removeChallenge(chall)) {
Ladders.match(chall.ready, ready);
}
return true;
}
/**
* @param {string} userid
*/
static getChallenging(userid) {
const userChalls = challenges.get(userid);
if (userChalls) {
for (const chall of userChalls) {
if (chall.from === userid) return chall;
}
}
return null;
}
/**
* @param {Challenge} challenge
*/
static addChallenge(challenge, skipUpdate = false) {
let challs1 = challenges.get(challenge.from);
if (!challs1) challenges.set(challenge.from, challs1 = []);
let challs2 = challenges.get(challenge.to);
if (!challs2) challenges.set(challenge.to, challs2 = []);
challs1.push(challenge);
challs2.push(challenge);
if (!skipUpdate) {
const fromUser = Users(challenge.from);
if (fromUser) Ladder.updateChallenges(fromUser);
const toUser = Users(challenge.to);
if (toUser) Ladder.updateChallenges(toUser);
}
}
/**
* @param {Challenge} challenge
*/
static removeChallenge(challenge, skipUpdate = false) {
const fromChalls = /** @type {Challenge[]} */ (challenges.get(challenge.from));
// the challenge may have been cancelled
if (!fromChalls) return false;
const fromIndex = fromChalls.indexOf(challenge);
if (fromIndex < 0) return false;
fromChalls.splice(fromIndex, 1);
if (!fromChalls.length) challenges.delete(challenge.from);
const toChalls = /** @type {Challenge[]} */ (challenges.get(challenge.to));
toChalls.splice(toChalls.indexOf(challenge), 1);
if (!toChalls.length) challenges.delete(challenge.to);
if (!skipUpdate) {
const fromUser = Users(challenge.from);
if (fromUser) Ladder.updateChallenges(fromUser);
const toUser = Users(challenge.to);
if (toUser) Ladder.updateChallenges(toUser);
}
return true;
}
/**
* @param {User} user
* @param {Connection?} connection
*/
static updateChallenges(user, connection = null) {
if (!user.connected) return;
let challengeTo = null;
let challengesFrom = {};
const userChalls = challenges.get(user.userid);
if (userChalls) {
for (const chall of userChalls) {
if (chall.from === user.userid) {
challengeTo = {
to: chall.to,
format: chall.formatid,
};
} else {
challengesFrom[chall.from] = chall.formatid;
}
}
}
(connection || user).send(`|updatechallenges|` + JSON.stringify({
challengesFrom: challengesFrom,
challengeTo: challengeTo,
}));
}
/**
* @param {User} user
* @return {boolean}
*/
cancelSearch(user) {
const formatid = toId(this.formatid);
const formatTable = searches.get(formatid);
if (!formatTable) return false;
if (!formatTable.has(user.userid)) return false;
formatTable.delete(user.userid);
Ladder.updateSearch(user);
return true;
}
/**
* @param {User} user
* @return {number} cancel count
*/
static cancelSearches(user) {
let cancelCount = 0;
for (let formatTable of searches.values()) {
const search = formatTable.get(user.userid);
if (!search) continue;
formatTable.delete(user.userid);
cancelCount++;
}
Ladder.updateSearch(user);
return cancelCount;
}
/**
* @param {BattleReady} search
*/
getSearcher(search) {
const formatid = toId(this.formatid);
const user = Users.get(search.userid);
if (!user || !user.connected || user.userid !== search.userid) {
const formatTable = searches.get(formatid);
if (formatTable) formatTable.delete(search.userid);
if (user && user.connected) {
user.popup(`You changed your name and are no longer looking for a battle in ${formatid}`);
Ladder.updateSearch(user);
}
return;
}
return user;
}
/**
* @param {User} user
*/
static getSearches(user) {
let userSearches = [];
for (const [formatid, formatTable] of searches) {
if (formatTable.has(user.userid)) userSearches.push(formatid);
}
return userSearches;
}
/**
* @param {User} user
* @param {Connection?} connection
*/
static updateSearch(user, connection = null) {
let games = /** @type {any} */ ({});
let atLeastOne = false;
for (const roomid of user.games) {
const room = Rooms(roomid);
if (!room) {
Monitor.warn(`while searching, room ${roomid} expired for user ${user.userid} in rooms ${[...user.inRooms]} and games ${[...user.games]}`);
user.games.delete(roomid);
return;
}
const game = room.game;
if (!game) {
Monitor.warn(`while searching, room ${roomid} has no game for user ${user.userid} in rooms ${[...user.inRooms]} and games ${[...user.games]}`);
user.games.delete(roomid);
return;
}
games[roomid] = game.title + (game.allowRenames ? '' : '*');
atLeastOne = true;
}
if (!atLeastOne) games = null;
let searching = Ladders.getSearches(user);
(connection || user).send(`|updatesearch|` + JSON.stringify({
searching: searching,
games: games,
}));
}
/**
* @param {User} user
*/
hasSearch(user) {
const formatid = toId(this.formatid);
const formatTable = searches.get(formatid);
if (!formatTable) return false;
return formatTable.has(user.userid);
}
/**
* Validates a user's team and fetches their rating for a given format
* before creating a search for a battle.
* @param {User} user
* @param {Connection} connection
* @return {Promise<void>}
*/
async searchBattle(user, connection) {
if (!user.connected) return;
const format = Dex.getFormat(this.formatid);
if (!format.searchShow) {
connection.popup(`Error: Your format ${format.id} is not ladderable.`);
}
let oldUserid = user.userid;
const search = await this.prepBattle(connection, null, format.rated !== false);
if (oldUserid !== user.userid) return;
if (!search) return;
this.addSearch(search, user);
}
/**
* Verifies whether or not a match made between two users is valid. Returns
* @param {BattleReady} search1
* @param {BattleReady} search2
* @param {User=} user1
* @param {User=} user2
* @return {boolean}
*/
matchmakingOK(search1, search2, user1, user2) {
const formatid = toId(this.formatid);
if (!user1 || !user2) {
// This should never happen.
require('./crashlogger')(new Error(`Matched user ${user1 ? search2.userid : search1.userid} not found`), "The main process");
return false;
}
// users must be different
if (user1 === user2) return false;
// users must have different IPs
if (user1.latestIp === user2.latestIp) return false;
// users must not have been matched immediately previously
if (user1.lastMatch === user2.userid || user2.lastMatch === user1.userid) return false;
// search must be within range
let searchRange = 100;
let elapsed = Date.now() - Math.min(search1.time, search2.time);
if (formatid === 'gen7ou' || formatid === 'gen7oucurrent' ||
formatid === 'gen7oususpecttest' || formatid === 'gen7randombattle') {
searchRange = 50;
}
searchRange += elapsed / 300; // +1 every .3 seconds
if (searchRange > 300) searchRange = 300 + (searchRange - 300) / 10; // +1 every 3 sec after 300
if (searchRange > 600) searchRange = 600;
if (Math.abs(search1.rating - search2.rating) > searchRange) return false;
user1.lastMatch = user2.userid;
user2.lastMatch = user1.userid;
return true;
}
/**
* Starts a search for a battle for a user under the given format.
* @param {BattleReady} newSearch
* @param {User} user
*/
addSearch(newSearch, user) {
const formatid = newSearch.formatid;
let formatTable = searches.get(formatid);
if (!formatTable) {
formatTable = new Map();
searches.set(formatid, formatTable);
}
if (formatTable.has(user.userid)) {
user.popup(`Couldn't search: You are already searching for a ${formatid} battle.`);
return;
}
// In order from longest waiting to shortest waiting
for (let search of formatTable.values()) {
const searcher = this.getSearcher(search);
if (!searcher) continue;
const matched = this.matchmakingOK(search, newSearch, searcher, user);
if (matched) {
formatTable.delete(search.userid);
Ladder.match(search, newSearch);
return;
}
}
formatTable.set(newSearch.userid, newSearch);
Ladder.updateSearch(user);
}
/**
* Creates a match for a new battle for each format in this.searches if a
* valid match can be made. This is run periodically depending on
* PERIODIC_MATCH_INTERVAL.
*/
static periodicMatch() {
// In order from longest waiting to shortest waiting
for (const [formatid, formatTable] of searches) {
const matchmaker = Ladders(formatid);
let longestSearch, longestSearcher;
for (let search of formatTable.values()) {
if (!longestSearch) {
longestSearcher = matchmaker.getSearcher(search);
if (!longestSearcher) continue;
longestSearch = search;
continue;
}
let searcher = matchmaker.getSearcher(search);
if (!searcher) continue;
let matched = matchmaker.matchmakingOK(search, longestSearch, searcher, longestSearcher);
if (matched) {
formatTable.delete(search.userid);
formatTable.delete(longestSearch.userid);
Ladder.match(longestSearch, search);
return;
}
}
}
}
/**
* @param {BattleReady} ready1
* @param {BattleReady} ready2
*/
static match(ready1, ready2) {
if (ready1.formatid !== ready2.formatid) throw new Error(`Format IDs don't match`);
const user1 = Users(ready1.userid);
const user2 = Users(ready2.userid);
if (!user1 && !user2) return false;
if (!user1) {
user2.popup(`Sorry, your opponent ${ready1.userid} went offline before your battle could start.`);
return false;
}
if (!user2) {
user1.popup(`Sorry, your opponent ${ready2.userid} went offline before your battle could start.`);
return false;
}
Rooms.createBattle(ready1.formatid, {
p1: user1,
p1team: ready1.team,
p2: user2,
p2team: ready2.team,
rated: Math.min(ready1.rating, ready2.rating),
});
}
}
/**
* @param {string} formatid
*/
function getLadder(formatid) {
return new Ladder(formatid);
}
/** @type {?NodeJS.Timer} */
let periodicMatchInterval = setInterval(
() => Ladder.periodicMatch(),
PERIODIC_MATCH_INTERVAL
);
const Ladders = Object.assign(getLadder, {
BattleReady,
LadderStore,
Ladder,
cancelSearches: Ladder.cancelSearches,
updateSearch: Ladder.updateSearch,
rejectChallenge: Ladder.rejectChallenge,
acceptChallenge: Ladder.acceptChallenge,
cancelChallenging: Ladder.cancelChallenging,
clearChallenges: Ladder.clearChallenges,
updateChallenges: Ladder.updateChallenges,
visualizeAll: Ladder.visualizeAll,
getSearches: Ladder.getSearches,
match: Ladder.match,
searches,
periodicMatchInterval,
// tells the client to ask the server for format information
formatsListPrefix: LadderStore.formatsListPrefix,
/** @type {true | false | 'db'} */
disabled: false,
});
module.exports = Ladders;
| Access searches/challenges through Ladders
In general I like to make non-local variable accesses clearer, hence
this.
...should maybe mention this in CONTRIBUTING.md
| ladders.js | Access searches/challenges through Ladders | <ide><path>adders.js
<ide> */
<ide> static clearChallenges(username) {
<ide> const userid = toId(username);
<del> const userChalls = challenges.get(userid);
<add> const userChalls = Ladders.challenges.get(userid);
<ide> if (userChalls) {
<ide> for (const chall of userChalls.slice()) {
<ide> let otherUserid;
<ide> * @param {string} userid
<ide> */
<ide> static getChallenging(userid) {
<del> const userChalls = challenges.get(userid);
<add> const userChalls = Ladders.challenges.get(userid);
<ide> if (userChalls) {
<ide> for (const chall of userChalls) {
<ide> if (chall.from === userid) return chall;
<ide> * @param {Challenge} challenge
<ide> */
<ide> static addChallenge(challenge, skipUpdate = false) {
<del> let challs1 = challenges.get(challenge.from);
<del> if (!challs1) challenges.set(challenge.from, challs1 = []);
<del> let challs2 = challenges.get(challenge.to);
<del> if (!challs2) challenges.set(challenge.to, challs2 = []);
<add> let challs1 = Ladders.challenges.get(challenge.from);
<add> if (!challs1) Ladders.challenges.set(challenge.from, challs1 = []);
<add> let challs2 = Ladders.challenges.get(challenge.to);
<add> if (!challs2) Ladders.challenges.set(challenge.to, challs2 = []);
<ide> challs1.push(challenge);
<ide> challs2.push(challenge);
<ide> if (!skipUpdate) {
<ide> * @param {Challenge} challenge
<ide> */
<ide> static removeChallenge(challenge, skipUpdate = false) {
<del> const fromChalls = /** @type {Challenge[]} */ (challenges.get(challenge.from));
<add> const fromChalls = /** @type {Challenge[]} */ (Ladders.challenges.get(challenge.from));
<ide> // the challenge may have been cancelled
<ide> if (!fromChalls) return false;
<ide> const fromIndex = fromChalls.indexOf(challenge);
<ide> if (fromIndex < 0) return false;
<ide> fromChalls.splice(fromIndex, 1);
<del> if (!fromChalls.length) challenges.delete(challenge.from);
<del> const toChalls = /** @type {Challenge[]} */ (challenges.get(challenge.to));
<add> if (!fromChalls.length) Ladders.challenges.delete(challenge.from);
<add> const toChalls = /** @type {Challenge[]} */ (Ladders.challenges.get(challenge.to));
<ide> toChalls.splice(toChalls.indexOf(challenge), 1);
<del> if (!toChalls.length) challenges.delete(challenge.to);
<add> if (!toChalls.length) Ladders.challenges.delete(challenge.to);
<ide> if (!skipUpdate) {
<ide> const fromUser = Users(challenge.from);
<ide> if (fromUser) Ladder.updateChallenges(fromUser);
<ide> if (!user.connected) return;
<ide> let challengeTo = null;
<ide> let challengesFrom = {};
<del> const userChalls = challenges.get(user.userid);
<add> const userChalls = Ladders.challenges.get(user.userid);
<ide> if (userChalls) {
<ide> for (const chall of userChalls) {
<ide> if (chall.from === user.userid) {
<ide> cancelSearch(user) {
<ide> const formatid = toId(this.formatid);
<ide>
<del> const formatTable = searches.get(formatid);
<add> const formatTable = Ladders.searches.get(formatid);
<ide> if (!formatTable) return false;
<ide> if (!formatTable.has(user.userid)) return false;
<ide> formatTable.delete(user.userid);
<ide> static cancelSearches(user) {
<ide> let cancelCount = 0;
<ide>
<del> for (let formatTable of searches.values()) {
<add> for (let formatTable of Ladders.searches.values()) {
<ide> const search = formatTable.get(user.userid);
<ide> if (!search) continue;
<ide> formatTable.delete(user.userid);
<ide> const formatid = toId(this.formatid);
<ide> const user = Users.get(search.userid);
<ide> if (!user || !user.connected || user.userid !== search.userid) {
<del> const formatTable = searches.get(formatid);
<add> const formatTable = Ladders.searches.get(formatid);
<ide> if (formatTable) formatTable.delete(search.userid);
<ide> if (user && user.connected) {
<ide> user.popup(`You changed your name and are no longer looking for a battle in ${formatid}`);
<ide> */
<ide> static getSearches(user) {
<ide> let userSearches = [];
<del> for (const [formatid, formatTable] of searches) {
<add> for (const [formatid, formatTable] of Ladders.searches) {
<ide> if (formatTable.has(user.userid)) userSearches.push(formatid);
<ide> }
<ide> return userSearches;
<ide> */
<ide> hasSearch(user) {
<ide> const formatid = toId(this.formatid);
<del> const formatTable = searches.get(formatid);
<add> const formatTable = Ladders.searches.get(formatid);
<ide> if (!formatTable) return false;
<ide> return formatTable.has(user.userid);
<ide> }
<ide> */
<ide> addSearch(newSearch, user) {
<ide> const formatid = newSearch.formatid;
<del> let formatTable = searches.get(formatid);
<add> let formatTable = Ladders.searches.get(formatid);
<ide> if (!formatTable) {
<ide> formatTable = new Map();
<del> searches.set(formatid, formatTable);
<add> Ladders.searches.set(formatid, formatTable);
<ide> }
<ide> if (formatTable.has(user.userid)) {
<ide> user.popup(`Couldn't search: You are already searching for a ${formatid} battle.`);
<ide> */
<ide> static periodicMatch() {
<ide> // In order from longest waiting to shortest waiting
<del> for (const [formatid, formatTable] of searches) {
<add> for (const [formatid, formatTable] of Ladders.searches) {
<ide> const matchmaker = Ladders(formatid);
<ide> let longestSearch, longestSearcher;
<ide> for (let search of formatTable.values()) {
<ide> match: Ladder.match,
<ide>
<ide> searches,
<add> challenges,
<ide> periodicMatchInterval,
<ide>
<ide> // tells the client to ask the server for format information |
|
Java | apache-2.0 | da4bae1cd87f2c2de5d24b88edf43877f3ce4802 | 0 | creswick/StreamingQR,creswick/StreamingQR | /**
* Copyright 2014 Galois, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.galois.qrstream.lib;
import android.content.Context;
import android.content.Intent;
import android.util.Log;
import com.galois.qrstream.qrpipe.IProgress;
import com.galois.qrstream.qrpipe.Receive;
import com.galois.qrstream.qrpipe.ReceiveException;
import org.jetbrains.annotations.NotNull;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
/**
* Created by donp on 2/28/14.
*/
public class DecodeThread extends Thread {
private final Receive receiver;
private final CameraManager cameraManager;
private final Context context;
public DecodeThread(Context ctx, IProgress progress, CameraManager cameraManager) {
this.context = ctx;
this.cameraManager = cameraManager;
this.receiver = new Receive(
cameraManager.getDisplayHeight(),
cameraManager.getDisplayWidth(),
progress);
}
@Override
public void run(){
Job message;
try {
message = (Job)receiver.decodeQRSerializable(cameraManager);
Log.w(Constants.APP_TAG, "DecodeThread received " + message.getData().length + " bytes " +
message.getMimeType());
Log.w(Constants.APP_TAG, "DecodeThread heard " + new String(message.getData()));
Intent i = buildIntent(message);
// TODO integrate with ZXing
context.startActivity(Intent.createChooser(i, "Open with"));
} catch(ReceiveException e) {
Log.e(Constants.APP_TAG, "DecodeThread failed to read message. " + e.getMessage());
} catch (IOException e) {
Log.e(Constants.APP_TAG, "Could not store data to temp file." + e.getMessage());
}
}
private Intent buildIntent(Job message) throws IOException {
Intent i = new Intent();
i.setAction(Intent.ACTION_SEND);
i.addCategory(Intent.CATEGORY_DEFAULT);
String mimeType = message.getMimeType();
i.setType(mimeType);
if(mimeType.equals("text/plain")) {
String msg = new String(message.getData());
i.putExtra(Intent.EXTRA_TEXT, msg);
} else {
// this should conditionally use a URI if the payload is too large.
URI dataLoc = storeData(message);
i.putExtra(Intent.EXTRA_STREAM, dataLoc);
}
return i;
}
private @NotNull URI storeData(Job message) throws IOException {
File cacheDir = context.getCacheDir();
File tmpFile = File.createTempFile("qrstream","", cacheDir);
// make tmpFile world-readable:
tmpFile.setReadable(true, false);
tmpFile.deleteOnExit();
BufferedOutputStream bos = null;
try {
bos = new BufferedOutputStream(new FileOutputStream(tmpFile));
bos.write(message.getData());
bos.flush();
} finally {
if ( null != bos) {
bos.close();
}
}
return tmpFile.toURI();
}
}
| development/android/qrstreamlib/src/main/java/com/galois/qrstream/lib/DecodeThread.java | /**
* Copyright 2014 Galois, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.galois.qrstream.lib;
import android.content.Context;
import android.content.Intent;
import android.util.Log;
import com.galois.qrstream.qrpipe.IProgress;
import com.galois.qrstream.qrpipe.Receive;
import com.galois.qrstream.qrpipe.ReceiveException;
import org.jetbrains.annotations.NotNull;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
/**
* Created by donp on 2/28/14.
*/
public class DecodeThread extends Thread {
private final Receive receiver;
private final CameraManager cameraManager;
private final Context context;
public DecodeThread(Context ctx, IProgress progress, CameraManager cameraManager) {
this.context = ctx;
this.cameraManager = cameraManager;
this.receiver = new Receive(
cameraManager.getDisplayHeight(),
cameraManager.getDisplayWidth(),
progress);
}
@Override
public void run(){
Job message;
try {
message = (Job)receiver.decodeQRSerializable(cameraManager);
Log.w(Constants.APP_TAG, "DecodeThread read message of length: " + message.getData().length);
Log.w(Constants.APP_TAG, "DecodeThread heard " + message.toString());
Intent i = new Intent();
i.setAction(Intent.ACTION_SEND);
i.addCategory(Intent.CATEGORY_DEFAULT);
i.setType(message.getMimeType());
// this should conditionally use a URI if the payload is too large.
URI dataLoc = storeData(message);
i.putExtra(Intent.EXTRA_STREAM, dataLoc);
// TODO integrate with ZXing
context.startActivity(Intent.createChooser(i, "Open with"));
} catch(ReceiveException e) {
Log.e(Constants.APP_TAG, "DecodeThread failed to read message. " + e.getMessage());
} catch (IOException e) {
Log.e(Constants.APP_TAG, "Could not store data to temp file." + e.getMessage());
}
}
private @NotNull URI storeData(Job message) throws IOException {
File cacheDir = context.getCacheDir();
File tmpFile = File.createTempFile("qrstream","", cacheDir);
// make tmpFile world-readable:
tmpFile.setReadable(true, false);
tmpFile.deleteOnExit();
BufferedOutputStream bos = null;
try {
bos = new BufferedOutputStream(new FileOutputStream(tmpFile));
bos.write(message.getData());
bos.flush();
} finally {
if ( null != bos) {
bos.close();
}
}
return tmpFile.toURI();
}
}
| use EXTRA_TEXT when receiving text
| development/android/qrstreamlib/src/main/java/com/galois/qrstream/lib/DecodeThread.java | use EXTRA_TEXT when receiving text | <ide><path>evelopment/android/qrstreamlib/src/main/java/com/galois/qrstream/lib/DecodeThread.java
<ide> Job message;
<ide> try {
<ide> message = (Job)receiver.decodeQRSerializable(cameraManager);
<del> Log.w(Constants.APP_TAG, "DecodeThread read message of length: " + message.getData().length);
<del> Log.w(Constants.APP_TAG, "DecodeThread heard " + message.toString());
<add> Log.w(Constants.APP_TAG, "DecodeThread received " + message.getData().length + " bytes " +
<add> message.getMimeType());
<add> Log.w(Constants.APP_TAG, "DecodeThread heard " + new String(message.getData()));
<ide>
<ide>
<del> Intent i = new Intent();
<del> i.setAction(Intent.ACTION_SEND);
<del> i.addCategory(Intent.CATEGORY_DEFAULT);
<del> i.setType(message.getMimeType());
<del>
<del> // this should conditionally use a URI if the payload is too large.
<del> URI dataLoc = storeData(message);
<del> i.putExtra(Intent.EXTRA_STREAM, dataLoc);
<add> Intent i = buildIntent(message);
<ide>
<ide> // TODO integrate with ZXing
<ide>
<ide> } catch (IOException e) {
<ide> Log.e(Constants.APP_TAG, "Could not store data to temp file." + e.getMessage());
<ide> }
<add> }
<add>
<add> private Intent buildIntent(Job message) throws IOException {
<add> Intent i = new Intent();
<add> i.setAction(Intent.ACTION_SEND);
<add> i.addCategory(Intent.CATEGORY_DEFAULT);
<add> String mimeType = message.getMimeType();
<add> i.setType(mimeType);
<add>
<add> if(mimeType.equals("text/plain")) {
<add> String msg = new String(message.getData());
<add> i.putExtra(Intent.EXTRA_TEXT, msg);
<add> } else {
<add> // this should conditionally use a URI if the payload is too large.
<add> URI dataLoc = storeData(message);
<add> i.putExtra(Intent.EXTRA_STREAM, dataLoc);
<add> }
<add> return i;
<ide> }
<ide>
<ide> private @NotNull URI storeData(Job message) throws IOException { |
|
Java | bsd-3-clause | 378de3b541879f8fec149bb91953f172e5995569 | 0 | Clunker5/tregmine-2.0,Clunker5/tregmine-2.0,EmilHernvall/tregmine,EmilHernvall/tregmine,EmilHernvall/tregmine | package info.tregmine.listeners;
import java.util.*;
import java.util.Map.Entry;
import org.bukkit.*;
import org.bukkit.block.*;
import org.bukkit.entity.*;
import org.bukkit.event.*;
import org.bukkit.event.block.Action;
import org.bukkit.event.inventory.InventoryCloseEvent;
import org.bukkit.event.player.*;
import org.bukkit.event.player.PlayerLoginEvent.Result;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.ItemMeta;
import org.bukkit.scoreboard.*;
import org.kitteh.tag.PlayerReceiveNameTagEvent;
import info.tregmine.Tregmine;
import info.tregmine.api.*;
import info.tregmine.api.lore.Created;
import info.tregmine.api.util.ScoreboardClearTask;
import info.tregmine.commands.MentorCommand;
import info.tregmine.database.*;
import info.tregmine.events.PlayerMoveBlockEvent;
import info.tregmine.quadtree.Point;
import info.tregmine.zones.*;
public class TregminePlayerListener implements Listener
{
private static class RankComparator implements Comparator<TregminePlayer>
{
private int order;
public RankComparator()
{
this.order = 1;
}
public RankComparator(boolean reverseOrder)
{
this.order = reverseOrder ? -1 : 1;
}
@Override
public int compare(TregminePlayer a, TregminePlayer b)
{
return order * (a.getGuardianRank() - b.getGuardianRank());
}
}
private Tregmine plugin;
private Map<Item, TregminePlayer> droppedItems;
public TregminePlayerListener(Tregmine instance)
{
this.plugin = instance;
droppedItems = new HashMap<Item, TregminePlayer>();
}
@EventHandler
public void onPlayerClick(PlayerInteractEvent event)
{
if (event.getAction() != Action.RIGHT_CLICK_BLOCK) {
return;
}
Player player = event.getPlayer();
BlockState block = event.getClickedBlock().getState();
if (block instanceof Skull) {
Skull skull = (Skull) block;
if (!skull.getSkullType().equals(SkullType.PLAYER)) {
return;
}
String owner = skull.getOwner();
TregminePlayer skullowner = plugin.getPlayerOffline(owner);
if (skullowner != null){
ChatColor C = skullowner.getNameColor();
player.sendMessage(ChatColor.AQUA + "This is " + C + owner + "'s " + ChatColor.AQUA + "head!");
}else{
player.sendMessage(ChatColor.AQUA + "This is " + ChatColor.WHITE + owner + ChatColor.AQUA + "'s head!");
}
}
}
@EventHandler
public void onPlayerItemHeld(InventoryCloseEvent event)
{
Player player = (Player) event.getPlayer();
if (player.getGameMode() == GameMode.CREATIVE) {
for (ItemStack item : player.getInventory().getContents()) {
if (item != null) {
ItemMeta meta = item.getItemMeta();
List<String> lore = new ArrayList<String>();
lore.add(Created.CREATIVE.toColorString());
TregminePlayer p = this.plugin.getPlayer(player);
lore.add(ChatColor.WHITE + "by: " + p.getChatName());
lore.add(ChatColor.WHITE + "Value: " + ChatColor.MAGIC
+ "0000" + ChatColor.RESET + ChatColor.WHITE
+ " Treg");
meta.setLore(lore);
item.setItemMeta(meta);
}
}
}
TregminePlayer p = plugin.getPlayer(player);
if (p.getCurrentInventory() != null) {
p.saveInventory(p.getCurrentInventory());
}
}
@EventHandler
public void onPlayerRespawnSave(PlayerRespawnEvent event)
{
TregminePlayer p = plugin.getPlayer(event.getPlayer());
p.saveInventory(p.getCurrentInventory());
}
@EventHandler
public void onPlayerInteract(PlayerInteractEvent event)
{
if (event.getAction() == Action.RIGHT_CLICK_BLOCK) {
Player player = event.getPlayer();
Block block = event.getClickedBlock();
Location loc = block.getLocation();
if (player.getItemInHand().getType() == Material.BOOK) {
player.sendMessage(ChatColor.DARK_AQUA + "Type: "
+ ChatColor.AQUA + block.getType().toString() + " ("
+ ChatColor.BLUE + block.getType().getId()
+ ChatColor.DARK_AQUA + ")");
player.sendMessage(ChatColor.DARK_AQUA + "Data: "
+ ChatColor.AQUA + (int) block.getData());
player.sendMessage(ChatColor.RED + "X" + ChatColor.WHITE + ", "
+ ChatColor.GREEN + "Y" + ChatColor.WHITE + ", "
+ ChatColor.BLUE + "Z" + ChatColor.WHITE + ": "
+ ChatColor.RED + loc.getBlockX() + ChatColor.WHITE
+ ", " + ChatColor.GREEN + loc.getBlockY()
+ ChatColor.WHITE + ", " + ChatColor.BLUE
+ loc.getBlockZ());
try {
player.sendMessage(ChatColor.DARK_AQUA + "Biome: "
+ ChatColor.AQUA + block.getBiome().toString());
} catch (Exception e) {
player.sendMessage(ChatColor.DARK_AQUA + "Biome: "
+ ChatColor.AQUA + "NULL");
}
Tregmine.LOGGER.info("POS: " + loc.getBlockX() + ", "
+ loc.getBlockY() + ", " + loc.getBlockZ());
}
}
}
@EventHandler
public void onPreCommand(PlayerCommandPreprocessEvent event)
{
// Tregmine.LOGGER.info("COMMAND: " + event.getPlayer().getName() + "::"
// + event.getMessage());
}
@EventHandler
public void onPlayerLogin(PlayerLoginEvent event)
{
TregminePlayer player;
try {
player = plugin.addPlayer(event.getPlayer(), event.getAddress());
if (player == null) {
event.disallow(Result.KICK_OTHER, "Something went wrong");
return;
}
}
catch (PlayerBannedException e) {
event.disallow(Result.KICK_BANNED, e.getMessage());
return;
}
if (player.getRank() == Rank.UNVERIFIED) {
player.setChatState(TregminePlayer.ChatState.SETUP);
}
if (player.getLocation().getWorld().getName().matches("world_the_end")) {
player.teleport(this.plugin.getServer().getWorld("world")
.getSpawnLocation());
}
if (player.getKeyword() != null) {
String keyword =
player.getKeyword()
+ ".mc.tregmine.info:25565".toLowerCase();
Tregmine.LOGGER.warning("host: " + event.getHostname());
Tregmine.LOGGER.warning("keyword:" + keyword);
if (keyword.equals(event.getHostname().toLowerCase())
|| keyword.matches("mc.tregmine.info")) {
Tregmine.LOGGER.warning(player.getName()
+ " keyword :: success");
}
else {
Tregmine.LOGGER.warning(player.getName() + " keyword :: faild");
event.disallow(Result.KICK_BANNED, "Wrong keyword!");
}
}
else {
Tregmine.LOGGER.warning(player.getName() + " keyword :: notset");
}
if (player.getRank() == Rank.GUARDIAN) {
player.setGuardianState(TregminePlayer.GuardianState.QUEUED);
}
}
@EventHandler
public void onPlayerJoin(PlayerJoinEvent event)
{
event.setJoinMessage(null);
TregminePlayer player = plugin.getPlayer(event.getPlayer());
if (player == null) {
event.getPlayer().kickPlayer("error loading profile!");
return;
}
Rank rank = player.getRank();
// Handle invisibility, if set
List<TregminePlayer> players = plugin.getOnlinePlayers();
if (player.hasFlag(TregminePlayer.Flags.INVISIBLE)) {
player.sendMessage(ChatColor.YELLOW + "You are now invisible!");
// Hide the new player from all existing players
for (TregminePlayer current : players) {
if (!current.getRank().canVanish()) {
current.hidePlayer(player);
} else {
current.showPlayer(player);
}
}
}
else {
for (TregminePlayer current : players) {
current.showPlayer(player);
}
}
player.loadInventory("survival", false);
// Hide currently invisible players from the player that just signed on
for (TregminePlayer current : players) {
if (current.hasFlag(TregminePlayer.Flags.INVISIBLE)) {
player.hidePlayer(current);
} else {
player.showPlayer(current);
}
if (player.getRank().canVanish()) {
player.showPlayer(current);
}
}
// Set applicable game mode
if (rank == Rank.BUILDER) {
player.setGameMode(GameMode.CREATIVE);
}
else if (!rank.canUseCreative()) {
player.setGameMode(GameMode.SURVIVAL);
}
// Try to find a mentor for new players
if (rank == Rank.UNVERIFIED) {
return;
}
// Check if the player is allowed to fly
if (player.hasFlag(TregminePlayer.Flags.HARDWARNED) ||
player.hasFlag(TregminePlayer.Flags.SOFTWARNED)) {
player.sendMessage("You are warned and are not allowed to fly.");
player.setAllowFlight(false);
} else if (rank.canFly()) {
if (player.hasFlag(TregminePlayer.Flags.FLY_ENABLED)) {
player.sendMessage("Flying: Allowed and Enabled! Toggle flying with /fly");
player.setAllowFlight(true);
} else {
player.sendMessage("Flying: Allowed but Disabled! Toggle flying with /fly");
player.setAllowFlight(false);
}
} else {
player.sendMessage("no-z-cheat");
player.sendMessage("You are NOT allowed to fly");
player.setAllowFlight(false);
}
try (IContext ctx = plugin.createContext()) {
if (player.getPlayTime() > 10 * 3600 && rank == Rank.SETTLER) {
player.setRank(Rank.RESIDENT);
rank = Rank.RESIDENT;
IPlayerDAO playerDAO = ctx.getPlayerDAO();
playerDAO.updatePlayer(player);
playerDAO.updatePlayerInfo(player);
player.sendMessage(ChatColor.DARK_GREEN + "Congratulations! " +
"You are now a resident on Tregmine!");
}
// Load motd
IMotdDAO motdDAO = ctx.getMotdDAO();
String message = motdDAO.getMotd();
if (message != null) {
String[] lines = message.split("\n");
for (String line : lines) {
player.sendMessage(ChatColor.GOLD + "" + ChatColor.BOLD + line);
}
}
} catch (DAOException e) {
throw new RuntimeException(e);
}
// Show a score board
if (player.isOnline()) {
ScoreboardManager manager = Bukkit.getScoreboardManager();
Scoreboard board = manager.getNewScoreboard();
Objective objective = board.registerNewObjective("1", "2");
objective.setDisplaySlot(DisplaySlot.SIDEBAR);
objective.setDisplayName("" + ChatColor.DARK_RED + ""
+ ChatColor.BOLD + "Welcome to Tregmine!");
try (IContext ctx = plugin.createContext()) {
IWalletDAO walletDAO = ctx.getWalletDAO();
// Get a fake offline player
String desc = ChatColor.BLACK + "Your Balance:";
Score score = objective.getScore(Bukkit.getOfflinePlayer(desc));
score.setScore((int)walletDAO.balance(player));
} catch (DAOException e) {
throw new RuntimeException(e);
}
try {
player.setScoreboard(board);
ScoreboardClearTask.start(plugin, player);
} catch (IllegalStateException e) {
// ignore
}
}
// Recalculate guardians
activateGuardians();
if (rank == Rank.TOURIST) {
// Try to find a mentor for tourists that rejoin
MentorCommand.findMentor(plugin, player);
}
else if (player.canMentor()) {
Queue<TregminePlayer> students = plugin.getStudentQueue();
if (students.size() > 0) {
player.sendMessage(ChatColor.YELLOW + "Mentors are needed! " +
"Type /mentor to offer your services!");
}
}
if (player.getKeyword() == null && player.getRank().mustUseKeyword()) {
player.sendMessage(ChatColor.RED + "You have not set a keyword! DO SO NOW.");
}
if (rank == Rank.DONATOR &&
!player.hasBadge(Badge.PHILANTROPIST)) {
player.awardBadgeLevel(Badge.PHILANTROPIST,
"For being a Tregmine donator!");
}
}
@EventHandler
public void onPlayerQuit(PlayerQuitEvent event)
{
TregminePlayer player = plugin.getPlayer(event.getPlayer());
if (player == null) {
Tregmine.LOGGER.info(event.getPlayer().getName() + " was not found " +
"in players map when quitting.");
return;
}
player.saveInventory(player.getCurrentInventory());
event.setQuitMessage(null);
if (!player.isOp()) {
String message = null;
if (player.getQuitMessage() != null) {
message = player.getChatName() + " quit: " + ChatColor.YELLOW + player.getQuitMessage();
} else {
Random rand = new Random();
int msgIndex = rand.nextInt(plugin.getQuitMessages().size());
message = String.format(plugin.getQuitMessages().get(msgIndex), player.getChatName());
}
plugin.getServer().broadcastMessage(message);
}
// Look if there are any students being mentored by the exiting player
if (player.getStudent() != null) {
TregminePlayer student = player.getStudent();
try (IContext ctx = plugin.createContext()) {
IMentorLogDAO mentorLogDAO = ctx.getMentorLogDAO();
int mentorLogId = mentorLogDAO.getMentorLogId(student, player);
mentorLogDAO.updateMentorLogEvent(mentorLogId,
IMentorLogDAO.MentoringEvent.CANCELLED);
} catch (DAOException e) {
throw new RuntimeException(e);
}
student.setMentor(null);
player.setStudent(null);
student.sendMessage(ChatColor.RED + "Your mentor left. We'll try " +
"to find a new one for you as quickly as possible.");
MentorCommand.findMentor(plugin, student);
}
else if (player.getMentor() != null) {
TregminePlayer mentor = player.getMentor();
try (IContext ctx = plugin.createContext()) {
IMentorLogDAO mentorLogDAO = ctx.getMentorLogDAO();
int mentorLogId = mentorLogDAO.getMentorLogId(player, mentor);
mentorLogDAO.updateMentorLogEvent(mentorLogId,
IMentorLogDAO.MentoringEvent.CANCELLED);
} catch (DAOException e) {
throw new RuntimeException(e);
}
mentor.setStudent(null);
player.setMentor(null);
mentor.sendMessage(ChatColor.RED + "Your student left. :(");
}
plugin.removePlayer(player);
Tregmine.LOGGER.info("Unloaded settings for " + player.getName() + ".");
activateGuardians();
}
@EventHandler
public void onPlayerMove(PlayerMoveEvent event)
{
TregminePlayer player = this.plugin.getPlayer(event.getPlayer());
if (player == null) {
event.getPlayer().kickPlayer("error loading profile!");
}
}
@EventHandler
public void onPlayerBlockMove(PlayerMoveBlockEvent event)
{
TregminePlayer player = event.getPlayer();
// To add player.hasBadge for a flight badge when made
if (player.getRank().canFly() && player.isFlying() && player.isSprinting()) {
player.setFlySpeed(0.7f); // To be balanced
} else {
player.setFlySpeed(0.1f); // 0.1 is default
}
}
/*@EventHandler
public void onDeath(PlayerDeathEvent event)
{
TregminePlayer player = plugin.getPlayer(event.getEntity());
try(IContext ctx = plugin.createContext()){
IWalletDAO dao = ctx.getWalletDAO();
dao.take(player, MathUtil.percentOf(dao.balance(player), 5));
} catch (DAOException e) {
e.printStackTrace();
}
}*/
@EventHandler
public void onPlayerFlight(PlayerToggleFlightEvent event)
{
TregminePlayer player = plugin.getPlayer(event.getPlayer());
if (player.getRank().canModifyZones()) {
return;
}
if (!player.getRank().canFly()) {
event.setCancelled(true);
}
if (player.hasFlag(TregminePlayer.Flags.HARDWARNED) ||
player.hasFlag(TregminePlayer.Flags.SOFTWARNED)) {
event.setCancelled(true);
}
Location loc = player.getLocation();
ZoneWorld world = plugin.getWorld(loc.getWorld());
Lot lot = world.findLot(new Point(loc.getBlockX(), loc.getBlockZ()));
if (lot == null) {
return;
}
if (!lot.hasFlag(Lot.Flags.FLIGHT_ALLOWED)) {
event.setCancelled(true);
}
if (loc.getWorld().getName().equalsIgnoreCase(plugin.getRulelessWorld().getName()) &&
(!player.getRank().canBypassWorld() && player.getGameMode() != GameMode.CREATIVE)) {
player.setAllowFlight(false);
player.setFlying(false);
}
}
@EventHandler
public void onPlayerPickupItem(PlayerPickupItemEvent event)
{
TregminePlayer player = this.plugin.getPlayer(event.getPlayer());
if (player.getGameMode() == GameMode.CREATIVE) {
event.setCancelled(true);
return;
}
if (!player.getRank().arePickupsLogged()) {
return;
}
if (!player.getRank().canPickup()) {
event.setCancelled(true);
return;
}
try (IContext ctx = plugin.createContext()) {
Item item = event.getItem();
TregminePlayer droppedBy = droppedItems.get(item);
if (droppedBy != null && droppedBy.getId() != player.getId()) {
ItemStack stack = item.getItemStack();
ILogDAO logDAO = ctx.getLogDAO();
logDAO.insertGiveLog(droppedBy, player, stack);
player.sendMessage(ChatColor.YELLOW + "You got " +
stack.getAmount() + " " + stack.getType() + " from " +
droppedBy.getName() + ".");
if (droppedBy.isOnline()) {
droppedBy.sendMessage(ChatColor.YELLOW + "You gave " +
stack.getAmount() + " " + stack.getType() + " to " +
player.getName() + ".");
}
}
droppedItems.remove(item);
} catch (DAOException e) {
throw new RuntimeException(e);
}
}
@EventHandler
public void onPlayerDropItem(PlayerDropItemEvent event)
{
TregminePlayer player = this.plugin.getPlayer(event.getPlayer());
if (player.getGameMode() == GameMode.CREATIVE) {
event.setCancelled(true);
return;
}
if (!player.getRank().arePickupsLogged()) {
return;
}
if (!player.getRank().canPickup()) {
event.setCancelled(true);
return;
}
Item item = event.getItemDrop();
droppedItems.put(item, player);
}
@EventHandler
public void onPlayerKick(PlayerKickEvent event)
{
event.setLeaveMessage(null);
}
@EventHandler
public void onNameTag(PlayerReceiveNameTagEvent event)
{
TregminePlayer player = plugin.getPlayer(event.getPlayer());
if (player == null) {
return;
}
event.setTag(player.getChatName());
}
private void activateGuardians()
{
// Identify all guardians and categorize them based on their current
// state
Player[] players = plugin.getServer().getOnlinePlayers();
Set<TregminePlayer> guardians = new HashSet<TregminePlayer>();
List<TregminePlayer> activeGuardians = new ArrayList<TregminePlayer>();
List<TregminePlayer> inactiveGuardians =
new ArrayList<TregminePlayer>();
List<TregminePlayer> queuedGuardians = new ArrayList<TregminePlayer>();
for (Player srvPlayer : players) {
TregminePlayer guardian = plugin.getPlayer(srvPlayer.getName());
if (guardian == null || guardian.getRank() != Rank.GUARDIAN) {
continue;
}
TregminePlayer.GuardianState state = guardian.getGuardianState();
if (state == null) {
state = TregminePlayer.GuardianState.QUEUED;
}
switch (state) {
case ACTIVE:
activeGuardians.add(guardian);
break;
case INACTIVE:
inactiveGuardians.add(guardian);
break;
case QUEUED:
queuedGuardians.add(guardian);
break;
}
guardian.setGuardianState(TregminePlayer.GuardianState.QUEUED);
guardians.add(guardian);
}
Collections.sort(activeGuardians, new RankComparator());
Collections.sort(inactiveGuardians, new RankComparator(true));
Collections.sort(queuedGuardians, new RankComparator());
int idealCount = (int) Math.ceil(Math.sqrt(players.length) / 2);
// There are not enough guardians active, we need to activate a few more
if (activeGuardians.size() <= idealCount) {
// Make a pool of every "willing" guardian currently online
List<TregminePlayer> activationList =
new ArrayList<TregminePlayer>();
activationList.addAll(activeGuardians);
activationList.addAll(queuedGuardians);
// If the pool isn't large enough to satisfy demand, we add the
// guardians
// that have made themselves inactive as well.
if (activationList.size() < idealCount) {
int diff = idealCount - activationList.size();
// If there aren't enough of these to satisfy demand, we add all
// of them
if (diff >= inactiveGuardians.size()) {
activationList.addAll(inactiveGuardians);
}
// Otherwise we just add the lowest ranked of the inactive
else {
activationList.addAll(inactiveGuardians.subList(0, diff));
}
}
// If there are more than necessarry guardians online, only activate
// the most highly ranked.
Set<TregminePlayer> activationSet;
if (activationList.size() > idealCount) {
Collections.sort(activationList, new RankComparator());
activationSet =
new HashSet<TregminePlayer>(activationList.subList(0,
idealCount));
}
else {
activationSet = new HashSet<TregminePlayer>(activationList);
}
// Perform activation
StringBuffer globalMessage = new StringBuffer();
String delim = "";
for (TregminePlayer guardian : activationSet) {
guardian.setGuardianState(TregminePlayer.GuardianState.ACTIVE);
globalMessage.append(delim);
globalMessage.append(guardian.getName());
delim = ", ";
}
Set<TregminePlayer> oldActiveGuardians =
new HashSet<TregminePlayer>(activeGuardians);
if (!activationSet.containsAll(oldActiveGuardians)
|| activationSet.size() != oldActiveGuardians.size()) {
plugin.getServer()
.broadcastMessage(
ChatColor.BLUE
+ "Active guardians are: "
+ globalMessage
+ ". Please contact any of them if you need help.");
// Notify previously active guardian of their state change
for (TregminePlayer guardian : activeGuardians) {
if (!activationSet.contains(guardian)) {
guardian.sendMessage(ChatColor.BLUE
+ "You are no longer on active duty, and should not respond to help requests, unless asked by an admin or active guardian.");
}
}
// Notify previously inactive guardians of their state change
for (TregminePlayer guardian : inactiveGuardians) {
if (activationSet.contains(guardian)) {
guardian.sendMessage(ChatColor.BLUE
+ "You have been restored to active duty and should respond to help requests.");
}
}
// Notify previously queued guardians of their state change
for (TregminePlayer guardian : queuedGuardians) {
if (activationSet.contains(guardian)) {
guardian.sendMessage(ChatColor.BLUE
+ "You are now on active duty and should respond to help requests.");
}
}
}
}
}
}
| src/info/tregmine/listeners/TregminePlayerListener.java | package info.tregmine.listeners;
import java.util.*;
import java.util.Map.Entry;
import org.bukkit.*;
import org.bukkit.block.*;
import org.bukkit.entity.*;
import org.bukkit.event.*;
import org.bukkit.event.block.Action;
import org.bukkit.event.inventory.InventoryCloseEvent;
import org.bukkit.event.player.*;
import org.bukkit.event.player.PlayerLoginEvent.Result;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.ItemMeta;
import org.bukkit.scoreboard.*;
import org.kitteh.tag.PlayerReceiveNameTagEvent;
import info.tregmine.Tregmine;
import info.tregmine.api.*;
import info.tregmine.api.lore.Created;
import info.tregmine.api.util.ScoreboardClearTask;
import info.tregmine.commands.MentorCommand;
import info.tregmine.database.*;
import info.tregmine.events.PlayerMoveBlockEvent;
import info.tregmine.quadtree.Point;
import info.tregmine.zones.*;
public class TregminePlayerListener implements Listener
{
private static class RankComparator implements Comparator<TregminePlayer>
{
private int order;
public RankComparator()
{
this.order = 1;
}
public RankComparator(boolean reverseOrder)
{
this.order = reverseOrder ? -1 : 1;
}
@Override
public int compare(TregminePlayer a, TregminePlayer b)
{
return order * (a.getGuardianRank() - b.getGuardianRank());
}
}
private Tregmine plugin;
private Map<Item, TregminePlayer> droppedItems;
public TregminePlayerListener(Tregmine instance)
{
this.plugin = instance;
droppedItems = new HashMap<Item, TregminePlayer>();
}
@EventHandler
public void onPlayerClick(PlayerInteractEvent event)
{
if (event.getAction() != Action.RIGHT_CLICK_BLOCK) {
return;
}
Player player = event.getPlayer();
BlockState block = event.getClickedBlock().getState();
if (block instanceof Skull) {
Skull skull = (Skull) block;
if (!skull.getSkullType().equals(SkullType.PLAYER)) {
return;
}
String owner = skull.getOwner();
TregminePlayer skullowner = plugin.getPlayerOffline(owner);
if (skullowner != null){
ChatColor C = skullowner.getNameColor();
player.sendMessage(ChatColor.AQUA + "This is " + C + owner + "'s " + ChatColor.AQUA + "head!");
}else{
player.sendMessage(ChatColor.AQUA + "This is " + ChatColor.WHITE + owner + ChatColor.AQUA + "'s head!");
}
}
}
@EventHandler
public void onPlayerItemHeld(InventoryCloseEvent event)
{
Player player = (Player) event.getPlayer();
if (player.getGameMode() == GameMode.CREATIVE) {
for (ItemStack item : player.getInventory().getContents()) {
if (item != null) {
ItemMeta meta = item.getItemMeta();
List<String> lore = new ArrayList<String>();
lore.add(Created.CREATIVE.toColorString());
TregminePlayer p = this.plugin.getPlayer(player);
lore.add(ChatColor.WHITE + "by: " + p.getChatName());
lore.add(ChatColor.WHITE + "Value: " + ChatColor.MAGIC
+ "0000" + ChatColor.RESET + ChatColor.WHITE
+ " Treg");
meta.setLore(lore);
item.setItemMeta(meta);
}
}
}
TregminePlayer p = plugin.getPlayer(player);
if (p.getCurrentInventory() != null) {
p.saveInventory(p.getCurrentInventory());
}
}
@EventHandler
public void onPlayerRespawnSave(PlayerRespawnEvent event)
{
TregminePlayer p = plugin.getPlayer(event.getPlayer());
p.saveInventory(p.getCurrentInventory());
}
@EventHandler
public void onPlayerInteract(PlayerInteractEvent event)
{
if (event.getAction() == Action.RIGHT_CLICK_BLOCK) {
Player player = event.getPlayer();
Block block = event.getClickedBlock();
Location loc = block.getLocation();
if (player.getItemInHand().getType() == Material.BOOK) {
player.sendMessage(ChatColor.DARK_AQUA + "Type: "
+ ChatColor.AQUA + block.getType().toString() + " ("
+ ChatColor.BLUE + block.getType().getId()
+ ChatColor.DARK_AQUA + ")");
player.sendMessage(ChatColor.DARK_AQUA + "Data: "
+ ChatColor.AQUA + (int) block.getData());
player.sendMessage(ChatColor.RED + "X" + ChatColor.WHITE + ", "
+ ChatColor.GREEN + "Y" + ChatColor.WHITE + ", "
+ ChatColor.BLUE + "Z" + ChatColor.WHITE + ": "
+ ChatColor.RED + loc.getBlockX() + ChatColor.WHITE
+ ", " + ChatColor.GREEN + loc.getBlockY()
+ ChatColor.WHITE + ", " + ChatColor.BLUE
+ loc.getBlockZ());
try {
player.sendMessage(ChatColor.DARK_AQUA + "Biome: "
+ ChatColor.AQUA + block.getBiome().toString());
} catch (Exception e) {
player.sendMessage(ChatColor.DARK_AQUA + "Biome: "
+ ChatColor.AQUA + "NULL");
}
Tregmine.LOGGER.info("POS: " + loc.getBlockX() + ", "
+ loc.getBlockY() + ", " + loc.getBlockZ());
}
}
}
@EventHandler
public void onPreCommand(PlayerCommandPreprocessEvent event)
{
// Tregmine.LOGGER.info("COMMAND: " + event.getPlayer().getName() + "::"
// + event.getMessage());
}
@EventHandler
public void onPlayerLogin(PlayerLoginEvent event)
{
TregminePlayer player;
try {
player = plugin.addPlayer(event.getPlayer(), event.getAddress());
if (player == null) {
event.disallow(Result.KICK_OTHER, "Something went wrong");
return;
}
}
catch (PlayerBannedException e) {
event.disallow(Result.KICK_BANNED, e.getMessage());
return;
}
if (player.getRank() == Rank.UNVERIFIED) {
player.setChatState(TregminePlayer.ChatState.SETUP);
}
if (player.getLocation().getWorld().getName().matches("world_the_end")) {
player.teleport(this.plugin.getServer().getWorld("world")
.getSpawnLocation());
}
if (player.getKeyword() != null) {
String keyword =
player.getKeyword()
+ ".mc.tregmine.info:25565".toLowerCase();
Tregmine.LOGGER.warning("host: " + event.getHostname());
Tregmine.LOGGER.warning("keyword:" + keyword);
if (keyword.equals(event.getHostname().toLowerCase())
|| keyword.matches("mc.tregmine.info")) {
Tregmine.LOGGER.warning(player.getName()
+ " keyword :: success");
}
else {
Tregmine.LOGGER.warning(player.getName() + " keyword :: faild");
event.disallow(Result.KICK_BANNED, "Wrong keyword!");
}
}
else {
Tregmine.LOGGER.warning(player.getName() + " keyword :: notset");
}
if (player.getRank() == Rank.GUARDIAN) {
player.setGuardianState(TregminePlayer.GuardianState.QUEUED);
}
}
@EventHandler
public void onPlayerJoin(PlayerJoinEvent event)
{
event.setJoinMessage(null);
TregminePlayer player = plugin.getPlayer(event.getPlayer());
if (player == null) {
event.getPlayer().kickPlayer("error loading profile!");
return;
}
Rank rank = player.getRank();
// Handle invisibility, if set
List<TregminePlayer> players = plugin.getOnlinePlayers();
if (player.hasFlag(TregminePlayer.Flags.INVISIBLE)) {
player.sendMessage(ChatColor.YELLOW + "You are now invisible!");
// Hide the new player from all existing players
for (TregminePlayer current : players) {
if (!current.getRank().canVanish()) {
current.hidePlayer(player);
} else {
current.showPlayer(player);
}
}
}
else {
for (TregminePlayer current : players) {
current.showPlayer(player);
}
}
player.loadInventory("survival", false);
// Hide currently invisible players from the player that just signed on
for (TregminePlayer current : players) {
if (current.hasFlag(TregminePlayer.Flags.INVISIBLE)) {
player.hidePlayer(current);
} else {
player.showPlayer(current);
}
if (player.getRank().canVanish()) {
player.showPlayer(current);
}
}
// Set applicable game mode
if (rank == Rank.BUILDER) {
player.setGameMode(GameMode.CREATIVE);
}
else if (!rank.canUseCreative()) {
player.setGameMode(GameMode.SURVIVAL);
}
// Try to find a mentor for new players
if (rank == Rank.UNVERIFIED) {
return;
}
// Check if the player is allowed to fly
if (player.hasFlag(TregminePlayer.Flags.HARDWARNED) ||
player.hasFlag(TregminePlayer.Flags.SOFTWARNED)) {
player.sendMessage("You are warned and are not allowed to fly.");
player.setAllowFlight(false);
} else if (rank.canFly()) {
if (player.hasFlag(TregminePlayer.Flags.FLY_ENABLED)) {
player.sendMessage("Flying: Allowed and Enabled! Toggle flying with /fly");
player.setAllowFlight(true);
} else {
player.sendMessage("Flying: Allowed but Disabled! Toggle flying with /fly");
player.setAllowFlight(false);
}
} else {
player.sendMessage("no-z-cheat");
player.sendMessage("You are NOT allowed to fly");
player.setAllowFlight(false);
}
try (IContext ctx = plugin.createContext()) {
if (player.getPlayTime() > 10 * 3600 && rank == Rank.SETTLER) {
player.setRank(Rank.RESIDENT);
rank = Rank.RESIDENT;
IPlayerDAO playerDAO = ctx.getPlayerDAO();
playerDAO.updatePlayer(player);
playerDAO.updatePlayerInfo(player);
player.sendMessage(ChatColor.DARK_GREEN + "Congratulations! " +
"You are now a resident on Tregmine!");
}
// Load motd
IMotdDAO motdDAO = ctx.getMotdDAO();
String message = motdDAO.getMotd();
if (message != null) {
String[] lines = message.split("\n");
for (String line : lines) {
player.sendMessage(ChatColor.GOLD + "" + ChatColor.BOLD + line);
}
}
} catch (DAOException e) {
throw new RuntimeException(e);
}
// Show a score board
if (player.isOnline()) {
ScoreboardManager manager = Bukkit.getScoreboardManager();
Scoreboard board = manager.getNewScoreboard();
Objective objective = board.registerNewObjective("1", "2");
objective.setDisplaySlot(DisplaySlot.SIDEBAR);
objective.setDisplayName("" + ChatColor.DARK_RED + ""
+ ChatColor.BOLD + "Welcome to Tregmine!");
try (IContext ctx = plugin.createContext()) {
IWalletDAO walletDAO = ctx.getWalletDAO();
// Get a fake offline player
String desc = ChatColor.BLACK + "Your Balance:";
Score score = objective.getScore(Bukkit.getOfflinePlayer(desc));
score.setScore((int)walletDAO.balance(player));
} catch (DAOException e) {
throw new RuntimeException(e);
}
try {
player.setScoreboard(board);
ScoreboardClearTask.start(plugin, player);
} catch (IllegalStateException e) {
// ignore
}
}
// Recalculate guardians
activateGuardians();
if (rank == Rank.TOURIST) {
// Try to find a mentor for tourists that rejoin
MentorCommand.findMentor(plugin, player);
}
else if (player.canMentor()) {
Queue<TregminePlayer> students = plugin.getStudentQueue();
if (students.size() > 0) {
player.sendMessage(ChatColor.YELLOW + "Mentors are needed! " +
"Type /mentor to offer your services!");
}
}
if (player.getKeyword() == null && player.getRank().mustUseKeyword()) {
player.sendMessage(ChatColor.RED + "You have not set a keyword! DO SO NOW.");
}
if (rank == Rank.DONATOR &&
!player.hasBadge(Badge.PHILANTROPIST)) {
player.awardBadgeLevel(Badge.PHILANTROPIST,
"For being a Tregmine donator!");
}
}
@EventHandler
public void onPlayerQuit(PlayerQuitEvent event)
{
TregminePlayer player = plugin.getPlayer(event.getPlayer());
if (player == null) {
Tregmine.LOGGER.info(event.getPlayer().getName() + " was not found " +
"in players map when quitting.");
return;
}
player.saveInventory(player.getCurrentInventory());
event.setQuitMessage(null);
if (!player.isOp()) {
String message = null;
if (player.getQuitMessage() != null) {
message = player.getChatName() + " quit: " + ChatColor.YELLOW + player.getQuitMessage();
} else {
Random rand = new Random();
int msgIndex = rand.nextInt(plugin.getQuitMessages().size());
message = String.format(plugin.getQuitMessages().get(msgIndex), player.getChatName());
}
plugin.getServer().broadcastMessage(message);
}
// Look if there are any students being mentored by the exiting player
if (player.getStudent() != null) {
TregminePlayer student = player.getStudent();
try (IContext ctx = plugin.createContext()) {
IMentorLogDAO mentorLogDAO = ctx.getMentorLogDAO();
int mentorLogId = mentorLogDAO.getMentorLogId(student, player);
mentorLogDAO.updateMentorLogEvent(mentorLogId,
IMentorLogDAO.MentoringEvent.CANCELLED);
} catch (DAOException e) {
throw new RuntimeException(e);
}
student.setMentor(null);
player.setStudent(null);
student.sendMessage(ChatColor.RED + "Your mentor left. We'll try " +
"to find a new one for you as quickly as possible.");
MentorCommand.findMentor(plugin, student);
}
else if (player.getMentor() != null) {
TregminePlayer mentor = player.getMentor();
try (IContext ctx = plugin.createContext()) {
IMentorLogDAO mentorLogDAO = ctx.getMentorLogDAO();
int mentorLogId = mentorLogDAO.getMentorLogId(player, mentor);
mentorLogDAO.updateMentorLogEvent(mentorLogId,
IMentorLogDAO.MentoringEvent.CANCELLED);
} catch (DAOException e) {
throw new RuntimeException(e);
}
mentor.setStudent(null);
player.setMentor(null);
mentor.sendMessage(ChatColor.RED + "Your student left. :(");
}
plugin.removePlayer(player);
Tregmine.LOGGER.info("Unloaded settings for " + player.getName() + ".");
activateGuardians();
}
@EventHandler
public void onPlayerMove(PlayerMoveEvent event)
{
TregminePlayer player = this.plugin.getPlayer(event.getPlayer());
if (player == null) {
event.getPlayer().kickPlayer("error loading profile!");
}
}
@EventHandler
public void onPlayerBlockMove(PlayerMoveBlockEvent event)
{
TregminePlayer player = event.getPlayer();
// To add player.hasBadge for a flight badge when made
if (player.getRank().canFly() && player.isFlying() && player.isSprinting()) {
player.setFlySpeed(0.7f); // To be balanced
} else {
player.setFlySpeed(0.1f); // 0.1 is default
}
if (player.getGameMode() == GameMode.CREATIVE) {
return;
}
double pickupDistance = player.getRank().getPickupDistance();
List<Entity> entities = player.getNearbyEntities(pickupDistance, pickupDistance, pickupDistance);
for (Entity entity : entities) {
if (entity instanceof Item) {
Item item = (Item) entity;
if (item.getTicksLived() < item.getPickupDelay()) {
return;
}
HashMap<Integer, ItemStack> remaining = player.getInventory().addItem(item.getItemStack());
if (remaining.size() > 0) {
for (Entry<Integer, ItemStack> entry : remaining.entrySet()) {
item.setItemStack(entry.getValue());
}
} else {
item.remove();
}
}
}
}
/*@EventHandler
public void onDeath(PlayerDeathEvent event)
{
TregminePlayer player = plugin.getPlayer(event.getEntity());
try(IContext ctx = plugin.createContext()){
IWalletDAO dao = ctx.getWalletDAO();
dao.take(player, MathUtil.percentOf(dao.balance(player), 5));
} catch (DAOException e) {
e.printStackTrace();
}
}*/
@EventHandler
public void onPlayerFlight(PlayerToggleFlightEvent event)
{
TregminePlayer player = plugin.getPlayer(event.getPlayer());
if (player.getRank().canModifyZones()) {
return;
}
if (!player.getRank().canFly()) {
event.setCancelled(true);
}
if (player.hasFlag(TregminePlayer.Flags.HARDWARNED) ||
player.hasFlag(TregminePlayer.Flags.SOFTWARNED)) {
event.setCancelled(true);
}
Location loc = player.getLocation();
ZoneWorld world = plugin.getWorld(loc.getWorld());
Lot lot = world.findLot(new Point(loc.getBlockX(), loc.getBlockZ()));
if (lot == null) {
return;
}
if (!lot.hasFlag(Lot.Flags.FLIGHT_ALLOWED)) {
event.setCancelled(true);
}
if (loc.getWorld().getName().equalsIgnoreCase(plugin.getRulelessWorld().getName()) &&
(!player.getRank().canBypassWorld() && player.getGameMode() != GameMode.CREATIVE)) {
player.setAllowFlight(false);
player.setFlying(false);
}
}
@EventHandler
public void onPlayerPickupItem(PlayerPickupItemEvent event)
{
TregminePlayer player = this.plugin.getPlayer(event.getPlayer());
if (player.getGameMode() == GameMode.CREATIVE) {
event.setCancelled(true);
return;
}
if (!player.getRank().arePickupsLogged()) {
return;
}
if (!player.getRank().canPickup()) {
event.setCancelled(true);
return;
}
try (IContext ctx = plugin.createContext()) {
Item item = event.getItem();
TregminePlayer droppedBy = droppedItems.get(item);
if (droppedBy != null && droppedBy.getId() != player.getId()) {
ItemStack stack = item.getItemStack();
ILogDAO logDAO = ctx.getLogDAO();
logDAO.insertGiveLog(droppedBy, player, stack);
player.sendMessage(ChatColor.YELLOW + "You got " +
stack.getAmount() + " " + stack.getType() + " from " +
droppedBy.getName() + ".");
if (droppedBy.isOnline()) {
droppedBy.sendMessage(ChatColor.YELLOW + "You gave " +
stack.getAmount() + " " + stack.getType() + " to " +
player.getName() + ".");
}
}
droppedItems.remove(item);
} catch (DAOException e) {
throw new RuntimeException(e);
}
}
@EventHandler
public void onPlayerDropItem(PlayerDropItemEvent event)
{
TregminePlayer player = this.plugin.getPlayer(event.getPlayer());
if (player.getGameMode() == GameMode.CREATIVE) {
event.setCancelled(true);
return;
}
if (!player.getRank().arePickupsLogged()) {
return;
}
if (!player.getRank().canPickup()) {
event.setCancelled(true);
return;
}
Item item = event.getItemDrop();
droppedItems.put(item, player);
}
@EventHandler
public void onPlayerKick(PlayerKickEvent event)
{
event.setLeaveMessage(null);
}
@EventHandler
public void onNameTag(PlayerReceiveNameTagEvent event)
{
TregminePlayer player = plugin.getPlayer(event.getPlayer());
if (player == null) {
return;
}
event.setTag(player.getChatName());
}
private void activateGuardians()
{
// Identify all guardians and categorize them based on their current
// state
Player[] players = plugin.getServer().getOnlinePlayers();
Set<TregminePlayer> guardians = new HashSet<TregminePlayer>();
List<TregminePlayer> activeGuardians = new ArrayList<TregminePlayer>();
List<TregminePlayer> inactiveGuardians =
new ArrayList<TregminePlayer>();
List<TregminePlayer> queuedGuardians = new ArrayList<TregminePlayer>();
for (Player srvPlayer : players) {
TregminePlayer guardian = plugin.getPlayer(srvPlayer.getName());
if (guardian == null || guardian.getRank() != Rank.GUARDIAN) {
continue;
}
TregminePlayer.GuardianState state = guardian.getGuardianState();
if (state == null) {
state = TregminePlayer.GuardianState.QUEUED;
}
switch (state) {
case ACTIVE:
activeGuardians.add(guardian);
break;
case INACTIVE:
inactiveGuardians.add(guardian);
break;
case QUEUED:
queuedGuardians.add(guardian);
break;
}
guardian.setGuardianState(TregminePlayer.GuardianState.QUEUED);
guardians.add(guardian);
}
Collections.sort(activeGuardians, new RankComparator());
Collections.sort(inactiveGuardians, new RankComparator(true));
Collections.sort(queuedGuardians, new RankComparator());
int idealCount = (int) Math.ceil(Math.sqrt(players.length) / 2);
// There are not enough guardians active, we need to activate a few more
if (activeGuardians.size() <= idealCount) {
// Make a pool of every "willing" guardian currently online
List<TregminePlayer> activationList =
new ArrayList<TregminePlayer>();
activationList.addAll(activeGuardians);
activationList.addAll(queuedGuardians);
// If the pool isn't large enough to satisfy demand, we add the
// guardians
// that have made themselves inactive as well.
if (activationList.size() < idealCount) {
int diff = idealCount - activationList.size();
// If there aren't enough of these to satisfy demand, we add all
// of them
if (diff >= inactiveGuardians.size()) {
activationList.addAll(inactiveGuardians);
}
// Otherwise we just add the lowest ranked of the inactive
else {
activationList.addAll(inactiveGuardians.subList(0, diff));
}
}
// If there are more than necessarry guardians online, only activate
// the most highly ranked.
Set<TregminePlayer> activationSet;
if (activationList.size() > idealCount) {
Collections.sort(activationList, new RankComparator());
activationSet =
new HashSet<TregminePlayer>(activationList.subList(0,
idealCount));
}
else {
activationSet = new HashSet<TregminePlayer>(activationList);
}
// Perform activation
StringBuffer globalMessage = new StringBuffer();
String delim = "";
for (TregminePlayer guardian : activationSet) {
guardian.setGuardianState(TregminePlayer.GuardianState.ACTIVE);
globalMessage.append(delim);
globalMessage.append(guardian.getName());
delim = ", ";
}
Set<TregminePlayer> oldActiveGuardians =
new HashSet<TregminePlayer>(activeGuardians);
if (!activationSet.containsAll(oldActiveGuardians)
|| activationSet.size() != oldActiveGuardians.size()) {
plugin.getServer()
.broadcastMessage(
ChatColor.BLUE
+ "Active guardians are: "
+ globalMessage
+ ". Please contact any of them if you need help.");
// Notify previously active guardian of their state change
for (TregminePlayer guardian : activeGuardians) {
if (!activationSet.contains(guardian)) {
guardian.sendMessage(ChatColor.BLUE
+ "You are no longer on active duty, and should not respond to help requests, unless asked by an admin or active guardian.");
}
}
// Notify previously inactive guardians of their state change
for (TregminePlayer guardian : inactiveGuardians) {
if (activationSet.contains(guardian)) {
guardian.sendMessage(ChatColor.BLUE
+ "You have been restored to active duty and should respond to help requests.");
}
}
// Notify previously queued guardians of their state change
for (TregminePlayer guardian : queuedGuardians) {
if (activationSet.contains(guardian)) {
guardian.sendMessage(ChatColor.BLUE
+ "You are now on active duty and should respond to help requests.");
}
}
}
}
}
}
| Fixes dupe | src/info/tregmine/listeners/TregminePlayerListener.java | Fixes dupe | <ide><path>rc/info/tregmine/listeners/TregminePlayerListener.java
<ide> } else {
<ide> player.setFlySpeed(0.1f); // 0.1 is default
<ide> }
<del>
<del> if (player.getGameMode() == GameMode.CREATIVE) {
<del> return;
<del> }
<del>
<del> double pickupDistance = player.getRank().getPickupDistance();
<del> List<Entity> entities = player.getNearbyEntities(pickupDistance, pickupDistance, pickupDistance);
<del>
<del> for (Entity entity : entities) {
<del> if (entity instanceof Item) {
<del> Item item = (Item) entity;
<del>
<del> if (item.getTicksLived() < item.getPickupDelay()) {
<del> return;
<del> }
<del>
<del> HashMap<Integer, ItemStack> remaining = player.getInventory().addItem(item.getItemStack());
<del>
<del> if (remaining.size() > 0) {
<del> for (Entry<Integer, ItemStack> entry : remaining.entrySet()) {
<del> item.setItemStack(entry.getValue());
<del> }
<del> } else {
<del> item.remove();
<del> }
<del> }
<del> }
<ide> }
<ide>
<ide> /*@EventHandler |
|
Java | apache-2.0 | error: pathspec 'pop3/src/main/java/org/apache/james/protocols/POP3Session.java' did not match any file(s) known to git
| ec4a53eeaa4ad077adfbdb5c853f2309cfa83bab | 1 | aduprat/james-protocols,linagora/james-protocols,mbaechler/james-protocols,tools4origins/james-protocols | /****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.pop3server;
import java.util.List;
import org.apache.james.protocols.api.TLSSupportedSession;
import org.apache.james.services.MailRepository;
import org.apache.mailet.Mail;
/**
* All the handlers access this interface to communicate with
* POP3Handler object
*/
public interface POP3Session extends TLSSupportedSession {
/**
* A placeholder for emails deleted during the course of the POP3 transaction.
* This Mail instance is used to enable fast checks as to whether an email has been
* deleted from the inbox.
*/
public final static String DELETED ="DELETED_MAIL";
// Authentication states for the POP3 interaction
/** Waiting for user id */
public final static int AUTHENTICATION_READY = 0;
/** User id provided, waiting for password */
public final static int AUTHENTICATION_USERSET = 1;
/**
* A valid user id/password combination has been provided.
* In this state the client can access the mailbox
* of the specified user.
*/
public final static int TRANSACTION = 2;
/**
* Returns POP3Handler service wide configuration
*
* @return POP3HandlerConfigurationData
*/
POP3HandlerConfigurationData getConfigurationData();
/**
* Returns the current handler state
*
* @return handler state
*/
int getHandlerState();
/**
* Sets the new handler state
*
* @param handlerState state
*/
void setHandlerState(int handlerState);
/**
* Returns the current user inbox
*
* @return MailRepository
*/
MailRepository getUserInbox();
/**
* Sets the user's mail repository
*
* @param userInbox userInbox
*/
void setUserInbox(MailRepository userInbox);
/**
* Returns the mail list contained in the mailbox
*
* @return mailbox content
*/
List<Mail> getUserMailbox();
/**
* Sets a new mailbox content
*
* @param userMailbox mailbox
*/
void setUserMailbox(List<Mail> userMailbox);
/**
* Returns the backup mailbox
*
* @return list backup
*/
List<Mail> getBackupUserMailbox();
/**
* Sets a new backup mailbox content
*
* @param backupUserMailbox the mailbox backup
*/
void setBackupUserMailbox(List<Mail> backupUserMailbox);
}
| pop3/src/main/java/org/apache/james/protocols/POP3Session.java | copy pop3 stuff
git-svn-id: 72b794ca7b43188aed56de932e8081c5d617dce0@905778 13f79535-47bb-0310-9956-ffa450edef68
| pop3/src/main/java/org/apache/james/protocols/POP3Session.java | copy pop3 stuff | <ide><path>op3/src/main/java/org/apache/james/protocols/POP3Session.java
<add>/****************************************************************
<add> * Licensed to the Apache Software Foundation (ASF) under one *
<add> * or more contributor license agreements. See the NOTICE file *
<add> * distributed with this work for additional information *
<add> * regarding copyright ownership. The ASF licenses this file *
<add> * to you under the Apache License, Version 2.0 (the *
<add> * "License"); you may not use this file except in compliance *
<add> * with the License. You may obtain a copy of the License at *
<add> * *
<add> * http://www.apache.org/licenses/LICENSE-2.0 *
<add> * *
<add> * Unless required by applicable law or agreed to in writing, *
<add> * software distributed under the License is distributed on an *
<add> * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
<add> * KIND, either express or implied. See the License for the *
<add> * specific language governing permissions and limitations *
<add> * under the License. *
<add> ****************************************************************/
<add>
<add>
<add>
<add>package org.apache.james.pop3server;
<add>
<add>
<add>import java.util.List;
<add>
<add>import org.apache.james.protocols.api.TLSSupportedSession;
<add>import org.apache.james.services.MailRepository;
<add>import org.apache.mailet.Mail;
<add>
<add>/**
<add> * All the handlers access this interface to communicate with
<add> * POP3Handler object
<add> */
<add>
<add>public interface POP3Session extends TLSSupportedSession {
<add>
<add> /**
<add> * A placeholder for emails deleted during the course of the POP3 transaction.
<add> * This Mail instance is used to enable fast checks as to whether an email has been
<add> * deleted from the inbox.
<add> */
<add> public final static String DELETED ="DELETED_MAIL";
<add>
<add> // Authentication states for the POP3 interaction
<add> /** Waiting for user id */
<add> public final static int AUTHENTICATION_READY = 0;
<add> /** User id provided, waiting for password */
<add> public final static int AUTHENTICATION_USERSET = 1;
<add> /**
<add> * A valid user id/password combination has been provided.
<add> * In this state the client can access the mailbox
<add> * of the specified user.
<add> */
<add> public final static int TRANSACTION = 2;
<add>
<add> /**
<add> * Returns POP3Handler service wide configuration
<add> *
<add> * @return POP3HandlerConfigurationData
<add> */
<add> POP3HandlerConfigurationData getConfigurationData();
<add>
<add> /**
<add> * Returns the current handler state
<add> *
<add> * @return handler state
<add> */
<add> int getHandlerState();
<add>
<add> /**
<add> * Sets the new handler state
<add> *
<add> * @param handlerState state
<add> */
<add> void setHandlerState(int handlerState);
<add>
<add> /**
<add> * Returns the current user inbox
<add> *
<add> * @return MailRepository
<add> */
<add> MailRepository getUserInbox();
<add>
<add> /**
<add> * Sets the user's mail repository
<add> *
<add> * @param userInbox userInbox
<add> */
<add> void setUserInbox(MailRepository userInbox);
<add>
<add> /**
<add> * Returns the mail list contained in the mailbox
<add> *
<add> * @return mailbox content
<add> */
<add> List<Mail> getUserMailbox();
<add>
<add> /**
<add> * Sets a new mailbox content
<add> *
<add> * @param userMailbox mailbox
<add> */
<add> void setUserMailbox(List<Mail> userMailbox);
<add>
<add> /**
<add> * Returns the backup mailbox
<add> *
<add> * @return list backup
<add> */
<add> List<Mail> getBackupUserMailbox();
<add>
<add>
<add> /**
<add> * Sets a new backup mailbox content
<add> *
<add> * @param backupUserMailbox the mailbox backup
<add> */
<add> void setBackupUserMailbox(List<Mail> backupUserMailbox);
<add>}
<add> |
|
Java | mit | 35c721a742320165a6369a0bd7254d4608a1c8bb | 0 | seqcode/seqcode-core,seqcode/seqcode-core,seqcode/seqcode-core,seqcode/seqcode-core | package edu.psu.compbio.seqcode.projects.akshay.bayesments.bayesnet;
import java.io.File;
import java.util.Arrays;
import java.util.Random;
import edu.psu.compbio.seqcode.gse.utils.probability.NormalDistribution;
import edu.psu.compbio.seqcode.projects.akshay.bayesments.experiments.ExperimentManager;
import edu.psu.compbio.seqcode.projects.akshay.bayesments.features.GenomicLocations;
import edu.psu.compbio.seqcode.projects.akshay.bayesments.features.Sequences;
import edu.psu.compbio.seqcode.projects.akshay.bayesments.framework.Config;
import edu.psu.compbio.seqcode.projects.akshay.bayesments.utils.BayesmentsSandbox;
import edu.psu.compbio.seqcode.projects.akshay.bayesments.utils.Cubic;
/**
* EMtrain class the initializes and trains the parameters of the Bayesian network using the EM framework
* @author akshaykakumanu
*
*/
public class EMtrain {
protected Config config;
protected GenomicLocations trainingData;
//The following 4 lines are variables needed when the model is in seq state. The should be initiated using a setter mrthod (setSeqMode)
protected Sequences seqs;
protected double[][] Xs;
protected double[][] MUs;
protected double[][] SIGMAs;
//boolean variable, turns on when the model is in seqState. Once, the model is in seqState it can't go back to non-seq state mode
// Initially it is false. That is the EM always stars in non-seq state
protected boolean seqState;
// True if only chromatin features are used. i.e sequence features are never used
protected boolean onlyChrom;
// 2-d array of chromatin counts, with rows as locations and columns as conditions
protected float[][] Xc;
//2-d array of factor countts, with rows as locations and columns as conditions
protected float[][] Xf;
//2-d array of means, with rows as chromatin states and columns as conditions
protected double[][] MUc;
//2-d array of means, with rows as factor stats and columns as conditions
protected double[][] MUf;
//2-d array of variance, with rows as chromatin states and columns as conditions
protected double[][] SIGMAc;
//2-d array of variance, with rows as factor stats and columns as conditions
protected double[][] SIGMAf;
//2-d array of transition probabilities, with rows as chromatin states and colums as factor states
protected double[][] Bjk;
//1-d array of probabilities for different chromatin states
protected double[] PIj;
//Expectation of unobserved variables for the E step in EM algorithm
protected double[][][] Qijk;
//no of chromatin states
protected int numChromStates;
//number of factor states
protected int numFacBindingStates;
protected int N; // number of training examples
protected int C; // number of chromatin conditions
protected int F; // number of factor conditions (alomost always 1)
protected int M; // number of motifs to be included as features (this number is initialized when the models enters the seq state mode (setSeqMode))
// flag to turn on plotting of the parameters as the function of iterations step in EM
//The 3-d arrays store the values of seq parameters over all iterations of EM. They are globally defined unlike other non-seq parameters
//because of some technical details
//These are initializes when the model enters the seq Mode (setSeqMode)
protected double[][][] trainMUs;
protected double[][][] trainSIGMAs;
protected double[][][] trainMUc;
protected double[][][] trainMUf;
protected double[][][] trainSIGMAc;
protected double[][][] trainSIGMAf;
protected double[][] trainPIj;
protected double[][][] trainBjk;
protected double[] capSIGMAc;
protected double[] capSIGMAf;
protected double[] capSIGMAs;
protected double[] WCnorm;
protected double[] WSnorm;
protected double lambda;
protected boolean regularize;
//Current round of EM. This remembers, if the model was run previously in non-seq mode and hence globally defined
protected int itr_no=0;
//Total number of EM iters, both, in seq and non-seq mode (Should be given when the class object is initialized)
protected int total_itrs=0;
/**
* When initializing this class object, it is always in seq off mode
* @param config
* @param trainingData
* @param manager
*/
public EMtrain(Config config, GenomicLocations trainingData, ExperimentManager manager) {
this.config = config;
this.trainingData = trainingData;
this.regularize = config.doRegularization();
if(regularize){
this.lambda = config.getLambda();
}
//Initializing the model
initializeEM(manager);
this.total_itrs = config.getNumItrs();
this.onlyChrom = config.runOnlyChrom();
this.seqState = false;
}
/**
* Method that initializes all the parameters for the Bayesian network
* @param manager
*/
private void initializeEM(ExperimentManager manager){
// getting N, M, C, P and F
N=this.trainingData.getNumTrainingExamples();
C=this.trainingData.getNumChromatinCons();
F=this.trainingData.getNumFacCons();
numChromStates = config.getNumChrmStates();
numFacBindingStates = config.getNumFacStates();
//Initializing and loading X's
this.Xc = new float[N][C];
this.Xf = new float[N][F];
this.Xc = this.trainingData.getChromatinCounts();
this.Xf = this.trainingData.getFactorCounts();
//Initializing mu's
MUc = new double[numChromStates][C];
MUf = new double[numFacBindingStates][F];
if(regularize){
WCnorm = new double[C];
// Setting all initial weights to 1
for(int c=0; c<C; c++){
WCnorm[c] = 0.0;
}
}
this.capSIGMAc = new double[C];
this.capSIGMAf = new double[F];
//Initialization from emperical means
for(int c=0; c< C; c++){
float[] observedValues = new float[N];
for(int i=0; i<N; i++){
observedValues[i] = Xc[i][c];
}
double[] means = this.getEmpMeanValues(numChromStates, observedValues);
for(int j=0; j<numChromStates; j++){
MUc[j][c] = means[j];
}
}
for(int f=0; f< F; f++){
float[] observedValues = new float[N];
for(int i=0; i<N; i++){
observedValues[i] = Xf[i][f];
}
double[] means = this.getEmpMeanValues(numFacBindingStates, observedValues);
for(int k=0; k<numFacBindingStates; k++){
MUf[k][f] = means[k];
}
}
//Printing the initial Mu's
BayesmentsSandbox.printArray(MUc, "MUc", "MUc", manager);
BayesmentsSandbox.printArray(MUf, "MUf", "MUf", manager);
//Initializing sigma's
SIGMAc = new double[numChromStates][C];
SIGMAf = new double[numFacBindingStates][F];
// Initializing from emperical data... (Max-Min)
for(int c=0; c< C; c++){
double[] observedValues = new double[N];
for(int i=0; i<N; i++){
observedValues[i] = Xc[i][c];
}
double min = observedValues[this.getPercentileIndex(15.0, observedValues)];
double max = observedValues[this.getPercentileIndex(85, observedValues)];
for(int j=0; j<numChromStates; j++){
SIGMAc[j][c] = (max-min)/config.getNumChrmStates();
this.capSIGMAc[c] = (max-min)/config.getNumChrmStates();
}
}
for(int f=0; f< F; f++){
double[] observedValues = new double[N];
for(int i=0; i<N; i++){
observedValues[i] = Xf[i][f];
}
double min = observedValues[this.getPercentileIndex(15.0, observedValues)];
double max = observedValues[this.getPercentileIndex(85, observedValues)];
for(int k=0; k<numFacBindingStates; k++){
SIGMAf[k][f] = (max-min)/config.getNumFacStates();
this.capSIGMAf[f] = (max-min)/config.getNumFacStates();
}
}
//Printing the initial SIGMA's
BayesmentsSandbox.printArray(SIGMAc, "SIGMAc", "SIGMAc", manager);
BayesmentsSandbox.printArray(SIGMAf, "SIGMAf", "SIGMAf", manager);
// Initializing Bjk .. Using random initialization
Bjk = new double[numChromStates][numFacBindingStates];
for(int i=0; i<numChromStates; i++){
Bjk[i] = this.getRandomList(numFacBindingStates, true);
}
//printing the initial Bjk's
BayesmentsSandbox.printArray(Bjk, "chrom_state", "factor_State", manager);
// Initializing PIj ... Using Uniform initialization
PIj = new double[numChromStates];
PIj = this.getUniformList(numChromStates);
//Printing the initial PIj's
BayesmentsSandbox.printArray(PIj, "chrom_state");
//Initializing the dimensions of Qijk's
Qijk = new double[N][numChromStates][numFacBindingStates];
}
/**
* This is internally called by seqSeqMode setter when the class enters seq mode
* The method initializes the parameters for the seq features of the bayesian network
*/
private void initializeSeqParams(ExperimentManager manager){
this.MUs = new double[numChromStates][M];
this.SIGMAs = new double[numChromStates][M];
for(int m=0; m< M; m++){
float[] observedScores = new float[N];
for(int i=0; i<N; i++){
observedScores[i] = (float) Xs[i][m];
}
double[] means = this.getEmpMeanValues(numChromStates, observedScores);
for(int j=0; j<numChromStates; j++){
MUs[j][m] = means[j];
}
}
//printing params
BayesmentsSandbox.printArray(MUs, "MUs", "MUs", manager);
for(int m=0; m< M; m++){
double[] observedValues = new double[N];
for(int i=0; i<N; i++){
observedValues[i] = Xs[i][m];
}
double min = observedValues[this.getPercentileIndex(15.0, observedValues)];
double max = observedValues[this.getPercentileIndex(85.0, observedValues)];
for(int j=0; j<numChromStates; j++){
SIGMAs[j][m] = (max-min)/config.getNumChrmStates();
this.capSIGMAs[m] = (max-min)/config.getNumChrmStates();
}
}
BayesmentsSandbox.printArray(SIGMAs, "SIGMAs", "SIGMAs", manager);
}
/**
* Generates n random numbers that sum up to 1 if the boolean prob is TRUE
* @param n
* @param prob
* @return
*/
private double[] getRandomList(int n, boolean prob){
double[] ret = new double[n];
double sum=0.0;
Random ran = new Random();
for(int i=0; i<n; i++){
ret[i] = ran.nextDouble()+(double) ran.nextInt(90);
sum = sum + ret[i];
}
if(prob){
for(int i=0; i<n; i++){
ret[i] = ret[i]/sum;
}
return ret;
}else{
return ret;
}
}
/**
* Generates n values that fall in mu+-0.2*sigma of the the observedValues
* @param n
* @param observedValues
* @return
*/
private double[] getEmpMeanValues(int n, float[] observedValues){
double[] ret = new double[n];
double mean = 0.0;
double std=0.0;
for(int i=0; i< observedValues.length; i++){
mean = mean + observedValues[i];
}
mean = mean/(double) observedValues.length;
for(int i=0; i<observedValues.length; i++){
std = std + Math.pow(observedValues[i]-mean, 2.0);
}
std = std/(double) observedValues.length;
std = Math.sqrt(std);
Random rn =new Random();
double range = 0.4*std;
for(int i=0; i<n ; i++){
double random = rn.nextDouble()*range+mean-0.2*std;
ret[i] = random;
}
return ret;
}
/**
* Returns n equal numbers that add upto one
* @param n
* @return
*/
private double[] getUniformList(int n){
double[] ret = new double[n];
for(int i=0; i<n; i++){
ret[i] = 1/(double)n;
}
return ret;
}
/**
* Returns the index of the minimum number in the given list of doubles
* @param list
* @return
*/
private int getMinindex(double[] list){
double val = 100000.0;
int ret=0;
for(int i=0; i< list.length; i++){
if(val > list[i]){
ret = i;
val = list[i];
}
}
return ret;
}
/**
* returns the index of the maximum number in the given list of doubles
* @param list
* @return
*/
private int getMaxindex(double[] list){
double val = -100000.0;
int ret=0;
for(int i=0; i< list.length; i++){
if(list[i] > val){
ret =i;
val = list[i];
}
}
return ret;
}
public int getPercentileIndex(double percentile, double[] list){
Arrays.sort(list);
int index = (int) percentile*list.length/100;
return index;
}
/**
* Runs the EM algorithm for a given number of iterations and also plots the parameters over the learning rounds if plot parameter is true
*/
public void runEM(int itrs, boolean plot){
// Initializing the arrays to store the parameters for all training rounds
//double[][][] trainMUc = new double[this.total_itrs+1][numChromStates][C]; //initial random params plus itrs
//double[][][] trainMUf = new double[this.total_itrs+1][numFacBindingStates][F];
//double[][][] trainSIGMAc = new double[this.total_itrs+1][numChromStates][C];
//double[][][] trainSIGMAf = new double[this.total_itrs+1][numFacBindingStates][F];
//double[][] trainPIj = new double[this.total_itrs+1][numChromStates];
//double[][][] trainBjk = new double[this.total_itrs+1][numChromStates][numFacBindingStates];
for(int t=0; t<itrs; t++){ // training for the given number of iterations
if(itr_no==0){ //Copy the initial set of random parameters. True on the first round of EM
trainMUc = new double[this.total_itrs+1][numChromStates][C];
trainMUf = new double[this.total_itrs+1][numFacBindingStates][F];
trainSIGMAc = new double[this.total_itrs+1][numChromStates][C];
trainSIGMAf = new double[this.total_itrs+1][numFacBindingStates][F];
trainPIj = new double[this.total_itrs+1][numChromStates];
trainBjk = new double[this.total_itrs+1][numChromStates][numFacBindingStates];
trainMUc[0] = MUc;
trainMUf[0] = MUf;
trainSIGMAc[0] = SIGMAc;
trainSIGMAf[0] = SIGMAf;
trainPIj[0] = PIj;
trainBjk[0] = Bjk;
itr_no++;
}
if(this.seqState){ //When the model is in seq state
if(t == 0){ // First EM iteration after the model entered seq state
for(int p=0; p< this.itr_no-1 ; p++){ // indexe's form 0 to itr_no-2 are all zeros
for(int j=0; j<this.numChromStates; j++){
for(int m=0; m<M; m++){
trainMUs[p][j][m] =0.0;
}
}
}
for(int j=0; j<this.numChromStates; j++){ // index no itr_no-1 is initial random seq parameters
for(int m=0; m<M; m++){
trainMUs[itr_no-1] = MUs;
trainSIGMAs[itr_no-1] = SIGMAs;
}
}
}
}
executeEStep(); //E-Step
executeMStep(); //M-Step
// Copy the updated parameters
for(int j=0; j<numChromStates; j++){
for(int c=0; c<C; c++){
trainMUc[itr_no][j][c]= MUc[j][c];
}
}
for(int k=0; k<numFacBindingStates; k++){
for(int f=0; f<F; f++){
trainMUf[itr_no][k][f]= MUf[k][f];
}
}
for(int j=0; j<numChromStates; j++){
for(int c=0; c<C; c++){
trainSIGMAc[itr_no][j][c]= SIGMAc[j][c];
}
}
for(int k=0; k<numFacBindingStates; k++){
for(int f=0; f<F; f++){
trainSIGMAf[itr_no][k][f]= SIGMAf[k][f];
}
}
for(int j=0; j<numChromStates; j++){
trainPIj[itr_no][j] = PIj[j];
}
for(int j=0; j< numChromStates; j++){
for(int k=0; k<numFacBindingStates; k++){
trainBjk[itr_no][j][k] = Bjk[j][k];
}
}
if(this.seqState){
for(int j=0; j<numChromStates; j++){
for(int m=0; m<M; m++){
trainMUs[itr_no][j][m]= MUs[j][m];
//debug
//System.out.println(Integer.toString(j)+"\t"+Double.toString(MUs[j][m]));
}
}
for(int j=0; j<numChromStates; j++){
for(int m=0; m<M; m++){
trainSIGMAs[itr_no][j][m]= SIGMAs[j][m];
}
}
}
this.itr_no++; // increment the EM iteration number
}
// Plot if asked for
if(plot){
if(this.onlyChrom || this.seqState){
//Plotting Pi values
double[][] Xaxes = new double[numChromStates][this.total_itrs+1]; // plus 1 for initial random parameters
double[][] Yaxes = new double[numChromStates][this.total_itrs+1];
for(int j=0; j<numChromStates; j++){
for(int itr =0; itr<this.total_itrs+1; itr++){
Xaxes[j][itr] = itr;
Yaxes[j][itr] = trainPIj[itr][j];
}
}
EMIterPlotter piplotter = new EMIterPlotter(this.config, Xaxes, Yaxes, "PI-C");
piplotter.plot();
//Plotting Mu-c
Xaxes = new double[numChromStates][this.total_itrs+1];
Yaxes = new double[numChromStates][this.total_itrs+1];
for(int c=0; c<C; c++){
for(int j=0; j<numChromStates; j++){
for(int itr=0; itr<this.total_itrs+1; itr++){
Xaxes[j][itr] = itr;
Yaxes[j][itr] = trainMUc[itr][j][c];
}
}
EMIterPlotter MUcPlotter = new EMIterPlotter(this.config, Xaxes, Yaxes, "MU-C_"+Integer.toString(c));
MUcPlotter.plot();
}
//Plotting SIGMAc
Xaxes = new double[numChromStates][this.total_itrs+1];
Yaxes = new double[numChromStates][this.total_itrs+1];
for(int c=0; c<C; c++){
for(int j=0; j<numChromStates; j++){
for(int itr=0; itr<this.total_itrs+1; itr++){
Xaxes[j][itr] = itr;
Yaxes[j][itr] = trainSIGMAc[itr][j][c];
}
}
EMIterPlotter SIGMACPlotter = new EMIterPlotter(this.config, Xaxes, Yaxes, "SIGMA-C_"+Integer.toString(c));
SIGMACPlotter.plot();
}
//Plotting Muf
Xaxes = new double[numFacBindingStates][this.total_itrs+1];
Yaxes = new double[numFacBindingStates][this.total_itrs+1];
for(int f=0; f<F; f++){
for(int k=0; k<numFacBindingStates; k++){
for(int itr=0; itr<this.total_itrs+1; itr++){
Xaxes[k][itr] = itr;
Yaxes[k][itr] = trainMUf[itr][k][f];
}
}
EMIterPlotter MUfPlotter = new EMIterPlotter(this.config, Xaxes, Yaxes, "MF-F_"+Integer.toString(f));
MUfPlotter.plot();
}
//Plotting SIGMAf
Xaxes = new double[numFacBindingStates][this.total_itrs+1];
Yaxes = new double[numFacBindingStates][this.total_itrs+1];
for(int f=0; f<F; f++){
for(int k=0; k<numFacBindingStates; k++){
for(int itr=0; itr<this.total_itrs+1; itr++){
Xaxes[k][itr] = itr;
Yaxes[k][itr] = trainSIGMAf[itr][k][f];
}
}
EMIterPlotter SIGMAfPlotter = new EMIterPlotter(this.config, Xaxes, Yaxes, "SIGMA-f_"+Integer.toString(f));
SIGMAfPlotter.plot();
}
//Plotting Bjk
Xaxes = new double[numFacBindingStates*numChromStates][this.total_itrs+1];
Yaxes = new double[numFacBindingStates*numChromStates][this.total_itrs+1];
int count=0;
for(int j=0; j<numChromStates; j++){
for(int k=0; k<numFacBindingStates; k++){
for(int itr=0; itr<this.total_itrs+1; itr++){
Xaxes[count][itr] = itr;
Yaxes[count][itr] = trainBjk[itr][j][k];
}
count++;
}
}
EMIterPlotter BjkPlotter = new EMIterPlotter(this.config, Xaxes, Yaxes, "Bjk");
BjkPlotter.plot();
}
if(this.seqState){
double[][] Xaxes = new double[numChromStates][this.total_itrs+1]; // plus 1 for initial random parameters
double[][] Yaxes = new double[numChromStates][this.total_itrs+1];
//Plotting Mus
for(int m=0; m<M; m++){
for(int j=0; j<numChromStates; j++){
for(int itr=0; itr<this.total_itrs+1; itr++){
Xaxes[m][itr] = itr;
Yaxes[m][itr] = trainMUs[itr][j][m];
}
}
EMIterPlotter MUsPlotter = new EMIterPlotter(this.config, Xaxes, Yaxes, "MU-s_"+Integer.toString(m));
MUsPlotter.plot();
}
//Plotting SIIGMAs
Xaxes = new double[numChromStates][this.total_itrs+1];
Yaxes = new double[numChromStates][this.total_itrs+1];
for(int m=0; m<M; m++){
for(int j=0; j<numChromStates; j++){
for(int itr=0; itr<this.total_itrs+1; itr++){
Xaxes[j][itr] = itr;
Yaxes[j][itr] = trainSIGMAs[itr][j][m];
}
}
EMIterPlotter SIGMAsPlotter = new EMIterPlotter(this.config, Xaxes, Yaxes, "SIGMA-s_"+Integer.toString(m));
SIGMAsPlotter.plot();
}
}
}
}
/**
* Executes the E step. That is it calculates the Qijk's using the current set of parameters
*/
private void executeEStep(){
double den[]= new double[N];
//Calculate the numerator and the denominator for all the Qijk's
for(int i=0; i<N; i++){ // over the training examples
for(int j=0; j<numChromStates; j++){ // over the chromatin states
for(int k=0; k< numFacBindingStates; k++){ //over factor binding states
double chromGausssianProd=0.0;
double facGaussianProd = 0.0;
double seqGaussianProd = 0.0;
for(int c=0; c<C; c++){
NormalDistribution gaussian = new NormalDistribution(MUc[j][c],Math.pow(SIGMAc[j][c], 2.0));
double que_pusher = (this.regularize) ?Math.pow(gaussian.calcProbability((double) Xc[i][c]), 1/(1+WCnorm[c])): gaussian.calcProbability((double) Xc[i][c]) ;
chromGausssianProd = c==0 ? que_pusher: chromGausssianProd* que_pusher;
}
for(int f=0; f< F; f++){
NormalDistribution gaussian = new NormalDistribution(MUf[k][f],Math.pow(SIGMAf[k][f], 2.0));
facGaussianProd = (f == 0 ? gaussian.calcProbability((double) Xf[i][f]): facGaussianProd* gaussian.calcProbability((double) Xf[i][f]));
}
// Set the guassian products to 0 in case they are NaN
chromGausssianProd = ( Double.isNaN(chromGausssianProd)) ? 0.0 : chromGausssianProd;
facGaussianProd = (Double.isNaN(facGaussianProd)) ? 0.0: facGaussianProd;
if(this.seqState){
for(int m=0; m<M; m++){
NormalDistribution gaussian = new NormalDistribution(MUs[j][m],Math.pow(SIGMAs[j][m], 2.0));
double que_pusher = this.regularize ? Math.pow(gaussian.calcProbability((double) Xs[i][m]), 1/(1+WSnorm[m])) : gaussian.calcProbability((double) Xs[i][m]);
seqGaussianProd = m==0 ? que_pusher: seqGaussianProd* que_pusher;
}
seqGaussianProd = (Double.isNaN(seqGaussianProd)) ? 0.0: seqGaussianProd;
}
if(this.seqState){
Qijk[i][j][k] = PIj[j]*Math.pow(chromGausssianProd, -1*config.getChromWeight())*Bjk[j][k]*facGaussianProd*Math.pow(seqGaussianProd, -1*config.getSeqWeight());
}else{Qijk[i][j][k] = PIj[j]*chromGausssianProd*Bjk[j][k]*facGaussianProd;}
den[i] = den[i]+Qijk[i][j][k];
}
}
}
//Normalize the numerator by dividing the Qijk's with the denominators
for(int i=0; i<N; i++){
for(int j=0; j<numChromStates; j++){
for(int k=0; k<numFacBindingStates; k++){
Qijk[i][j][k] = Qijk[i][j][k]/den[i];
Qijk[i][j][k] = ( Double.isNaN(Qijk[i][j][k])) ? 0.0 : Qijk[i][j][k];
}
}
}
}
/**
* Executes the M step. Updates all the parameters of the model
*/
private void executeMStep(){
//-------------------------PIj update-----------------------------
//Compute
double denPIj = 0.0;
for(int j=0; j<numChromStates; j++){
for(int i=0; i<N; i++){
for(int k=0; k<numFacBindingStates; k++){
PIj[j] = k==0 && i==0 ? Qijk[i][j][k] : PIj[j]+Qijk[i][j][k];
denPIj = denPIj + Qijk[i][j][k];
}
}
}
//Normalize
for(int j=0; j<numChromStates; j++){
PIj[j] = PIj[j]/denPIj;
}
//Making sure PI-j for any state does not go to zero
//for(int j=0; j<numChromStates; j++){
// if(PIj[j] < 0.01){
// PIj[j] = 0.01;
// }
//}
//Re-normalize
//denPIj = 0.0;
//for(int j=0; j< numChromStates; j++){
// denPIj = denPIj + PIj[j];
//}
//for(int j=0; j<numChromStates; j++){
// PIj[j] = PIj[j]/denPIj;
//}
//-----------------------MUc update------------------------------------
//Compute
double[][] denMUc=new double[numChromStates][C];
for(int j=0; j<numChromStates; j++){
for(int c=0; c<C; c++){
for(int i=0; i<N; i++){
for(int k=0; k<numFacBindingStates; k++){
MUc[j][c] = k==0 && i==0 ? Qijk[i][j][k]*Xc[i][c] : MUc[j][c]+ Qijk[i][j][k]*Xc[i][c];
denMUc[j][c] = denMUc[j][c]+Qijk[i][j][k];
}
}
}
}
//Normalize
for(int j=0; j<numChromStates; j++){
for(int c=0; c<C; c++){
MUc[j][c] = MUc[j][c]/denMUc[j][c];
}
}
//-----------------------MUf update --------------------------------------
//Compute
double[][] denMUf = new double[numFacBindingStates][F];
for(int k=0; k<numFacBindingStates; k++){
for(int f=0; f<F; f++){
for(int i=0; i<N; i++){
for(int j=0; j<numChromStates; j++){
MUf[k][f] = j==0 && i==0? Qijk[i][j][k]*Xf[i][f]: MUf[k][f]+Qijk[i][j][k]*Xf[i][f];
denMUf[k][f] = denMUf[k][f]+Qijk[i][j][k];
}
}
}
}
//Normalize
for(int k=0; k<numFacBindingStates; k++){
for(int f=0; f<F; f++){
MUf[k][f] = MUf[k][f]/denMUf[k][f];
}
}
// -----------------------MUs update, if in seqState----------------------------
//Compute
if(this.seqState){
double[][] denMUs = new double[numChromStates][M];
for(int j=0; j<numChromStates; j++){
for(int m=0; m<M; m++){
for(int i=0; i<N; i++){
for(int k=0; k<numFacBindingStates; k++){
MUs[j][m] = k==0 && i==0? Qijk[i][j][k]*Xs[i][m]: MUs[j][m]+Qijk[i][j][k]*Xs[i][m];
denMUs[j][m] = denMUs[j][m]+Qijk[i][j][k];
}
}
}
}
//Normalize
for(int j=0; j<numChromStates; j++){
for(int m=0; m<M; m++){
MUs[j][m] = MUs[j][m]/denMUs[j][m];
}
}
}
//--------------------SIGMAc update --------------------------------------
//Compute
double[][] denSIGMAc = new double[numChromStates][C];
for(int j=0; j<numChromStates; j++){
for(int c=0; c<C; c++){
for(int i=0; i<N; i++){
for(int k=0; k<numFacBindingStates; k++){
SIGMAc[j][c] = k==0 && i==0? Qijk[i][j][k]*Math.pow(((double)Xc[i][c] - MUc[j][c]) , 2.0): SIGMAc[j][c]+Qijk[i][j][k]*Math.pow(((double)Xc[i][c] - MUc[j][c]) , 2.0);
denSIGMAc[j][c] = denSIGMAc[j][c]+Qijk[i][j][k];
}
}
}
}
//Normalize and taking the square root
for(int j=0; j<numChromStates; j++){
for(int c=0; c<C; c++){
SIGMAc[j][c] = Math.sqrt(SIGMAc[j][c]/denSIGMAc[j][c]);
}
}
if(config.capSigma()){
for(int j=0; j<numChromStates; j++){
for(int c=0; c<C; c++){
SIGMAc[j][c] = (SIGMAc[j][c] > this.capSIGMAc[c]) ? this.capSIGMAc[c] : SIGMAc[j][c];
}
}
}
//------------------SIGMAf update------------------------------------------
//Compute
double[][] denSIGMAf = new double[numFacBindingStates][F];
for(int k=0; k<numFacBindingStates; k++){
for(int f=0; f<F; f++){
for(int i=0; i<N; i++){
for(int j=0; j< numChromStates; j++){
SIGMAf[k][f] = j==0 && i==0? Qijk[i][j][k]*Math.pow(((double)Xf[i][f] - MUf[k][f]) , 2.0) : SIGMAf[k][f]+Qijk[i][j][k]*Math.pow(((double)Xf[i][f] - MUf[k][f]) , 2.0);
denSIGMAf[k][f] = denSIGMAf[k][f]+Qijk[i][j][k];
}
}
}
}
//Normalize and taking the square root
for(int k=0; k<numFacBindingStates; k++){
for(int f=0; f<F; f++){
SIGMAf[k][f] = Math.sqrt(SIGMAf[k][f]/denSIGMAf[k][f]);
}
}
if(config.capSigma()){
for(int k=0; k<numFacBindingStates; k++){
for(int f=0; f<F; f++){
SIGMAf[k][f] = (SIGMAf[k][f] > this.capSIGMAf[f]) ? this.capSIGMAf[f] : SIGMAf[k][f];
}
}
}
//--------------------SIGMAs update, if in seqState ------------------------
//Compute
if(this.seqState){
double[][] denSIGMAs = new double[numChromStates][M];
for(int j=0; j<numChromStates; j++){
for(int m=0; m<M; m++){
for(int i=0; i<N; i++){
for(int k=0; k<numFacBindingStates; k++){
SIGMAs[j][m] = k==0 && i==0? Qijk[i][j][k]*Math.pow(((double)Xs[i][m] - MUs[j][m]) , 2.0): SIGMAs[j][m]+Qijk[i][j][k]*Math.pow(((double)Xs[i][m] - MUs[j][m]) , 2.0);
denSIGMAs[j][m] = denSIGMAs[j][m]+Qijk[i][j][k];
}
}
}
}
//Normalize and taking the square root
for(int j=0; j<numChromStates; j++){
for(int m=0; m<M; m++){
SIGMAs[j][m] = Math.sqrt(SIGMAs[j][m]/denSIGMAs[j][m]);
}
}
if(config.capSigma()){
for(int j=0; j<numChromStates; j++){
for(int m=0; m<M; m++){
SIGMAs[j][m] = (SIGMAs[j][m] > this.capSIGMAs[m]) ? this.capSIGMAs[m] : SIGMAs[j][m];
}
}
}
}
//---------------------Bjk update -------------------------------------------
//Compute
double[] denBjk = new double[numFacBindingStates];
for(int k=0; k<numFacBindingStates; k++){
for(int j=0; j< numChromStates; j++){
for(int i=0; i<N; i++){
Bjk[j][k] = i==0 ? Qijk[i][j][k] : Bjk[j][k]+Qijk[i][j][k];
denBjk[k] = denBjk[k]+Qijk[i][j][k];
}
}
}
//Normalize
for(int k=0; k<numFacBindingStates; k++){
for(int j=0; j< numChromStates; j++){
Bjk[j][k] = Bjk[j][k]/denBjk[k];
}
}
// ------------------------ regularization chromatin weights update (if regularization is true) ------------------------------------
if(this.regularize){
for(int c=0; c<C; c++){
double Z=0.0;
Cubic cube_root_solver = new Cubic();
for(int i=0; i<N; i++){
for(int j=0; j<numChromStates; j++){
for(int k=0; k<numFacBindingStates; k++){
NormalDistribution gaussian = new NormalDistribution(MUc[j][c],Math.pow(SIGMAc[j][c], 2.0));
if(!Double.isNaN(gaussian.calcProbability((double)Xc[i][c]))){
Z = (i==0 && j==0 && k==0) ? Qijk[i][j][k]*Math.log(gaussian.calcProbability((double)Xc[i][c])) : Z+Qijk[i][j][k]*Math.log(gaussian.calcProbability((double)Xc[i][c]));
}else{
Z = (i==0 && j==0 && k==0) ? 0.0 : Z+0.0;
}
}
}
}
cube_root_solver.solve(1, 2, 1, Z/this.lambda);
this.WCnorm[c] = cube_root_solver.x1;
}
if(this.seqState){
for(int m=0; m<M; m++){
double Z=0.0;
Cubic cube_root_solver = new Cubic();
for(int i=0; i<N; i++){
for(int j=0; j<numChromStates; j++){
for(int k=0; k< this.numFacBindingStates; k++){
NormalDistribution gaussian = new NormalDistribution(MUs[j][m],Math.pow(SIGMAs[j][m], 2.0));
if(!Double.isNaN(gaussian.calcProbability((double)Xs[i][m]))){
Z = (i==0 && j==0 && k==0) ? Qijk[i][j][k]*Math.log(gaussian.calcProbability((double)Xs[i][m])) : Z+Qijk[i][j][k]*Math.log(gaussian.calcProbability((double)Xs[i][m]));
}else{
Z = (i==0 && j==0 && k==0) ? 0.0 : Z+0.0;
}
}
}
}
cube_root_solver.solve(1, 2, 1, Z/this.lambda);
this.WSnorm[m] = cube_root_solver.x1;
}
}
}
}
//setters
public void setSeqMode(Sequences seqs, double[][] Xs, ExperimentManager manager){
this.seqState = true;
this.setXs(Xs);
this.M = Xs[0].length;
if(this.regularize){
WSnorm = new double[M];
}
this.capSIGMAs = new double[M];
this.setSequences(seqs);
this.setInitialSeqParams(manager);
this.trainMUs = new double[this.total_itrs+1][this.numChromStates][this.M];
this.trainSIGMAs = new double[this.total_itrs+1][this.numChromStates][this.M];
}
private void setSequences(Sequences seqs){this.seqs = seqs;}
private void setXs(double[][] Xs){this.Xs = Xs;}
private void setInitialSeqParams(ExperimentManager manager){this.initializeSeqParams(manager);}
//Accessors
public double[] getPIj(){return this.PIj;}
public double[][] getMUc(){return this.MUc;}
public double[][] getMUf(){return this.MUf;}
public double[][] getSIGMAc(){return this.SIGMAc;}
public double[][] getSIGMAf(){return this.SIGMAf;}
public double[][] getBjk(){return this.Bjk;}
public double[][] getMUs(){return this.MUs;}
public double[][] getSIGMAs(){return this.SIGMAs;}
public GenomicLocations getChromData(){return this.trainingData;}
public Sequences getSeqData(){return this.seqs;}
public boolean getSeqStateStatus(){return this.seqState;}
public double[] getChromWeights(){return this.WCnorm;}
public double[] getSeqWeights(){return this.WSnorm;}
// main method is only for testing puposers
public static void main(String[] args){
//double[] test = getRandomList(3,true);
//System.out.println(test[0]);
//System.out.println(test[1]);
//System.out.println(test[2]);
}
}
| src/edu/psu/compbio/seqcode/projects/akshay/bayesments/bayesnet/EMtrain.java | package edu.psu.compbio.seqcode.projects.akshay.bayesments.bayesnet;
import java.io.File;
import java.util.Arrays;
import java.util.Random;
import edu.psu.compbio.seqcode.gse.utils.probability.NormalDistribution;
import edu.psu.compbio.seqcode.projects.akshay.bayesments.experiments.ExperimentManager;
import edu.psu.compbio.seqcode.projects.akshay.bayesments.features.GenomicLocations;
import edu.psu.compbio.seqcode.projects.akshay.bayesments.features.Sequences;
import edu.psu.compbio.seqcode.projects.akshay.bayesments.framework.Config;
import edu.psu.compbio.seqcode.projects.akshay.bayesments.utils.BayesmentsSandbox;
import edu.psu.compbio.seqcode.projects.akshay.bayesments.utils.Cubic;
/**
* EMtrain class the initializes and trains the parameters of the Bayesian network using the EM framework
* @author akshaykakumanu
*
*/
public class EMtrain {
protected Config config;
protected GenomicLocations trainingData;
//The following 4 lines are variables needed when the model is in seq state. The should be initiated using a setter mrthod (setSeqMode)
protected Sequences seqs;
protected double[][] Xs;
protected double[][] MUs;
protected double[][] SIGMAs;
//boolean variable, turns on when the model is in seqState. Once, the model is in seqState it can't go back to non-seq state mode
// Initially it is false. That is the EM always stars in non-seq state
protected boolean seqState;
// True if only chromatin features are used. i.e sequence features are never used
protected boolean onlyChrom;
// 2-d array of chromatin counts, with rows as locations and columns as conditions
protected float[][] Xc;
//2-d array of factor countts, with rows as locations and columns as conditions
protected float[][] Xf;
//2-d array of means, with rows as chromatin states and columns as conditions
protected double[][] MUc;
//2-d array of means, with rows as factor stats and columns as conditions
protected double[][] MUf;
//2-d array of variance, with rows as chromatin states and columns as conditions
protected double[][] SIGMAc;
//2-d array of variance, with rows as factor stats and columns as conditions
protected double[][] SIGMAf;
//2-d array of transition probabilities, with rows as chromatin states and colums as factor states
protected double[][] Bjk;
//1-d array of probabilities for different chromatin states
protected double[] PIj;
//Expectation of unobserved variables for the E step in EM algorithm
protected double[][][] Qijk;
//no of chromatin states
protected int numChromStates;
//number of factor states
protected int numFacBindingStates;
protected int N; // number of training examples
protected int C; // number of chromatin conditions
protected int F; // number of factor conditions (alomost always 1)
protected int M; // number of motifs to be included as features (this number is initialized when the models enters the seq state mode (setSeqMode))
// flag to turn on plotting of the parameters as the function of iterations step in EM
//The 3-d arrays store the values of seq parameters over all iterations of EM. They are globally defined unlike other non-seq parameters
//because of some technical details
//These are initializes when the model enters the seq Mode (setSeqMode)
protected double[][][] trainMUs;
protected double[][][] trainSIGMAs;
protected double[][][] trainMUc;
protected double[][][] trainMUf;
protected double[][][] trainSIGMAc;
protected double[][][] trainSIGMAf;
protected double[][] trainPIj;
protected double[][][] trainBjk;
protected double[] capSIGMAc;
protected double[] capSIGMAf;
protected double[] capSIGMAs;
protected double[] WCnorm;
protected double[] WSnorm;
protected double lambda;
protected boolean regularize;
//Current round of EM. This remembers, if the model was run previously in non-seq mode and hence globally defined
protected int itr_no=0;
//Total number of EM iters, both, in seq and non-seq mode (Should be given when the class object is initialized)
protected int total_itrs=0;
/**
* When initializing this class object, it is always in seq off mode
* @param config
* @param trainingData
* @param manager
*/
public EMtrain(Config config, GenomicLocations trainingData, ExperimentManager manager) {
this.config = config;
this.trainingData = trainingData;
this.regularize = config.doRegularization();
if(regularize){
this.lambda = config.getLambda();
}
//Initializing the model
initializeEM(manager);
this.total_itrs = config.getNumItrs();
this.onlyChrom = config.runOnlyChrom();
this.seqState = false;
}
/**
* Method that initializes all the parameters for the Bayesian network
* @param manager
*/
private void initializeEM(ExperimentManager manager){
// getting N, M, C, P and F
N=this.trainingData.getNumTrainingExamples();
C=this.trainingData.getNumChromatinCons();
F=this.trainingData.getNumFacCons();
numChromStates = config.getNumChrmStates();
numFacBindingStates = config.getNumFacStates();
//Initializing and loading X's
this.Xc = new float[N][C];
this.Xf = new float[N][F];
this.Xc = this.trainingData.getChromatinCounts();
this.Xf = this.trainingData.getFactorCounts();
//Initializing mu's
MUc = new double[numChromStates][C];
MUf = new double[numFacBindingStates][F];
if(regularize){
WCnorm = new double[C];
// Setting all initial weights to 1
for(int c=0; c<C; c++){
WCnorm[c] = 0.0;
}
}
this.capSIGMAc = new double[C];
this.capSIGMAf = new double[F];
//Initialization from emperical means
for(int c=0; c< C; c++){
float[] observedValues = new float[N];
for(int i=0; i<N; i++){
observedValues[i] = Xc[i][c];
}
double[] means = this.getEmpMeanValues(numChromStates, observedValues);
for(int j=0; j<numChromStates; j++){
MUc[j][c] = means[j];
}
}
for(int f=0; f< F; f++){
float[] observedValues = new float[N];
for(int i=0; i<N; i++){
observedValues[i] = Xf[i][f];
}
double[] means = this.getEmpMeanValues(numFacBindingStates, observedValues);
for(int k=0; k<numFacBindingStates; k++){
MUf[k][f] = means[k];
}
}
//Printing the initial Mu's
BayesmentsSandbox.printArray(MUc, "MUc", "MUc", manager);
BayesmentsSandbox.printArray(MUf, "MUf", "MUf", manager);
//Initializing sigma's
SIGMAc = new double[numChromStates][C];
SIGMAf = new double[numFacBindingStates][F];
// Initializing from emperical data... (Max-Min)
for(int c=0; c< C; c++){
double[] observedValues = new double[N];
for(int i=0; i<N; i++){
observedValues[i] = Xc[i][c];
}
double min = observedValues[this.getPercentileIndex(15.0, observedValues)];
double max = observedValues[this.getPercentileIndex(85, observedValues)];
for(int j=0; j<numChromStates; j++){
SIGMAc[j][c] = (max-min)/config.getNumChrmStates();
this.capSIGMAc[c] = (max-min)/config.getNumChrmStates();
}
}
for(int f=0; f< F; f++){
double[] observedValues = new double[N];
for(int i=0; i<N; i++){
observedValues[i] = Xf[i][f];
}
double min = observedValues[this.getPercentileIndex(15.0, observedValues)];
double max = observedValues[this.getPercentileIndex(85, observedValues)];
for(int k=0; k<numFacBindingStates; k++){
SIGMAf[k][f] = (max-min)/config.getNumFacStates();
this.capSIGMAf[f] = (max-min)/config.getNumFacStates();
}
}
//Printing the initial SIGMA's
BayesmentsSandbox.printArray(SIGMAc, "SIGMAc", "SIGMAc", manager);
BayesmentsSandbox.printArray(SIGMAf, "SIGMAf", "SIGMAf", manager);
// Initializing Bjk .. Using random initialization
Bjk = new double[numChromStates][numFacBindingStates];
for(int i=0; i<numChromStates; i++){
Bjk[i] = this.getRandomList(numFacBindingStates, true);
}
//printing the initial Bjk's
BayesmentsSandbox.printArray(Bjk, "chrom_state", "factor_State", manager);
// Initializing PIj ... Using Uniform initialization
PIj = new double[numChromStates];
PIj = this.getUniformList(numChromStates);
//Printing the initial PIj's
BayesmentsSandbox.printArray(PIj, "chrom_state");
//Initializing the dimensions of Qijk's
Qijk = new double[N][numChromStates][numFacBindingStates];
}
/**
* This is internally called by seqSeqMode setter when the class enters seq mode
* The method initializes the parameters for the seq features of the bayesian network
*/
private void initializeSeqParams(ExperimentManager manager){
this.MUs = new double[numChromStates][M];
this.SIGMAs = new double[numChromStates][M];
for(int m=0; m< M; m++){
float[] observedScores = new float[N];
for(int i=0; i<N; i++){
observedScores[i] = (float) Xs[i][m];
}
double[] means = this.getEmpMeanValues(numChromStates, observedScores);
for(int j=0; j<numChromStates; j++){
MUs[j][m] = means[j];
}
}
//printing params
BayesmentsSandbox.printArray(MUs, "MUs", "MUs", manager);
for(int m=0; m< M; m++){
double[] observedValues = new double[N];
for(int i=0; i<N; i++){
observedValues[i] = Xs[i][m];
}
double min = observedValues[this.getPercentileIndex(15.0, observedValues)];
double max = observedValues[this.getPercentileIndex(85.0, observedValues)];
for(int j=0; j<numChromStates; j++){
SIGMAs[j][m] = (max-min)/config.getNumChrmStates();
this.capSIGMAs[m] = (max-min)/config.getNumChrmStates();
}
}
BayesmentsSandbox.printArray(SIGMAs, "SIGMAs", "SIGMAs", manager);
}
/**
* Generates n random numbers that sum up to 1 if the boolean prob is TRUE
* @param n
* @param prob
* @return
*/
private double[] getRandomList(int n, boolean prob){
double[] ret = new double[n];
double sum=0.0;
Random ran = new Random();
for(int i=0; i<n; i++){
ret[i] = ran.nextDouble()+(double) ran.nextInt(90);
sum = sum + ret[i];
}
if(prob){
for(int i=0; i<n; i++){
ret[i] = ret[i]/sum;
}
return ret;
}else{
return ret;
}
}
/**
* Generates n values that fall in mu+-0.2*sigma of the the observedValues
* @param n
* @param observedValues
* @return
*/
private double[] getEmpMeanValues(int n, float[] observedValues){
double[] ret = new double[n];
double mean = 0.0;
double std=0.0;
for(int i=0; i< observedValues.length; i++){
mean = mean + observedValues[i];
}
mean = mean/(double) observedValues.length;
for(int i=0; i<observedValues.length; i++){
std = std + Math.pow(observedValues[i]-mean, 2.0);
}
std = std/(double) observedValues.length;
std = Math.sqrt(std);
Random rn =new Random();
double range = 0.4*std;
for(int i=0; i<n ; i++){
double random = rn.nextDouble()*range+mean-0.2*std;
ret[i] = random;
}
return ret;
}
/**
* Returns n equal numbers that add upto one
* @param n
* @return
*/
private double[] getUniformList(int n){
double[] ret = new double[n];
for(int i=0; i<n; i++){
ret[i] = 1/(double)n;
}
return ret;
}
/**
* Returns the index of the minimum number in the given list of doubles
* @param list
* @return
*/
private int getMinindex(double[] list){
double val = 100000.0;
int ret=0;
for(int i=0; i< list.length; i++){
if(val > list[i]){
ret = i;
val = list[i];
}
}
return ret;
}
/**
* returns the index of the maximum number in the given list of doubles
* @param list
* @return
*/
private int getMaxindex(double[] list){
double val = -100000.0;
int ret=0;
for(int i=0; i< list.length; i++){
if(list[i] > val){
ret =i;
val = list[i];
}
}
return ret;
}
public int getPercentileIndex(double percentile, double[] list){
Arrays.sort(list);
int index = (int) percentile*list.length/100;
return index;
}
/**
* Runs the EM algorithm for a given number of iterations and also plots the parameters over the learning rounds if plot parameter is true
*/
public void runEM(int itrs, boolean plot){
// Initializing the arrays to store the parameters for all training rounds
//double[][][] trainMUc = new double[this.total_itrs+1][numChromStates][C]; //initial random params plus itrs
//double[][][] trainMUf = new double[this.total_itrs+1][numFacBindingStates][F];
//double[][][] trainSIGMAc = new double[this.total_itrs+1][numChromStates][C];
//double[][][] trainSIGMAf = new double[this.total_itrs+1][numFacBindingStates][F];
//double[][] trainPIj = new double[this.total_itrs+1][numChromStates];
//double[][][] trainBjk = new double[this.total_itrs+1][numChromStates][numFacBindingStates];
for(int t=0; t<itrs; t++){ // training for the given number of iterations
if(itr_no==0){ //Copy the initial set of random parameters. True on the first round of EM
trainMUc = new double[this.total_itrs+1][numChromStates][C];
trainMUf = new double[this.total_itrs+1][numFacBindingStates][F];
trainSIGMAc = new double[this.total_itrs+1][numChromStates][C];
trainSIGMAf = new double[this.total_itrs+1][numFacBindingStates][F];
trainPIj = new double[this.total_itrs+1][numChromStates];
trainBjk = new double[this.total_itrs+1][numChromStates][numFacBindingStates];
trainMUc[0] = MUc;
trainMUf[0] = MUf;
trainSIGMAc[0] = SIGMAc;
trainSIGMAf[0] = SIGMAf;
trainPIj[0] = PIj;
trainBjk[0] = Bjk;
itr_no++;
}
if(this.seqState){ //When the model is in seq state
if(t == 0){ // First EM iteration after the model entered seq state
for(int p=0; p< this.itr_no-1 ; p++){ // indexe's form 0 to itr_no-2 are all zeros
for(int j=0; j<this.numChromStates; j++){
for(int m=0; m<M; m++){
trainMUs[p][j][m] =0.0;
}
}
}
for(int j=0; j<this.numChromStates; j++){ // index no itr_no-1 is initial random seq parameters
for(int m=0; m<M; m++){
trainMUs[itr_no-1] = MUs;
trainSIGMAs[itr_no-1] = SIGMAs;
}
}
}
}
executeEStep(); //E-Step
executeMStep(); //M-Step
// Copy the updated parameters
for(int j=0; j<numChromStates; j++){
for(int c=0; c<C; c++){
trainMUc[itr_no][j][c]= MUc[j][c];
}
}
for(int k=0; k<numFacBindingStates; k++){
for(int f=0; f<F; f++){
trainMUf[itr_no][k][f]= MUf[k][f];
}
}
for(int j=0; j<numChromStates; j++){
for(int c=0; c<C; c++){
trainSIGMAc[itr_no][j][c]= SIGMAc[j][c];
}
}
for(int k=0; k<numFacBindingStates; k++){
for(int f=0; f<F; f++){
trainSIGMAf[itr_no][k][f]= SIGMAf[k][f];
}
}
for(int j=0; j<numChromStates; j++){
trainPIj[itr_no][j] = PIj[j];
}
for(int j=0; j< numChromStates; j++){
for(int k=0; k<numFacBindingStates; k++){
trainBjk[itr_no][j][k] = Bjk[j][k];
}
}
if(this.seqState){
for(int j=0; j<numChromStates; j++){
for(int m=0; m<M; m++){
trainMUs[itr_no][j][m]= MUs[j][m];
//debug
//System.out.println(Integer.toString(j)+"\t"+Double.toString(MUs[j][m]));
}
}
for(int j=0; j<numChromStates; j++){
for(int m=0; m<M; m++){
trainSIGMAs[itr_no][j][m]= SIGMAs[j][m];
}
}
}
this.itr_no++; // increment the EM iteration number
}
// Plot if asked for
if(plot){
if(this.onlyChrom || this.seqState){
//Plotting Pi values
double[][] Xaxes = new double[numChromStates][this.total_itrs+1]; // plus 1 for initial random parameters
double[][] Yaxes = new double[numChromStates][this.total_itrs+1];
for(int j=0; j<numChromStates; j++){
for(int itr =0; itr<this.total_itrs+1; itr++){
Xaxes[j][itr] = itr;
Yaxes[j][itr] = trainPIj[itr][j];
}
}
EMIterPlotter piplotter = new EMIterPlotter(this.config, Xaxes, Yaxes, "PI-C");
piplotter.plot();
//Plotting Mu-c
Xaxes = new double[numChromStates][this.total_itrs+1];
Yaxes = new double[numChromStates][this.total_itrs+1];
for(int c=0; c<C; c++){
for(int j=0; j<numChromStates; j++){
for(int itr=0; itr<this.total_itrs+1; itr++){
Xaxes[j][itr] = itr;
Yaxes[j][itr] = trainMUc[itr][j][c];
}
}
EMIterPlotter MUcPlotter = new EMIterPlotter(this.config, Xaxes, Yaxes, "MU-C_"+Integer.toString(c));
MUcPlotter.plot();
}
//Plotting SIGMAc
Xaxes = new double[numChromStates][this.total_itrs+1];
Yaxes = new double[numChromStates][this.total_itrs+1];
for(int c=0; c<C; c++){
for(int j=0; j<numChromStates; j++){
for(int itr=0; itr<this.total_itrs+1; itr++){
Xaxes[j][itr] = itr;
Yaxes[j][itr] = trainSIGMAc[itr][j][c];
}
}
EMIterPlotter SIGMACPlotter = new EMIterPlotter(this.config, Xaxes, Yaxes, "SIGMA-C_"+Integer.toString(c));
SIGMACPlotter.plot();
}
//Plotting Muf
Xaxes = new double[numFacBindingStates][this.total_itrs+1];
Yaxes = new double[numFacBindingStates][this.total_itrs+1];
for(int f=0; f<F; f++){
for(int k=0; k<numFacBindingStates; k++){
for(int itr=0; itr<this.total_itrs+1; itr++){
Xaxes[k][itr] = itr;
Yaxes[k][itr] = trainMUf[itr][k][f];
}
}
EMIterPlotter MUfPlotter = new EMIterPlotter(this.config, Xaxes, Yaxes, "MF-F_"+Integer.toString(f));
MUfPlotter.plot();
}
//Plotting SIGMAf
Xaxes = new double[numFacBindingStates][this.total_itrs+1];
Yaxes = new double[numFacBindingStates][this.total_itrs+1];
for(int f=0; f<F; f++){
for(int k=0; k<numFacBindingStates; k++){
for(int itr=0; itr<this.total_itrs+1; itr++){
Xaxes[k][itr] = itr;
Yaxes[k][itr] = trainSIGMAf[itr][k][f];
}
}
EMIterPlotter SIGMAfPlotter = new EMIterPlotter(this.config, Xaxes, Yaxes, "SIGMA-f_"+Integer.toString(f));
SIGMAfPlotter.plot();
}
//Plotting Bjk
Xaxes = new double[numFacBindingStates*numChromStates][this.total_itrs+1];
Yaxes = new double[numFacBindingStates*numChromStates][this.total_itrs+1];
int count=0;
for(int j=0; j<numChromStates; j++){
for(int k=0; k<numFacBindingStates; k++){
for(int itr=0; itr<this.total_itrs+1; itr++){
Xaxes[count][itr] = itr;
Yaxes[count][itr] = trainBjk[itr][j][k];
}
count++;
}
}
EMIterPlotter BjkPlotter = new EMIterPlotter(this.config, Xaxes, Yaxes, "Bjk");
BjkPlotter.plot();
}
if(this.seqState){
double[][] Xaxes = new double[numChromStates][this.total_itrs+1]; // plus 1 for initial random parameters
double[][] Yaxes = new double[numChromStates][this.total_itrs+1];
//Plotting Mus
for(int m=0; m<M; m++){
for(int j=0; j<numChromStates; j++){
for(int itr=0; itr<this.total_itrs+1; itr++){
Xaxes[m][itr] = itr;
Yaxes[m][itr] = trainMUs[itr][j][m];
}
}
EMIterPlotter MUsPlotter = new EMIterPlotter(this.config, Xaxes, Yaxes, "MU-s_"+Integer.toString(m));
MUsPlotter.plot();
}
//Plotting SIIGMAs
Xaxes = new double[numChromStates][this.total_itrs+1];
Yaxes = new double[numChromStates][this.total_itrs+1];
for(int m=0; m<M; m++){
for(int j=0; j<numChromStates; j++){
for(int itr=0; itr<this.total_itrs+1; itr++){
Xaxes[j][itr] = itr;
Yaxes[j][itr] = trainSIGMAs[itr][j][m];
}
}
EMIterPlotter SIGMAsPlotter = new EMIterPlotter(this.config, Xaxes, Yaxes, "SIGMA-s_"+Integer.toString(m));
SIGMAsPlotter.plot();
}
}
}
}
/**
* Executes the E step. That is it calculates the Qijk's using the current set of parameters
*/
private void executeEStep(){
double den[]= new double[N];
//Calculate the numerator and the denominator for all the Qijk's
for(int i=0; i<N; i++){ // over the training examples
for(int j=0; j<numChromStates; j++){ // over the chromatin states
for(int k=0; k< numFacBindingStates; k++){ //over factor binding states
double chromGausssianProd=0.0;
double facGaussianProd = 0.0;
double seqGaussianProd = 0.0;
for(int c=0; c<C; c++){
NormalDistribution gaussian = new NormalDistribution(MUc[j][c],Math.pow(SIGMAc[j][c], 2.0));
double que_pusher = (this.regularize) ?Math.pow(gaussian.calcProbability((double) Xc[i][c]), 1/(1+WCnorm[c])): gaussian.calcProbability((double) Xc[i][c]) ;
chromGausssianProd = c==0 ? que_pusher: chromGausssianProd* que_pusher;
}
for(int f=0; f< F; f++){
NormalDistribution gaussian = new NormalDistribution(MUf[k][f],Math.pow(SIGMAf[k][f], 2.0));
facGaussianProd = (f == 0 ? gaussian.calcProbability((double) Xf[i][f]): facGaussianProd* gaussian.calcProbability((double) Xf[i][f]));
}
// Set the guassian products to 0 in case they are NaN
chromGausssianProd = ( Double.isNaN(chromGausssianProd)) ? 0.0 : chromGausssianProd;
facGaussianProd = (Double.isNaN(facGaussianProd)) ? 0.0: facGaussianProd;
if(this.seqState){
for(int m=0; m<M; m++){
NormalDistribution gaussian = new NormalDistribution(MUs[j][m],Math.pow(SIGMAs[j][m], 2.0));
double que_pusher = this.regularize ? Math.pow(gaussian.calcProbability((double) Xs[i][m]), 1/(1+WSnorm[m])) : gaussian.calcProbability((double) Xs[i][m]);
seqGaussianProd = m==0 ? que_pusher: seqGaussianProd* que_pusher;
}
seqGaussianProd = (Double.isNaN(seqGaussianProd)) ? 0.0: seqGaussianProd;
}
if(this.seqState){
Qijk[i][j][k] = PIj[j]*Math.pow(chromGausssianProd, -1*config.getChromWeight())*Bjk[j][k]*facGaussianProd*Math.pow(seqGaussianProd, -1*config.getSeqWeight());
}else{Qijk[i][j][k] = PIj[j]*chromGausssianProd*Bjk[j][k]*facGaussianProd;}
den[i] = den[i]+Qijk[i][j][k];
}
}
}
//Normalize the numerator by dividing the Qijk's with the denominators
for(int i=0; i<N; i++){
for(int j=0; j<numChromStates; j++){
for(int k=0; k<numFacBindingStates; k++){
Qijk[i][j][k] = Qijk[i][j][k]/den[i];
Qijk[i][j][k] = ( Double.isNaN(Qijk[i][j][k])) ? 0.0 : Qijk[i][j][k];
}
}
}
}
/**
* Executes the M step. Updates all the parameters of the model
*/
private void executeMStep(){
//-------------------------PIj update-----------------------------
//Compute
double denPIj = 0.0;
for(int j=0; j<numChromStates; j++){
for(int i=0; i<N; i++){
for(int k=0; k<numFacBindingStates; k++){
PIj[j] = k==0 && i==0 ? Qijk[i][j][k] : PIj[j]+Qijk[i][j][k];
denPIj = denPIj + Qijk[i][j][k];
}
}
}
//Normalize
for(int j=0; j<numChromStates; j++){
PIj[j] = PIj[j]/denPIj;
}
//Making sure PI-j for any state does not go to zero
//for(int j=0; j<numChromStates; j++){
// if(PIj[j] < 0.01){
// PIj[j] = 0.01;
// }
//}
//Re-normalize
//denPIj = 0.0;
//for(int j=0; j< numChromStates; j++){
// denPIj = denPIj + PIj[j];
//}
//for(int j=0; j<numChromStates; j++){
// PIj[j] = PIj[j]/denPIj;
//}
//-----------------------MUc update------------------------------------
//Compute
double[][] denMUc=new double[numChromStates][C];
for(int j=0; j<numChromStates; j++){
for(int c=0; c<C; c++){
for(int i=0; i<N; i++){
for(int k=0; k<numFacBindingStates; k++){
MUc[j][c] = k==0 && i==0 ? Qijk[i][j][k]*Xc[i][c] : MUc[j][c]+ Qijk[i][j][k]*Xc[i][c];
denMUc[j][c] = denMUc[j][c]+Qijk[i][j][k];
}
}
}
}
//Normalize
for(int j=0; j<numChromStates; j++){
for(int c=0; c<C; c++){
MUc[j][c] = MUc[j][c]/denMUc[j][c];
}
}
//-----------------------MUf update --------------------------------------
//Compute
double[][] denMUf = new double[numFacBindingStates][F];
for(int k=0; k<numFacBindingStates; k++){
for(int f=0; f<F; f++){
for(int i=0; i<N; i++){
for(int j=0; j<numChromStates; j++){
MUf[k][f] = j==0 && i==0? Qijk[i][j][k]*Xf[i][f]: MUf[k][f]+Qijk[i][j][k]*Xf[i][f];
denMUf[k][f] = denMUf[k][f]+Qijk[i][j][k];
}
}
}
}
//Normalize
for(int k=0; k<numFacBindingStates; k++){
for(int f=0; f<F; f++){
MUf[k][f] = MUf[k][f]/denMUf[k][f];
}
}
// -----------------------MUs update, if in seqState----------------------------
//Compute
if(this.seqState){
double[][] denMUs = new double[numChromStates][M];
for(int j=0; j<numChromStates; j++){
for(int m=0; m<M; m++){
for(int i=0; i<N; i++){
for(int k=0; k<numFacBindingStates; k++){
MUs[j][m] = k==0 && i==0? Qijk[i][j][k]*Xs[i][m]: MUs[j][m]+Qijk[i][j][k]*Xs[i][m];
denMUs[j][m] = denMUs[j][m]+Qijk[i][j][k];
}
}
}
}
//Normalize
for(int j=0; j<numChromStates; j++){
for(int m=0; m<M; m++){
MUs[j][m] = MUs[j][m]/denMUs[j][m];
}
}
}
//--------------------SIGMAc update --------------------------------------
//Compute
double[][] denSIGMAc = new double[numChromStates][C];
for(int j=0; j<numChromStates; j++){
for(int c=0; c<C; c++){
for(int i=0; i<N; i++){
for(int k=0; k<numFacBindingStates; k++){
SIGMAc[j][c] = k==0 && i==0? Qijk[i][j][k]*Math.pow(((double)Xc[i][c] - MUc[j][c]) , 2.0): SIGMAc[j][c]+Qijk[i][j][k]*Math.pow(((double)Xc[i][c] - MUc[j][c]) , 2.0);
denSIGMAc[j][c] = denSIGMAc[j][c]+Qijk[i][j][k];
}
}
}
}
//Normalize and taking the square root
for(int j=0; j<numChromStates; j++){
for(int c=0; c<C; c++){
SIGMAc[j][c] = Math.sqrt(SIGMAc[j][c]/denSIGMAc[j][c]);
}
}
if(config.capSigma()){
for(int j=0; j<numChromStates; j++){
for(int c=0; c<C; c++){
SIGMAc[j][c] = (SIGMAc[j][c] > this.capSIGMAc[c]) ? this.capSIGMAc[c] : SIGMAc[j][c];
}
}
}
//------------------SIGMAf update------------------------------------------
//Compute
double[][] denSIGMAf = new double[numFacBindingStates][F];
for(int k=0; k<numFacBindingStates; k++){
for(int f=0; f<F; f++){
for(int i=0; i<N; i++){
for(int j=0; j< numChromStates; j++){
SIGMAf[k][f] = j==0 && i==0? Qijk[i][j][k]*Math.pow(((double)Xf[i][f] - MUf[k][f]) , 2.0) : SIGMAf[k][f]+Qijk[i][j][k]*Math.pow(((double)Xf[i][f] - MUf[k][f]) , 2.0);
denSIGMAf[k][f] = denSIGMAf[k][f]+Qijk[i][j][k];
}
}
}
}
//Normalize and taking the square root
for(int k=0; k<numFacBindingStates; k++){
for(int f=0; f<F; f++){
SIGMAf[k][f] = Math.sqrt(SIGMAf[k][f]/denSIGMAf[k][f]);
}
}
if(config.capSigma()){
for(int k=0; k<numFacBindingStates; k++){
for(int f=0; f<F; f++){
SIGMAf[k][f] = (SIGMAf[k][f] > this.capSIGMAf[f]) ? this.capSIGMAf[f] : SIGMAf[k][f];
}
}
}
//--------------------SIGMAs update, if in seqState ------------------------
//Compute
if(this.seqState){
double[][] denSIGMAs = new double[numChromStates][M];
for(int j=0; j<numChromStates; j++){
for(int m=0; m<M; m++){
for(int i=0; i<N; i++){
for(int k=0; k<numFacBindingStates; k++){
SIGMAs[j][m] = k==0 && i==0? Qijk[i][j][k]*Math.pow(((double)Xs[i][m] - MUs[j][m]) , 2.0): SIGMAs[j][m]+Qijk[i][j][k]*Math.pow(((double)Xs[i][m] - MUs[j][m]) , 2.0);
denSIGMAs[j][m] = denSIGMAs[j][m]+Qijk[i][j][k];
}
}
}
}
//Normalize and taking the square root
for(int j=0; j<numChromStates; j++){
for(int m=0; m<M; m++){
SIGMAs[j][m] = Math.sqrt(SIGMAs[j][m]/denSIGMAs[j][m]);
}
}
if(config.capSigma()){
for(int j=0; j<numChromStates; j++){
for(int m=0; m<M; m++){
SIGMAs[j][m] = (SIGMAs[j][m] > this.capSIGMAs[m]) ? this.capSIGMAs[m] : SIGMAs[j][m];
}
}
}
}
//---------------------Bjk update -------------------------------------------
//Compute
double[] denBjk = new double[numFacBindingStates];
for(int k=0; k<numFacBindingStates; k++){
for(int j=0; j< numChromStates; j++){
for(int i=0; i<N; i++){
Bjk[j][k] = i==0 ? Qijk[i][j][k] : Bjk[j][k]+Qijk[i][j][k];
denBjk[k] = denBjk[k]+Qijk[i][j][k];
}
}
}
//Normalize
for(int k=0; k<numFacBindingStates; k++){
for(int j=0; j< numChromStates; j++){
Bjk[j][k] = Bjk[j][k]/denBjk[k];
}
}
// ------------------------ regularization chromatin weights update (if regularization is true) ------------------------------------
if(this.regularize){
for(int c=0; c<C; c++){
double Z=0.0;
Cubic cube_root_solver = new Cubic();
for(int i=0; i<N; i++){
for(int j=0; j<numChromStates; j++){
for(int k=0; k<numFacBindingStates; k++){
NormalDistribution gaussian = new NormalDistribution(MUc[j][c],Math.pow(SIGMAc[j][c], 2.0));
Z = (i==0 && j==0 && k==0) ? Qijk[i][j][k]*Math.log(gaussian.calcProbability((double)Xc[i][c])) : Z+Qijk[i][j][k]*Math.log(gaussian.calcProbability((double)Xc[i][c]));
}
}
}
cube_root_solver.solve(1, 2, 1, Z/this.lambda);
this.WCnorm[c] = cube_root_solver.x1;
}
if(this.seqState){
for(int m=0; m<M; m++){
double Z=0.0;
Cubic cube_root_solver = new Cubic();
for(int i=0; i<N; i++){
for(int j=0; j<numChromStates; j++){
for(int k=0; k< this.numFacBindingStates; k++){
NormalDistribution gaussian = new NormalDistribution(MUs[j][m],Math.pow(SIGMAs[j][m], 2.0));
Z = (i==0 && j==0 && k==0) ? Qijk[i][j][k]*Math.log(gaussian.calcProbability((double)Xs[i][m])) : Z+Qijk[i][j][k]*Math.log(gaussian.calcProbability((double)Xs[i][m]));
}
}
}
cube_root_solver.solve(1, 2, 1, Z/this.lambda);
this.WSnorm[m] = cube_root_solver.x1;
}
}
}
}
//setters
public void setSeqMode(Sequences seqs, double[][] Xs, ExperimentManager manager){
this.seqState = true;
this.setXs(Xs);
this.M = Xs[0].length;
if(this.regularize){
WSnorm = new double[M];
}
this.capSIGMAs = new double[M];
this.setSequences(seqs);
this.setInitialSeqParams(manager);
this.trainMUs = new double[this.total_itrs+1][this.numChromStates][this.M];
this.trainSIGMAs = new double[this.total_itrs+1][this.numChromStates][this.M];
}
private void setSequences(Sequences seqs){this.seqs = seqs;}
private void setXs(double[][] Xs){this.Xs = Xs;}
private void setInitialSeqParams(ExperimentManager manager){this.initializeSeqParams(manager);}
//Accessors
public double[] getPIj(){return this.PIj;}
public double[][] getMUc(){return this.MUc;}
public double[][] getMUf(){return this.MUf;}
public double[][] getSIGMAc(){return this.SIGMAc;}
public double[][] getSIGMAf(){return this.SIGMAf;}
public double[][] getBjk(){return this.Bjk;}
public double[][] getMUs(){return this.MUs;}
public double[][] getSIGMAs(){return this.SIGMAs;}
public GenomicLocations getChromData(){return this.trainingData;}
public Sequences getSeqData(){return this.seqs;}
public boolean getSeqStateStatus(){return this.seqState;}
public double[] getChromWeights(){return this.WCnorm;}
public double[] getSeqWeights(){return this.WSnorm;}
// main method is only for testing puposers
public static void main(String[] args){
//double[] test = getRandomList(3,true);
//System.out.println(test[0]);
//System.out.println(test[1]);
//System.out.println(test[2]);
}
}
| dealing with NaN doubles | src/edu/psu/compbio/seqcode/projects/akshay/bayesments/bayesnet/EMtrain.java | dealing with NaN doubles | <ide><path>rc/edu/psu/compbio/seqcode/projects/akshay/bayesments/bayesnet/EMtrain.java
<ide> for(int j=0; j<numChromStates; j++){
<ide> for(int k=0; k<numFacBindingStates; k++){
<ide> NormalDistribution gaussian = new NormalDistribution(MUc[j][c],Math.pow(SIGMAc[j][c], 2.0));
<del> Z = (i==0 && j==0 && k==0) ? Qijk[i][j][k]*Math.log(gaussian.calcProbability((double)Xc[i][c])) : Z+Qijk[i][j][k]*Math.log(gaussian.calcProbability((double)Xc[i][c]));
<add> if(!Double.isNaN(gaussian.calcProbability((double)Xc[i][c]))){
<add> Z = (i==0 && j==0 && k==0) ? Qijk[i][j][k]*Math.log(gaussian.calcProbability((double)Xc[i][c])) : Z+Qijk[i][j][k]*Math.log(gaussian.calcProbability((double)Xc[i][c]));
<add> }else{
<add> Z = (i==0 && j==0 && k==0) ? 0.0 : Z+0.0;
<add> }
<ide> }
<ide> }
<ide> }
<ide> for(int j=0; j<numChromStates; j++){
<ide> for(int k=0; k< this.numFacBindingStates; k++){
<ide> NormalDistribution gaussian = new NormalDistribution(MUs[j][m],Math.pow(SIGMAs[j][m], 2.0));
<del> Z = (i==0 && j==0 && k==0) ? Qijk[i][j][k]*Math.log(gaussian.calcProbability((double)Xs[i][m])) : Z+Qijk[i][j][k]*Math.log(gaussian.calcProbability((double)Xs[i][m]));
<add> if(!Double.isNaN(gaussian.calcProbability((double)Xs[i][m]))){
<add> Z = (i==0 && j==0 && k==0) ? Qijk[i][j][k]*Math.log(gaussian.calcProbability((double)Xs[i][m])) : Z+Qijk[i][j][k]*Math.log(gaussian.calcProbability((double)Xs[i][m]));
<add> }else{
<add> Z = (i==0 && j==0 && k==0) ? 0.0 : Z+0.0;
<add> }
<ide> }
<ide> }
<ide> } |
|
JavaScript | apache-2.0 | bb21718c6d96747e9c5b75bb04f31ae8b9b67c06 | 0 | openradiation/openradiation-mobile,openradiation/openradiation-mobile,openradiation/openradiation-mobile,openradiation/openradiation-mobile | //
// Here is how to define your module
// has dependent on mobile-angular-ui
//
var app = angular.module('MobileAngularUiExamples', [
'Cordova',
'ngRoute',
'mobile-angular-ui',
// touch/drag feature: this is from 'mobile-angular-ui.gestures.js'
// it is at a very beginning stage, so please be careful if you like to use
// in production. This is intended to provide a flexible, integrated and and
// easy to use alternative to other 3rd party libs like hammer.js, with the
// final pourpose to integrate gestures into default ui interactions like
// opening sidebars, turning switches on/off ..
'mobile-angular-ui.gestures',
'starter.services'
]);
//
// You can configure ngRoute as always, but to take advantage of SharedState location
// feature (i.e. close sidebar on backbutton) you should setup 'reloadOnSearch: false'
// in order to avoid unwanted routing.
//
app.config(function($routeProvider) {
$routeProvider.when('/', {templateUrl: 'templates/or-home.html', reloadOnSearch: false});
$routeProvider.when('/scroll', {templateUrl: 'templates/scroll.html', reloadOnSearch: false});
$routeProvider.when('/toggle', {templateUrl: 'templates/toggle.html', reloadOnSearch: false});
$routeProvider.when('/tabs', {templateUrl: 'templates/tabs.html', reloadOnSearch: false});
$routeProvider.when('/accordion', {templateUrl: 'templates/accordion.html', reloadOnSearch: false});
$routeProvider.when('/overlay', {templateUrl: 'templates/overlay.html', reloadOnSearch: false});
$routeProvider.when('/forms', {templateUrl: 'templates/forms.html', reloadOnSearch: false});
$routeProvider.when('/dropdown', {templateUrl: 'templates/dropdown.html', reloadOnSearch: false});
$routeProvider.when('/drag', {templateUrl: 'templates/drag.html', reloadOnSearch: false});
$routeProvider.when('/carousel', {templateUrl: 'templates/carousel.html', reloadOnSearch: false});
$routeProvider.when('/useok', {templateUrl: 'templates/useok.html', reloadOnSearch: false});
$routeProvider.when('/tab-charts', {templateUrl: 'templates/tab-charts.html', reloadOnSearch: false});
$routeProvider.when('/mesurePrise', {templateUrl: 'templates/or-mesure-prise.html', reloadOnSearch: false});
$routeProvider.when('/mesureRecap', {templateUrl: 'templates/or-mesure-recap.html', reloadOnSearch: false});
$routeProvider.when('/histo', {templateUrl: 'templates/or-histo.html', reloadOnSearch: false});
$routeProvider.when('/more', {templateUrl: 'templates/or-more.html', reloadOnSearch: false});
$routeProvider.when('/param', {templateUrl: 'templates/or-param.html', reloadOnSearch: false});
$routeProvider.when('/param2', {templateUrl: 'templates/or-param2.html', reloadOnSearch: false});
$routeProvider.when('/test', {templateUrl: 'templates/or-test.html', reloadOnSearch: false});
});
//
// `$drag` example: drag to dismiss
//
app.directive('dragToDismiss', function($drag, $parse, $timeout){
return {
restrict: 'A',
compile: function(elem, attrs) {
var dismissFn = $parse(attrs.dragToDismiss);
return function(scope, elem, attrs){
var dismiss = false;
$drag.bind(elem, {
constraint: {
minX: 0,
minY: 0,
maxY: 0
},
move: function(c) {
if( c.left >= c.width / 4) {
dismiss = true;
elem.addClass('dismiss');
} else {
dismiss = false;
elem.removeClass('dismiss');
}
},
cancel: function(){
elem.removeClass('dismiss');
},
end: function(c, undo, reset) {
if (dismiss) {
elem.addClass('dismitted');
$timeout(function() {
scope.$apply(function() {
dismissFn(scope);
});
}, 400);
} else {
reset();
}
}
});
};
}
};
});
//
// Another `$drag` usage example: this is how you could create
// a touch enabled "deck of cards" carousel. See `carousel.html` for markup.
//
app.directive('carousel', function(){
return {
restrict: 'C',
scope: {},
controller: function($scope) {
this.itemCount = 0;
this.activeItem = null;
this.addItem = function(){
var newId = this.itemCount++;
this.activeItem = this.itemCount == 1 ? newId : this.activeItem;
return newId;
};
this.next = function(){
this.activeItem = this.activeItem || 0;
this.activeItem = this.activeItem == this.itemCount - 1 ? 0 : this.activeItem + 1;
};
this.prev = function(){
this.activeItem = this.activeItem || 0;
this.activeItem = this.activeItem === 0 ? this.itemCount - 1 : this.activeItem - 1;
};
}
};
});
app.directive('carouselItem', function($drag) {
return {
restrict: 'C',
require: '^carousel',
scope: {},
transclude: true,
template: '<div class="item"><div ng-transclude></div></div>',
link: function(scope, elem, attrs, carousel) {
scope.carousel = carousel;
var id = carousel.addItem();
var zIndex = function(){
var res = 0;
if (id == carousel.activeItem){
res = 2000;
} else if (carousel.activeItem < id) {
res = 2000 - (id - carousel.activeItem);
} else {
res = 2000 - (carousel.itemCount - 1 - carousel.activeItem + id);
}
return res;
};
scope.$watch(function(){
return carousel.activeItem;
}, function(n, o){
elem[0].style['z-index']=zIndex();
});
$drag.bind(elem, {
constraint: { minY: 0, maxY: 0 },
adaptTransform: function(t, dx, dy, x, y, x0, y0) {
var maxAngle = 15;
var velocity = 0.02;
var r = t.getRotation();
var newRot = r + Math.round(dx * velocity);
newRot = Math.min(newRot, maxAngle);
newRot = Math.max(newRot, -maxAngle);
t.rotate(-r);
t.rotate(newRot);
},
move: function(c){
if(c.left >= c.width / 4 || c.left <= -(c.width / 4)) {
elem.addClass('dismiss');
} else {
elem.removeClass('dismiss');
}
},
cancel: function(){
elem.removeClass('dismiss');
},
end: function(c, undo, reset) {
elem.removeClass('dismiss');
if(c.left >= c.width / 4) {
scope.$apply(function() {
carousel.next();
});
} else if (c.left <= -(c.width / 4)) {
scope.$apply(function() {
carousel.next();
});
}
reset();
}
});
}
};
});
function alertDismissed() {
// do something
//alert('rr');
}
app.controller('MainController', function(cordovaReady,$rootScope, $scope,$location,$route){
$scope.appName = "Openradiation";
$scope.buttonHome = "off";
$scope.state = "1";
$scope.top="0";
$scope.menu="1";
if (!isMobile)
{
var locationPath = $location.path();
if (locationPath != "/")
{
$scope.top="1";
}
if (locationPath == "/mesurePrise" || locationPath == "/mesureRecap")
{
$scope.menu="0";
}
}
async.series([
function(callback){ cordovaReady(callback);},
// function(callback){init_DB(callback);},
//creta table
/* function(callback){createTableQuestionnaires(callback);},
function(callback){createTableHoraires(callback);},
function(callback){createTableReponses(callback);},*/
//create db content
//function(callback){createQuestionnairesSuccess(callback);},
//test useOk
// function(callback){do_MC_UseOk(callback,$location,$route);},
],
function(err, results ){
console.log(results);
//refreshDevices();
//init state
var locationPath = $location.path();
if (locationPath != "/")
{
$scope.top="1";
}
if (locationPath == "/mesurePrise" || locationPath == "/mesureRecap")
{
$scope.menu="0";
}
}
);//fin async.series*/
$scope.buttonSearchCapteur = function(clickEvent){
if (typeof ble == 'undefined')
//cas emulation chrome
{
fakeSearch($scope);
}
else
{
ble.isEnabled(
function() {
console.log("Bluetooth is enabled");
alert("Bluetooth is enabled");
//TODO : do scan until find a known one
ble.scan([], 25, function(device) {
//console.log(JSON.stringify(device));
alert(JSON.stringify(device));
}, function(){alert('pb');} );
},
function() {
alertNotif("Bluetooth is *not* enabled","Attention","Ok")
}
);
}
}
$scope.goHome = function(clickEvent){
console.log('goHome');
$location.path('/');
$scope.top = "0";
$scope.menu="1";
}
$scope.doMesure = function(clickEvent){
console.log('doMesure');
$location.path('/mesurePrise');
$scope.top = "1";
$scope.menu="0";
fakeMesure($scope);
}
$scope.endMesure = function(clickEvent){
console.log('endMesure');
$location.path('/mesureRecap');
$scope.top = "1";
$scope.menu="0";
// fakeMesure($scope);
}
$scope.validMesure = function(clickEvent){
console.log('validMesure');
$scope.top = "0";
$scope.menu="1";
//$scope.state="1";
$location.path('/');
//$location.path('/mesureRecap');
//$scope.top = "1";
// fakeMesure($scope);
}
$scope.doHisto = function(clickEvent){
console.log('doHisto');
$location.path('/histo');
$scope.top = "1";
//$scope.menu="0";
//fakeMesure($scope);
}
$scope.doParam = function(clickEvent){
console.log('doParam');
$location.path('/param');
$scope.top = "1";
//$scope.menu="0";
//fakeMesure($scope);
}
$scope.doParam2 = function(clickEvent){
console.log('doParam2');
$location.path('/param2');
$scope.top = "1";
if (typeof ble == 'undefined')
//cas emulation chrome
{
fakeBluetoothDeviceSearch($scope);
}
else
{
ble.isEnabled(
function() {doBluetoothDeviceSearch($scope);},
function() {alertNotif("Bluetooth is *not* enabled","Attention","Ok")}
);
}
//$scope.menu="0";
//fakeMesure($scope);
}
$scope.doMore= function(clickEvent){
console.log('doMore');
$location.path('/more');
$scope.top = "1";
//$scope.menu="0";
//fakeMesure($scope);
}
$scope.devices = {};
$scope.connectedDeviceId = 0;
$scope.doTest = function(clickEvent){
console.log('doTest');
$location.path('/test');
$scope.top = "1";
if (typeof rfduino == 'undefined')
//cas emulation chrome
{
fakeBluetoothDeviceSearch($scope);
}
else
{
//ble.isEnabled(
rfduino.isEnabled(
function() {doBluetoothDeviceSearch($scope);},
function() {alertNotif("Bluetooth is *not* enabled","Attention","Ok")}
);
}
//$scope.menu="0";
//fakeMesure($scope);
}
$scope.doConnect = function(deviceId){
if (typeof rfduino == 'undefined')
//cas emulation chrome
{
//fakeBluetoothDeviceSearch($scope);
alertNotif(deviceId+" connecté","Success","Ok")
}
else
{
rfduino.connect(deviceId,
function() {
//success
alertNotif(deviceId+" connecté","Success","Ok");
$scope.connectedDeviceId = deviceId;
$scope.$apply();
rfduino.onData(function(data){
//alert(JSON.stringify(data));
//$scope.data = JSON.stringify(data);
$scope.length = data.byteLength;
var buffer1 = new Int8Array(data);
var str = "";
for (var i=0 ; i<buffer1.length ; i++) {
str += buffer1[i].toString(16)+" ";
}
alert(str);
$scope.data8 = JSON.stringify(buffer1);
var buffer = new Int16Array(data);
$scope.data16 = JSON.stringify(buffer);
//var buffer3 = new Uint16Array(data);
// $scope.datau16 = JSON.stringify(buffer3);
$scope.datau16 = String.fromCharCode.apply(null, new Int16Array(data))
$scope.datau8 = String.fromCharCode.apply(null, new Int8Array(data))
/*var buffer2 = new Int32Array(data);
$scope.data32 = JSON.stringify(buffer2);*/
$scope.dataview = new DataView(data);
$scope.$apply();
},
function(error){alertNotif(deviceId+" onData error : "+error,"Failure","Ok")});
},
function() {alertNotif(deviceId+" non connecté","Failure","Ok")}
);
}
}
$scope.doDisconnect= function(deviceId){
if (typeof rfduino == 'undefined')
//cas emulation chrome
{
//fakeBluetoothDeviceSearch($scope);
alertNotif(deviceId+" connecté","Success","Ok")
}
else
{
rfduino.disconnect(deviceId,function() {
//success
alertNotif(deviceId+" déconnecté","Success","Ok");
$scope.connectedDeviceId = 0;
$scope.$apply();
},
function() {alertNotif(deviceId+" non déconnecté","Failure","Ok")}
);
}
}
$scope.doRead = function(deviceId,charId,serviceId){
ble.read(deviceId,serviceId,charId,
function(success){
//alert(JSON.stringify(success));
alertNotif(JSON.stringify(success),"Success read "+charId+" "+serviceId,"Ok");
},
function(failure){
//alert(JSON.stringify(failure));
alertNotif(JSON.stringify(failure),"Failure read "+charId+" "+serviceId,"Ok");
});
}
$scope.doNotif = function(deviceId,charId,serviceId){
ble.startNotification(deviceId,serviceId,charId,
function(success){
//alert(JSON.stringify(success));
alertNotif(JSON.stringify(success),"Success startNotif "+charId+" "+serviceId,"Ok");
},
function(failure){
//alert(JSON.stringify(failure));
alertNotif(JSON.stringify(failure),"Failure startNotif "+charId+" "+serviceId,"Ok");
});
}
$scope.stopNotif = function(deviceId,charId,serviceId){
ble.stopNotification(deviceId,serviceId,charId,
function(success){
//alert(JSON.stringify(success));
alertNotif(JSON.stringify(success),"Success stopNotif "+charId+" "+serviceId,"Ok");
},
function(failure){
//alert(JSON.stringify(failure));
alertNotif(JSON.stringify(failure),"Failure stopNotif "+charId+" "+serviceId,"Ok");
});
}
// User agent displayed in home page
$scope.userAgent = navigator.userAgent;
// Needed for the loading screen
$rootScope.$on('$routeChangeStart', function(){
$rootScope.loading = true;
});
$rootScope.$on('$routeChangeSuccess', function(){
$rootScope.loading = false;
});
//Change path
//$location.path('/scroll');
// console.log($rootScope);
// Fake text i used here and there.
$scope.lorem = 'Lorem ipsum dolor sit amet, consectetur adipisicing elit. Vel explicabo, aliquid eaque soluta nihil eligendi adipisci error, illum corrupti nam fuga omnis quod quaerat mollitia expedita impedit dolores ipsam. Obcaecati.';
//
// 'Scroll' screen
//
var scrollItems = [];
for (var i=1; i<=100; i++) {
scrollItems.push('Item ' + i);
}
$scope.scrollItems = scrollItems;
$scope.bottomReached = function() {
alert('Congrats you scrolled to the end of the list!');
}
//
// Right Sidebar
//
$scope.chatUsers = [
{ name: 'Carlos Flowers', online: true },
{ name: 'Byron Taylor', online: true },
{ name: 'Jana Terry', online: true },
{ name: 'Darryl Stone', online: true },
{ name: 'Fannie Carlson', online: true },
{ name: 'Holly Nguyen', online: true },
{ name: 'Bill Chavez', online: true },
{ name: 'Veronica Maxwell', online: true },
{ name: 'Jessica Webster', online: true },
{ name: 'Jackie Barton', online: true },
{ name: 'Crystal Drake', online: false },
{ name: 'Milton Dean', online: false },
{ name: 'Joann Johnston', online: false },
{ name: 'Cora Vaughn', online: false },
{ name: 'Nina Briggs', online: false },
{ name: 'Casey Turner', online: false },
{ name: 'Jimmie Wilson', online: false },
{ name: 'Nathaniel Steele', online: false },
{ name: 'Aubrey Cole', online: false },
{ name: 'Donnie Summers', online: false },
{ name: 'Kate Myers', online: false },
{ name: 'Priscilla Hawkins', online: false },
{ name: 'Joe Barker', online: false },
{ name: 'Lee Norman', online: false },
{ name: 'Ebony Rice', online: false }
];
//
// 'Forms' screen
//
$scope.rememberMe = true;
$scope.email = '[email protected]';
$scope.login = function() {
alert('You submitted the login form');
};
//
// 'Drag' screen
//
$scope.notices = [];
for (var j = 0; j < 10; j++) {
$scope.notices.push({icon: 'envelope', message: 'Notice ' + (j + 1) });
}
$scope.deleteNotice = function(notice) {
var index = $scope.notices.indexOf(notice);
if (index > -1) {
$scope.notices.splice(index, 1);
}
};
/// openradiation
// $("#deviceList").touchend(connect);
$scope.getData = function(clickEvent){
console.log("getData");
alert('getData');
$('#stateData span').html('En cours');
//rfduino.onData(onData, onRfError);
$("#deviceList").on('touchend',connect);
//$("#deviceList").on('click',connect);
//$("#deviceList").touchend(connect);
}
$scope.stopData = function(clickEvent){
console.log("stopData");
$('#stateData span').html('Stop');}
});
/*app.controller('FormCtrl', function($rootScope, $scope,$location){
console.log('form');
console.log(testglobal);
$location.path('/toggle');
});*/
/* $rootScope.$apply(function() {
$location.path('/scroll');
//console.log($location.path());
});*/
//CORDOVA
angular.module('Cordova', [])
.factory('cordovaReady', function(){
return function(done) {
if (typeof window.cordova === 'object') {
document.addEventListener('deviceready', function () {
console.log('cordovaready');
// done();
done(null,'cordoveaok');
}, false);
} else {
done();
done(null,'cordoveako');
}
};
}); | www/js/demo.js | //
// Here is how to define your module
// has dependent on mobile-angular-ui
//
var app = angular.module('MobileAngularUiExamples', [
'Cordova',
'ngRoute',
'mobile-angular-ui',
// touch/drag feature: this is from 'mobile-angular-ui.gestures.js'
// it is at a very beginning stage, so please be careful if you like to use
// in production. This is intended to provide a flexible, integrated and and
// easy to use alternative to other 3rd party libs like hammer.js, with the
// final pourpose to integrate gestures into default ui interactions like
// opening sidebars, turning switches on/off ..
'mobile-angular-ui.gestures',
'starter.services'
]);
//
// You can configure ngRoute as always, but to take advantage of SharedState location
// feature (i.e. close sidebar on backbutton) you should setup 'reloadOnSearch: false'
// in order to avoid unwanted routing.
//
app.config(function($routeProvider) {
$routeProvider.when('/', {templateUrl: 'templates/or-home.html', reloadOnSearch: false});
$routeProvider.when('/scroll', {templateUrl: 'templates/scroll.html', reloadOnSearch: false});
$routeProvider.when('/toggle', {templateUrl: 'templates/toggle.html', reloadOnSearch: false});
$routeProvider.when('/tabs', {templateUrl: 'templates/tabs.html', reloadOnSearch: false});
$routeProvider.when('/accordion', {templateUrl: 'templates/accordion.html', reloadOnSearch: false});
$routeProvider.when('/overlay', {templateUrl: 'templates/overlay.html', reloadOnSearch: false});
$routeProvider.when('/forms', {templateUrl: 'templates/forms.html', reloadOnSearch: false});
$routeProvider.when('/dropdown', {templateUrl: 'templates/dropdown.html', reloadOnSearch: false});
$routeProvider.when('/drag', {templateUrl: 'templates/drag.html', reloadOnSearch: false});
$routeProvider.when('/carousel', {templateUrl: 'templates/carousel.html', reloadOnSearch: false});
$routeProvider.when('/useok', {templateUrl: 'templates/useok.html', reloadOnSearch: false});
$routeProvider.when('/tab-charts', {templateUrl: 'templates/tab-charts.html', reloadOnSearch: false});
$routeProvider.when('/mesurePrise', {templateUrl: 'templates/or-mesure-prise.html', reloadOnSearch: false});
$routeProvider.when('/mesureRecap', {templateUrl: 'templates/or-mesure-recap.html', reloadOnSearch: false});
$routeProvider.when('/histo', {templateUrl: 'templates/or-histo.html', reloadOnSearch: false});
$routeProvider.when('/more', {templateUrl: 'templates/or-more.html', reloadOnSearch: false});
$routeProvider.when('/param', {templateUrl: 'templates/or-param.html', reloadOnSearch: false});
$routeProvider.when('/param2', {templateUrl: 'templates/or-param2.html', reloadOnSearch: false});
$routeProvider.when('/test', {templateUrl: 'templates/or-test.html', reloadOnSearch: false});
});
//
// `$drag` example: drag to dismiss
//
app.directive('dragToDismiss', function($drag, $parse, $timeout){
return {
restrict: 'A',
compile: function(elem, attrs) {
var dismissFn = $parse(attrs.dragToDismiss);
return function(scope, elem, attrs){
var dismiss = false;
$drag.bind(elem, {
constraint: {
minX: 0,
minY: 0,
maxY: 0
},
move: function(c) {
if( c.left >= c.width / 4) {
dismiss = true;
elem.addClass('dismiss');
} else {
dismiss = false;
elem.removeClass('dismiss');
}
},
cancel: function(){
elem.removeClass('dismiss');
},
end: function(c, undo, reset) {
if (dismiss) {
elem.addClass('dismitted');
$timeout(function() {
scope.$apply(function() {
dismissFn(scope);
});
}, 400);
} else {
reset();
}
}
});
};
}
};
});
//
// Another `$drag` usage example: this is how you could create
// a touch enabled "deck of cards" carousel. See `carousel.html` for markup.
//
app.directive('carousel', function(){
return {
restrict: 'C',
scope: {},
controller: function($scope) {
this.itemCount = 0;
this.activeItem = null;
this.addItem = function(){
var newId = this.itemCount++;
this.activeItem = this.itemCount == 1 ? newId : this.activeItem;
return newId;
};
this.next = function(){
this.activeItem = this.activeItem || 0;
this.activeItem = this.activeItem == this.itemCount - 1 ? 0 : this.activeItem + 1;
};
this.prev = function(){
this.activeItem = this.activeItem || 0;
this.activeItem = this.activeItem === 0 ? this.itemCount - 1 : this.activeItem - 1;
};
}
};
});
app.directive('carouselItem', function($drag) {
return {
restrict: 'C',
require: '^carousel',
scope: {},
transclude: true,
template: '<div class="item"><div ng-transclude></div></div>',
link: function(scope, elem, attrs, carousel) {
scope.carousel = carousel;
var id = carousel.addItem();
var zIndex = function(){
var res = 0;
if (id == carousel.activeItem){
res = 2000;
} else if (carousel.activeItem < id) {
res = 2000 - (id - carousel.activeItem);
} else {
res = 2000 - (carousel.itemCount - 1 - carousel.activeItem + id);
}
return res;
};
scope.$watch(function(){
return carousel.activeItem;
}, function(n, o){
elem[0].style['z-index']=zIndex();
});
$drag.bind(elem, {
constraint: { minY: 0, maxY: 0 },
adaptTransform: function(t, dx, dy, x, y, x0, y0) {
var maxAngle = 15;
var velocity = 0.02;
var r = t.getRotation();
var newRot = r + Math.round(dx * velocity);
newRot = Math.min(newRot, maxAngle);
newRot = Math.max(newRot, -maxAngle);
t.rotate(-r);
t.rotate(newRot);
},
move: function(c){
if(c.left >= c.width / 4 || c.left <= -(c.width / 4)) {
elem.addClass('dismiss');
} else {
elem.removeClass('dismiss');
}
},
cancel: function(){
elem.removeClass('dismiss');
},
end: function(c, undo, reset) {
elem.removeClass('dismiss');
if(c.left >= c.width / 4) {
scope.$apply(function() {
carousel.next();
});
} else if (c.left <= -(c.width / 4)) {
scope.$apply(function() {
carousel.next();
});
}
reset();
}
});
}
};
});
function alertDismissed() {
// do something
//alert('rr');
}
app.controller('MainController', function(cordovaReady,$rootScope, $scope,$location,$route){
$scope.appName = "Openradiation";
$scope.buttonHome = "off";
$scope.state = "1";
$scope.top="0";
$scope.menu="1";
if (!isMobile)
{
var locationPath = $location.path();
if (locationPath != "/")
{
$scope.top="1";
}
if (locationPath == "/mesurePrise" || locationPath == "/mesureRecap")
{
$scope.menu="0";
}
}
async.series([
function(callback){ cordovaReady(callback);},
// function(callback){init_DB(callback);},
//creta table
/* function(callback){createTableQuestionnaires(callback);},
function(callback){createTableHoraires(callback);},
function(callback){createTableReponses(callback);},*/
//create db content
//function(callback){createQuestionnairesSuccess(callback);},
//test useOk
// function(callback){do_MC_UseOk(callback,$location,$route);},
],
function(err, results ){
console.log(results);
//refreshDevices();
//init state
var locationPath = $location.path();
if (locationPath != "/")
{
$scope.top="1";
}
if (locationPath == "/mesurePrise" || locationPath == "/mesureRecap")
{
$scope.menu="0";
}
}
);//fin async.series*/
$scope.buttonSearchCapteur = function(clickEvent){
if (typeof ble == 'undefined')
//cas emulation chrome
{
fakeSearch($scope);
}
else
{
ble.isEnabled(
function() {
console.log("Bluetooth is enabled");
alert("Bluetooth is enabled");
//TODO : do scan until find a known one
ble.scan([], 25, function(device) {
//console.log(JSON.stringify(device));
alert(JSON.stringify(device));
}, function(){alert('pb');} );
},
function() {
alertNotif("Bluetooth is *not* enabled","Attention","Ok")
}
);
}
}
$scope.goHome = function(clickEvent){
console.log('goHome');
$location.path('/');
$scope.top = "0";
$scope.menu="1";
}
$scope.doMesure = function(clickEvent){
console.log('doMesure');
$location.path('/mesurePrise');
$scope.top = "1";
$scope.menu="0";
fakeMesure($scope);
}
$scope.endMesure = function(clickEvent){
console.log('endMesure');
$location.path('/mesureRecap');
$scope.top = "1";
$scope.menu="0";
// fakeMesure($scope);
}
$scope.validMesure = function(clickEvent){
console.log('validMesure');
$scope.top = "0";
$scope.menu="1";
//$scope.state="1";
$location.path('/');
//$location.path('/mesureRecap');
//$scope.top = "1";
// fakeMesure($scope);
}
$scope.doHisto = function(clickEvent){
console.log('doHisto');
$location.path('/histo');
$scope.top = "1";
//$scope.menu="0";
//fakeMesure($scope);
}
$scope.doParam = function(clickEvent){
console.log('doParam');
$location.path('/param');
$scope.top = "1";
//$scope.menu="0";
//fakeMesure($scope);
}
$scope.doParam2 = function(clickEvent){
console.log('doParam2');
$location.path('/param2');
$scope.top = "1";
if (typeof ble == 'undefined')
//cas emulation chrome
{
fakeBluetoothDeviceSearch($scope);
}
else
{
ble.isEnabled(
function() {doBluetoothDeviceSearch($scope);},
function() {alertNotif("Bluetooth is *not* enabled","Attention","Ok")}
);
}
//$scope.menu="0";
//fakeMesure($scope);
}
$scope.doMore= function(clickEvent){
console.log('doMore');
$location.path('/more');
$scope.top = "1";
//$scope.menu="0";
//fakeMesure($scope);
}
$scope.devices = {};
$scope.connectedDeviceId = 0;
$scope.doTest = function(clickEvent){
console.log('doTest');
$location.path('/test');
$scope.top = "1";
if (typeof rfduino == 'undefined')
//cas emulation chrome
{
fakeBluetoothDeviceSearch($scope);
}
else
{
//ble.isEnabled(
rfduino.isEnabled(
function() {doBluetoothDeviceSearch($scope);},
function() {alertNotif("Bluetooth is *not* enabled","Attention","Ok")}
);
}
//$scope.menu="0";
//fakeMesure($scope);
}
$scope.doConnect = function(deviceId){
if (typeof rfduino == 'undefined')
//cas emulation chrome
{
//fakeBluetoothDeviceSearch($scope);
alertNotif(deviceId+" connecté","Success","Ok")
}
else
{
rfduino.connect(deviceId,
function() {
//success
alertNotif(deviceId+" connecté","Success","Ok");
$scope.connectedDeviceId = deviceId;
$scope.$apply();
rfduino.onData(function(data){
//alert(JSON.stringify(data));
//$scope.data = JSON.stringify(data);
$scope.length = data.byteLength;
var buffer1 = new Int8Array(data);
alert(buffer1.toString());
$scope.data8 = JSON.stringify(buffer1);
var buffer = new Int16Array(data);
$scope.data16 = JSON.stringify(buffer);
//var buffer3 = new Uint16Array(data);
// $scope.datau16 = JSON.stringify(buffer3);
$scope.datau16 = String.fromCharCode.apply(null, new Int16Array(data))
$scope.datau8 = String.fromCharCode.apply(null, new Int8Array(data))
/*var buffer2 = new Int32Array(data);
$scope.data32 = JSON.stringify(buffer2);*/
$scope.dataview = new DataView(data);
$scope.$apply();
},
function(error){alertNotif(deviceId+" onData error : "+error,"Failure","Ok")});
},
function() {alertNotif(deviceId+" non connecté","Failure","Ok")}
);
}
}
$scope.doDisconnect= function(deviceId){
if (typeof rfduino == 'undefined')
//cas emulation chrome
{
//fakeBluetoothDeviceSearch($scope);
alertNotif(deviceId+" connecté","Success","Ok")
}
else
{
rfduino.disconnect(deviceId,function() {
//success
alertNotif(deviceId+" déconnecté","Success","Ok");
$scope.connectedDeviceId = 0;
$scope.$apply();
},
function() {alertNotif(deviceId+" non déconnecté","Failure","Ok")}
);
}
}
$scope.doRead = function(deviceId,charId,serviceId){
ble.read(deviceId,serviceId,charId,
function(success){
//alert(JSON.stringify(success));
alertNotif(JSON.stringify(success),"Success read "+charId+" "+serviceId,"Ok");
},
function(failure){
//alert(JSON.stringify(failure));
alertNotif(JSON.stringify(failure),"Failure read "+charId+" "+serviceId,"Ok");
});
}
$scope.doNotif = function(deviceId,charId,serviceId){
ble.startNotification(deviceId,serviceId,charId,
function(success){
//alert(JSON.stringify(success));
alertNotif(JSON.stringify(success),"Success startNotif "+charId+" "+serviceId,"Ok");
},
function(failure){
//alert(JSON.stringify(failure));
alertNotif(JSON.stringify(failure),"Failure startNotif "+charId+" "+serviceId,"Ok");
});
}
$scope.stopNotif = function(deviceId,charId,serviceId){
ble.stopNotification(deviceId,serviceId,charId,
function(success){
//alert(JSON.stringify(success));
alertNotif(JSON.stringify(success),"Success stopNotif "+charId+" "+serviceId,"Ok");
},
function(failure){
//alert(JSON.stringify(failure));
alertNotif(JSON.stringify(failure),"Failure stopNotif "+charId+" "+serviceId,"Ok");
});
}
// User agent displayed in home page
$scope.userAgent = navigator.userAgent;
// Needed for the loading screen
$rootScope.$on('$routeChangeStart', function(){
$rootScope.loading = true;
});
$rootScope.$on('$routeChangeSuccess', function(){
$rootScope.loading = false;
});
//Change path
//$location.path('/scroll');
// console.log($rootScope);
// Fake text i used here and there.
$scope.lorem = 'Lorem ipsum dolor sit amet, consectetur adipisicing elit. Vel explicabo, aliquid eaque soluta nihil eligendi adipisci error, illum corrupti nam fuga omnis quod quaerat mollitia expedita impedit dolores ipsam. Obcaecati.';
//
// 'Scroll' screen
//
var scrollItems = [];
for (var i=1; i<=100; i++) {
scrollItems.push('Item ' + i);
}
$scope.scrollItems = scrollItems;
$scope.bottomReached = function() {
alert('Congrats you scrolled to the end of the list!');
}
//
// Right Sidebar
//
$scope.chatUsers = [
{ name: 'Carlos Flowers', online: true },
{ name: 'Byron Taylor', online: true },
{ name: 'Jana Terry', online: true },
{ name: 'Darryl Stone', online: true },
{ name: 'Fannie Carlson', online: true },
{ name: 'Holly Nguyen', online: true },
{ name: 'Bill Chavez', online: true },
{ name: 'Veronica Maxwell', online: true },
{ name: 'Jessica Webster', online: true },
{ name: 'Jackie Barton', online: true },
{ name: 'Crystal Drake', online: false },
{ name: 'Milton Dean', online: false },
{ name: 'Joann Johnston', online: false },
{ name: 'Cora Vaughn', online: false },
{ name: 'Nina Briggs', online: false },
{ name: 'Casey Turner', online: false },
{ name: 'Jimmie Wilson', online: false },
{ name: 'Nathaniel Steele', online: false },
{ name: 'Aubrey Cole', online: false },
{ name: 'Donnie Summers', online: false },
{ name: 'Kate Myers', online: false },
{ name: 'Priscilla Hawkins', online: false },
{ name: 'Joe Barker', online: false },
{ name: 'Lee Norman', online: false },
{ name: 'Ebony Rice', online: false }
];
//
// 'Forms' screen
//
$scope.rememberMe = true;
$scope.email = '[email protected]';
$scope.login = function() {
alert('You submitted the login form');
};
//
// 'Drag' screen
//
$scope.notices = [];
for (var j = 0; j < 10; j++) {
$scope.notices.push({icon: 'envelope', message: 'Notice ' + (j + 1) });
}
$scope.deleteNotice = function(notice) {
var index = $scope.notices.indexOf(notice);
if (index > -1) {
$scope.notices.splice(index, 1);
}
};
/// openradiation
// $("#deviceList").touchend(connect);
$scope.getData = function(clickEvent){
console.log("getData");
alert('getData');
$('#stateData span').html('En cours');
//rfduino.onData(onData, onRfError);
$("#deviceList").on('touchend',connect);
//$("#deviceList").on('click',connect);
//$("#deviceList").touchend(connect);
}
$scope.stopData = function(clickEvent){
console.log("stopData");
$('#stateData span').html('Stop');}
});
/*app.controller('FormCtrl', function($rootScope, $scope,$location){
console.log('form');
console.log(testglobal);
$location.path('/toggle');
});*/
/* $rootScope.$apply(function() {
$location.path('/scroll');
//console.log($location.path());
});*/
//CORDOVA
angular.module('Cordova', [])
.factory('cordovaReady', function(){
return function(done) {
if (typeof window.cordova === 'object') {
document.addEventListener('deviceready', function () {
console.log('cordovaready');
// done();
done(null,'cordoveaok');
}, false);
} else {
done();
done(null,'cordoveako');
}
};
}); | migration test vers rfduino
| www/js/demo.js | migration test vers rfduino | <ide><path>ww/js/demo.js
<ide> //$scope.data = JSON.stringify(data);
<ide> $scope.length = data.byteLength;
<ide> var buffer1 = new Int8Array(data);
<del> alert(buffer1.toString());
<add> var str = "";
<add> for (var i=0 ; i<buffer1.length ; i++) {
<add> str += buffer1[i].toString(16)+" ";
<add> }
<add> alert(str);
<ide> $scope.data8 = JSON.stringify(buffer1);
<ide> var buffer = new Int16Array(data);
<ide> $scope.data16 = JSON.stringify(buffer); |
|
Java | apache-2.0 | 3cd5ab760884a254dc82071a6d92166ddd35e14c | 0 | hvivani/bigdata,hvivani/bigdata,hvivani/bigdata | package com.amazonaws.vivanih.hadoop.cascading;
import java.util.Properties;
import java.util.Calendar;
import java.text.SimpleDateFormat;
import cascading.flow.Flow;
import cascading.flow.FlowConnector;
import cascading.flow.FlowDef;
import cascading.flow.hadoop.HadoopFlowConnector;
import cascading.operation.aggregator.Count;
import cascading.operation.aggregator.Average;
import cascading.operation.regex.RegexSplitGenerator;
import cascading.operation.regex.RegexGenerator;
import cascading.operation.regex.RegexFilter;
import cascading.operation.text.DateParser;
import cascading.operation.text.DateFormatter;
import cascading.tap.SinkMode;
import cascading.pipe.Each;
import cascading.pipe.Every;
import cascading.pipe.GroupBy;
import cascading.pipe.Pipe;
import cascading.pipe.assembly.Unique;
import cascading.property.AppProps;
import cascading.scheme.Scheme;
import cascading.scheme.hadoop.TextDelimited;
import cascading.tap.Tap;
import cascading.tap.hadoop.Hfs;
import cascading.tuple.Fields;
public class
Main
{
public static void
main( String[] args )
{
String inPath = args[ 0 ];
String outPath = args[ 1 ];
Properties properties = new Properties();
AppProps.setApplicationJarClass( properties, Main.class );
FlowConnector flowConnector = new HadoopFlowConnector( properties );
//scheme definition:
Scheme inScheme = new TextDelimited( new Fields("LoadId", "MRP", "ServicerName", "CIR", "UPB", "LoanAge", "RMLM" , "ARMM", "MadurityDate", "MSA", "CLDS", "ModificationFlag", "ZBC", "ZBED", "RepurchaseIndicator"), "|");
Scheme outScheme = new TextDelimited( new Fields( /*"year",*/ "month", "UPBaverage"),"\t" ) ;
// create source and sink taps
Tap inTap = new Hfs( inScheme, inPath );
Tap outTap = new Hfs( outScheme, outPath );
//Tap outTap = new Hfs( new TextDelimited(true, "\t"), outPath );
//convert date to "ts" from MRP field
DateParser dateParser = new DateParser( new Fields( "ts" ), "MM/dd/yyyy" );
//Parsing the date. (0 is January)
//DateParser dateParser= new DateParser(new Fields("month", "day", "year"), new int[] { Calendar.MONTH, Calendar.DAY_OF_MONTH, Calendar.YEAR }, "MM/dd/yyyy");
//parse pipe will parse the MRP field into month, day, year. /with Fields.ALL I get original fields + new fields. With Fields.RESULTS, I get only the date parsed.
Pipe parsePipe = new Each("parsePipe", new Fields("MRP"), dateParser , Fields.ALL);
//change the format from "ts" to date required
DateFormatter formatter = new DateFormatter( new Fields( "date" ), "dd/MMMM/yyyy" );
parsePipe = new Each( parsePipe, new Fields( "ts" ), formatter, Fields.ALL );
//regex to extract the month in MMMM format:
RegexGenerator splitter=new RegexGenerator(new Fields("month"),"(?<!\\pL)(?=\\pL)[^ ]*(?<=\\pL)(?!\\pL])");
parsePipe = new Each( parsePipe, new Fields( "date" ), splitter, Fields.ALL );
//filterning tuples with null at UPB field
RegexFilter nullfilter = new RegexFilter( "^$" ,true);
parsePipe = new Each( parsePipe, new Fields( "UPB" ), nullfilter );
//Pipe to filter duplicates. It is like select distinct. It is like using combiners.
//http://docs.cascading.org/cascading/2.2/javadoc/cascading/pipe/assembly/Unique.html
parsePipe = new Unique(parsePipe, new Fields("LoadId", "MRP", "ServicerName", "CIR", "UPB", "LoanAge", "RMLM" , "ARMM", "MadurityDate", "MSA", "CLDS", "ModificationFlag", "ZBC", "ZBED", "RepurchaseIndicator"));
// aggregate by year, month. input is previous parsePipe
Pipe averagePipe = new GroupBy( "averagePipe", uniquePipe, new Fields(/*"year",*/ "month" ));
// average each aggregation
averagePipe = new Every(averagePipe, new Fields("UPB"), new Average(new Fields("UPBaverage")), Fields.ALL );
// connect the taps, pipes, etc., into a flow
FlowDef flowDef = FlowDef.flowDef()
.addSource( averagePipe, inTap )
//.addSource( parsePipe, inTap )
.addTailSink(averagePipe, outTap );
//.addTailSink(parsePipe, outTap );
// run the flow
flowConnector.connect( flowDef ).complete();
}
}
| cascading/AveragePerMonth/Main.java | package com.amazonaws.vivanih.hadoop.cascading;
import java.util.Properties;
import java.util.Calendar;
import java.text.SimpleDateFormat;
import cascading.flow.Flow;
import cascading.flow.FlowConnector;
import cascading.flow.FlowDef;
import cascading.flow.hadoop.HadoopFlowConnector;
import cascading.operation.aggregator.Count;
import cascading.operation.aggregator.Average;
import cascading.operation.regex.RegexSplitGenerator;
import cascading.operation.regex.RegexGenerator;
import cascading.operation.text.DateParser;
import cascading.operation.text.DateFormatter;
import cascading.tap.SinkMode;
import cascading.pipe.Each;
import cascading.pipe.Every;
import cascading.pipe.GroupBy;
import cascading.pipe.Pipe;
import cascading.pipe.assembly.Unique;
import cascading.property.AppProps;
import cascading.scheme.Scheme;
import cascading.scheme.hadoop.TextDelimited;
import cascading.tap.Tap;
import cascading.tap.hadoop.Hfs;
import cascading.tuple.Fields;
public class
Main
{
public static void
main( String[] args )
{
String inPath = args[ 0 ];
String outPath = args[ 1 ];
Properties properties = new Properties();
AppProps.setApplicationJarClass( properties, Main.class );
FlowConnector flowConnector = new HadoopFlowConnector( properties );
//scheme definition:
Scheme inScheme = new TextDelimited( new Fields("LoadId", "MRP", "ServicerName", "CIR", "UPB", "LoanAge", "RMLM" , "ARMM", "MadurityDate", "MSA", "CLDS", "ModificationFlag", "ZBC", "ZBED", "RepurchaseIndicator"), "|");
Scheme outScheme = new TextDelimited( new Fields( /*"year",*/ "month", "UPBaverage"),"\t" ) ;
// create source and sink taps
Tap inTap = new Hfs( inScheme, inPath );
Tap outTap = new Hfs( outScheme, outPath );
//Tap outTap = new Hfs( new TextDelimited(true, "\t"), outPath );
//convert date to "ts" from MRP field
DateParser dateParser = new DateParser( new Fields( "ts" ), "MM/dd/yyyy" );
//Parsing the date. (0 is January)
//DateParser dateParser= new DateParser(new Fields("month", "day", "year"), new int[] { Calendar.MONTH, Calendar.DAY_OF_MONTH, Calendar.YEAR }, "MM/dd/yyyy");
//parse pipe will parse the MRP field into month, day, year. /with Fields.ALL I get original fields + new fields. With Fields.RESULTS, I get only the date parsed.
Pipe parsePipe = new Each("parsePipe", new Fields("MRP"), dateParser , Fields.ALL);
//change the format from "ts" to date required
DateFormatter formatter = new DateFormatter( new Fields( "date" ), "dd/MMMM/yyyy" );
parsePipe = new Each( parsePipe, new Fields( "ts" ), formatter, Fields.ALL );
//regex to extract the month in MMMM format:
RegexGenerator splitter=new RegexGenerator(new Fields("month"),"(?<!\\pL)(?=\\pL)[^ ]*(?<=\\pL)(?!\\pL])");
parsePipe = new Each( parsePipe, new Fields( "date" ), splitter, Fields.ALL );
//Pipe to filter duplicates. It is like select distinct. It is like using combiners.
//http://docs.cascading.org/cascading/2.2/javadoc/cascading/pipe/assembly/Unique.html
Pipe uniquePipe = new Unique("unique", parsePipe, new Fields("LoadId","MRP"));
// aggregate by year, month. input is previous parsePipe
Pipe averagePipe = new GroupBy( "averagePipe", uniquePipe, new Fields(/*"year",*/ "month" ));
// average each aggregation
averagePipe = new Every(averagePipe, new Fields("UPB"), new Average(new Fields("UPBaverage")), Fields.ALL );
// connect the taps, pipes, etc., into a flow
FlowDef flowDef = FlowDef.flowDef()
.addSource( averagePipe, inTap )
//.addSource( parsePipe, inTap )
.addTailSink(averagePipe, outTap );
//.addTailSink(parsePipe, outTap );
// run the flow
flowConnector.connect( flowDef ).complete();
}
}
| Update Main.java | cascading/AveragePerMonth/Main.java | Update Main.java | <ide><path>ascading/AveragePerMonth/Main.java
<ide> import cascading.operation.aggregator.Average;
<ide> import cascading.operation.regex.RegexSplitGenerator;
<ide> import cascading.operation.regex.RegexGenerator;
<add>import cascading.operation.regex.RegexFilter;
<ide> import cascading.operation.text.DateParser;
<ide> import cascading.operation.text.DateFormatter;
<ide> import cascading.tap.SinkMode;
<ide> import cascading.tap.Tap;
<ide> import cascading.tap.hadoop.Hfs;
<ide> import cascading.tuple.Fields;
<add>
<ide>
<ide>
<ide> public class
<ide> RegexGenerator splitter=new RegexGenerator(new Fields("month"),"(?<!\\pL)(?=\\pL)[^ ]*(?<=\\pL)(?!\\pL])");
<ide> parsePipe = new Each( parsePipe, new Fields( "date" ), splitter, Fields.ALL );
<ide>
<add> //filterning tuples with null at UPB field
<add> RegexFilter nullfilter = new RegexFilter( "^$" ,true);
<add> parsePipe = new Each( parsePipe, new Fields( "UPB" ), nullfilter );
<add>
<ide> //Pipe to filter duplicates. It is like select distinct. It is like using combiners.
<ide> //http://docs.cascading.org/cascading/2.2/javadoc/cascading/pipe/assembly/Unique.html
<del> Pipe uniquePipe = new Unique("unique", parsePipe, new Fields("LoadId","MRP"));
<add> parsePipe = new Unique(parsePipe, new Fields("LoadId", "MRP", "ServicerName", "CIR", "UPB", "LoanAge", "RMLM" , "ARMM", "MadurityDate", "MSA", "CLDS", "ModificationFlag", "ZBC", "ZBED", "RepurchaseIndicator"));
<ide>
<ide> // aggregate by year, month. input is previous parsePipe
<ide> Pipe averagePipe = new GroupBy( "averagePipe", uniquePipe, new Fields(/*"year",*/ "month" )); |
|
Java | apache-2.0 | 9954d3965557083c224ae3e04bf06260b0a709d1 | 0 | fhg-fokus-nubomedia/nubomedia-paas,fhg-fokus-nubomedia/nubomedia-paas,nubomedia/nubomedia-paas,nubomedia/nubomedia-paas,nubomedia/nubomedia-paas,fhg-fokus-nubomedia/nubomedia-paas,nubomedia/nubomedia-paas,fhg-fokus-nubomedia/nubomedia-paas | /*
* Copyright (c) 2015-2016 Fraunhofer FOKUS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.project.openbaton.nubomedia.paas.main;
import org.project.openbaton.nubomedia.paas.core.openshift.OpenshiftConfiguration;
import org.project.openbaton.nubomedia.paas.events.ConfigurationBeans;
import org.project.openbaton.nubomedia.paas.main.utils.BeanSchedulerConfiguration;
import org.project.openbaton.nubomedia.paas.main.utils.OpenbatonConfiguration;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.orm.jpa.EntityScan;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.test.context.ContextConfiguration;
/**
* Created by lto on 24/09/15.
*/
@SpringBootApplication
@ContextConfiguration(classes = {OpenshiftConfiguration.class, OpenbatonConfiguration.class, BeanSchedulerConfiguration.class, ConfigurationBeans.class, })
@EnableJpaRepositories("org.project.openbaton.nubomedia.paas")
@EntityScan(basePackages = "org.project.openbaton")
@ComponentScan(basePackages = "org.project.openbaton")
public class Main {
public static void main(String[] args) {
ApplicationContext context = SpringApplication.run(Main.class, args);
}
}
| src/main/java/org/project/openbaton/nubomedia/paas/main/Main.java | /*
* Copyright (c) 2015-2016 Fraunhofer FOKUS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.project.openbaton.nubomedia.paas.main;
import org.project.openbaton.nubomedia.paas.core.openshift.OpenshiftConfiguration;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.orm.jpa.EntityScan;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.test.context.ContextConfiguration;
/**
* Created by lto on 24/09/15.
*/
@SpringBootApplication
@ContextConfiguration(classes = OpenshiftConfiguration.class)
@EnableJpaRepositories ("org.project.openbaton.nubomedia.paas")
@EntityScan (basePackages = "org.project.openbaton")
@EnableScheduling
@ComponentScan(basePackages = "org.project.openbaton")
public class Main {
public static void main(String[] args) {
ApplicationContext context = SpringApplication.run(Main.class, args);
}
}
| Added Bean Configuration classes to ContextConfiguration
| src/main/java/org/project/openbaton/nubomedia/paas/main/Main.java | Added Bean Configuration classes to ContextConfiguration | <ide><path>rc/main/java/org/project/openbaton/nubomedia/paas/main/Main.java
<ide> package org.project.openbaton.nubomedia.paas.main;
<ide>
<ide> import org.project.openbaton.nubomedia.paas.core.openshift.OpenshiftConfiguration;
<add>import org.project.openbaton.nubomedia.paas.events.ConfigurationBeans;
<add>import org.project.openbaton.nubomedia.paas.main.utils.BeanSchedulerConfiguration;
<add>import org.project.openbaton.nubomedia.paas.main.utils.OpenbatonConfiguration;
<ide> import org.springframework.boot.SpringApplication;
<ide> import org.springframework.boot.autoconfigure.SpringBootApplication;
<ide> import org.springframework.boot.orm.jpa.EntityScan;
<ide> import org.springframework.context.ApplicationContext;
<ide> import org.springframework.context.annotation.ComponentScan;
<ide> import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
<del>import org.springframework.scheduling.annotation.EnableScheduling;
<ide> import org.springframework.test.context.ContextConfiguration;
<ide>
<ide> /**
<ide> * Created by lto on 24/09/15.
<ide> */
<ide> @SpringBootApplication
<del>@ContextConfiguration(classes = OpenshiftConfiguration.class)
<del>@EnableJpaRepositories ("org.project.openbaton.nubomedia.paas")
<del>@EntityScan (basePackages = "org.project.openbaton")
<del>@EnableScheduling
<add>@ContextConfiguration(classes = {OpenshiftConfiguration.class, OpenbatonConfiguration.class, BeanSchedulerConfiguration.class, ConfigurationBeans.class, })
<add>@EnableJpaRepositories("org.project.openbaton.nubomedia.paas")
<add>@EntityScan(basePackages = "org.project.openbaton")
<ide> @ComponentScan(basePackages = "org.project.openbaton")
<ide> public class Main {
<ide> public static void main(String[] args) { |
|
Java | apache-2.0 | bbd755006aaff1e14c950d46a883425081593ee9 | 0 | FHannes/intellij-community,clumsy/intellij-community,suncycheng/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,signed/intellij-community,izonder/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,fnouama/intellij-community,blademainer/intellij-community,kool79/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,petteyg/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,kdwink/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,caot/intellij-community,ol-loginov/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,caot/intellij-community,retomerz/intellij-community,fnouama/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,slisson/intellij-community,caot/intellij-community,ernestp/consulo,amith01994/intellij-community,dslomov/intellij-community,clumsy/intellij-community,ftomassetti/intellij-community,diorcety/intellij-community,samthor/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,dslomov/intellij-community,signed/intellij-community,lucafavatella/intellij-community,clumsy/intellij-community,hurricup/intellij-community,samthor/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,kdwink/intellij-community,asedunov/intellij-community,diorcety/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,ivan-fedorov/intellij-community,vladmm/intellij-community,hurricup/intellij-community,retomerz/intellij-community,consulo/consulo,idea4bsd/idea4bsd,fengbaicanhe/intellij-community,asedunov/intellij-community,ernestp/consulo,gnuhub/intellij-community,adedayo/intellij-community,diorcety/intellij-community,semonte/intellij-community,kdwink/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,ryano144/intellij-community,fitermay/intellij-community,nicolargo/intellij-community,muntasirsyed/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,izonder/intellij-community,holmes/intellij-community,izonder/intellij-community,SerCeMan/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,apixandru/intellij-community,izonder/intellij-community,vvv1559/intellij-community,supersven/intellij-community,jagguli/intellij-community,FHannes/intellij-community,caot/intellij-community,samthor/intellij-community,adedayo/intellij-community,ryano144/intellij-community,vvv1559/intellij-community,diorcety/intellij-community,consulo/consulo,slisson/intellij-community,fitermay/intellij-community,akosyakov/intellij-community,robovm/robovm-studio,diorcety/intellij-community,samthor/intellij-community,fengbaicanhe/intellij-community,SerCeMan/intellij-community,asedunov/intellij-community,slisson/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,robovm/robovm-studio,izonder/intellij-community,kool79/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,caot/intellij-community,apixandru/intellij-community,clumsy/intellij-community,retomerz/intellij-community,robovm/robovm-studio,adedayo/intellij-community,fengbaicanhe/intellij-community,ThiagoGarciaAlves/intellij-community,ahb0327/intellij-community,ahb0327/intellij-community,clumsy/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,clumsy/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,semonte/intellij-community,da1z/intellij-community,amith01994/intellij-community,kdwink/intellij-community,holmes/intellij-community,asedunov/intellij-community,xfournet/intellij-community,supersven/intellij-community,robovm/robovm-studio,vladmm/intellij-community,izonder/intellij-community,kool79/intellij-community,retomerz/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,orekyuu/intellij-community,FHannes/intellij-community,hurricup/intellij-community,MichaelNedzelsky/intellij-community,semonte/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,semonte/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,holmes/intellij-community,FHannes/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,allotria/intellij-community,izonder/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,signed/intellij-community,kool79/intellij-community,kdwink/intellij-community,ibinti/intellij-community,slisson/intellij-community,akosyakov/intellij-community,petteyg/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,FHannes/intellij-community,fitermay/intellij-community,ryano144/intellij-community,da1z/intellij-community,fitermay/intellij-community,alphafoobar/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,diorcety/intellij-community,suncycheng/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,samthor/intellij-community,mglukhikh/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,akosyakov/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,supersven/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,slisson/intellij-community,salguarnieri/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,vladmm/intellij-community,fnouama/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,hurricup/intellij-community,supersven/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,adedayo/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,ivan-fedorov/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,slisson/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,ThiagoGarciaAlves/intellij-community,TangHao1987/intellij-community,caot/intellij-community,Lekanich/intellij-community,blademainer/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,akosyakov/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,vvv1559/intellij-community,blademainer/intellij-community,Lekanich/intellij-community,da1z/intellij-community,petteyg/intellij-community,vladmm/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,ivan-fedorov/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,signed/intellij-community,asedunov/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,tmpgit/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,dslomov/intellij-community,kdwink/intellij-community,dslomov/intellij-community,fnouama/intellij-community,blademainer/intellij-community,alphafoobar/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,consulo/consulo,youdonghai/intellij-community,kool79/intellij-community,slisson/intellij-community,signed/intellij-community,da1z/intellij-community,pwoodworth/intellij-community,MichaelNedzelsky/intellij-community,salguarnieri/intellij-community,pwoodworth/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,alphafoobar/intellij-community,ftomassetti/intellij-community,caot/intellij-community,robovm/robovm-studio,Lekanich/intellij-community,mglukhikh/intellij-community,ernestp/consulo,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,wreckJ/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,holmes/intellij-community,ibinti/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,asedunov/intellij-community,fnouama/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,fengbaicanhe/intellij-community,SerCeMan/intellij-community,tmpgit/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,MER-GROUP/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,ol-loginov/intellij-community,lucafavatella/intellij-community,gnuhub/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,fnouama/intellij-community,xfournet/intellij-community,allotria/intellij-community,supersven/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,semonte/intellij-community,fitermay/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,allotria/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,ahb0327/intellij-community,samthor/intellij-community,apixandru/intellij-community,slisson/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,jagguli/intellij-community,semonte/intellij-community,robovm/robovm-studio,idea4bsd/idea4bsd,MER-GROUP/intellij-community,FHannes/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,holmes/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,supersven/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,suncycheng/intellij-community,consulo/consulo,mglukhikh/intellij-community,dslomov/intellij-community,dslomov/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,gnuhub/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,petteyg/intellij-community,clumsy/intellij-community,signed/intellij-community,nicolargo/intellij-community,vvv1559/intellij-community,semonte/intellij-community,blademainer/intellij-community,holmes/intellij-community,FHannes/intellij-community,ernestp/consulo,da1z/intellij-community,ryano144/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,ol-loginov/intellij-community,pwoodworth/intellij-community,robovm/robovm-studio,xfournet/intellij-community,pwoodworth/intellij-community,muntasirsyed/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,kool79/intellij-community,hurricup/intellij-community,MER-GROUP/intellij-community,fitermay/intellij-community,jagguli/intellij-community,amith01994/intellij-community,kool79/intellij-community,vladmm/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,ernestp/consulo,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,fitermay/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,ibinti/intellij-community,robovm/robovm-studio,dslomov/intellij-community,hurricup/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,ol-loginov/intellij-community,blademainer/intellij-community,petteyg/intellij-community,xfournet/intellij-community,nicolargo/intellij-community,muntasirsyed/intellij-community,petteyg/intellij-community,apixandru/intellij-community,ibinti/intellij-community,tmpgit/intellij-community,holmes/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,da1z/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,clumsy/intellij-community,Distrotech/intellij-community,allotria/intellij-community,caot/intellij-community,adedayo/intellij-community,dslomov/intellij-community,samthor/intellij-community,kool79/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,retomerz/intellij-community,Lekanich/intellij-community,ryano144/intellij-community,signed/intellij-community,TangHao1987/intellij-community,slisson/intellij-community,apixandru/intellij-community,jagguli/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,MichaelNedzelsky/intellij-community,mglukhikh/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,MER-GROUP/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,kdwink/intellij-community,ol-loginov/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,holmes/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,izonder/intellij-community,ryano144/intellij-community,TangHao1987/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,ftomassetti/intellij-community,retomerz/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,hurricup/intellij-community,FHannes/intellij-community,petteyg/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,orekyuu/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,amith01994/intellij-community,ol-loginov/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,apixandru/intellij-community,ernestp/consulo,petteyg/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,suncycheng/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,wreckJ/intellij-community,akosyakov/intellij-community,da1z/intellij-community,Lekanich/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,ryano144/intellij-community,ol-loginov/intellij-community,semonte/intellij-community,hurricup/intellij-community,asedunov/intellij-community,kool79/intellij-community,holmes/intellij-community,wreckJ/intellij-community,wreckJ/intellij-community,ryano144/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,supersven/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,pwoodworth/intellij-community,Distrotech/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,fengbaicanhe/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,allotria/intellij-community,muntasirsyed/intellij-community,ThiagoGarciaAlves/intellij-community,caot/intellij-community,SerCeMan/intellij-community,caot/intellij-community,hurricup/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,vladmm/intellij-community,jagguli/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,salguarnieri/intellij-community,amith01994/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,holmes/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,da1z/intellij-community,caot/intellij-community,salguarnieri/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,nicolargo/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,ahb0327/intellij-community,robovm/robovm-studio,ol-loginov/intellij-community,kdwink/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,tmpgit/intellij-community,jagguli/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,tmpgit/intellij-community,jagguli/intellij-community,ibinti/intellij-community,consulo/consulo,asedunov/intellij-community,gnuhub/intellij-community,gnuhub/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,holmes/intellij-community,michaelgallacher/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,tmpgit/intellij-community,MER-GROUP/intellij-community,MER-GROUP/intellij-community,samthor/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,pwoodworth/intellij-community,signed/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,pwoodworth/intellij-community,ahb0327/intellij-community,salguarnieri/intellij-community,lucafavatella/intellij-community,TangHao1987/intellij-community,fitermay/intellij-community,TangHao1987/intellij-community,da1z/intellij-community,TangHao1987/intellij-community,slisson/intellij-community,consulo/consulo,lucafavatella/intellij-community,orekyuu/intellij-community,suncycheng/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,wreckJ/intellij-community,izonder/intellij-community,supersven/intellij-community,robovm/robovm-studio,xfournet/intellij-community,ahb0327/intellij-community,jagguli/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,samthor/intellij-community,gnuhub/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,xfournet/intellij-community,xfournet/intellij-community,signed/intellij-community,ftomassetti/intellij-community,petteyg/intellij-community,hurricup/intellij-community,petteyg/intellij-community,signed/intellij-community,clumsy/intellij-community,asedunov/intellij-community,ibinti/intellij-community,muntasirsyed/intellij-community,fitermay/intellij-community,FHannes/intellij-community,ftomassetti/intellij-community,mglukhikh/intellij-community,Lekanich/intellij-community,da1z/intellij-community,FHannes/intellij-community,blademainer/intellij-community,allotria/intellij-community,orekyuu/intellij-community,asedunov/intellij-community | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* @author max
*/
package com.intellij.codeInsight.daemon.impl;
import com.intellij.codeInsight.daemon.DaemonBundle;
import com.intellij.codeInsight.daemon.impl.actions.ShowErrorDescriptionAction;
import com.intellij.codeInsight.hint.LineTooltipRenderer;
import com.intellij.codeInsight.hint.TooltipLinkHandlerEP;
import com.intellij.codeInsight.hint.TooltipRenderer;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.ex.ErrorStripTooltipRendererProvider;
import com.intellij.openapi.editor.impl.TrafficTooltipRenderer;
import com.intellij.openapi.editor.markup.RangeHighlighter;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.SmartList;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.Collection;
import java.util.Comparator;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class DaemonTooltipRendererProvider implements ErrorStripTooltipRendererProvider {
@NonNls private static final String END_MARKER = "<!-- end marker -->";
private final Project myProject;
public DaemonTooltipRendererProvider(final Project project) {
myProject = project;
}
@Override
public TooltipRenderer calcTooltipRenderer(@NotNull final Collection<RangeHighlighter> highlighters) {
LineTooltipRenderer bigRenderer = null;
List<HighlightInfo> infos = new SmartList<HighlightInfo>();
Collection<String> tooltips = new THashSet<String>(); //do not show same tooltip twice
for (RangeHighlighter marker : highlighters) {
final Object tooltipObject = marker.getErrorStripeTooltip();
if (tooltipObject == null) continue;
if (tooltipObject instanceof HighlightInfo) {
HighlightInfo info = (HighlightInfo)tooltipObject;
if (info.toolTip != null && tooltips.add(info.toolTip)) {
infos.add(info);
}
}
else {
final String text = tooltipObject.toString();
if (tooltips.add(text)) {
if (bigRenderer == null) {
bigRenderer = new MyRenderer(text, new Object[] {highlighters});
}
else {
bigRenderer.addBelow(text);
}
}
}
}
if (!infos.isEmpty()) {
// show errors first
ContainerUtil.quickSort(infos, new Comparator<HighlightInfo>() {
@Override
public int compare(final HighlightInfo o1, final HighlightInfo o2) {
int i = SeverityRegistrar.getInstance(myProject).compare(o2.getSeverity(), o1.getSeverity());
if (i != 0) return i;
return o1.toolTip.compareTo(o2.toolTip);
}
});
final HighlightInfoComposite composite = new HighlightInfoComposite(infos);
if (bigRenderer == null) {
bigRenderer = new MyRenderer(UIUtil.convertSpace2Nbsp(composite.toolTip), new Object[] {highlighters});
}
else {
final LineTooltipRenderer renderer = new MyRenderer(UIUtil.convertSpace2Nbsp(composite.toolTip), new Object[] {highlighters});
renderer.addBelow(bigRenderer.getText());
bigRenderer = renderer;
}
}
return bigRenderer;
}
@Override
public TooltipRenderer calcTooltipRenderer(@NotNull final String text) {
return new MyRenderer(text, new Object[] {text});
}
@Override
public TooltipRenderer calcTooltipRenderer(@NotNull final String text, final int width) {
return new MyRenderer(text, width, new Object[] {text});
}
@NotNull
@Override
public TrafficTooltipRenderer createTrafficTooltipRenderer(@NotNull Runnable onHide, @NotNull Editor editor) {
return new TrafficTooltipRendererImpl(onHide, editor);
}
private static class MyRenderer extends LineTooltipRenderer {
public MyRenderer(final String text, Object[] comparable) {
super(text, comparable);
}
public MyRenderer(final String text, final int width, Object[] comparable) {
super(text, width, comparable);
}
@Override
protected void onHide(final JComponent contentComponent) {
ShowErrorDescriptionAction.rememberCurrentWidth(contentComponent.getWidth());
}
@Override
protected boolean dressDescription(@NotNull final Editor editor) {
final List<String> problems = StringUtil.split(UIUtil.getHtmlBody(myText), BORDER_LINE);
String text = "";
for (String problem : problems) {
final String ref = getLinkRef(problem);
if (ref != null) {
String description = TooltipLinkHandlerEP.getDescription(ref, editor);
if (description != null) {
description = UIUtil.getHtmlBody(description);
final int descriptionEnd = description.indexOf("<!-- tooltip end -->");
if (descriptionEnd < 0) {
final Pattern pattern = Pattern.compile(".*Use.*(the (panel|checkbox|checkboxes|field|button|controls).*below).*", Pattern.DOTALL);
final Matcher matcher = pattern.matcher(description);
int startFindIdx = 0;
while (matcher.find(startFindIdx)) {
final int end = matcher.end(1);
startFindIdx = end;
description = description.substring(0, matcher.start(1)) + " inspection settings " + description.substring(end);
}
} else {
description = description.substring(0, descriptionEnd);
}
text += UIUtil.getHtmlBody(problem).replace(DaemonBundle.message("inspection.extended.description"),
DaemonBundle.message("inspection.collapse.description")) +
END_MARKER + "<p>" + description + BORDER_LINE;
}
}
else {
text += UIUtil.getHtmlBody(problem) + BORDER_LINE;
}
}
if (!text.isEmpty()) { //otherwise do not change anything
myText = "<html><body>" + StringUtil.trimEnd(text, BORDER_LINE) + "</body></html>";
return true;
}
return false;
}
@Nullable
private static String getLinkRef(@NonNls String text) {
final String linkWithRef = "<a href=\"";
final int linkStartIdx = text.indexOf(linkWithRef);
if (linkStartIdx >= 0) {
final String ref = text.substring(linkStartIdx + linkWithRef.length());
final int quoteIdx = ref.indexOf('"');
if (quoteIdx > 0) {
return ref.substring(0, quoteIdx);
}
}
return null;
}
@Override
protected void stripDescription() {
final List<String> problems = StringUtil.split(UIUtil.getHtmlBody(myText), BORDER_LINE);
myText = "<html><body>";
for (int i = 0, size = problems.size(); i < size; i++) {
final String problem = StringUtil.split(problems.get(i), END_MARKER).get(0);
myText += UIUtil.getHtmlBody(problem).replace(DaemonBundle.message("inspection.collapse.description"),
DaemonBundle.message("inspection.extended.description")) + BORDER_LINE;
}
myText = StringUtil.trimEnd(myText, BORDER_LINE) + "</body></html>";
}
@Override
protected LineTooltipRenderer createRenderer(final String text, final int width) {
return new MyRenderer(text, width, getEqualityObjects());
}
}
} | platform/lang-impl/src/com/intellij/codeInsight/daemon/impl/DaemonTooltipRendererProvider.java | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* @author max
*/
package com.intellij.codeInsight.daemon.impl;
import com.intellij.codeInsight.daemon.DaemonBundle;
import com.intellij.codeInsight.daemon.impl.actions.ShowErrorDescriptionAction;
import com.intellij.codeInsight.hint.LineTooltipRenderer;
import com.intellij.codeInsight.hint.TooltipLinkHandlerEP;
import com.intellij.codeInsight.hint.TooltipRenderer;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.ex.ErrorStripTooltipRendererProvider;
import com.intellij.openapi.editor.impl.TrafficTooltipRenderer;
import com.intellij.openapi.editor.markup.RangeHighlighter;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.SmartList;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.Collection;
import java.util.Comparator;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class DaemonTooltipRendererProvider implements ErrorStripTooltipRendererProvider {
private final Project myProject;
public DaemonTooltipRendererProvider(final Project project) {
myProject = project;
}
@Override
public TooltipRenderer calcTooltipRenderer(@NotNull final Collection<RangeHighlighter> highlighters) {
LineTooltipRenderer bigRenderer = null;
List<HighlightInfo> infos = new SmartList<HighlightInfo>();
Collection<String> tooltips = new THashSet<String>(); //do not show same tooltip twice
for (RangeHighlighter marker : highlighters) {
final Object tooltipObject = marker.getErrorStripeTooltip();
if (tooltipObject == null) continue;
if (tooltipObject instanceof HighlightInfo) {
HighlightInfo info = (HighlightInfo)tooltipObject;
if (info.toolTip != null && tooltips.add(info.toolTip)) {
infos.add(info);
}
}
else {
final String text = tooltipObject.toString();
if (tooltips.add(text)) {
if (bigRenderer == null) {
bigRenderer = new MyRenderer(text, new Object[] {highlighters});
}
else {
bigRenderer.addBelow(text);
}
}
}
}
if (!infos.isEmpty()) {
// show errors first
ContainerUtil.quickSort(infos, new Comparator<HighlightInfo>() {
@Override
public int compare(final HighlightInfo o1, final HighlightInfo o2) {
int i = SeverityRegistrar.getInstance(myProject).compare(o2.getSeverity(), o1.getSeverity());
if (i != 0) return i;
return o1.toolTip.compareTo(o2.toolTip);
}
});
final HighlightInfoComposite composite = new HighlightInfoComposite(infos);
if (bigRenderer == null) {
bigRenderer = new MyRenderer(UIUtil.convertSpace2Nbsp(composite.toolTip), new Object[] {highlighters});
}
else {
final LineTooltipRenderer renderer = new MyRenderer(UIUtil.convertSpace2Nbsp(composite.toolTip), new Object[] {highlighters});
renderer.addBelow(bigRenderer.getText());
bigRenderer = renderer;
}
}
return bigRenderer;
}
@Override
public TooltipRenderer calcTooltipRenderer(@NotNull final String text) {
return new MyRenderer(text, new Object[] {text});
}
@Override
public TooltipRenderer calcTooltipRenderer(@NotNull final String text, final int width) {
return new MyRenderer(text, width, new Object[] {text});
}
@NotNull
@Override
public TrafficTooltipRenderer createTrafficTooltipRenderer(@NotNull Runnable onHide, @NotNull Editor editor) {
return new TrafficTooltipRendererImpl(onHide, editor);
}
private static class MyRenderer extends LineTooltipRenderer {
public MyRenderer(final String text, Object[] comparable) {
super(text, comparable);
}
public MyRenderer(final String text, final int width, Object[] comparable) {
super(text, width, comparable);
}
@Override
protected void onHide(final JComponent contentComponent) {
ShowErrorDescriptionAction.rememberCurrentWidth(contentComponent.getWidth());
}
@Override
protected boolean dressDescription(@NotNull final Editor editor) {
final List<String> problems = StringUtil.split(UIUtil.getHtmlBody(myText), BORDER_LINE);
String text = "";
for (String problem : problems) {
final String ref = getLinkRef(problem);
if (ref != null) {
String description = TooltipLinkHandlerEP.getDescription(ref, editor);
if (description != null) {
description = UIUtil.getHtmlBody(description);
final int descriptionEnd = description.indexOf("<!-- tooltip end -->");
if (descriptionEnd < 0) {
final Pattern pattern = Pattern.compile(".*Use.*(the (panel|checkbox|checkboxes|field|button|controls).*below).*", Pattern.DOTALL);
final Matcher matcher = pattern.matcher(description);
int startFindIdx = 0;
while (matcher.find(startFindIdx)) {
final int end = matcher.end(1);
startFindIdx = end;
description = description.substring(0, matcher.start(1)) + " inspection settings " + description.substring(end);
}
} else {
description = description.substring(0, descriptionEnd);
}
text += UIUtil.getHtmlBody(problem).replace(DaemonBundle.message("inspection.extended.description"),
DaemonBundle.message("inspection.collapse.description")) +
BORDER_LINE + description + BORDER_LINE;
}
}
}
if (!text.isEmpty()) { //otherwise do not change anything
myText = "<html><body>" + StringUtil.trimEnd(text, BORDER_LINE) + "</body></html>";
return true;
}
return false;
}
@Nullable
private static String getLinkRef(@NonNls String text) {
final String linkWithRef = "<a href=\"";
final int linkStartIdx = text.indexOf(linkWithRef);
if (linkStartIdx >= 0) {
final String ref = text.substring(linkStartIdx + linkWithRef.length());
final int quoteIdx = ref.indexOf('"');
if (quoteIdx > 0) {
return ref.substring(0, quoteIdx);
}
}
return null;
}
@Override
protected void stripDescription() {
final String[] problems = UIUtil.getHtmlBody(myText).split(BORDER_LINE);
myText = "<html><body>";
for (int i = 0; i < problems.length; i++) {
final String problem = problems[i];
if (i % 2 == 0) {
myText += UIUtil.getHtmlBody(problem).replace(DaemonBundle.message("inspection.collapse.description"),
DaemonBundle.message("inspection.extended.description")) + BORDER_LINE;
}
}
myText = StringUtil.trimEnd(myText, BORDER_LINE) + "</body></html>";
}
@Override
protected LineTooltipRenderer createRenderer(final String text, final int width) {
return new MyRenderer(text, width, getEqualityObjects());
}
}
} | better looking inspection tooltips
| platform/lang-impl/src/com/intellij/codeInsight/daemon/impl/DaemonTooltipRendererProvider.java | better looking inspection tooltips | <ide><path>latform/lang-impl/src/com/intellij/codeInsight/daemon/impl/DaemonTooltipRendererProvider.java
<ide> import java.util.regex.Pattern;
<ide>
<ide> public class DaemonTooltipRendererProvider implements ErrorStripTooltipRendererProvider {
<add> @NonNls private static final String END_MARKER = "<!-- end marker -->";
<ide> private final Project myProject;
<ide>
<ide> public DaemonTooltipRendererProvider(final Project project) {
<ide> }
<ide> text += UIUtil.getHtmlBody(problem).replace(DaemonBundle.message("inspection.extended.description"),
<ide> DaemonBundle.message("inspection.collapse.description")) +
<del> BORDER_LINE + description + BORDER_LINE;
<add> END_MARKER + "<p>" + description + BORDER_LINE;
<ide> }
<add> }
<add> else {
<add> text += UIUtil.getHtmlBody(problem) + BORDER_LINE;
<ide> }
<ide> }
<ide> if (!text.isEmpty()) { //otherwise do not change anything
<ide>
<ide> @Override
<ide> protected void stripDescription() {
<del> final String[] problems = UIUtil.getHtmlBody(myText).split(BORDER_LINE);
<add> final List<String> problems = StringUtil.split(UIUtil.getHtmlBody(myText), BORDER_LINE);
<ide> myText = "<html><body>";
<del> for (int i = 0; i < problems.length; i++) {
<del> final String problem = problems[i];
<del> if (i % 2 == 0) {
<del> myText += UIUtil.getHtmlBody(problem).replace(DaemonBundle.message("inspection.collapse.description"),
<del> DaemonBundle.message("inspection.extended.description")) + BORDER_LINE;
<del> }
<add> for (int i = 0, size = problems.size(); i < size; i++) {
<add> final String problem = StringUtil.split(problems.get(i), END_MARKER).get(0);
<add> myText += UIUtil.getHtmlBody(problem).replace(DaemonBundle.message("inspection.collapse.description"),
<add> DaemonBundle.message("inspection.extended.description")) + BORDER_LINE;
<ide> }
<ide> myText = StringUtil.trimEnd(myText, BORDER_LINE) + "</body></html>";
<ide> } |
|
Java | apache-2.0 | 815ee08562aed1a0d6e86e620b8504058f194daa | 0 | jerome79/OG-Platform,jeorme/OG-Platform,DevStreet/FinanceAnalytics,McLeodMoores/starling,codeaudit/OG-Platform,ChinaQuants/OG-Platform,jerome79/OG-Platform,codeaudit/OG-Platform,jeorme/OG-Platform,jerome79/OG-Platform,nssales/OG-Platform,DevStreet/FinanceAnalytics,ChinaQuants/OG-Platform,nssales/OG-Platform,DevStreet/FinanceAnalytics,codeaudit/OG-Platform,DevStreet/FinanceAnalytics,McLeodMoores/starling,nssales/OG-Platform,jeorme/OG-Platform,codeaudit/OG-Platform,jeorme/OG-Platform,McLeodMoores/starling,nssales/OG-Platform,ChinaQuants/OG-Platform,ChinaQuants/OG-Platform,McLeodMoores/starling,jerome79/OG-Platform | /**
* Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.util.money;
import java.io.Serializable;
import com.opengamma.util.ArgumentChecker;
/**
* An amount of a currency.
* <p>
* This class effectively represents a monetary amount, however it uses a
* {@code double}. This is required for performance reasons.
* This design is acceptable because this is a system designed for the calculation
* of risk (which is an estimate), rather than a system for accurate accounting of money.
* <p>
* This class is immutable and thread-safe.
*/
public final class CurrencyAmount implements Serializable {
/** Serialization version. */
private static final long serialVersionUID = 1L;
/**
* The currency.
*/
private final Currency _currency;
/**
* The amount.
*/
private final double _amount;
/**
* Obtains an instance of {@code CurrencyAmount} for the specified currency and amount.
*
* @param currency the currency the amount is in, not null
* @param amount the amount of the currency to represent
* @return the currency amount, not null
*/
public static CurrencyAmount of(final Currency currency, final double amount) {
return new CurrencyAmount(currency, amount);
}
/**
* Obtains an instance of {@code CurrencyAmount} for the specified ISO-4217
* three letter currency code and amount.
* <p>
* A currency is uniquely identified by ISO-4217 three letter code.
* This method creates the currency if it is not known.
*
* @param currencyCode the three letter currency code, ASCII and upper case, not null
* @param amount the amount of the currency to represent
* @return the currency amount, not null
* @throws IllegalArgumentException if the currency code is invalid
*/
public static CurrencyAmount of(final String currencyCode, final double amount) {
return of(Currency.of(currencyCode), amount);
}
/**
* Creates a currency amount for the specified currency and amount,
* handling null inputs by returning null.
*
* @param currency the currency the amount is in, may be null
* @param amount the amount of the currency to represent, may be null
* @return the currency amount, may be null
*/
public static CurrencyAmount create(final Currency currency, final Double amount) {
if (currency != null && amount != null) {
return new CurrencyAmount(currency, amount);
} else {
return null;
}
}
//-------------------------------------------------------------------------
/**
* Creates an instance.
*
* @param currency the currency, not null
* @param amount the amount
*/
private CurrencyAmount(final Currency currency, final double amount) {
ArgumentChecker.notNull(currency, "currency");
_currency = currency;
_amount = amount;
}
//-------------------------------------------------------------------------
/**
* Gets the currency.
*
* @return the currency, not null
*/
public Currency getCurrency() {
return _currency;
}
/**
* Gets the amount of the currency.
*
* @return the amount
*/
public double getAmount() {
return _amount;
}
//-------------------------------------------------------------------------
/**
* Returns a copy of this {@code CurrencyAmount} with the specified amount added.
* <p>
* This adds the specified amount to this monetary amount, returning a new object.
* The addition simply uses standard {@code double} arithmetic.
* <p>
* This instance is immutable and unaffected by this method.
*
* @param amountToAdd the amount to add, in the same currency, not null
* @return an amount based on this with the specified amount added, not null
* @throws IllegalArgumentException if the currencies are not equal
*/
public CurrencyAmount plus(final CurrencyAmount amountToAdd) {
ArgumentChecker.notNull(amountToAdd, "amountToAdd");
ArgumentChecker.isTrue(amountToAdd.getCurrency().equals(_currency), "Unable to add amounts in different currencies");
return plus(amountToAdd.getAmount());
}
/**
* Returns a copy of this {@code CurrencyAmount} with the specified amount added.
* <p>
* This adds the specified amount to this monetary amount, returning a new object.
* The addition simply uses standard {@code double} arithmetic.
* <p>
* This instance is immutable and unaffected by this method.
*
* @param amountToAdd the amount to add, in the same currency
* @return an amount based on this with the specified amount added, not null
*/
public CurrencyAmount plus(final double amountToAdd) {
return new CurrencyAmount(_currency, _amount + amountToAdd);
}
/**
* Returns a copy of this {@code CurrencyAmount} with the amount multiplied.
* <p>
* This takes this amount and multiplies it by the specified value.
* The multiplication simply uses standard {@code double} arithmetic.
* <p>
* This instance is immutable and unaffected by this method.
*
* @param valueToMultiplyBy the scalar amount to multiply by
* @return an amount based on this with the amount multiplied, not null
*/
public CurrencyAmount multipliedBy(final double valueToMultiplyBy) {
return new CurrencyAmount(_currency, _amount * valueToMultiplyBy);
}
//-------------------------------------------------------------------------
/**
* Checks if this amount equals another amount.
* <p>
* The comparison checks the currency and amount.
*
* @param obj the other amount, null returns false
* @return true if equal
*/
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof CurrencyAmount) {
CurrencyAmount other = (CurrencyAmount) obj;
return _currency.equals(other._currency) &&
Double.doubleToLongBits(_amount) == Double.doubleToLongBits(other._amount);
}
return false;
}
/**
* Returns a suitable hash code for the amount.
*
* @return the hash code
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long amountBits = Double.doubleToLongBits(_amount);
result = prime * result + (int) (amountBits ^ (amountBits >>> 32));
result = prime * result + _currency.hashCode();
return result;
}
//-----------------------------------------------------------------------
/**
* Gets the amount as a string.
* <p>
* The format is the currency code, followed by a space, followed by the amount.
*
* @return the currency amount, not null
*/
@Override
public String toString() {
return _currency + " " + _amount;
}
}
| projects/OG-Util/src/main/java/com/opengamma/util/money/CurrencyAmount.java | /**
* Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.util.money;
import java.io.Serializable;
import com.opengamma.util.ArgumentChecker;
/**
* An amount of a currency.
* <p>
* This class effectively represents a monetary amount, however it uses a
* {@code double}. This is required for performance reasons.
* This design is acceptable because this is a system designed for the calculation
* of risk (which is an estimate), rather than a system for accurate accounting of money.
* <p>
* This class is immutable and thread-safe.
*/
public final class CurrencyAmount implements Serializable {
/** Serialization version. */
private static final long serialVersionUID = 1L;
/**
* The currency.
*/
private final Currency _currency;
/**
* The amount.
*/
private final double _amount;
/**
* Obtains an instance of {@code CurrencyAmount} for the specified currency and amount.
*
* @param currency the currency the amount is in, not null
* @param amount the amount of the currency to represent
* @return the currency amount, not null
*/
public static CurrencyAmount of(final Currency currency, final double amount) {
return new CurrencyAmount(currency, amount);
}
/**
* Obtains an instance of {@code CurrencyAmount} for the specified ISO-4217
* three letter currency code and amount.
* <p>
* A currency is uniquely identified by ISO-4217 three letter code.
* This method creates the currency if it is not known.
*
* @param currencyCode the three letter currency code, ASCII and upper case, not null
* @param amount the amount of the currency to represent
* @return the currency amount, not null
* @throws IllegalArgumentException if the currency code is invalid
*/
public static CurrencyAmount of(final String currencyCode, final double amount) {
return of(Currency.of(currencyCode), amount);
}
/**
* Creates an instance.
*
* @param currency the currency, not null
* @param amount the amount
*/
private CurrencyAmount(final Currency currency, final double amount) {
ArgumentChecker.notNull(currency, "currency");
_currency = currency;
_amount = amount;
}
//-------------------------------------------------------------------------
/**
* Gets the currency.
*
* @return the currency, not null
*/
public Currency getCurrency() {
return _currency;
}
/**
* Gets the amount of the currency.
*
* @return the amount
*/
public double getAmount() {
return _amount;
}
//-------------------------------------------------------------------------
/**
* Returns a copy of this {@code CurrencyAmount} with the specified amount added.
* <p>
* This adds the specified amount to this monetary amount, returning a new object.
* The addition simply uses standard {@code double} arithmetic.
* <p>
* This instance is immutable and unaffected by this method.
*
* @param amountToAdd the amount to add, in the same currency, not null
* @return an amount based on this with the specified amount added, not null
* @throws IllegalArgumentException if the currencies are not equal
*/
public CurrencyAmount plus(final CurrencyAmount amountToAdd) {
ArgumentChecker.notNull(amountToAdd, "amountToAdd");
ArgumentChecker.isTrue(amountToAdd.getCurrency().equals(_currency), "Unable to add amounts in different currencies");
return plus(amountToAdd.getAmount());
}
/**
* Returns a copy of this {@code CurrencyAmount} with the specified amount added.
* <p>
* This adds the specified amount to this monetary amount, returning a new object.
* The addition simply uses standard {@code double} arithmetic.
* <p>
* This instance is immutable and unaffected by this method.
*
* @param amountToAdd the amount to add, in the same currency
* @return an amount based on this with the specified amount added, not null
*/
public CurrencyAmount plus(final double amountToAdd) {
return new CurrencyAmount(_currency, _amount + amountToAdd);
}
/**
* Returns a copy of this {@code CurrencyAmount} with the amount multiplied.
* <p>
* This takes this amount and multiplies it by the specified value.
* The multiplication simply uses standard {@code double} arithmetic.
* <p>
* This instance is immutable and unaffected by this method.
*
* @param valueToMultiplyBy the scalar amount to multiply by
* @return an amount based on this with the amount multiplied, not null
*/
public CurrencyAmount multipliedBy(final double valueToMultiplyBy) {
return new CurrencyAmount(_currency, _amount * valueToMultiplyBy);
}
//-------------------------------------------------------------------------
/**
* Checks if this amount equals another amount.
* <p>
* The comparison checks the currency and amount.
*
* @param obj the other amount, null returns false
* @return true if equal
*/
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof CurrencyAmount) {
CurrencyAmount other = (CurrencyAmount) obj;
return _currency.equals(other._currency) &&
Double.doubleToLongBits(_amount) == Double.doubleToLongBits(other._amount);
}
return false;
}
/**
* Returns a suitable hash code for the amount.
*
* @return the hash code
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long amountBits = Double.doubleToLongBits(_amount);
result = prime * result + (int) (amountBits ^ (amountBits >>> 32));
result = prime * result + _currency.hashCode();
return result;
}
//-----------------------------------------------------------------------
/**
* Gets the amount as a string.
* <p>
* The format is the currency code, followed by a space, followed by the amount.
*
* @return the currency amount, not null
*/
@Override
public String toString() {
return _currency + " " + _amount;
}
}
| Add null-swallowing factory to CurrencyAmount
| projects/OG-Util/src/main/java/com/opengamma/util/money/CurrencyAmount.java | Add null-swallowing factory to CurrencyAmount | <ide><path>rojects/OG-Util/src/main/java/com/opengamma/util/money/CurrencyAmount.java
<ide> }
<ide>
<ide> /**
<add> * Creates a currency amount for the specified currency and amount,
<add> * handling null inputs by returning null.
<add> *
<add> * @param currency the currency the amount is in, may be null
<add> * @param amount the amount of the currency to represent, may be null
<add> * @return the currency amount, may be null
<add> */
<add> public static CurrencyAmount create(final Currency currency, final Double amount) {
<add> if (currency != null && amount != null) {
<add> return new CurrencyAmount(currency, amount);
<add> } else {
<add> return null;
<add> }
<add> }
<add>
<add> //-------------------------------------------------------------------------
<add> /**
<ide> * Creates an instance.
<ide> *
<ide> * @param currency the currency, not null |
|
Java | apache-2.0 | d67a7b093068b977d62cacbb5542999b61a8c347 | 0 | realityforge/arez,realityforge/arez,realityforge/arez | package org.realityforge.arez.processor;
import com.google.auto.service.AutoService;
import com.squareup.javapoet.JavaFile;
import com.squareup.javapoet.TypeSpec;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.Processor;
import javax.annotation.processing.RoundEnvironment;
import javax.annotation.processing.SupportedAnnotationTypes;
import javax.annotation.processing.SupportedSourceVersion;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.NestingKind;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.TypeMirror;
import org.realityforge.arez.annotations.ArezComponent;
import org.realityforge.arez.annotations.Repository;
import static javax.tools.Diagnostic.Kind.ERROR;
/**
* Annotation processor that analyzes Arez annotated source and generates models from the annotations.
*/
@SuppressWarnings( "Duplicates" )
@AutoService( Processor.class )
@SupportedAnnotationTypes( { "org.realityforge.arez.annotations.*", "javax.annotation.PostConstruct" } )
@SupportedSourceVersion( SourceVersion.RELEASE_8 )
public final class ArezProcessor
extends AbstractProcessor
{
/**
* {@inheritDoc}
*/
@Override
public boolean process( final Set<? extends TypeElement> annotations, final RoundEnvironment env )
{
final Set<? extends Element> elements = env.getElementsAnnotatedWith( ArezComponent.class );
processElements( elements );
return false;
}
private void processElements( @Nonnull final Set<? extends Element> elements )
{
for ( final Element element : elements )
{
try
{
process( element );
}
catch ( final IOException ioe )
{
processingEnv.getMessager().printMessage( ERROR, ioe.getMessage(), element );
}
catch ( final ArezProcessorException e )
{
processingEnv.getMessager().printMessage( ERROR, e.getMessage(), e.getElement() );
}
catch ( final Throwable e )
{
final StringWriter sw = new StringWriter();
e.printStackTrace( new PrintWriter( sw ) );
sw.flush();
final String message =
"Unexpected error will running the " + getClass().getName() + " processor. This has " +
"resulted in a failure to process the code and has left the compiler in an invalid " +
"state. Please report the failure to the developers so that it can be fixed.\n" +
" Report the error at: https://github.com/realityforge/arez/issues\n" +
"\n\n" +
sw.toString();
processingEnv.getMessager().printMessage( ERROR, message, element );
}
}
}
private void process( @Nonnull final Element element )
throws IOException, ArezProcessorException
{
final PackageElement packageElement = processingEnv.getElementUtils().getPackageOf( element );
final TypeElement typeElement = (TypeElement) element;
final ComponentDescriptor descriptor = parse( packageElement, typeElement );
emitTypeSpec( descriptor.getPackageName(), descriptor.buildType( processingEnv.getTypeUtils() ) );
if ( descriptor.hasRepository() )
{
emitTypeSpec( descriptor.getPackageName(), descriptor.buildRepository( processingEnv.getTypeUtils() ) );
emitTypeSpec( descriptor.getPackageName(), descriptor.buildRepositoryExtension() );
}
}
@SuppressWarnings( "unchecked" )
@Nonnull
private ComponentDescriptor parse( final PackageElement packageElement, final TypeElement typeElement )
throws ArezProcessorException
{
if ( ElementKind.CLASS != typeElement.getKind() )
{
throw new ArezProcessorException( "@ArezComponent target must be a class", typeElement );
}
else if ( typeElement.getModifiers().contains( Modifier.ABSTRACT ) )
{
throw new ArezProcessorException( "@ArezComponent target must not be abstract", typeElement );
}
else if ( typeElement.getModifiers().contains( Modifier.FINAL ) )
{
throw new ArezProcessorException( "@ArezComponent target must not be final", typeElement );
}
else if ( NestingKind.TOP_LEVEL != typeElement.getNestingKind() &&
!typeElement.getModifiers().contains( Modifier.STATIC ) )
{
throw new ArezProcessorException( "@ArezComponent target must not be a non-static nested class", typeElement );
}
final ArezComponent arezComponent = typeElement.getAnnotation( ArezComponent.class );
final String name =
ProcessorUtil.isSentinelName( arezComponent.name() ) ?
typeElement.getSimpleName().toString() :
arezComponent.name();
final List<ExecutableElement> methods = ProcessorUtil.getMethods( typeElement, processingEnv.getTypeUtils() );
final boolean generateToString = methods.stream()
.noneMatch( m -> m.getSimpleName().toString().equals( "toString" ) &&
m.getParameters().size() == 0 &&
!( m.getEnclosingElement().getSimpleName().toString().equals( "Object" ) &&
"java.lang".equals( processingEnv.getElementUtils().
getPackageOf( m.getEnclosingElement() ).getQualifiedName().toString() ) ) );
final ComponentDescriptor descriptor =
new ComponentDescriptor( name,
arezComponent.singleton(),
arezComponent.disposable(),
arezComponent.allowEmpty(),
generateToString,
packageElement,
typeElement );
descriptor.analyzeCandidateMethods( methods, processingEnv.getTypeUtils() );
descriptor.validate();
for ( final ObservableDescriptor observable : descriptor.getObservables() )
{
if ( observable.expectSetter() )
{
final TypeMirror returnType = observable.getGetterType().getReturnType();
final TypeMirror parameterType = observable.getSetterType().getParameterTypes().get( 0 );
if ( !processingEnv.getTypeUtils().isSameType( parameterType, returnType ) )
{
throw new ArezProcessorException( "@Observable property defines a setter and getter with different types." +
" Getter type: " + returnType + " Setter type: " + parameterType + ".",
observable.getGetter() );
}
}
}
final Repository repository = typeElement.getAnnotation( Repository.class );
if ( null != repository )
{
final List<TypeElement> extensions =
ProcessorUtil.getTypeMirrorsAnnotationParameter( typeElement, "extensions", Repository.class ).stream().
map( typeMirror -> (TypeElement) processingEnv.getTypeUtils().asElement( typeMirror ) ).
collect( Collectors.toList() );
descriptor.configureRepository( repository.name(), extensions );
}
return descriptor;
}
private void emitTypeSpec( @Nonnull final String packageName, @Nonnull final TypeSpec typeSpec )
throws IOException
{
JavaFile.builder( packageName, typeSpec ).
skipJavaLangImports( true ).
build().
writeTo( processingEnv.getFiler() );
}
}
| processor/src/main/java/org/realityforge/arez/processor/ArezProcessor.java | package org.realityforge.arez.processor;
import com.google.auto.service.AutoService;
import com.squareup.javapoet.JavaFile;
import com.squareup.javapoet.TypeSpec;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.Processor;
import javax.annotation.processing.RoundEnvironment;
import javax.annotation.processing.SupportedAnnotationTypes;
import javax.annotation.processing.SupportedSourceVersion;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.NestingKind;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.TypeMirror;
import org.realityforge.arez.annotations.ArezComponent;
import org.realityforge.arez.annotations.Repository;
import static javax.tools.Diagnostic.Kind.ERROR;
/**
* Annotation processor that analyzes Arez annotated source and generates models from the annotations.
*/
@SuppressWarnings( "Duplicates" )
@AutoService( Processor.class )
@SupportedAnnotationTypes( { "org.realityforge.arez.annotations.*", "javax.annotation.PostConstruct" } )
@SupportedSourceVersion( SourceVersion.RELEASE_8 )
public final class ArezProcessor
extends AbstractProcessor
{
/**
* {@inheritDoc}
*/
@Override
public boolean process( final Set<? extends TypeElement> annotations, final RoundEnvironment env )
{
final Set<? extends Element> elements = env.getElementsAnnotatedWith( ArezComponent.class );
processElements( elements );
return false;
}
private void processElements( @Nonnull final Set<? extends Element> elements )
{
for ( final Element element : elements )
{
try
{
process( element );
}
catch ( final IOException ioe )
{
processingEnv.getMessager().printMessage( ERROR, ioe.getMessage(), element );
}
catch ( final ArezProcessorException e )
{
processingEnv.getMessager().printMessage( ERROR, e.getMessage(), e.getElement() );
}
catch ( final Throwable e )
{
final StringWriter sw = new StringWriter();
e.printStackTrace( new PrintWriter( sw ) );
sw.flush();
final String message =
"Unexpected error will running the " + getClass().getName() + " processor. This has " +
"resulted in a failure to process the code and has left the compiler in an invalid " +
"state. Please report the failure to the developers so that it can be fixed.\n" +
" Report the error at: https://github.com/realityforge/arez/issues\n" +
"\n\n" +
sw.toString();
processingEnv.getMessager().printMessage( ERROR, message, element );
}
}
}
private void process( @Nonnull final Element element )
throws IOException, ArezProcessorException
{
final PackageElement packageElement = processingEnv.getElementUtils().getPackageOf( element );
final TypeElement typeElement = (TypeElement) element;
final ComponentDescriptor descriptor = parse( packageElement, typeElement );
emitTypeSpec( descriptor.getPackageName(), descriptor.buildType( processingEnv.getTypeUtils() ) );
if ( descriptor.hasRepository() )
{
emitTypeSpec( descriptor.getPackageName(), descriptor.buildRepository( processingEnv.getTypeUtils() ) );
emitTypeSpec( descriptor.getPackageName(), descriptor.buildRepositoryExtension() );
}
}
@SuppressWarnings( "unchecked" )
@Nonnull
private ComponentDescriptor parse( final PackageElement packageElement, final TypeElement typeElement )
throws ArezProcessorException
{
if ( ElementKind.CLASS != typeElement.getKind() )
{
throw new ArezProcessorException( "@ArezComponent target must be a class", typeElement );
}
else if ( typeElement.getModifiers().contains( Modifier.ABSTRACT ) )
{
throw new ArezProcessorException( "@ArezComponent target must not be abstract", typeElement );
}
else if ( typeElement.getModifiers().contains( Modifier.FINAL ) )
{
throw new ArezProcessorException( "@ArezComponent target must not be final", typeElement );
}
else if ( NestingKind.TOP_LEVEL != typeElement.getNestingKind() &&
!typeElement.getModifiers().contains( Modifier.STATIC ) )
{
throw new ArezProcessorException( "@ArezComponent target must not be a non-static nested class", typeElement );
}
final ArezComponent arezComponent = typeElement.getAnnotation( ArezComponent.class );
final String name =
ProcessorUtil.isSentinelName( arezComponent.name() ) ?
typeElement.getSimpleName().toString() :
arezComponent.name();
final List<ExecutableElement> methods = ProcessorUtil.getMethods( typeElement, processingEnv.getTypeUtils() );
final boolean generateToString = methods.stream()
.noneMatch( m -> m.getSimpleName().toString().equals( "toString" ) &&
m.getParameters().size() == 0 &&
!(m.getEnclosingElement().getSimpleName().toString().equals( "Object" ) &&
"java.lang".equals( processingEnv.getElementUtils().
getPackageOf( m.getEnclosingElement() ).getQualifiedName().toString() ) ));
final ComponentDescriptor descriptor =
new ComponentDescriptor( name,
arezComponent.singleton(),
arezComponent.disposable(),
arezComponent.allowEmpty(),
generateToString,
packageElement,
typeElement );
descriptor.analyzeCandidateMethods( methods, processingEnv.getTypeUtils() );
descriptor.validate();
for ( final ObservableDescriptor observable : descriptor.getObservables() )
{
if ( observable.expectSetter() )
{
final TypeMirror returnType = observable.getGetterType().getReturnType();
final TypeMirror parameterType = observable.getSetterType().getParameterTypes().get( 0 );
if ( !processingEnv.getTypeUtils().isSameType( parameterType, returnType ) )
{
throw new ArezProcessorException( "@Observable property defines a setter and getter with different types." +
" Getter type: " + returnType + " Setter type: " + parameterType + ".",
observable.getGetter() );
}
}
}
final Repository repository = typeElement.getAnnotation( Repository.class );
if ( null != repository )
{
final List<TypeElement> extensions =
ProcessorUtil.getTypeMirrorsAnnotationParameter( typeElement, "extensions", Repository.class ).stream().
map( typeMirror -> (TypeElement) processingEnv.getTypeUtils().asElement( typeMirror ) ).
collect( Collectors.toList() );
descriptor.configureRepository( repository.name(), extensions );
}
return descriptor;
}
private void emitTypeSpec( @Nonnull final String packageName, @Nonnull final TypeSpec typeSpec )
throws IOException
{
JavaFile.builder( packageName, typeSpec ).
skipJavaLangImports( true ).
build().
writeTo( processingEnv.getFiler() );
}
}
| Whitespace
| processor/src/main/java/org/realityforge/arez/processor/ArezProcessor.java | Whitespace | <ide><path>rocessor/src/main/java/org/realityforge/arez/processor/ArezProcessor.java
<ide> final boolean generateToString = methods.stream()
<ide> .noneMatch( m -> m.getSimpleName().toString().equals( "toString" ) &&
<ide> m.getParameters().size() == 0 &&
<del> !(m.getEnclosingElement().getSimpleName().toString().equals( "Object" ) &&
<del> "java.lang".equals( processingEnv.getElementUtils().
<del> getPackageOf( m.getEnclosingElement() ).getQualifiedName().toString() ) ));
<add> !( m.getEnclosingElement().getSimpleName().toString().equals( "Object" ) &&
<add> "java.lang".equals( processingEnv.getElementUtils().
<add> getPackageOf( m.getEnclosingElement() ).getQualifiedName().toString() ) ) );
<ide>
<ide> final ComponentDescriptor descriptor =
<ide> new ComponentDescriptor( name, |
|
JavaScript | mit | 3d7f86fa5c47f2df5b923c620c0d8174f5e87cf2 | 0 | tiltfactor/smorball,tiltfactor/smorball,tiltfactor/smorball,tiltfactor/smorball | /**
* Created by nidhincg on 27/12/14.
*/
(function(){
var CaptchaProcessor = function(config){
this.config = config;
this.wordCount = 0;
this.currentPass = 0;
this.maxPass = 1;
this.captchasOnScreen=[];
this.captchaTextBoxId = "inputText";
this.captchaPassButton = "passButton";
this.init();
cp = this;
}
CaptchaProcessor.prototype.init = function(){
//this.captchaDatasArray = [localData];
this.currentIndex = 0;
if(this.config.gameState.captchaDatasArray.length == 1){
this.callCaptchaFromServer();
}
activateCaptchaSet(this);
loadEvents(this);
}
var activateUI = function(me){
$("#canvasHolder").parent().css({position: 'relative'});
//$("#canvasHolder").css({top: me.config.canvasHeight - $("#canvasHolder").height(), position:'absolute'});
document.getElementById('canvasHolder').style.display = "block";
document.getElementById(me.captchaPassButton).value = 'Pass('+ me.maxPass + ')';
event.preventDefault();
$('#inputText').focus();
disablePassButton(me,false);
}
var loadEvents = function(me){
var pt = function(e){passText(me,e)};
EventBus.addEventListener("passText", pt);
var at = function(e){assistText(me, e.target)};
EventBus.addEventListener("assistText",at);
EventBus.addEventListener("callCaptchaFromServer",this.callCaptchaFromServer);
}
CaptchaProcessor.prototype.getCaptchaPlaceHolder = function(maxWidth,height,laneId){
activateUI(this);
var captchaHolder = new createjs.Bitmap();
captchaHolder.maxHeight = height;
captchaHolder.maxWidth = maxWidth;
captchaHolder.id = laneId;//by me
// activateCaptchaSet(this);
this.load(captchaHolder);
return captchaHolder;
}
CaptchaProcessor.prototype.load = function(captcha){
var captchaData = getCaptchaData(this);
var message = "";
//console.log(captchaData);
captcha.image = captchaData.url;
if(this.captchaDatas.local){
setScale(captcha,captcha.image.width, captcha.image.height);
captcha.texts = [captchaData.ocr1, captchaData.ocr2];
if(this.config.gameState.currentLevel==1){
EventBus.dispatch("showCommentary", captchaData.message);
}
}else{
var myCords = getCaptchaCoordinates(captchaData.coords);
captcha.sourceRect = new createjs.Rectangle(myCords.sPoint.x,myCords.sPoint.y,myCords.width,myCords.height);
setScale(captcha, myCords.width, myCords.height);
captcha.texts =captchaData.texts;
captcha._id = captchaData._id;
}
//captcha.x = 10;
captcha.y = captcha.maxHeight/2 - (captcha.getTransformedBounds().height/2) ;
// captcha.datas = captchaData;
captcha.scaleX =captcha.scaleY = 0;
createjs.Tween.get(captcha).to({scaleX:1,scaleY:1},1000,createjs.Ease.backOut);
this.captchasOnScreen.push(captcha);
// console.log(this.captchasOnScreen);
++this.currentIndex;
};
var getCaptchaData = function(me){
checkCaptchaSetting(me);
var captchaData = me.captchaDatas.differences[me.currentIndex];
var imageId = null;
var myText = null;
if(me.captchaDatas.local){
imageId = captchaData.image.split(".")[0];
myText = captchaData.ocr1;
}else{
imageId = me.captchaDatas._id;
myText = captchaData.texts[0];
}
captchaData.url = me.config.loader.getResult(imageId);
if(captchaData.url == null){
me.currentIndex++;
captchaData = getCaptchaData(me);
}
if(getCaptcha(me,myText) != null){
me.currentIndex++;
captchaData = getCaptchaData(me);
}
return captchaData;
}
CaptchaProcessor.prototype.reset = function(){
for(var i= this.captchasOnScreen.length-1 ; i >= 0; i--) {
var captcha = this.captchasOnScreen[i];
var index = this.captchasOnScreen.indexOf(captcha);
this.captchasOnScreen.splice(index,1);
this.load(captcha);
}
}
var setScale = function(captcha, imgWidth, imgHeight){
var cW = captcha.maxWidth-20;
var cH = captcha.maxHeight - 10;
var sx = cW/imgWidth > 1 ? 1: cW/imgWidth ;
var sy = cH/imgHeight > 1 ? 1 : cH/imgHeight ;
captcha.scaleX = sx;
captcha.scaleY = sy;
}
var checkCaptchaSetting = function(me){
//console.log("index : " + me.currentIndex);
if(me.currentIndex == Math.floor(me.captchaDatas.differences.length/2) && me.config.gameState.currentLevel != 1){
console.log("next load");
me.callCaptchaFromServer();
}
if(me.captchaDatas.local && me.config.loader.localCapthcaSize <= me.currentIndex){
// console.log(me.config.loader.localCapthcaSize +" entering into loop..")
activateCaptchaSet(me);
}
if(me.currentIndex >= me.captchaDatas.differences.length){
//console.log("change");
activateCaptchaSet(me);
}
}
CaptchaProcessor.prototype.compare = function(){
var me = this;
var output = {};
var input = document.getElementById(this.captchaTextBoxId).value;
if(input == ''){
output.pass = false;
output.message = "Enter text";
return output;
}
if(input == "completelevel"){
this.config.gameState.gs.gameLevelPoints[this.config.gameState.currentLevel-1] = 3;
if(this.config.gameState.gs.maxLevel<=this.config.gameState.currentLevel+1){
this.config.gameState.gs.maxLevel = ++this.config.gameState.currentLevel;
}
this.config.gameState.currentState = this.config.gameState.states.GAME_OVER;
output.pass = false;
output.cheated = true;
output.message = "cheat code is accessed";
clearText(this);
return output;
}
if(input=="unlockall"){
this.config.gameState.gs.maxLevel = this.config.gameState.totalLevels;
this.config.gameState.currentState = this.config.gameState.states.GAME_OVER;
output.pass = false;
output.cheated = true;
output.message = "cheat code is accessed";
clearText(this);
return output;
}
var cw = new closestWord(input,this.captchasOnScreen);
if(cw.match){
if(input.length>8){
output.extraDamage = true;
}
output.pass = true;
output.message = "correct";
var captcha = cw.closestOcr;
var index = this.captchasOnScreen.indexOf(captcha);
this.wordCount++;
this.captchasOnScreen.splice(index,1);
output.laneId = captcha.id;
this.load(captcha);
}else{
output.pass = false;
output.message = "incorrect";
var captcha = cw.closestOcr;
var passDisabled = $("#canvasHolder #passButton").prop("disabled");
$("#canvasHolder input").prop("disabled",true);
setTimeout(function(){
$("#canvasHolder input").prop("disabled",false);
if(passDisabled)
$("#canvasHolder #passButton").prop("disabled",true);
me.reset();
$("#inputText").focus();
},me.config.gameState.gs.penalty);
}
var ob = {};
ob._id = captcha._id;
ob.text = input;
if(ob._id){
me.config.gameState.inputTextArr.push(ob);
}
clearText(this);
return output;
}
var getCaptcha = function(me,input){
for(var i = 0 ; i< me.captchasOnScreen.length; i++){
var captcha = me.captchasOnScreen[i];
if(input == captcha.texts[0]||input==captcha.texts[1]){
return captcha;
}
}
return null;
}
var passText = function(me){
clearText(me);
if(++me.currentPass >= me.maxPass){
disablePassButton(me,true);
document.getElementById(me.captchaPassButton).value = "PASS";
$("#inputText").focus();
}
me.reset();
}
var clearText = function(me){
document.getElementById(me.captchaTextBoxId).value = "";
}
var disablePassButton = function(me,status){
document.getElementById(me.captchaPassButton).disabled = status;
}
var getCaptchaCoordinates = function(cord){
var myCords = {};
myCords.sPoint = cord[3];
myCords.width = cord[2].x - cord[3].x;
myCords.height = cord[0].y - cord[3].y;
return myCords;
}
var matchText = function(textArray, input){
for(var i = 0 ; i< textArray.length; i++){
var text = textArray[i];
if(text == input){
return true;
}
}
return false;
}
CaptchaProcessor.prototype.callCaptchaFromServer = function(){
var me = this;
var url = "http://tiltfactor1.dartmouth.edu:8080/api/page";
// setTimeout(function(){
// console.log("call from server");
$.ajax({
dataType: 'json',
url: url,
beforeSend : function(xhr){
xhr.setRequestHeader('x-access-token', 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzdWIiOiJHYW1lIiwiaWF0IjoxNDE1MzQxNjMxMjY4LCJpc3MiOiJCSExTZXJ2ZXIifQ.bwRps5G6lAd8tGZKK7nExzhxFrZmAwud0C2RW26sdRM');
},
error: function(XMLHttpRequest, textStatus, errorThrown){
console.log("error: "+ textStatus);
},
success: function(data){
//console.log(data);
if(data != null)
processCaptchaData(data, me);
}
});
// }, 1);
}
var processCaptchaData = function(data, me){
var myData = {"url" : data.url, "differences" : data.differences, "_id": data._id, "local": false };
var _onImageLoad = function(me){
//console.log("after image load");
me.config.gameState.captchaDatasArray.push(myData);
if((me.captchaDatas == undefined || me.captchaDatas.local) && me.config.gameState.currentLevel != 1 ){
activateCaptchaSet(me);
}
}
me.config.loader.load([{src: myData.url + ".jpeg", id: myData._id}], _onImageLoad, me);
}
var activateCaptchaSet = function(me){
//console.log(me.config.gameState.currentLevel+ " activate captcha set");
if(me.config.gameState.currentLevel == 1){
me.captchaDatas = me.config.gameState.captchaDatasArray[0];
}else{
me.captchaDatas = me.config.gameState.captchaDatasArray[me.config.gameState.captchaDatasArray.length-1];
}
if(!me.captchaDatas.local){
me.config.gameState.captchaDatasArray.pop();
}
me.currentIndex = 0;
// console.log(me.captchaDatas.differences.length);
}
CaptchaProcessor.prototype.getCaptchaImageData = function(){
if(this.config.gameState.captchaDatasArray.length == 1){
var data = this.config.gameState.captchaDatasArray[0];
var url = data.url + ".jpg";
return {"src" : url, "id": data._id };
}
return null;
}
var assistText = function(me,laneId){
for(var i=0;i<me.captchasOnScreen.length;i++){
var captcha = me.captchasOnScreen[i];
if(captcha.id == laneId){
$("#"+me.captchaTextBoxId).val(captcha.texts[0]);
}
}
}
CaptchaProcessor.prototype.hideCaptchas = function(){
for(var i =0;i<this.captchasOnScreen.length;i++){
this.captchasOnScreen[i].alpha=0;
}
}
CaptchaProcessor.prototype.showCaptchas = function(){
for(var i =0;i<this.captchasOnScreen.length;i++){
this.captchasOnScreen[i].alpha=1;
}
};
CaptchaProcessor.prototype.getWordCount = function(){
return this.wordCount;
};
window.CaptchaProcessor = CaptchaProcessor;
}()); | Smorball/js/model/CaptchaProcessor.js | /**
* Created by nidhincg on 27/12/14.
*/
(function(){
var CaptchaProcessor = function(config){
this.config = config;
this.wordCount = 0;
this.currentPass = 0;
this.maxPass = 1;
this.captchasOnScreen=[];
this.captchaTextBoxId = "inputText";
this.captchaPassButton = "passButton";
this.init();
cp = this;
}
CaptchaProcessor.prototype.init = function(){
//this.captchaDatasArray = [localData];
this.currentIndex = 0;
if(this.config.gameState.captchaDatasArray.length == 1){
this.callCaptchaFromServer();
}
activateCaptchaSet(this);
loadEvents(this);
}
var activateUI = function(me){
$("#canvasHolder").parent().css({position: 'relative'});
//$("#canvasHolder").css({top: me.config.canvasHeight - $("#canvasHolder").height(), position:'absolute'});
document.getElementById('canvasHolder').style.display = "block";
document.getElementById(me.captchaPassButton).value = 'Pass('+ me.maxPass + ')';
event.preventDefault();
$('#inputText').focus();
disablePassButton(me,false);
}
var loadEvents = function(me){
var pt = function(e){passText(me,e)};
EventBus.addEventListener("passText", pt);
var at = function(e){assistText(me, e.target)};
EventBus.addEventListener("assistText",at);
EventBus.addEventListener("callCaptchaFromServer",this.callCaptchaFromServer);
}
CaptchaProcessor.prototype.getCaptchaPlaceHolder = function(maxWidth,height,laneId){
activateUI(this);
var captchaHolder = new createjs.Bitmap();
captchaHolder.maxHeight = height;
captchaHolder.maxWidth = maxWidth;
captchaHolder.id = laneId;//by me
// activateCaptchaSet(this);
this.load(captchaHolder);
return captchaHolder;
}
CaptchaProcessor.prototype.load = function(captcha){
var captchaData = getCaptchaData(this);
var message = "";
//console.log(captchaData);
captcha.image = captchaData.url;
if(this.captchaDatas.local){
setScale(captcha,captcha.image.width, captcha.image.height);
captcha.texts = [captchaData.ocr1, captchaData.ocr2];
if(this.config.gameState.currentLevel==1){
EventBus.dispatch("showCommentary", captchaData.message);
}
}else{
var myCords = getCaptchaCoordinates(captchaData.coords);
captcha.sourceRect = new createjs.Rectangle(myCords.sPoint.x,myCords.sPoint.y,myCords.width,myCords.height);
setScale(captcha, myCords.width, myCords.height);
captcha.texts =captchaData.texts;
captcha._id = captchaData._id;
}
//captcha.x = 10;
captcha.y = captcha.maxHeight/2 - (captcha.getTransformedBounds().height/2) ;
// captcha.datas = captchaData;
captcha.scaleX =captcha.scaleY = 0;
createjs.Tween.get(captcha).to({scaleX:1,scaleY:1},1000,createjs.Ease.backOut);
this.captchasOnScreen.push(captcha);
// console.log(this.captchasOnScreen);
++this.currentIndex;
};
var getCaptchaData = function(me){
checkCaptchaSetting(me);
var captchaData = me.captchaDatas.differences[me.currentIndex];
var imageId = null;
var myText = null;
if(me.captchaDatas.local){
imageId = captchaData.image.split(".")[0];
myText = captchaData.ocr1;
}else{
imageId = me.captchaDatas._id;
myText = captchaData.texts[0];
}
captchaData.url = me.config.loader.getResult(imageId);
if(captchaData.url == null){
me.currentIndex++;
captchaData = getCaptchaData(me);
}
if(getCaptcha(me,myText) != null){
me.currentIndex++;
captchaData = getCaptchaData(me);
}
return captchaData;
}
CaptchaProcessor.prototype.reset = function(){
for(var i= this.captchasOnScreen.length-1 ; i >= 0; i--) {
var captcha = this.captchasOnScreen[i];
var index = this.captchasOnScreen.indexOf(captcha);
this.captchasOnScreen.splice(index,1);
this.load(captcha);
}
}
var setScale = function(captcha, imgWidth, imgHeight){
var cW = captcha.maxWidth-20;
var cH = captcha.maxHeight - 10;
var sx = cW/imgWidth > 1 ? 1: cW/imgWidth ;
var sy = cH/imgHeight > 1 ? 1 : cH/imgHeight ;
captcha.scaleX = sx;
captcha.scaleY = sy;
}
var checkCaptchaSetting = function(me){
//console.log("index : " + me.currentIndex);
if(me.currentIndex == Math.floor(me.captchaDatas.differences.length/2) && me.config.gameState.currentLevel != 1){
console.log("next load");
me.callCaptchaFromServer();
}
if(me.captchaDatas.local && me.config.loader.localCapthcaSize <= me.currentIndex){
// console.log(me.config.loader.localCapthcaSize +" entering into loop..")
activateCaptchaSet(me);
}
if(me.currentIndex >= me.captchaDatas.differences.length){
//console.log("change");
activateCaptchaSet(me);
}
}
CaptchaProcessor.prototype.compare = function(){
var me = this;
var output = {};
var input = document.getElementById(this.captchaTextBoxId).value;
if(input == ''){
output.pass = false;
output.message = "Enter text";
return output;
}
if(input == "completelevel"){
this.config.gameState.gs.gameLevelPoints[this.config.gameState.currentLevel-1] = 3;
if(this.config.gameState.gs.maxLevel<=this.config.gameState.currentLevel+1){
this.config.gameState.gs.maxLevel = ++this.config.gameState.currentLevel;
}
this.config.gameState.currentState = this.config.gameState.states.GAME_OVER;
output.pass = false;
output.cheated = true;
output.message = "cheat code is accessed";
clearText(this);
return output;
}
if(input=="unlockall"){
this.config.gameState.gs.maxLevel = this.config.gameState.totalLevels;
this.config.gameState.currentState = this.config.gameState.states.GAME_OVER;
output.pass = false;
output.cheated = true;
output.message = "cheat code is accessed";
clearText(this);
return output;
}
var cw = new closestWord(input,this.captchasOnScreen);
if(cw.match){
if(input.length>8){
output.extraDamage = true;
}
output.pass = true;
output.message = "correct";
var captcha = cw.closestOcr;
var index = this.captchasOnScreen.indexOf(captcha);
this.wordCount++;
this.captchasOnScreen.splice(index,1);
output.laneId = captcha.id;
this.load(captcha);
}else{
output.pass = false;
output.message = "incorrect";
var captcha = cw.closestOcr;
$("#canvasHolder input").prop("disabled",true);
setTimeout(function(){
$("#canvasHolder input").prop("disabled",false);
me.reset();
$("#inputText").focus();
},me.config.gameState.gs.penalty);
}
var ob = {};
ob._id = captcha._id;
ob.text = input;
if(ob._id){
me.config.gameState.inputTextArr.push(ob);
}
clearText(this);
return output;
}
var getCaptcha = function(me,input){
for(var i = 0 ; i< me.captchasOnScreen.length; i++){
var captcha = me.captchasOnScreen[i];
if(input == captcha.texts[0]||input==captcha.texts[1]){
return captcha;
}
}
return null;
}
var passText = function(me){
clearText(me);
if(++me.currentPass >= me.maxPass){
disablePassButton(me,true);
document.getElementById(me.captchaPassButton).value = "PASS";
$("#inputText").focus();
}
me.reset();
}
var clearText = function(me){
document.getElementById(me.captchaTextBoxId).value = "";
}
var disablePassButton = function(me,status){
document.getElementById(me.captchaPassButton).disabled = status;
}
var getCaptchaCoordinates = function(cord){
var myCords = {};
myCords.sPoint = cord[3];
myCords.width = cord[2].x - cord[3].x;
myCords.height = cord[0].y - cord[3].y;
return myCords;
}
var matchText = function(textArray, input){
for(var i = 0 ; i< textArray.length; i++){
var text = textArray[i];
if(text == input){
return true;
}
}
return false;
}
CaptchaProcessor.prototype.callCaptchaFromServer = function(){
var me = this;
var url = "http://tiltfactor1.dartmouth.edu:8080/api/page";
// setTimeout(function(){
// console.log("call from server");
$.ajax({
dataType: 'json',
url: url,
beforeSend : function(xhr){
xhr.setRequestHeader('x-access-token', 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzdWIiOiJHYW1lIiwiaWF0IjoxNDE1MzQxNjMxMjY4LCJpc3MiOiJCSExTZXJ2ZXIifQ.bwRps5G6lAd8tGZKK7nExzhxFrZmAwud0C2RW26sdRM');
},
error: function(XMLHttpRequest, textStatus, errorThrown){
console.log("error: "+ textStatus);
},
success: function(data){
//console.log(data);
if(data != null)
processCaptchaData(data, me);
}
});
// }, 1);
}
var processCaptchaData = function(data, me){
var myData = {"url" : data.url, "differences" : data.differences, "_id": data._id, "local": false };
var _onImageLoad = function(me){
//console.log("after image load");
me.config.gameState.captchaDatasArray.push(myData);
if((me.captchaDatas == undefined || me.captchaDatas.local) && me.config.gameState.currentLevel != 1 ){
activateCaptchaSet(me);
}
}
me.config.loader.load([{src: myData.url + ".jpeg", id: myData._id}], _onImageLoad, me);
}
var activateCaptchaSet = function(me){
//console.log(me.config.gameState.currentLevel+ " activate captcha set");
if(me.config.gameState.currentLevel == 1){
me.captchaDatas = me.config.gameState.captchaDatasArray[0];
}else{
me.captchaDatas = me.config.gameState.captchaDatasArray[me.config.gameState.captchaDatasArray.length-1];
}
if(!me.captchaDatas.local){
me.config.gameState.captchaDatasArray.pop();
}
me.currentIndex = 0;
// console.log(me.captchaDatas.differences.length);
}
CaptchaProcessor.prototype.getCaptchaImageData = function(){
if(this.config.gameState.captchaDatasArray.length == 1){
var data = this.config.gameState.captchaDatasArray[0];
var url = data.url + ".jpg";
return {"src" : url, "id": data._id };
}
return null;
}
var assistText = function(me,laneId){
for(var i=0;i<me.captchasOnScreen.length;i++){
var captcha = me.captchasOnScreen[i];
if(captcha.id == laneId){
$("#"+me.captchaTextBoxId).val(captcha.texts[0]);
}
}
}
CaptchaProcessor.prototype.hideCaptchas = function(){
for(var i =0;i<this.captchasOnScreen.length;i++){
this.captchasOnScreen[i].alpha=0;
}
}
CaptchaProcessor.prototype.showCaptchas = function(){
for(var i =0;i<this.captchasOnScreen.length;i++){
this.captchasOnScreen[i].alpha=1;
}
};
CaptchaProcessor.prototype.getWordCount = function(){
return this.wordCount;
};
window.CaptchaProcessor = CaptchaProcessor;
}()); | - Pass button needs to be disabled if 0 pass left. | Smorball/js/model/CaptchaProcessor.js | - Pass button needs to be disabled if 0 pass left. | <ide><path>morball/js/model/CaptchaProcessor.js
<ide> output.pass = false;
<ide> output.message = "incorrect";
<ide> var captcha = cw.closestOcr;
<add> var passDisabled = $("#canvasHolder #passButton").prop("disabled");
<ide> $("#canvasHolder input").prop("disabled",true);
<ide> setTimeout(function(){
<ide> $("#canvasHolder input").prop("disabled",false);
<add> if(passDisabled)
<add> $("#canvasHolder #passButton").prop("disabled",true);
<ide> me.reset();
<ide> $("#inputText").focus();
<ide> },me.config.gameState.gs.penalty); |
|
Java | mit | f750e6be8450a06a1473a8abe59fc9840f797e00 | 0 | likcoras/Asuka,likcoras/SSBot | package io.github.likcoras.ssbot.core;
import java.io.IOException;
import java.util.Set;
import io.github.likcoras.ssbot.ConfigParser;
import io.github.likcoras.ssbot.auth.AuthHandler;
import io.github.likcoras.ssbot.ignore.IgnoreHandler;
import io.github.likcoras.ssbot.util.BotUtils;
import io.github.likcoras.ssbot.util.TimeDiff;
import org.apache.log4j.Logger;
import org.pircbotx.Channel;
import org.pircbotx.User;
import org.pircbotx.UserLevel;
public class BotCoreHandlers {
private static final Logger LOG = Logger.getLogger(BotCoreHandlers.class);
private static final String[] IGNORE_HELP = {
BotUtils.addBold("%b.ignore usage%b"),
BotUtils.addBold("%b.ignore add [nick]:%b makes the bot ignore the user"),
BotUtils.addBold("%b.ignore rem [nick]:%b removes the ignore from the user"),
BotUtils.addBold("%b.ignore list:%b lists the ignored users") };
private final String botNick;
private final String quitMsg;
private final Long startTime;
private final AuthHandler auth;
private final IgnoreHandler ignore;
public BotCoreHandlers(ConfigParser cfg) {
botNick = cfg.getProperty("ircnick");
quitMsg = cfg.getProperty("quitmsg");
startTime = System.currentTimeMillis();
auth = new AuthHandler();
ignore = new IgnoreHandler();
}
public void initialize() throws IOException {
ignore.loadIgnores();
}
public AuthHandler getAuth() {
return auth;
}
public BotCoreResult handle(User user, Channel chan, String msg) {
if (msg.toLowerCase().startsWith(".ignore") || msg.toLowerCase().startsWith("!ignore")) {
if (auth.checkAuth(UserLevel.OP, user, chan))
ignore(user, chan, msg);
else
failIgnore(user, chan, msg);
return BotCoreResult.IGNORE;
} else if (msg.equalsIgnoreCase(".quit " + botNick) || msg.equalsIgnoreCase("!quit " + botNick)) {
if (auth.checkAuth(UserLevel.OP, user, chan))
return quit(user, chan);
else
return failQuit(user, chan);
} else if (msg.equalsIgnoreCase(".uptime") || msg.equalsIgnoreCase("!uptime")) {
uptime(chan);
return BotCoreResult.IGNORE;
} else if (ignore.isIgnored(user.getNick()))
return BotCoreResult.IGNORE;
return BotCoreResult.HANDLE;
}
private void ignore(final User user, final Channel chan, final String msg) {
LOG.info("Ignore command by " + BotUtils.userIdentifier(user) + ": " + msg);
final String[] command = msg.split("\\s+");
try {
if (command.length < 2)
invalidIgnore(user);
else if (command[1].equalsIgnoreCase("add"))
ignoreAdd(chan, command[2]);
else if (command[1].equalsIgnoreCase("rem"))
ignoreRem(chan, command[2]);
else if (command[1].equalsIgnoreCase("list"))
ignoreList(user);
else if (command[1].equalsIgnoreCase("help"))
ignoreUsage(user);
else
invalidIgnore(user);
} catch (final IOException e) {
LOG.error("Error while handling ignore command '" + msg + "'", e);
}
}
private void invalidIgnore(final User user) {
user.send().notice("Wrong usage! Try '.ignore help'!");
}
private void ignoreUsage(final User user) {
for (String help : IGNORE_HELP)
user.send().notice(help);
}
private void ignoreAdd(final Channel chan, final String target)
throws IOException {
ignore.addIgnore(target);
chan.send().message("User " + target + " was added to the ignore list");
}
private void ignoreRem(final Channel chan, final String target)
throws IOException {
ignore.delIgnore(target);
chan.send().message("User " + target + " removed from ignore list");
}
private void ignoreList(final User user) {
Set<String> ignoreList = ignore.listIgnores();
if (ignoreList.isEmpty()) {
user.send().notice("There are no ignored users!");
return;
}
String ignores = "";
for (final String ignore : ignoreList)
ignores += ignore + ", ";
ignores = ignores.substring(0, ignores.length() - 2);
user.send().notice("Ignored Users: " + ignores);
}
private void failIgnore(final User user, final Channel chan,
final String msg) {
LOG.info("Failed ignore attempt by " + BotUtils.userIdentifier(user));
chan.send().message(user, "Sorry, you're not allowed to do that!");
}
private BotCoreResult quit(final User user, final Channel chan) {
LOG.info("Quit requested by " + BotUtils.userIdentifier(user));
broadcastQuit(user);
return BotCoreResult.QUIT;
}
private void broadcastQuit(User user) {
String msg = String.format(quitMsg, user.getNick());
for (Channel chan : user.getBot().getUserChannelDao().getAllChannels())
chan.send().message(msg);
}
private BotCoreResult failQuit(final User user, final Channel chan) {
LOG.info("Failed quit attempt by " + BotUtils.userIdentifier(user));
chan.send().message(user, "Sorry, you're not allowed to do that!");
return BotCoreResult.IGNORE;
}
private void uptime(Channel chan) {
String uptime = TimeDiff.getTime(System.currentTimeMillis() - startTime).getComplexMessage();
chan.send().message("Uptime: " + uptime);
}
}
| src/main/java/io/github/likcoras/ssbot/core/BotCoreHandlers.java | package io.github.likcoras.ssbot.core;
import java.io.IOException;
import java.util.Set;
import io.github.likcoras.ssbot.ConfigParser;
import io.github.likcoras.ssbot.auth.AuthHandler;
import io.github.likcoras.ssbot.ignore.IgnoreHandler;
import io.github.likcoras.ssbot.util.BotUtils;
import io.github.likcoras.ssbot.util.TimeDiff;
import org.apache.log4j.Logger;
import org.pircbotx.Channel;
import org.pircbotx.User;
import org.pircbotx.UserLevel;
public class BotCoreHandlers {
private static final Logger LOG = Logger.getLogger(BotCoreHandlers.class);
private static final String[] IGNORE_HELP = {
BotUtils.addBold("%b.ignore usage%b"),
BotUtils.addBold("%b.ignore add [nick]:%b makes the bot ignore the user"),
BotUtils.addBold("%b.ignore rem [nick]:%b removes the ignore from the user"),
BotUtils.addBold("%b.ignore list:%b lists the ignored users") };
private final String botNick;
private final String quitMsg;
private final Long startTime;
private final AuthHandler auth;
private final IgnoreHandler ignore;
public BotCoreHandlers(ConfigParser cfg) {
botNick = cfg.getProperty("ircnick");
quitMsg = cfg.getProperty("quitmsg");
startTime = System.currentTimeMillis();
auth = new AuthHandler();
ignore = new IgnoreHandler();
}
public void initialize() throws IOException {
ignore.loadIgnores();
}
public AuthHandler getAuth() {
return auth;
}
public BotCoreResult handle(User user, Channel chan, String msg) {
if (msg.toLowerCase().startsWith(".ignore") || msg.toLowerCase().startsWith("!ignore")) {
if (auth.checkAuth(UserLevel.OP, user, chan))
ignore(user, chan, msg);
else
failIgnore(user, chan, msg);
return BotCoreResult.IGNORE;
} else if (msg.equalsIgnoreCase(".quit " + botNick) || msg.equalsIgnoreCase("!quit " + botNick)) {
if (auth.checkAuth(UserLevel.OP, user, chan))
return quit(user, chan);
else
return failQuit(user, chan);
} else if (msg.equalsIgnoreCase(".uptime") || msg.equalsIgnoreCase("!uptime")) {
uptime(chan);
return BotCoreResult.IGNORE;
} else if (ignore.isIgnored(user.getNick()))
return BotCoreResult.IGNORE;
return BotCoreResult.HANDLE;
}
private void ignore(final User user, final Channel chan, final String msg) {
LOG.info("Ignore command by " + BotUtils.userIdentifier(user) + ": " + msg);
final String[] command = msg.split("\\s+");
try {
if (command.length < 2)
ignoreUsage(user);
else if (command[1].equalsIgnoreCase("add"))
ignoreAdd(chan, command[2]);
else if (command[1].equalsIgnoreCase("rem"))
ignoreRem(chan, command[2]);
else if (command[1].equalsIgnoreCase("list"))
ignoreList(user);
else
ignoreUsage(user);
} catch (final IOException e) {
LOG.error("Error while handling ignore command '" + msg + "'", e);
}
}
private void ignoreUsage(final User user) {
for (String help : IGNORE_HELP)
user.send().notice(help);
}
private void ignoreAdd(final Channel chan, final String target)
throws IOException {
ignore.addIgnore(target);
chan.send().message("User " + target + " was added to the ignore list");
}
private void ignoreRem(final Channel chan, final String target)
throws IOException {
ignore.delIgnore(target);
chan.send().message("User " + target + " removed from ignore list");
}
private void ignoreList(final User user) {
Set<String> ignoreList = ignore.listIgnores();
if (ignoreList.isEmpty()) {
user.send().notice("There are no ignored users!");
return;
}
String ignores = "";
for (final String ignore : ignoreList)
ignores += ignore + ", ";
ignores = ignores.substring(0, ignores.length() - 2);
user.send().notice("Ignored Users: " + ignores);
}
private void failIgnore(final User user, final Channel chan,
final String msg) {
LOG.info("Failed ignore attempt by " + BotUtils.userIdentifier(user));
chan.send().message(user, "Sorry, you're not allowed to do that!");
}
private BotCoreResult quit(final User user, final Channel chan) {
LOG.info("Quit requested by " + BotUtils.userIdentifier(user));
broadcastQuit(user);
return BotCoreResult.QUIT;
}
private void broadcastQuit(User user) {
String msg = String.format(quitMsg, user.getNick());
for (Channel chan : user.getBot().getUserChannelDao().getAllChannels())
chan.send().message(msg);
}
private BotCoreResult failQuit(final User user, final Channel chan) {
LOG.info("Failed quit attempt by " + BotUtils.userIdentifier(user));
chan.send().message(user, "Sorry, you're not allowed to do that!");
return BotCoreResult.IGNORE;
}
private void uptime(Channel chan) {
String uptime = TimeDiff.getTime(System.currentTimeMillis() - startTime).getComplexMessage();
chan.send().message("Uptime: " + uptime);
}
}
| Only send ignore help when explicitly requested | src/main/java/io/github/likcoras/ssbot/core/BotCoreHandlers.java | Only send ignore help when explicitly requested | <ide><path>rc/main/java/io/github/likcoras/ssbot/core/BotCoreHandlers.java
<ide> try {
<ide>
<ide> if (command.length < 2)
<del> ignoreUsage(user);
<add> invalidIgnore(user);
<ide> else if (command[1].equalsIgnoreCase("add"))
<ide> ignoreAdd(chan, command[2]);
<ide> else if (command[1].equalsIgnoreCase("rem"))
<ide> ignoreRem(chan, command[2]);
<ide> else if (command[1].equalsIgnoreCase("list"))
<ide> ignoreList(user);
<add> else if (command[1].equalsIgnoreCase("help"))
<add> ignoreUsage(user);
<ide> else
<del> ignoreUsage(user);
<add> invalidIgnore(user);
<ide>
<ide> } catch (final IOException e) {
<ide>
<ide> LOG.error("Error while handling ignore command '" + msg + "'", e);
<ide>
<ide> }
<add>
<add> }
<add>
<add> private void invalidIgnore(final User user) {
<add>
<add> user.send().notice("Wrong usage! Try '.ignore help'!");
<ide>
<ide> }
<ide> |
|
Java | apache-2.0 | daa7e791ec4f4d2b87ba7dc917a69333f898e522 | 0 | OptimalOrange/CoolTechnologies,OptimalOrange/CoolTechnologies,Bai-Jie/CoolTechnologies,Bai-Jie/CoolTechnologies | package com.optimalorange.cooltechnologies.ui.fragment;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.ImageLoader;
import com.etsy.android.grid.StaggeredGridView;
import com.optimalorange.cooltechnologies.R;
import com.optimalorange.cooltechnologies.entity.Video;
import com.optimalorange.cooltechnologies.ui.view.PullRefreshLayout;
import com.optimalorange.cooltechnologies.util.Utils;
import com.optimalorange.cooltechnologies.util.VideosRequest;
import com.optimalorange.cooltechnologies.util.VolleySingleton;
import android.app.Fragment;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import java.util.LinkedList;
import java.util.List;
/**
* Created by WANGZHENGZE on 2014/11/20.
* 热门
*/
public class ListVideosFragment extends Fragment {
// Fragment初始化参数
/**
* 应当显示的Video的genre(类型,示例:手机)<br/>
* Type: String
*
* @see #newInstance(String genre)
*/
public static final String ARGUMENT_KEY_GENRE =
ListVideosFragment.class.getName() + ".argument.KEY_GENRE";
private static final String CATEGORY_LABEL_OF_TECH = "科技";
private String mYoukuClientId;
private VolleySingleton mVolleySingleton;
private String mGenre;
private int mPage = 1;
private StaggeredGridView mGridView;
private PullRefreshLayout mPullRefreshLayout;
private ItemsAdapter mItemsAdapter;
private LinkedList<Video> mListVideos = new LinkedList<Video>();
/**
* 获取Video(见entity包中Video)
*
* @return VideoRequest
*/
private VideosRequest buildQueryVideosRequest() {
VideosRequest.Builder builder = new VideosRequest.Builder()
.setClient_id(mYoukuClientId)
.setCategory(CATEGORY_LABEL_OF_TECH)
.setPage(mPage)
.setPeriod(VideosRequest.Builder.PERIOD.WEEK)
.setOrderby(VideosRequest.Builder.ORDER_BY.VIEW_COUNT)
.setResponseListener(new Response.Listener<List<Video>>() {
@Override
public void onResponse(List<Video> videos) {
for (Video mVideo : videos) {
mListVideos.add(mVideo);
if (mItemsAdapter != null) {
mItemsAdapter.notifyDataSetChanged();
}
if (mPullRefreshLayout != null) {
mPullRefreshLayout.setRefreshing(false);
}
}
}
})
.setErrorListener(new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
error.printStackTrace();
}
});
//为下一次请求获取Video翻页
mPage++;
//如果没设置mGenre就用默认的,如果设置了mGenre就请求相应的类型Video
if (mGenre != null) {
builder.setGenre(mGenre);
}
return builder.build();
}
/**
* 用于 创建设置有指定参数的新{@link ListVideosFragment}实例的 工厂方法
*
* @param genre 应当显示的Video的genre(类型,示例:手机)
* @return 设置有指定参数的新实例
* @see #ARGUMENT_KEY_GENRE
*/
public static ListVideosFragment newInstance(String genre) {
ListVideosFragment fragment = new ListVideosFragment();
Bundle arguments = new Bundle();
arguments.putString(ARGUMENT_KEY_GENRE, genre);
fragment.setArguments(arguments);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// 如果设置过Arguments,应用之
if (getArguments() != null) {
mGenre = getArguments().getString(ARGUMENT_KEY_GENRE);
}
mYoukuClientId = getString(R.string.youku_client_id);
mVolleySingleton = VolleySingleton.getInstance(getActivity());
mVolleySingleton.addToRequestQueue(buildQueryVideosRequest());
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_list_videos, container, false);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
mGridView = (StaggeredGridView) view.findViewById(R.id.grid_view);
mPullRefreshLayout = (PullRefreshLayout) view.findViewById(R.id.pull_refresh_layout);
mItemsAdapter = new ItemsAdapter(mListVideos, mVolleySingleton.getImageLoader());
mGridView.setAdapter(mItemsAdapter);
mPullRefreshLayout.setOnRefreshListener(new PullRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
//每次刷新时去除所有Video
mListVideos.clear();
//重新请求第一页的内容
mPage = 1;
mItemsAdapter.notifyDataSetChanged();
//开始请求刷新Video
mVolleySingleton.addToRequestQueue(buildQueryVideosRequest());
}
});
}
/**
* 热门视频的图片墙的适配器
*
* @author Zhou Peican
*/
public class ItemsAdapter extends BaseAdapter {
private LinkedList<Video> mVideos;
private ImageLoader mImageLoader;
public ItemsAdapter(LinkedList<Video> mVideos, ImageLoader mImageLoader) {
super();
this.mVideos = mVideos;
this.mImageLoader = mImageLoader;
}
@Override
public int getCount() {
return mVideos.size();
}
@Override
public Object getItem(int position) {
return mVideos.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
ViewHolder vh;
if (convertView == null) {
convertView = LayoutInflater.from(parent.getContext())
.inflate(R.layout.list_item_videos, parent, false);
vh = new ViewHolder();
vh.thumbnail = (ImageView) convertView.findViewById(R.id.thumbnail);
vh.duration = (TextView) convertView.findViewById(R.id.duration);
vh.title = (TextView) convertView.findViewById(R.id.title);
vh.viewCount = (TextView) convertView.findViewById(R.id.view_count);
convertView.setTag(vh);
} else {
vh = (ViewHolder) convertView.getTag();
}
//加载图片
mImageLoader.get(mVideos.get(position).getThumbnail_v2(),
ImageLoader.getImageListener(vh.thumbnail,
R.drawable.ic_launcher, R.drawable.ic_launcher));
//显示播放时长
vh.duration.setText(Utils.getDurationString(mVideos.get(position).getDuration()));
//显示视频标题
vh.title.setText(mVideos.get(position).getTitle());
//显示播放次数(这里使用字符串资源格式化)
vh.viewCount.setText(String.format(getString(R.string.view_count),
Utils.formatViewCount(mVideos.get(position).getView_count(),
parent.getContext())));
//当滑到末尾的位置时加载更多Video
if (position == mListVideos.size() - 2) {
mVolleySingleton.addToRequestQueue(buildQueryVideosRequest());
}
return convertView;
}
private class ViewHolder {
ImageView thumbnail;
TextView duration;
TextView title;
TextView viewCount;
}
}
}
| app/src/main/java/com/optimalorange/cooltechnologies/ui/fragment/ListVideosFragment.java | package com.optimalorange.cooltechnologies.ui.fragment;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.ImageLoader;
import com.etsy.android.grid.StaggeredGridView;
import com.optimalorange.cooltechnologies.R;
import com.optimalorange.cooltechnologies.entity.Video;
import com.optimalorange.cooltechnologies.ui.view.PullRefreshLayout;
import com.optimalorange.cooltechnologies.util.Utils;
import com.optimalorange.cooltechnologies.util.VideosRequest;
import com.optimalorange.cooltechnologies.util.VolleySingleton;
import android.app.Fragment;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import java.util.LinkedList;
import java.util.List;
/**
* Created by WANGZHENGZE on 2014/11/20.
* 热门
*/
public class ListVideosFragment extends Fragment {
// Fragment初始化参数
/**
* 应当显示的Video的genre(类型,示例:手机)<br/>
* Type: String
*
* @see #newInstance(String genre)
*/
public static final String ARGUMENT_KEY_GENRE =
ListVideosFragment.class.getName() + ".argument.KEY_GENRE";
private static final String CATEGORY_LABEL_OF_TECH = "科技";
private String mYoukuClientId;
private VolleySingleton mVolleySingleton;
private String mGenre;
private StaggeredGridView mGridView;
private PullRefreshLayout mPullRefreshLayout;
private ItemsAdapter mItemsAdapter;
private LinkedList<Video> mListVideos = new LinkedList<Video>();
/**
* 获取Video(见entity包中Video)
*
* @return VideoRequest
*/
private VideosRequest buildQueryVideosRequest() {
VideosRequest.Builder builder = new VideosRequest.Builder()
.setClient_id(mYoukuClientId)
.setCategory(CATEGORY_LABEL_OF_TECH)
.setPeriod(VideosRequest.Builder.PERIOD.WEEK)
.setResponseListener(new Response.Listener<List<Video>>() {
@Override
public void onResponse(List<Video> videos) {
for (Video mVideo : videos) {
mListVideos.add(mVideo);
mItemsAdapter.notifyDataSetChanged();
}
}
})
.setErrorListener(new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
error.printStackTrace();
}
});
if (mGenre != null) {
builder.setGenre(mGenre);
}
return builder.build();
}
/**
* 用于 创建设置有指定参数的新{@link ListVideosFragment}实例的 工厂方法
*
* @param genre 应当显示的Video的genre(类型,示例:手机)
* @return 设置有指定参数的新实例
* @see #ARGUMENT_KEY_GENRE
*/
public static ListVideosFragment newInstance(String genre) {
ListVideosFragment fragment = new ListVideosFragment();
Bundle arguments = new Bundle();
arguments.putString(ARGUMENT_KEY_GENRE, genre);
fragment.setArguments(arguments);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// 如果设置过Arguments,应用之
if (getArguments() != null) {
mGenre = getArguments().getString(ARGUMENT_KEY_GENRE);
}
mYoukuClientId = getString(R.string.youku_client_id);
mVolleySingleton = VolleySingleton.getInstance(getActivity());
mVolleySingleton.addToRequestQueue(buildQueryVideosRequest());
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_list_videos, container, false);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
mGridView = (StaggeredGridView) view.findViewById(R.id.grid_view);
mPullRefreshLayout = (PullRefreshLayout) view.findViewById(R.id.pull_refresh_layout);
mItemsAdapter = new ItemsAdapter(mListVideos, mVolleySingleton.getImageLoader());
mGridView.setAdapter(mItemsAdapter);
mPullRefreshLayout.setOnRefreshListener(new PullRefreshLayout.OnRefreshListener(){
@Override
public void onRefresh() {
mPullRefreshLayout.postDelayed(new Runnable() {
@Override
public void run() {
mPullRefreshLayout.setRefreshing(false);
}
}, 4000);
}
});
}
/**
* 热门视频的图片墙的适配器
*
* @author Zhou Peican
*/
public class ItemsAdapter extends BaseAdapter {
private LinkedList<Video> mVideos;
private ImageLoader mImageLoader;
public ItemsAdapter(LinkedList<Video> mVideos, ImageLoader mImageLoader) {
super();
this.mVideos = mVideos;
this.mImageLoader = mImageLoader;
}
@Override
public int getCount() {
return mVideos.size();
}
@Override
public Object getItem(int position) {
return mVideos.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
ViewHolder vh;
if (convertView == null) {
convertView = LayoutInflater.from(parent.getContext())
.inflate(R.layout.list_item_videos, parent, false);
vh = new ViewHolder();
vh.thumbnail = (ImageView) convertView.findViewById(R.id.thumbnail);
vh.duration = (TextView) convertView.findViewById(R.id.duration);
vh.title = (TextView) convertView.findViewById(R.id.title);
vh.viewCount = (TextView) convertView.findViewById(R.id.view_count);
convertView.setTag(vh);
} else {
vh = (ViewHolder) convertView.getTag();
}
//加载图片
mImageLoader.get(mVideos.get(position).getThumbnail_v2(),
ImageLoader.getImageListener(vh.thumbnail,
R.drawable.ic_launcher, R.drawable.ic_launcher));
//显示播放时长
vh.duration.setText(Utils.getDurationString(mVideos.get(position).getDuration()));
//显示视频标题
vh.title.setText(mVideos.get(position).getTitle());
//显示播放次数(这里使用字符串资源格式化)
vh.viewCount.setText(String.format(getString(R.string.view_count),
Utils.formatViewCount(mVideos.get(position).getView_count(),
parent.getContext())));
return convertView;
}
private class ViewHolder {
ImageView thumbnail;
TextView duration;
TextView title;
TextView viewCount;
}
}
}
| 修改ListVideosFragment.java
增加了下拉刷新和上滑自动加载更多的功能
| app/src/main/java/com/optimalorange/cooltechnologies/ui/fragment/ListVideosFragment.java | 修改ListVideosFragment.java 增加了下拉刷新和上滑自动加载更多的功能 | <ide><path>pp/src/main/java/com/optimalorange/cooltechnologies/ui/fragment/ListVideosFragment.java
<ide> public class ListVideosFragment extends Fragment {
<ide>
<ide> // Fragment初始化参数
<add>
<ide> /**
<ide> * 应当显示的Video的genre(类型,示例:手机)<br/>
<ide> * Type: String
<ide> private VolleySingleton mVolleySingleton;
<ide>
<ide> private String mGenre;
<add>
<add> private int mPage = 1;
<ide>
<ide> private StaggeredGridView mGridView;
<ide>
<ide> VideosRequest.Builder builder = new VideosRequest.Builder()
<ide> .setClient_id(mYoukuClientId)
<ide> .setCategory(CATEGORY_LABEL_OF_TECH)
<add> .setPage(mPage)
<ide> .setPeriod(VideosRequest.Builder.PERIOD.WEEK)
<add> .setOrderby(VideosRequest.Builder.ORDER_BY.VIEW_COUNT)
<ide> .setResponseListener(new Response.Listener<List<Video>>() {
<ide> @Override
<ide> public void onResponse(List<Video> videos) {
<ide> for (Video mVideo : videos) {
<ide> mListVideos.add(mVideo);
<del> mItemsAdapter.notifyDataSetChanged();
<add> if (mItemsAdapter != null) {
<add> mItemsAdapter.notifyDataSetChanged();
<add> }
<add> if (mPullRefreshLayout != null) {
<add> mPullRefreshLayout.setRefreshing(false);
<add> }
<ide> }
<ide> }
<ide> })
<ide> error.printStackTrace();
<ide> }
<ide> });
<add>
<add> //为下一次请求获取Video翻页
<add> mPage++;
<add>
<add> //如果没设置mGenre就用默认的,如果设置了mGenre就请求相应的类型Video
<ide> if (mGenre != null) {
<ide> builder.setGenre(mGenre);
<ide> }
<ide> mItemsAdapter = new ItemsAdapter(mListVideos, mVolleySingleton.getImageLoader());
<ide> mGridView.setAdapter(mItemsAdapter);
<ide>
<del> mPullRefreshLayout.setOnRefreshListener(new PullRefreshLayout.OnRefreshListener(){
<add> mPullRefreshLayout.setOnRefreshListener(new PullRefreshLayout.OnRefreshListener() {
<ide> @Override
<ide> public void onRefresh() {
<del> mPullRefreshLayout.postDelayed(new Runnable() {
<del> @Override
<del> public void run() {
<del> mPullRefreshLayout.setRefreshing(false);
<del> }
<del> }, 4000);
<add> //每次刷新时去除所有Video
<add> mListVideos.clear();
<add> //重新请求第一页的内容
<add> mPage = 1;
<add> mItemsAdapter.notifyDataSetChanged();
<add> //开始请求刷新Video
<add> mVolleySingleton.addToRequestQueue(buildQueryVideosRequest());
<ide> }
<ide> });
<ide> }
<ide> Utils.formatViewCount(mVideos.get(position).getView_count(),
<ide> parent.getContext())));
<ide>
<add> //当滑到末尾的位置时加载更多Video
<add> if (position == mListVideos.size() - 2) {
<add> mVolleySingleton.addToRequestQueue(buildQueryVideosRequest());
<add> }
<add>
<ide> return convertView;
<ide> }
<ide> |
|
Java | epl-1.0 | 01408cbe55a0ecf82ee198b2da40a6a7edb50ebe | 0 | tectronics/mercurialeclipse,boa0332/mercurialeclipse,naidu/mercurialeclipse,leinier/mercurialeclipse | /*******************************************************************************
* Copyright (c) 2005-2009 VecTrace (Zingo Andersen) and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Andrei Loskutov (Intland) - implementation
*******************************************************************************/
package com.vectrace.MercurialEclipse.team.cache;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.jface.preference.IPreferenceStore;
import com.vectrace.MercurialEclipse.MercurialEclipsePlugin;
import com.vectrace.MercurialEclipse.commands.HgIncomingClient;
import com.vectrace.MercurialEclipse.commands.HgOutgoingClient;
import com.vectrace.MercurialEclipse.exception.HgException;
import com.vectrace.MercurialEclipse.model.ChangeSet;
import com.vectrace.MercurialEclipse.model.HgRoot;
import com.vectrace.MercurialEclipse.model.IHgRepositoryLocation;
import com.vectrace.MercurialEclipse.model.ChangeSet.Direction;
import com.vectrace.MercurialEclipse.team.MercurialTeamProvider;
import com.vectrace.MercurialEclipse.utils.ResourceUtils;
/**
* A base class for remote caches (caching changesets which are either not present
* locally but existing on the server, or existing locally, but not present on the server).
* <p>
* The cache keeps the state automatically (and fetches the data on client request only), to avoid
* unneeded client-server communication.
* <p>
* There is no guarantee that the data in the cache is up-to-date with the server. To get the
* latest data, clients have explicitely refresh or clean the cache before using it.
* <p>
* The cache has empty ("invalid" state) before the first client request and automatically
* retrieves the data on first client request. So it becames "valid" state and does not refresh the
* data until some operation "clears" or explicitely requests a "refresh" of the cache. After the
* "clear" operation the cache is going to the initial "invalid" state again. After "clear" and
* "refresh", a notification is sent to the observing clients.
* <p>
* <b>Implementation note 1</b> this cache <b>automatically</b> keeps the "valid" state for given
* project/repository pair. Before each "get" request the cache validates itself. If the cached
* value is NULL, then the cache state is invalid, and new data is fetched. If the cached value is
* an object (even empty set), then the cache is "valid" (there is simply no data on the server).
* <p>
* <b>Implementation note 2</b> the cache sends different notifications depending on what kind of
* "state change" happened. After "clear", a set with only one "project" object is sent. After
* "refresh", a set with all changed elements is sent, which may also include a project.
*
* @author bastian
* @author Andrei Loskutov
* @author <a href="mailto:[email protected]">Adam Berkes</a>
*/
public abstract class AbstractRemoteCache extends AbstractCache {
/**
* Map hg root -> branch -> repo -> projects -> changeset
*/
protected final Map<HgRoot, Set<RemoteData>> repoDatas;
protected final Map<RemoteKey, RemoteData> fastRepoMap;
protected final Direction direction;
/**
* @param direction non null
*/
public AbstractRemoteCache(Direction direction) {
this.direction = direction;
repoDatas = new HashMap<HgRoot, Set<RemoteData>>();
fastRepoMap = new HashMap<RemoteKey, RemoteData>();
}
/**
* does nothing, clients has to override and update preferences
*/
@Override
protected void configureFromPreferences(IPreferenceStore store) {
// does nothing
}
public void clear(IHgRepositoryLocation repo) {
synchronized (repoDatas) {
Collection<Set<RemoteData>> values = repoDatas.values();
for (Set<RemoteData> set : values) {
Iterator<RemoteData> iterator = set.iterator();
while (iterator.hasNext()) {
RemoteData data = iterator.next();
if(repo.equals(data.getRepo())){
iterator.remove();
fastRepoMap.remove(data.getKey());
}
}
}
notifyChanged(repo, false);
}
}
public void clear(HgRoot root, boolean notify) {
synchronized (repoDatas) {
Set<RemoteData> set = repoDatas.get(root);
if(set == null){
return;
}
for (RemoteData data : set) {
fastRepoMap.remove(data.getKey());
}
set.clear();
}
if(notify) {
notifyChanged(root, false);
}
}
@SuppressWarnings("unchecked")
private void notifyChanged(HgRoot root, boolean expandMembers) {
Set<?> projects = ResourceUtils.getProjects(root);
notifyChanged((Set<IResource>) projects, expandMembers);
}
/**
* @param notify true to send a notification if the cache state changes after this operation,
* false to supress the event notification
*/
public void clear(IHgRepositoryLocation repo, IProject project, boolean notify) {
synchronized (repoDatas) {
HgRoot hgRoot = MercurialTeamProvider.getHgRoot(project);
Set<RemoteData> set = repoDatas.get(hgRoot);
if(set == null){
return;
}
Iterator<RemoteData> iterator = set.iterator();
while(iterator.hasNext()) {
RemoteData data = iterator.next();
if(repo.equals(data.getRepo())){
iterator.remove();
fastRepoMap.remove(data.getKey());
}
}
}
if(notify) {
notifyChanged(repo, false);
}
}
@SuppressWarnings("unchecked")
protected void notifyChanged(IHgRepositoryLocation repo, boolean expandMembers){
Set<?> projects = MercurialEclipsePlugin.getRepoManager().getAllRepoLocationProjects(repo);
notifyChanged((Set<IResource>) projects, expandMembers);
}
@Override
protected void projectDeletedOrClosed(IProject project) {
synchronized (repoDatas) {
for (RemoteData data : fastRepoMap.values()) {
data.clear(project);
}
}
}
/**
* Gets all (in or out) changesets of the given location for the given
* IResource.
*
* @param branch name of branch (default or "" for unnamed) or null if branch unaware
* @return never null
*/
public SortedSet<ChangeSet> getChangeSets(IResource resource,
IHgRepositoryLocation repository, String branch) throws HgException {
IProject project = resource.getProject();
// check if mercurial is team provider and if we're working on an open project
if (!project.isAccessible() || !MercurialTeamProvider.isHgTeamProviderFor(project)){
return EMPTY_SET;
}
HgRoot hgRoot = MercurialTeamProvider.getHgRoot(resource);
RemoteKey key = new RemoteKey(hgRoot, repository, branch);
synchronized (repoDatas){
RemoteData data = fastRepoMap.get(key);
if(data == null){
// lazy loading: refresh cache on demand only.
// lock the cache till update is complete
addResourcesToCache(key);
// XXX not sure if the full repo refresh event need to be sent here
// notifyChanged(key.getRepo(), true);
notifyChanged(hgRoot, true);
}
RemoteData remoteData = fastRepoMap.get(key);
if(remoteData != null) {
return remoteData.getChangeSets(resource);
}
}
return EMPTY_SET;
}
/**
* Gets all (in or out) changesets of the given location for the given
* IResource.
*
* @param branch name of branch (default or "" for unnamed) or null if branch unaware
* @return never null
*/
public SortedSet<ChangeSet> hasChangeSets(IResource resource, IHgRepositoryLocation repository,
String branch) {
IProject project = resource.getProject();
// check if mercurial is team provider and if we're working on an open project
if (!project.isAccessible() || !MercurialTeamProvider.isHgTeamProviderFor(project)){
return EMPTY_SET;
}
HgRoot hgRoot;
try {
hgRoot = MercurialTeamProvider.getHgRoot(resource);
} catch (HgException e) {
MercurialEclipsePlugin.logError(e);
return EMPTY_SET;
}
RemoteKey key = new RemoteKey(hgRoot, repository, branch);
synchronized (repoDatas){
RemoteData data = fastRepoMap.get(key);
if(data == null){
return EMPTY_SET;
}
return data.getChangeSets(resource);
}
}
/**
* Gets all (in or out) changesets of the given hg root
*
* @param branch name of branch (default or "" for unnamed) or null if branch unaware
* @return never null
*/
public SortedSet<ChangeSet> getChangeSets(HgRoot hgRoot,
IHgRepositoryLocation repository, String branch) throws HgException {
RemoteKey key = new RemoteKey(hgRoot, repository, branch);
synchronized (repoDatas){
RemoteData data = fastRepoMap.get(key);
if(data == null){
// lazy loading: refresh cache on demand only.
// lock the cache till update is complete
addResourcesToCache(key);
notifyChanged(hgRoot, true);
}
RemoteData remoteData = fastRepoMap.get(key);
if(remoteData != null) {
return remoteData.getChangeSets();
}
}
return EMPTY_SET;
}
/**
* Gets all (in or out) changesets for given hg root, which doesn't have any relationship to the
* projects inside Eclipse workspace (e.g. changesets with no files or with files which are
* unknown in terms of Eclipse workspace). Specifying an optional 'canIgnore' argument
* may help to optimize the work on huge amount of changesets or files inside.
*
* @param canIgnore
* (may be null) changesets which are already known to be mapped and can be ignored.
*
* @param branch
* name of branch (default or "" for unnamed) or null if branch unaware
* @return never null
*/
public SortedSet<ChangeSet> getUnmappedChangeSets(HgRoot hgRoot,
IHgRepositoryLocation repository, String branch, Set<ChangeSet> canIgnore) throws HgException {
SortedSet<ChangeSet> all = getChangeSets(hgRoot, repository, branch);
if(all.isEmpty()){
return all;
}
if(canIgnore != null && !canIgnore.isEmpty()) {
// 'all' was unmodifiable set, so create a copy here for filtering
all = new TreeSet<ChangeSet>(all);
all.removeAll(canIgnore);
if(all.isEmpty()){
return all;
}
}
TreeSet<ChangeSet> sorted = new TreeSet<ChangeSet>();
for (ChangeSet cs : all) {
if(cs.isEmpty()){
sorted.add(cs);
continue;
}
Set<IFile> files = cs.getFiles();
if(files.isEmpty()){
sorted.add(cs);
}
}
return sorted;
}
/**
* Gets all resources that are changed in (in or out) changesets of given
* repository, even resources not known in local workspace.
*
* @return never null
*/
public Set<IResource> getMembers(IResource resource,
IHgRepositoryLocation repository, String branch) throws HgException {
SortedSet<ChangeSet> changeSets;
synchronized (repoDatas){
// make sure data is there: will refresh (in or out) changesets if needed
changeSets = getChangeSets(resource, repository, branch);
return getMembers(resource, changeSets);
}
}
/**
* @return never null
*/
private static Set<IResource> getMembers(IResource resource,
SortedSet<ChangeSet> changeSets) {
Set<IResource> members = new HashSet<IResource>();
if (changeSets == null) {
return members;
}
for (ChangeSet cs : changeSets) {
members.addAll(cs.getFiles());
}
return members;
}
private void addResourcesToCache(RemoteKey key) throws HgException {
if(debug) {
System.out.println("!fetch " + direction + " for " + key);
}
fastRepoMap.remove(key);
// get changesets from hg
RemoteData data = null;
if (direction == Direction.OUTGOING) {
data = HgOutgoingClient.getOutgoing(key);
} else {
data = HgIncomingClient.getHgIncoming(key);
}
if(debug) {
System.out.println("!got " + data.getChangeSets().size() + " " + direction + " changesets");
}
fastRepoMap.put(key, data);
Set<RemoteData> set = repoDatas.get(key.getRoot());
if(set == null){
set = new HashSet<RemoteData>();
repoDatas.put(key.getRoot(), set);
}
set.add(data);
}
/**
* Get newest revision of resource on given branch
* @param resource Eclipse resource (e.g. a file) to find latest changeset for
* @param branch name of branch (default or "" for unnamed) or null if branch unaware
*/
public ChangeSet getNewestChangeSet(IResource resource,
IHgRepositoryLocation repository, String branch) throws HgException {
if (MercurialStatusCache.getInstance().isSupervised(resource) || !resource.exists()) {
synchronized (repoDatas){
// make sure data is there: will refresh (in or out) changesets if needed
SortedSet<ChangeSet> changeSets = getChangeSets(resource, repository, branch);
if (changeSets != null && changeSets.size() > 0) {
return changeSets.last();
}
}
}
return null;
}
}
| src/com/vectrace/MercurialEclipse/team/cache/AbstractRemoteCache.java | /*******************************************************************************
* Copyright (c) 2005-2009 VecTrace (Zingo Andersen) and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Andrei Loskutov (Intland) - implementation
*******************************************************************************/
package com.vectrace.MercurialEclipse.team.cache;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.jface.preference.IPreferenceStore;
import com.vectrace.MercurialEclipse.MercurialEclipsePlugin;
import com.vectrace.MercurialEclipse.commands.HgIncomingClient;
import com.vectrace.MercurialEclipse.commands.HgOutgoingClient;
import com.vectrace.MercurialEclipse.exception.HgException;
import com.vectrace.MercurialEclipse.model.ChangeSet;
import com.vectrace.MercurialEclipse.model.ChangeSet.Direction;
import com.vectrace.MercurialEclipse.model.HgRoot;
import com.vectrace.MercurialEclipse.model.IHgRepositoryLocation;
import com.vectrace.MercurialEclipse.team.MercurialTeamProvider;
import com.vectrace.MercurialEclipse.utils.ResourceUtils;
/**
* A base class for remote caches (caching changesets which are either not present
* locally but existing on the server, or existing locally, but not present on the server).
* <p>
* The cache keeps the state automatically (and fetches the data on client request only), to avoid
* unneeded client-server communication.
* <p>
* There is no guarantee that the data in the cache is up-to-date with the server. To get the
* latest data, clients have explicitely refresh or clean the cache before using it.
* <p>
* The cache has empty ("invalid" state) before the first client request and automatically
* retrieves the data on first client request. So it becames "valid" state and does not refresh the
* data until some operation "clears" or explicitely requests a "refresh" of the cache. After the
* "clear" operation the cache is going to the initial "invalid" state again. After "clear" and
* "refresh", a notification is sent to the observing clients.
* <p>
* <b>Implementation note 1</b> this cache <b>automatically</b> keeps the "valid" state for given
* project/repository pair. Before each "get" request the cache validates itself. If the cached
* value is NULL, then the cache state is invalid, and new data is fetched. If the cached value is
* an object (even empty set), then the cache is "valid" (there is simply no data on the server).
* <p>
* <b>Implementation note 2</b> the cache sends different notifications depending on what kind of
* "state change" happened. After "clear", a set with only one "project" object is sent. After
* "refresh", a set with all changed elements is sent, which may also include a project.
*
* @author bastian
* @author Andrei Loskutov
* @author <a href="mailto:[email protected]">Adam Berkes</a>
*/
public abstract class AbstractRemoteCache extends AbstractCache {
/**
* Map hg root -> branch -> repo -> projects -> changeset
*/
protected final Map<HgRoot, Set<RemoteData>> repoDatas;
protected final Map<RemoteKey, RemoteData> fastRepoMap;
protected final Direction direction;
/**
* @param direction non null
*/
public AbstractRemoteCache(Direction direction) {
this.direction = direction;
repoDatas = new HashMap<HgRoot, Set<RemoteData>>();
fastRepoMap = new HashMap<RemoteKey, RemoteData>();
}
/**
* does nothing, clients has to override and update preferences
*/
@Override
protected void configureFromPreferences(IPreferenceStore store) {
// does nothing
}
public void clear(IHgRepositoryLocation repo) {
synchronized (repoDatas) {
Collection<Set<RemoteData>> values = repoDatas.values();
for (Set<RemoteData> set : values) {
Iterator<RemoteData> iterator = set.iterator();
while (iterator.hasNext()) {
RemoteData data = iterator.next();
if(repo.equals(data.getRepo())){
iterator.remove();
fastRepoMap.remove(data.getKey());
}
}
}
notifyChanged(repo, false);
}
}
public void clear(HgRoot root, boolean notify) {
synchronized (repoDatas) {
Set<RemoteData> set = repoDatas.get(root);
if(set == null){
return;
}
for (RemoteData data : set) {
fastRepoMap.remove(data.getKey());
}
set.clear();
}
if(notify) {
notifyChanged(root, false);
}
}
@SuppressWarnings("unchecked")
private void notifyChanged(HgRoot root, boolean expandMembers) {
Set<?> projects = ResourceUtils.getProjects(root);
notifyChanged((Set<IResource>) projects, expandMembers);
}
/**
* @param notify true to send a notification if the cache state changes after this operation,
* false to supress the event notification
*/
public void clear(IHgRepositoryLocation repo, IProject project, boolean notify) {
synchronized (repoDatas) {
HgRoot hgRoot = MercurialTeamProvider.getHgRoot(project);
Set<RemoteData> set = repoDatas.get(hgRoot);
if(set == null){
return;
}
Iterator<RemoteData> iterator = set.iterator();
while(iterator.hasNext()) {
RemoteData data = iterator.next();
if(repo.equals(data.getRepo())){
iterator.remove();
fastRepoMap.remove(data.getKey());
}
}
}
if(notify) {
notifyChanged(repo, false);
}
}
@SuppressWarnings("unchecked")
protected void notifyChanged(IHgRepositoryLocation repo, boolean expandMembers){
Set<?> projects = MercurialEclipsePlugin.getRepoManager().getAllRepoLocationProjects(repo);
notifyChanged((Set<IResource>) projects, expandMembers);
}
@Override
protected void projectDeletedOrClosed(IProject project) {
synchronized (repoDatas) {
for (RemoteData data : fastRepoMap.values()) {
data.clear(project);
}
}
}
/**
* Gets all (in or out) changesets of the given location for the given
* IResource.
*
* @param branch name of branch (default or "" for unnamed) or null if branch unaware
* @return never null
*/
public SortedSet<ChangeSet> getChangeSets(IResource resource,
IHgRepositoryLocation repository, String branch) throws HgException {
IProject project = resource.getProject();
// check if mercurial is team provider and if we're working on an open project
if (!project.isAccessible() || !MercurialTeamProvider.isHgTeamProviderFor(project)){
return EMPTY_SET;
}
HgRoot hgRoot = MercurialTeamProvider.getHgRoot(project);
RemoteKey key = new RemoteKey(hgRoot, repository, branch);
synchronized (repoDatas){
RemoteData data = fastRepoMap.get(key);
if(data == null){
// lazy loading: refresh cache on demand only.
// lock the cache till update is complete
addResourcesToCache(key);
// XXX not sure if the full repo refresh event need to be sent here
// notifyChanged(key.getRepo(), true);
notifyChanged(hgRoot, true);
}
RemoteData remoteData = fastRepoMap.get(key);
if(remoteData != null) {
return remoteData.getChangeSets(resource);
}
}
return EMPTY_SET;
}
/**
* Gets all (in or out) changesets of the given location for the given
* IResource.
*
* @param branch name of branch (default or "" for unnamed) or null if branch unaware
* @return never null
*/
public SortedSet<ChangeSet> hasChangeSets(IResource resource, IHgRepositoryLocation repository,
String branch) {
IProject project = resource.getProject();
// check if mercurial is team provider and if we're working on an open project
if (!project.isAccessible() || !MercurialTeamProvider.isHgTeamProviderFor(project)){
return EMPTY_SET;
}
HgRoot hgRoot = MercurialTeamProvider.getHgRoot(project);
RemoteKey key = new RemoteKey(hgRoot, repository, branch);
synchronized (repoDatas){
RemoteData data = fastRepoMap.get(key);
if(data == null){
return EMPTY_SET;
}
return data.getChangeSets(resource);
}
}
/**
* Gets all (in or out) changesets of the given hg root
*
* @param branch name of branch (default or "" for unnamed) or null if branch unaware
* @return never null
*/
public SortedSet<ChangeSet> getChangeSets(HgRoot hgRoot,
IHgRepositoryLocation repository, String branch) throws HgException {
RemoteKey key = new RemoteKey(hgRoot, repository, branch);
synchronized (repoDatas){
RemoteData data = fastRepoMap.get(key);
if(data == null){
// lazy loading: refresh cache on demand only.
// lock the cache till update is complete
addResourcesToCache(key);
notifyChanged(hgRoot, true);
}
RemoteData remoteData = fastRepoMap.get(key);
if(remoteData != null) {
return remoteData.getChangeSets();
}
}
return EMPTY_SET;
}
/**
* Gets all (in or out) changesets for given hg root, which doesn't have any relationship to the
* projects inside Eclipse workspace (e.g. changesets with no files or with files which are
* unknown in terms of Eclipse workspace). Specifying an optional 'canIgnore' argument
* may help to optimize the work on huge amount of changesets or files inside.
*
* @param canIgnore
* (may be null) changesets which are already known to be mapped and can be ignored.
*
* @param branch
* name of branch (default or "" for unnamed) or null if branch unaware
* @return never null
*/
public SortedSet<ChangeSet> getUnmappedChangeSets(HgRoot hgRoot,
IHgRepositoryLocation repository, String branch, Set<ChangeSet> canIgnore) throws HgException {
SortedSet<ChangeSet> all = getChangeSets(hgRoot, repository, branch);
if(all.isEmpty()){
return all;
}
if(canIgnore != null && !canIgnore.isEmpty()) {
// 'all' was unmodifiable set, so create a copy here for filtering
all = new TreeSet<ChangeSet>(all);
all.removeAll(canIgnore);
if(all.isEmpty()){
return all;
}
}
TreeSet<ChangeSet> sorted = new TreeSet<ChangeSet>();
for (ChangeSet cs : all) {
if(cs.isEmpty()){
sorted.add(cs);
continue;
}
Set<IFile> files = cs.getFiles();
if(files.isEmpty()){
sorted.add(cs);
}
}
return sorted;
}
/**
* Gets all resources that are changed in (in or out) changesets of given
* repository, even resources not known in local workspace.
*
* @return never null
*/
public Set<IResource> getMembers(IResource resource,
IHgRepositoryLocation repository, String branch) throws HgException {
SortedSet<ChangeSet> changeSets;
synchronized (repoDatas){
// make sure data is there: will refresh (in or out) changesets if needed
changeSets = getChangeSets(resource, repository, branch);
return getMembers(resource, changeSets);
}
}
/**
* @return never null
*/
private static Set<IResource> getMembers(IResource resource,
SortedSet<ChangeSet> changeSets) {
Set<IResource> members = new HashSet<IResource>();
if (changeSets == null) {
return members;
}
for (ChangeSet cs : changeSets) {
members.addAll(cs.getFiles());
}
return members;
}
private void addResourcesToCache(RemoteKey key) throws HgException {
if(debug) {
System.out.println("!fetch " + direction + " for " + key);
}
fastRepoMap.remove(key);
// get changesets from hg
RemoteData data = null;
if (direction == Direction.OUTGOING) {
data = HgOutgoingClient.getOutgoing(key);
} else {
data = HgIncomingClient.getHgIncoming(key);
}
if(debug) {
System.out.println("!got " + data.getChangeSets().size() + " " + direction + " changesets");
}
fastRepoMap.put(key, data);
Set<RemoteData> set = repoDatas.get(key.getRoot());
if(set == null){
set = new HashSet<RemoteData>();
repoDatas.put(key.getRoot(), set);
}
set.add(data);
}
/**
* Get newest revision of resource on given branch
* @param resource Eclipse resource (e.g. a file) to find latest changeset for
* @param branch name of branch (default or "" for unnamed) or null if branch unaware
*/
public ChangeSet getNewestChangeSet(IResource resource,
IHgRepositoryLocation repository, String branch) throws HgException {
if (MercurialStatusCache.getInstance().isSupervised(resource) || !resource.exists()) {
synchronized (repoDatas){
// make sure data is there: will refresh (in or out) changesets if needed
SortedSet<ChangeSet> changeSets = getChangeSets(resource, repository, branch);
if (changeSets != null && changeSets.size() > 0) {
return changeSets.last();
}
}
}
return null;
}
}
| The remote changeset caches should now behave correctly
| src/com/vectrace/MercurialEclipse/team/cache/AbstractRemoteCache.java | The remote changeset caches should now behave correctly | <ide><path>rc/com/vectrace/MercurialEclipse/team/cache/AbstractRemoteCache.java
<ide> import com.vectrace.MercurialEclipse.commands.HgOutgoingClient;
<ide> import com.vectrace.MercurialEclipse.exception.HgException;
<ide> import com.vectrace.MercurialEclipse.model.ChangeSet;
<del>import com.vectrace.MercurialEclipse.model.ChangeSet.Direction;
<ide> import com.vectrace.MercurialEclipse.model.HgRoot;
<ide> import com.vectrace.MercurialEclipse.model.IHgRepositoryLocation;
<add>import com.vectrace.MercurialEclipse.model.ChangeSet.Direction;
<ide> import com.vectrace.MercurialEclipse.team.MercurialTeamProvider;
<ide> import com.vectrace.MercurialEclipse.utils.ResourceUtils;
<ide>
<ide> if (!project.isAccessible() || !MercurialTeamProvider.isHgTeamProviderFor(project)){
<ide> return EMPTY_SET;
<ide> }
<del> HgRoot hgRoot = MercurialTeamProvider.getHgRoot(project);
<add> HgRoot hgRoot = MercurialTeamProvider.getHgRoot(resource);
<ide> RemoteKey key = new RemoteKey(hgRoot, repository, branch);
<ide> synchronized (repoDatas){
<ide> RemoteData data = fastRepoMap.get(key);
<ide> if (!project.isAccessible() || !MercurialTeamProvider.isHgTeamProviderFor(project)){
<ide> return EMPTY_SET;
<ide> }
<del> HgRoot hgRoot = MercurialTeamProvider.getHgRoot(project);
<add> HgRoot hgRoot;
<add> try {
<add> hgRoot = MercurialTeamProvider.getHgRoot(resource);
<add> } catch (HgException e) {
<add> MercurialEclipsePlugin.logError(e);
<add> return EMPTY_SET;
<add> }
<ide> RemoteKey key = new RemoteKey(hgRoot, repository, branch);
<ide> synchronized (repoDatas){
<ide> RemoteData data = fastRepoMap.get(key); |
|
Java | mit | d14a15bf68abcd087a5ff692d96be3670f0051e2 | 0 | KyleCe/ScreenLocker2 | package com.ce.game.screenlocker.util;
import android.content.Context;
import android.view.View;
import android.view.WindowManager;
import android.view.WindowManager.LayoutParams;
import com.ce.game.screenlocker.common.DU;
/**
* Created by KyleCe on 2016/5/25.
*
* @author: KyleCe
*/
final public class LockLayer {
private WindowManager mWindowManager;
private View mLockView;
private LayoutParams mLockViewLayoutParams;
private static LockLayer mLockLayer;
private volatile boolean bIsLocked;
private Context mContext;
public static LockLayer getInstance(Context ctx, View view) {
if (ctx == null || view == null) throw new NullPointerException("Nonnull");
if (mLockLayer == null)
synchronized (ctx) {
if (mLockLayer == null) {
mLockLayer = new LockLayer(ctx, view);
}
}
return mLockLayer;
}
private LockLayer(Context ctx, View v) {
mContext = ctx;
mLockView = v;
init();
}
private void init() {
bIsLocked = false;
mWindowManager = (WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE);
mLockViewLayoutParams = new LayoutParams();
mLockViewLayoutParams.width = LayoutParams.MATCH_PARENT;
mLockViewLayoutParams.height = LayoutParams.MATCH_PARENT;
//实现关键
mLockViewLayoutParams.type = LayoutParams.TYPE_SYSTEM_ERROR;
// 此行代码有时在主界面键按下情况下会出现无法显示和退出,暂时去掉,去掉之后按下主界面键会直接返回主界面
mLockViewLayoutParams.flags = LayoutParams.FLAG_SHOW_WHEN_LOCKED
| LayoutParams.FLAG_DISMISS_KEYGUARD
| LayoutParams.FLAG_KEEP_SCREEN_ON;
mLockViewLayoutParams.alpha = 1f;
mLockViewLayoutParams.screenOrientation = 1;
}
public synchronized void lock() {
if (mLockView != null && !bIsLocked) {
removeIfAttachedAlready();
addViewAndSetFullscreen();
}
bIsLocked = true;
}
public synchronized void unlock() {
if (mWindowManager != null && bIsLocked) {
removeViewAndExitFullscreen();
}
bIsLocked = false;
}
public synchronized void addLockView() {
if (mLockView != null) {
ScreenModeHelper.requestImmersiveFullScreen(mLockView);
if (mLockView.getWindowToken() != null)
mWindowManager.updateViewLayout(mLockView, mLockViewLayoutParams);
else
mWindowManager.addView(mLockView, mLockViewLayoutParams);
}
bIsLocked = true;
}
public synchronized void bringBackLockView() {
if (mLockView == null) return;
addViewAndSetFullscreen();
}
private void removeIfAttachedAlready() {
if (mLockView.getWindowToken() != null || mLockView.getParent() != null) {
DU.sd("window token", mLockView.getWindowToken());
removeViewAndExitFullscreen();
}
}
private void addViewAndSetFullscreen() {
ScreenModeHelper.requestImmersiveFullScreen(mLockView);
try {
mWindowManager.addView(mLockView, mLockViewLayoutParams);
} catch (Exception e) {
// already attach to window, ignore exception
}
}
private void removeViewAndExitFullscreen() {
mWindowManager.removeView(mLockView);
ScreenModeHelper.unsetImmersiveFullScreen(mLockView);
}
public synchronized void removeLockView() {
if (mWindowManager == null || mLockView == null) return;
removeViewAndExitFullscreen();
}
public synchronized void setLockView(View v) {
mLockView = v;
}
public boolean isbIsLocked() {
return bIsLocked ? true : false;
}
}
| app/src/main/java/com/ce/game/screenlocker/util/LockLayer.java | package com.ce.game.screenlocker.util;
import android.content.Context;
import android.view.View;
import android.view.WindowManager;
import android.view.WindowManager.LayoutParams;
import com.ce.game.screenlocker.common.DU;
/**
* Created by KyleCe on 2016/5/25.
*
* @author: KyleCe
*/
final public class LockLayer {
private WindowManager mWindowManager;
private View mLockView;
private LayoutParams mLockViewLayoutParams;
private static LockLayer mLockLayer;
private volatile boolean bIsLocked;
private Context mContext;
public static LockLayer getInstance(Context ctx, View view) {
if (ctx == null || view == null) throw new NullPointerException("Nonnull");
if (mLockLayer == null)
synchronized (ctx) {
if (mLockLayer == null) {
mLockLayer = new LockLayer(ctx, view);
}
}
return mLockLayer;
}
private LockLayer(Context ctx, View v) {
mContext = ctx;
mLockView = v;
init();
}
private void init() {
bIsLocked = false;
mWindowManager = (WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE);
mLockViewLayoutParams = new LayoutParams();
mLockViewLayoutParams.width = LayoutParams.MATCH_PARENT;
mLockViewLayoutParams.height = LayoutParams.MATCH_PARENT;
//实现关键
mLockViewLayoutParams.type = LayoutParams.TYPE_SYSTEM_ERROR;
// 此行代码有时在主界面键按下情况下会出现无法显示和退出,暂时去掉,去掉之后按下主界面键会直接返回主界面
mLockViewLayoutParams.flags = LayoutParams.FLAG_SHOW_WHEN_LOCKED
| LayoutParams.FLAG_DISMISS_KEYGUARD
| LayoutParams.FLAG_KEEP_SCREEN_ON;
}
public synchronized void lock() {
if (mLockView != null && !bIsLocked) {
removeIfAttachedAlready();
addViewAndSetFullscreen();
}
bIsLocked = true;
}
public synchronized void unlock() {
if (mWindowManager != null && bIsLocked) {
removeViewAndExitFullscreen();
}
bIsLocked = false;
}
public synchronized void addLockView() {
if (mLockView != null) {
ScreenModeHelper.requestImmersiveFullScreen(mLockView);
if (mLockView.getWindowToken() != null)
mWindowManager.updateViewLayout(mLockView, mLockViewLayoutParams);
else
mWindowManager.addView(mLockView, mLockViewLayoutParams);
}
bIsLocked = true;
}
public synchronized void bringBackLockView() {
if (mLockView == null) return;
addViewAndSetFullscreen();
}
private void removeIfAttachedAlready() {
if (mLockView.getWindowToken() != null || mLockView.getParent() != null) {
DU.sd("window token", mLockView.getWindowToken());
removeViewAndExitFullscreen();
}
}
private void addViewAndSetFullscreen() {
ScreenModeHelper.requestImmersiveFullScreen(mLockView);
try {
mWindowManager.addView(mLockView, mLockViewLayoutParams);
} catch (Exception e) {
// already attach to window, ignore exception
}
}
private void removeViewAndExitFullscreen() {
mWindowManager.removeView(mLockView);
ScreenModeHelper.unsetImmersiveFullScreen(mLockView);
}
public synchronized void removeLockView() {
if (mWindowManager == null || mLockView == null) return;
removeViewAndExitFullscreen();
}
public synchronized void setLockView(View v) {
mLockView = v;
}
public boolean isbIsLocked() {
return bIsLocked ? true : false;
}
}
| limit the screen orientation
| app/src/main/java/com/ce/game/screenlocker/util/LockLayer.java | limit the screen orientation | <ide><path>pp/src/main/java/com/ce/game/screenlocker/util/LockLayer.java
<ide> mLockViewLayoutParams.flags = LayoutParams.FLAG_SHOW_WHEN_LOCKED
<ide> | LayoutParams.FLAG_DISMISS_KEYGUARD
<ide> | LayoutParams.FLAG_KEEP_SCREEN_ON;
<add>
<add> mLockViewLayoutParams.alpha = 1f;
<add> mLockViewLayoutParams.screenOrientation = 1;
<ide> }
<ide>
<ide> public synchronized void lock() { |
|
JavaScript | apache-2.0 | a8cca7506c2ff3f379c7bcc1386df33c49ed8e42 | 0 | ctamisier/generator-jhipster,liseri/generator-jhipster,vivekmore/generator-jhipster,atomfrede/generator-jhipster,ctamisier/generator-jhipster,atomfrede/generator-jhipster,liseri/generator-jhipster,jhipster/generator-jhipster,liseri/generator-jhipster,jhipster/generator-jhipster,atomfrede/generator-jhipster,jhipster/generator-jhipster,atomfrede/generator-jhipster,vivekmore/generator-jhipster,ctamisier/generator-jhipster,vivekmore/generator-jhipster,jhipster/generator-jhipster,liseri/generator-jhipster,vivekmore/generator-jhipster,ctamisier/generator-jhipster,ctamisier/generator-jhipster,liseri/generator-jhipster,atomfrede/generator-jhipster,jhipster/generator-jhipster,vivekmore/generator-jhipster | /**
* Copyright 2013-2021 the original author or authors from the JHipster project.
*
* This file is part of the JHipster project, see https://www.jhipster.tech/
* for more information.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const validationOptions = require('../jdl/jhipster/validations');
const databaseTypes = require('../jdl/jhipster/database-types');
const { ANGULAR_X, REACT, VUE } = require('../jdl/jhipster/client-framework-types');
const commonPackageJson = require('./common/templates/package.json');
// Version of Java
const JAVA_VERSION = '11';
const JAVA_COMPATIBLE_VERSIONS = ['11', '12', '13', '14', '15', '16', '17'];
// Version of Node, NPM
const NODE_VERSION = '16.13.1';
const NPM_VERSION = commonPackageJson.devDependencies.npm;
const OPENAPI_GENERATOR_CLI_VERSION = '1.0.13-4.3.1';
const GRADLE_VERSION = '7.3.1';
const JIB_VERSION = '3.1.4';
// Libraries version
const JHIPSTER_DEPENDENCIES_VERSION = '7.5.1-SNAPSHOT';
// The spring-boot version should match the one managed by https://mvnrepository.com/artifact/tech.jhipster/jhipster-dependencies/JHIPSTER_DEPENDENCIES_VERSION
const SPRING_BOOT_VERSION = '2.5.8';
const LIQUIBASE_VERSION = '4.6.1';
const LIQUIBASE_DTD_VERSION = LIQUIBASE_VERSION.split('.', 3).slice(0, 2).join('.');
const HIBERNATE_VERSION = '5.4.33';
const JACOCO_VERSION = '0.8.7';
const KAFKA_VERSION = '5.5.5';
const JACKSON_DATABIND_NULLABLE_VERSION = '0.2.2';
// Version of docker images
const DOCKER_COMPOSE_FORMAT_VERSION = '3.8';
// const DOCKER_JHIPSTER_REGISTRY = 'ghcr.io/jhipster/jhipster-registry:main';
const DOCKER_JHIPSTER_REGISTRY = 'jhipster/jhipster-registry:v7.1.0';
const DOCKER_JHIPSTER_CONTROL_CENTER = 'jhipster/jhipster-control-center:v0.5.0';
const DOCKER_JAVA_JRE = 'eclipse-temurin:11-jre-focal';
const DOCKER_MYSQL = 'mysql:8.0.27';
const DOCKER_MARIADB = 'mariadb:10.7.1';
const DOCKER_POSTGRESQL = 'postgres:14.1';
const DOCKER_MONGODB = 'mongo:4.4.10';
const DOCKER_COUCHBASE = 'couchbase/server:7.0.0';
const DOCKER_CASSANDRA = 'cassandra:3.11.11';
const DOCKER_MSSQL = 'mcr.microsoft.com/mssql/server:2019-CU13-ubuntu-20.04';
const DOCKER_NEO4J = 'neo4j:4.3.7';
const DOCKER_HAZELCAST_MANAGEMENT_CENTER = 'hazelcast/management-center:4.2021.06';
const DOCKER_MEMCACHED = 'memcached:1.6.12-alpine';
const DOCKER_REDIS = 'redis:6.2.6';
const DOCKER_KEYCLOAK = 'jboss/keycloak:16.1.0'; // The version should match the attribute 'keycloakVersion' from /docker-compose/templates/realm-config/jhipster-realm.json.ejs and /server/templates/src/main/docker/config/realm-config/jhipster-realm.json.ejs
const DOCKER_ELASTICSEARCH = 'docker.elastic.co/elasticsearch/elasticsearch:7.13.3'; // The version should be coherent with the one from spring-data-elasticsearch project
const DOCKER_KAFKA = `confluentinc/cp-kafka:${KAFKA_VERSION}`;
const DOCKER_ZOOKEEPER = `confluentinc/cp-zookeeper:${KAFKA_VERSION}`;
const DOCKER_SONAR = 'sonarqube:9.1.0-community';
const DOCKER_CONSUL = 'consul:1.10.4';
const DOCKER_CONSUL_CONFIG_LOADER = 'jhipster/consul-config-loader:v0.4.1';
const DOCKER_PROMETHEUS = 'prom/prometheus:v2.31.1';
const DOCKER_PROMETHEUS_ALERTMANAGER = 'prom/alertmanager:v0.23.0';
const DOCKER_GRAFANA = 'grafana/grafana:8.2.4';
const DOCKER_JENKINS = 'jenkins/jenkins:lts-jdk11';
const DOCKER_SWAGGER_EDITOR = 'swaggerapi/swagger-editor:latest';
const DOCKER_PROMETHEUS_OPERATOR = 'quay.io/coreos/prometheus-operator:v0.42.1';
const DOCKER_GRAFANA_WATCHER = 'quay.io/coreos/grafana-watcher:v0.0.8';
// Kubernetes versions
const KUBERNETES_CORE_API_VERSION = 'v1';
const KUBERNETES_BATCH_API_VERSION = 'batch/v1';
const KUBERNETES_DEPLOYMENT_API_VERSION = 'apps/v1';
const KUBERNETES_STATEFULSET_API_VERSION = 'apps/v1';
const KUBERNETES_INGRESS_API_VERSION = 'networking.k8s.io/v1beta1';
const KUBERNETES_ISTIO_NETWORKING_API_VERSION = 'networking.istio.io/v1beta1';
const KUBERNETES_RBAC_API_VERSION = 'rbac.authorization.k8s.io/v1';
// Helm versions
const HELM_KAFKA = '^0.20.1';
const HELM_ELASTICSEARCH = '^1.32.0';
const HELM_PROMETHEUS = '^9.2.0';
const HELM_GRAFANA = '^4.0.0';
const HELM_MYSQL = '^1.4.0';
const HELM_MARIADB = '^6.12.2';
const HELM_POSTGRESQL = '^6.5.3';
const HELM_MOGODB_REPLICASET = '^3.10.1';
const HELM_COUCHBASE_OPERATOR = '^2.2.1';
// all constants used throughout all generators
const LOGIN_REGEX = '^(?>[a-zA-Z0-9!$&*+=?^_`{|}~.-]+@[a-zA-Z0-9-]+(?:\\\\.[a-zA-Z0-9-]+)*)|(?>[_.@A-Za-z0-9-]+)$';
// JS does not support atomic groups
const LOGIN_REGEX_JS = '^[a-zA-Z0-9!$&*+=?^_`{|}~.-]+@[a-zA-Z0-9-]+(?:\\\\.[a-zA-Z0-9-]+)*$|^[_.@A-Za-z0-9-]+$';
const MAIN_DIR = 'src/main/';
const TEST_DIR = 'src/test/';
// Note: this will be prepended with 'target/classes' for Maven, or with 'build/resources/main' for Gradle.
const CLIENT_DIST_DIR = 'static/';
const SUPPORTED_VALIDATION_RULES = Object.keys(validationOptions)
.map(key => validationOptions[key])
.filter(e => typeof e === 'string');
const SUPPORTED_CLIENT_FRAMEWORKS = {
ANGULAR: ANGULAR_X,
REACT,
VUE,
};
// documentation constants
const JHIPSTER_DOCUMENTATION_URL = 'https://www.jhipster.tech';
const JHIPSTER_DOCUMENTATION_ARCHIVE_PATH = '/documentation-archive/';
const OFFICIAL_DATABASE_TYPE_NAMES = {
cassandra: 'Cassandra',
couchbase: 'Couchbase',
mongodb: 'MongoDB',
neo4j: 'Neo4j',
sql: 'SQL',
};
const R2DBC_DB_OPTIONS = [
{
value: databaseTypes.POSTGRESQL,
name: 'PostgreSQL',
},
{
value: databaseTypes.MYSQL,
name: 'MySQL',
},
{
value: databaseTypes.MARIADB,
name: 'MariaDB',
},
{
value: databaseTypes.MSSQL,
name: 'Microsoft SQL Server',
},
];
const SQL_DB_OPTIONS = [
{
value: databaseTypes.POSTGRESQL,
name: 'PostgreSQL',
},
{
value: databaseTypes.MYSQL,
name: 'MySQL',
},
{
value: databaseTypes.MARIADB,
name: 'MariaDB',
},
{
value: databaseTypes.ORACLE,
name: 'Oracle',
},
{
value: databaseTypes.MSSQL,
name: 'Microsoft SQL Server',
},
];
const LANGUAGES = [
{
name: 'Albanian',
dispName: 'Shqip',
value: 'al',
dayjsLocaleId: 'sq',
localeId: 'sq',
},
{
name: 'Arabic (Libya)',
dispName: 'العربية',
value: 'ar-ly',
rtl: true,
skipForLocale: true,
localeId: 'ar-LY',
},
{
name: 'Armenian',
dispName: 'Հայերեն',
value: 'hy',
dayjsLocaleId: 'hy-am',
},
{
name: 'Belarusian',
dispName: 'Беларускі',
value: 'by',
dayjsLocaleId: 'be',
localeId: 'be',
},
{
name: 'Bengali',
dispName: 'বাংলা',
value: 'bn',
dayjsLocaleId: 'bn',
},
{ name: 'Bulgarian', dispName: 'Български', value: 'bg' },
{
name: 'Catalan',
dispName: 'Català',
value: 'ca',
},
{
name: 'Chinese (Simplified)',
dispName: '中文(简体)',
value: 'zh-cn',
localeId: 'zh-Hans',
},
{
name: 'Chinese (Traditional)',
dispName: '繁體中文',
value: 'zh-tw',
localeId: 'zh-Hant',
},
{ name: 'Croatian', dispName: 'Hrvatski', value: 'hr' },
{ name: 'Czech', dispName: 'Český', value: 'cs' },
{ name: 'Danish', dispName: 'Dansk', value: 'da' },
{ name: 'Dutch', dispName: 'Nederlands', value: 'nl' },
{ name: 'English', dispName: 'English', value: 'en' },
{ name: 'Estonian', dispName: 'Eesti', value: 'et' },
{
name: 'Farsi',
dispName: 'فارسی',
value: 'fa',
rtl: true,
},
{ name: 'Finnish', dispName: 'Suomi', value: 'fi' },
{ name: 'French', dispName: 'Français', value: 'fr' },
{ name: 'Galician', dispName: 'Galego', value: 'gl' },
{ name: 'German', dispName: 'Deutsch', value: 'de' },
{ name: 'Greek', dispName: 'Ελληνικά', value: 'el' },
{ name: 'Hindi', dispName: 'हिंदी', value: 'hi' },
{ name: 'Hungarian', dispName: 'Magyar', value: 'hu' },
{
name: 'Indonesian',
dispName: 'Bahasa Indonesia',
/*
JDK <17 ("Indonesian Locale does not comply with ISO 639")
The locale is set to "in" for Indonesia
See https://bugs.openjdk.java.net/browse/JDK-6457127
And https://github.com/jhipster/generator-jhipster/issues/9494
Java 17 supports 'id' locale, for compatibility with java 11, we will keep legacy 'in' value while we support java 11.
When running with java 17 users must set 'java.locale.useOldISOCodes=true' environment variable.
See https://bugs.openjdk.java.net/browse/JDK-8267069.
*/
value: 'in',
localeId: 'id',
dayjsLocaleId: 'id',
},
{ name: 'Italian', dispName: 'Italiano', value: 'it' },
{ name: 'Japanese', dispName: '日本語', value: 'ja' },
{ name: 'Korean', dispName: '한국어', value: 'ko' },
{ name: 'Marathi', dispName: 'मराठी', value: 'mr' },
{ name: 'Myanmar', dispName: 'မြန်မာ', value: 'my' },
{ name: 'Polish', dispName: 'Polski', value: 'pl' },
{
name: 'Portuguese (Brazilian)',
dispName: 'Português (Brasil)',
value: 'pt-br',
localeId: 'pt',
},
{
name: 'Portuguese',
dispName: 'Português',
value: 'pt-pt',
localeId: 'pt-PT',
dayjsLocaleId: 'pt',
},
{
name: 'Punjabi',
dispName: 'ਪੰਜਾਬੀ',
value: 'pa',
dayjsLocaleId: 'pa-in',
},
{ name: 'Romanian', dispName: 'Română', value: 'ro' },
{ name: 'Russian', dispName: 'Русский', value: 'ru' },
{ name: 'Slovak', dispName: 'Slovenský', value: 'sk' },
{ name: 'Serbian', dispName: 'Srpski', value: 'sr' },
{ name: 'Sinhala', dispName: 'සිංහල', value: 'si' },
{ name: 'Spanish', dispName: 'Español', value: 'es' },
{ name: 'Swedish', dispName: 'Svenska', value: 'sv' },
{ name: 'Turkish', dispName: 'Türkçe', value: 'tr' },
{ name: 'Tamil', dispName: 'தமிழ்', value: 'ta' },
{ name: 'Telugu', dispName: 'తెలుగు', value: 'te' },
{ name: 'Thai', dispName: 'ไทย', value: 'th' },
{
name: 'Ukrainian',
dispName: 'Українська',
value: 'ua',
localeId: 'uk',
dayjsLocaleId: 'uk',
},
{
name: 'Uzbek (Cyrillic)',
dispName: 'Ўзбекча',
value: 'uz-Cyrl-uz',
localeId: 'uz-Cyrl',
dayjsLocaleId: 'uz',
},
{
name: 'Uzbek (Latin)',
dispName: 'O`zbekcha',
value: 'uz-Latn-uz',
localeId: 'uz-Latn',
dayjsLocaleId: 'uz-latn',
},
{ name: 'Vietnamese', dispName: 'Tiếng Việt', value: 'vi' },
];
const constants = {
GENERATOR_JHIPSTER: 'generator-jhipster',
JHIPSTER_CONFIG_DIR: '.jhipster',
INTERPOLATE_REGEX: /<%:([\s\S]+?)%>/g, // so that tags in templates do not get mistreated as _ templates
DOCKER_DIR: `${MAIN_DIR}docker/`,
LINE_LENGTH: 180,
LANGUAGES,
MAIN_DIR,
TEST_DIR,
LOGIN_REGEX,
LOGIN_REGEX_JS,
// supported client frameworks
SUPPORTED_CLIENT_FRAMEWORKS,
CLIENT_MAIN_SRC_DIR: `${MAIN_DIR}webapp/`,
CLIENT_TEST_SRC_DIR: `${TEST_DIR}javascript/`,
CLIENT_WEBPACK_DIR: 'webpack/',
CLIENT_DIST_DIR,
ANGULAR_DIR: `${MAIN_DIR}webapp/app/`,
REACT_DIR: `${MAIN_DIR}webapp/app/`,
VUE_DIR: `${MAIN_DIR}webapp/app/`,
SERVER_MAIN_SRC_DIR: `${MAIN_DIR}java/`,
SERVER_MAIN_RES_DIR: `${MAIN_DIR}resources/`,
SERVER_TEST_SRC_DIR: `${TEST_DIR}java/`,
SERVER_TEST_RES_DIR: `${TEST_DIR}resources/`,
// server related
OFFICIAL_DATABASE_TYPE_NAMES,
// entity related
SUPPORTED_VALIDATION_RULES,
JHIPSTER_DOCUMENTATION_URL,
JHIPSTER_DOCUMENTATION_ARCHIVE_PATH,
DOCKER_JHIPSTER_REGISTRY,
DOCKER_JHIPSTER_CONTROL_CENTER,
DOCKER_JAVA_JRE,
DOCKER_MYSQL,
DOCKER_MARIADB,
DOCKER_POSTGRESQL,
DOCKER_MONGODB,
DOCKER_COUCHBASE,
DOCKER_CASSANDRA,
DOCKER_MSSQL,
DOCKER_NEO4J,
DOCKER_HAZELCAST_MANAGEMENT_CENTER,
DOCKER_MEMCACHED,
DOCKER_REDIS,
DOCKER_ELASTICSEARCH,
DOCKER_KEYCLOAK,
DOCKER_KAFKA,
DOCKER_ZOOKEEPER,
DOCKER_SONAR,
DOCKER_CONSUL,
DOCKER_CONSUL_CONFIG_LOADER,
DOCKER_PROMETHEUS,
DOCKER_PROMETHEUS_ALERTMANAGER,
DOCKER_GRAFANA,
JAVA_VERSION,
JAVA_COMPATIBLE_VERSIONS,
KAFKA_VERSION,
GRADLE_VERSION,
// Libraries
JIB_VERSION,
JHIPSTER_DEPENDENCIES_VERSION,
SPRING_BOOT_VERSION,
LIQUIBASE_VERSION,
LIQUIBASE_DTD_VERSION,
HIBERNATE_VERSION,
JACOCO_VERSION,
JACKSON_DATABIND_NULLABLE_VERSION,
// NPM
NODE_VERSION,
NPM_VERSION,
OPENAPI_GENERATOR_CLI_VERSION,
DOCKER_JENKINS,
DOCKER_SWAGGER_EDITOR,
R2DBC_DB_OPTIONS,
SQL_DB_OPTIONS,
DOCKER_COMPOSE_FORMAT_VERSION,
DOCKER_PROMETHEUS_OPERATOR,
DOCKER_GRAFANA_WATCHER,
KUBERNETES_CORE_API_VERSION,
KUBERNETES_BATCH_API_VERSION,
KUBERNETES_DEPLOYMENT_API_VERSION,
KUBERNETES_STATEFULSET_API_VERSION,
KUBERNETES_INGRESS_API_VERSION,
KUBERNETES_ISTIO_NETWORKING_API_VERSION,
KUBERNETES_RBAC_API_VERSION,
HELM_KAFKA,
HELM_ELASTICSEARCH,
HELM_PROMETHEUS,
HELM_GRAFANA,
HELM_MYSQL,
HELM_MARIADB,
HELM_POSTGRESQL,
HELM_MOGODB_REPLICASET,
HELM_COUCHBASE_OPERATOR,
};
module.exports = constants;
| generators/generator-constants.js | /**
* Copyright 2013-2021 the original author or authors from the JHipster project.
*
* This file is part of the JHipster project, see https://www.jhipster.tech/
* for more information.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const validationOptions = require('../jdl/jhipster/validations');
const databaseTypes = require('../jdl/jhipster/database-types');
const { ANGULAR_X, REACT, VUE } = require('../jdl/jhipster/client-framework-types');
const commonPackageJson = require('./common/templates/package.json');
// Version of Java
const JAVA_VERSION = '11';
const JAVA_COMPATIBLE_VERSIONS = ['11', '12', '13', '14', '15', '16', '17'];
// Version of Node, NPM
const NODE_VERSION = '16.13.1';
const NPM_VERSION = commonPackageJson.devDependencies.npm;
const OPENAPI_GENERATOR_CLI_VERSION = '1.0.13-4.3.1';
const GRADLE_VERSION = '7.3.1';
const JIB_VERSION = '3.1.4';
// Libraries version
const JHIPSTER_DEPENDENCIES_VERSION = '7.5.1-SNAPSHOT';
// The spring-boot version should match the one managed by https://mvnrepository.com/artifact/tech.jhipster/jhipster-dependencies/JHIPSTER_DEPENDENCIES_VERSION
const SPRING_BOOT_VERSION = '2.5.8';
const LIQUIBASE_VERSION = '4.6.1';
const LIQUIBASE_DTD_VERSION = LIQUIBASE_VERSION.split('.', 3).slice(0, 2).join('.');
const HIBERNATE_VERSION = '5.4.33';
const JACOCO_VERSION = '0.8.7';
const KAFKA_VERSION = '5.5.5';
const JACKSON_DATABIND_NULLABLE_VERSION = '0.2.2';
// Version of docker images
const DOCKER_COMPOSE_FORMAT_VERSION = '3.8';
// const DOCKER_JHIPSTER_REGISTRY = 'ghcr.io/jhipster/jhipster-registry:main';
const DOCKER_JHIPSTER_REGISTRY = 'jhipster/jhipster-registry:v7.1.0';
const DOCKER_JHIPSTER_CONTROL_CENTER = 'jhipster/jhipster-control-center:v0.5.0';
const DOCKER_JAVA_JRE = 'eclipse-temurin:11-jre-focal';
const DOCKER_MYSQL = 'mysql:8.0.27';
const DOCKER_MARIADB = 'mariadb:10.7.1';
const DOCKER_POSTGRESQL = 'postgres:13.5';
const DOCKER_MONGODB = 'mongo:4.4.10';
const DOCKER_COUCHBASE = 'couchbase/server:7.0.0';
const DOCKER_CASSANDRA = 'cassandra:3.11.11';
const DOCKER_MSSQL = 'mcr.microsoft.com/mssql/server:2019-CU13-ubuntu-20.04';
const DOCKER_NEO4J = 'neo4j:4.3.7';
const DOCKER_HAZELCAST_MANAGEMENT_CENTER = 'hazelcast/management-center:4.2021.06';
const DOCKER_MEMCACHED = 'memcached:1.6.12-alpine';
const DOCKER_REDIS = 'redis:6.2.6';
const DOCKER_KEYCLOAK = 'jboss/keycloak:16.1.0'; // The version should match the attribute 'keycloakVersion' from /docker-compose/templates/realm-config/jhipster-realm.json.ejs and /server/templates/src/main/docker/config/realm-config/jhipster-realm.json.ejs
const DOCKER_ELASTICSEARCH = 'docker.elastic.co/elasticsearch/elasticsearch:7.13.3'; // The version should be coherent with the one from spring-data-elasticsearch project
const DOCKER_KAFKA = `confluentinc/cp-kafka:${KAFKA_VERSION}`;
const DOCKER_ZOOKEEPER = `confluentinc/cp-zookeeper:${KAFKA_VERSION}`;
const DOCKER_SONAR = 'sonarqube:9.1.0-community';
const DOCKER_CONSUL = 'consul:1.10.4';
const DOCKER_CONSUL_CONFIG_LOADER = 'jhipster/consul-config-loader:v0.4.1';
const DOCKER_PROMETHEUS = 'prom/prometheus:v2.31.1';
const DOCKER_PROMETHEUS_ALERTMANAGER = 'prom/alertmanager:v0.23.0';
const DOCKER_GRAFANA = 'grafana/grafana:8.2.4';
const DOCKER_JENKINS = 'jenkins/jenkins:lts-jdk11';
const DOCKER_SWAGGER_EDITOR = 'swaggerapi/swagger-editor:latest';
const DOCKER_PROMETHEUS_OPERATOR = 'quay.io/coreos/prometheus-operator:v0.42.1';
const DOCKER_GRAFANA_WATCHER = 'quay.io/coreos/grafana-watcher:v0.0.8';
// Kubernetes versions
const KUBERNETES_CORE_API_VERSION = 'v1';
const KUBERNETES_BATCH_API_VERSION = 'batch/v1';
const KUBERNETES_DEPLOYMENT_API_VERSION = 'apps/v1';
const KUBERNETES_STATEFULSET_API_VERSION = 'apps/v1';
const KUBERNETES_INGRESS_API_VERSION = 'networking.k8s.io/v1beta1';
const KUBERNETES_ISTIO_NETWORKING_API_VERSION = 'networking.istio.io/v1beta1';
const KUBERNETES_RBAC_API_VERSION = 'rbac.authorization.k8s.io/v1';
// Helm versions
const HELM_KAFKA = '^0.20.1';
const HELM_ELASTICSEARCH = '^1.32.0';
const HELM_PROMETHEUS = '^9.2.0';
const HELM_GRAFANA = '^4.0.0';
const HELM_MYSQL = '^1.4.0';
const HELM_MARIADB = '^6.12.2';
const HELM_POSTGRESQL = '^6.5.3';
const HELM_MOGODB_REPLICASET = '^3.10.1';
const HELM_COUCHBASE_OPERATOR = '^2.2.1';
// all constants used throughout all generators
const LOGIN_REGEX = '^(?>[a-zA-Z0-9!$&*+=?^_`{|}~.-]+@[a-zA-Z0-9-]+(?:\\\\.[a-zA-Z0-9-]+)*)|(?>[_.@A-Za-z0-9-]+)$';
// JS does not support atomic groups
const LOGIN_REGEX_JS = '^[a-zA-Z0-9!$&*+=?^_`{|}~.-]+@[a-zA-Z0-9-]+(?:\\\\.[a-zA-Z0-9-]+)*$|^[_.@A-Za-z0-9-]+$';
const MAIN_DIR = 'src/main/';
const TEST_DIR = 'src/test/';
// Note: this will be prepended with 'target/classes' for Maven, or with 'build/resources/main' for Gradle.
const CLIENT_DIST_DIR = 'static/';
const SUPPORTED_VALIDATION_RULES = Object.keys(validationOptions)
.map(key => validationOptions[key])
.filter(e => typeof e === 'string');
const SUPPORTED_CLIENT_FRAMEWORKS = {
ANGULAR: ANGULAR_X,
REACT,
VUE,
};
// documentation constants
const JHIPSTER_DOCUMENTATION_URL = 'https://www.jhipster.tech';
const JHIPSTER_DOCUMENTATION_ARCHIVE_PATH = '/documentation-archive/';
const OFFICIAL_DATABASE_TYPE_NAMES = {
cassandra: 'Cassandra',
couchbase: 'Couchbase',
mongodb: 'MongoDB',
neo4j: 'Neo4j',
sql: 'SQL',
};
const R2DBC_DB_OPTIONS = [
{
value: databaseTypes.POSTGRESQL,
name: 'PostgreSQL',
},
{
value: databaseTypes.MYSQL,
name: 'MySQL',
},
{
value: databaseTypes.MARIADB,
name: 'MariaDB',
},
{
value: databaseTypes.MSSQL,
name: 'Microsoft SQL Server',
},
];
const SQL_DB_OPTIONS = [
{
value: databaseTypes.POSTGRESQL,
name: 'PostgreSQL',
},
{
value: databaseTypes.MYSQL,
name: 'MySQL',
},
{
value: databaseTypes.MARIADB,
name: 'MariaDB',
},
{
value: databaseTypes.ORACLE,
name: 'Oracle',
},
{
value: databaseTypes.MSSQL,
name: 'Microsoft SQL Server',
},
];
const LANGUAGES = [
{
name: 'Albanian',
dispName: 'Shqip',
value: 'al',
dayjsLocaleId: 'sq',
localeId: 'sq',
},
{
name: 'Arabic (Libya)',
dispName: 'العربية',
value: 'ar-ly',
rtl: true,
skipForLocale: true,
localeId: 'ar-LY',
},
{
name: 'Armenian',
dispName: 'Հայերեն',
value: 'hy',
dayjsLocaleId: 'hy-am',
},
{
name: 'Belarusian',
dispName: 'Беларускі',
value: 'by',
dayjsLocaleId: 'be',
localeId: 'be',
},
{
name: 'Bengali',
dispName: 'বাংলা',
value: 'bn',
dayjsLocaleId: 'bn',
},
{ name: 'Bulgarian', dispName: 'Български', value: 'bg' },
{
name: 'Catalan',
dispName: 'Català',
value: 'ca',
},
{
name: 'Chinese (Simplified)',
dispName: '中文(简体)',
value: 'zh-cn',
localeId: 'zh-Hans',
},
{
name: 'Chinese (Traditional)',
dispName: '繁體中文',
value: 'zh-tw',
localeId: 'zh-Hant',
},
{ name: 'Croatian', dispName: 'Hrvatski', value: 'hr' },
{ name: 'Czech', dispName: 'Český', value: 'cs' },
{ name: 'Danish', dispName: 'Dansk', value: 'da' },
{ name: 'Dutch', dispName: 'Nederlands', value: 'nl' },
{ name: 'English', dispName: 'English', value: 'en' },
{ name: 'Estonian', dispName: 'Eesti', value: 'et' },
{
name: 'Farsi',
dispName: 'فارسی',
value: 'fa',
rtl: true,
},
{ name: 'Finnish', dispName: 'Suomi', value: 'fi' },
{ name: 'French', dispName: 'Français', value: 'fr' },
{ name: 'Galician', dispName: 'Galego', value: 'gl' },
{ name: 'German', dispName: 'Deutsch', value: 'de' },
{ name: 'Greek', dispName: 'Ελληνικά', value: 'el' },
{ name: 'Hindi', dispName: 'हिंदी', value: 'hi' },
{ name: 'Hungarian', dispName: 'Magyar', value: 'hu' },
{
name: 'Indonesian',
dispName: 'Bahasa Indonesia',
/*
JDK <17 ("Indonesian Locale does not comply with ISO 639")
The locale is set to "in" for Indonesia
See https://bugs.openjdk.java.net/browse/JDK-6457127
And https://github.com/jhipster/generator-jhipster/issues/9494
Java 17 supports 'id' locale, for compatibility with java 11, we will keep legacy 'in' value while we support java 11.
When running with java 17 users must set 'java.locale.useOldISOCodes=true' environment variable.
See https://bugs.openjdk.java.net/browse/JDK-8267069.
*/
value: 'in',
localeId: 'id',
dayjsLocaleId: 'id',
},
{ name: 'Italian', dispName: 'Italiano', value: 'it' },
{ name: 'Japanese', dispName: '日本語', value: 'ja' },
{ name: 'Korean', dispName: '한국어', value: 'ko' },
{ name: 'Marathi', dispName: 'मराठी', value: 'mr' },
{ name: 'Myanmar', dispName: 'မြန်မာ', value: 'my' },
{ name: 'Polish', dispName: 'Polski', value: 'pl' },
{
name: 'Portuguese (Brazilian)',
dispName: 'Português (Brasil)',
value: 'pt-br',
localeId: 'pt',
},
{
name: 'Portuguese',
dispName: 'Português',
value: 'pt-pt',
localeId: 'pt-PT',
dayjsLocaleId: 'pt',
},
{
name: 'Punjabi',
dispName: 'ਪੰਜਾਬੀ',
value: 'pa',
dayjsLocaleId: 'pa-in',
},
{ name: 'Romanian', dispName: 'Română', value: 'ro' },
{ name: 'Russian', dispName: 'Русский', value: 'ru' },
{ name: 'Slovak', dispName: 'Slovenský', value: 'sk' },
{ name: 'Serbian', dispName: 'Srpski', value: 'sr' },
{ name: 'Sinhala', dispName: 'සිංහල', value: 'si' },
{ name: 'Spanish', dispName: 'Español', value: 'es' },
{ name: 'Swedish', dispName: 'Svenska', value: 'sv' },
{ name: 'Turkish', dispName: 'Türkçe', value: 'tr' },
{ name: 'Tamil', dispName: 'தமிழ்', value: 'ta' },
{ name: 'Telugu', dispName: 'తెలుగు', value: 'te' },
{ name: 'Thai', dispName: 'ไทย', value: 'th' },
{
name: 'Ukrainian',
dispName: 'Українська',
value: 'ua',
localeId: 'uk',
dayjsLocaleId: 'uk',
},
{
name: 'Uzbek (Cyrillic)',
dispName: 'Ўзбекча',
value: 'uz-Cyrl-uz',
localeId: 'uz-Cyrl',
dayjsLocaleId: 'uz',
},
{
name: 'Uzbek (Latin)',
dispName: 'O`zbekcha',
value: 'uz-Latn-uz',
localeId: 'uz-Latn',
dayjsLocaleId: 'uz-latn',
},
{ name: 'Vietnamese', dispName: 'Tiếng Việt', value: 'vi' },
];
const constants = {
GENERATOR_JHIPSTER: 'generator-jhipster',
JHIPSTER_CONFIG_DIR: '.jhipster',
INTERPOLATE_REGEX: /<%:([\s\S]+?)%>/g, // so that tags in templates do not get mistreated as _ templates
DOCKER_DIR: `${MAIN_DIR}docker/`,
LINE_LENGTH: 180,
LANGUAGES,
MAIN_DIR,
TEST_DIR,
LOGIN_REGEX,
LOGIN_REGEX_JS,
// supported client frameworks
SUPPORTED_CLIENT_FRAMEWORKS,
CLIENT_MAIN_SRC_DIR: `${MAIN_DIR}webapp/`,
CLIENT_TEST_SRC_DIR: `${TEST_DIR}javascript/`,
CLIENT_WEBPACK_DIR: 'webpack/',
CLIENT_DIST_DIR,
ANGULAR_DIR: `${MAIN_DIR}webapp/app/`,
REACT_DIR: `${MAIN_DIR}webapp/app/`,
VUE_DIR: `${MAIN_DIR}webapp/app/`,
SERVER_MAIN_SRC_DIR: `${MAIN_DIR}java/`,
SERVER_MAIN_RES_DIR: `${MAIN_DIR}resources/`,
SERVER_TEST_SRC_DIR: `${TEST_DIR}java/`,
SERVER_TEST_RES_DIR: `${TEST_DIR}resources/`,
// server related
OFFICIAL_DATABASE_TYPE_NAMES,
// entity related
SUPPORTED_VALIDATION_RULES,
JHIPSTER_DOCUMENTATION_URL,
JHIPSTER_DOCUMENTATION_ARCHIVE_PATH,
DOCKER_JHIPSTER_REGISTRY,
DOCKER_JHIPSTER_CONTROL_CENTER,
DOCKER_JAVA_JRE,
DOCKER_MYSQL,
DOCKER_MARIADB,
DOCKER_POSTGRESQL,
DOCKER_MONGODB,
DOCKER_COUCHBASE,
DOCKER_CASSANDRA,
DOCKER_MSSQL,
DOCKER_NEO4J,
DOCKER_HAZELCAST_MANAGEMENT_CENTER,
DOCKER_MEMCACHED,
DOCKER_REDIS,
DOCKER_ELASTICSEARCH,
DOCKER_KEYCLOAK,
DOCKER_KAFKA,
DOCKER_ZOOKEEPER,
DOCKER_SONAR,
DOCKER_CONSUL,
DOCKER_CONSUL_CONFIG_LOADER,
DOCKER_PROMETHEUS,
DOCKER_PROMETHEUS_ALERTMANAGER,
DOCKER_GRAFANA,
JAVA_VERSION,
JAVA_COMPATIBLE_VERSIONS,
KAFKA_VERSION,
GRADLE_VERSION,
// Libraries
JIB_VERSION,
JHIPSTER_DEPENDENCIES_VERSION,
SPRING_BOOT_VERSION,
LIQUIBASE_VERSION,
LIQUIBASE_DTD_VERSION,
HIBERNATE_VERSION,
JACOCO_VERSION,
JACKSON_DATABIND_NULLABLE_VERSION,
// NPM
NODE_VERSION,
NPM_VERSION,
OPENAPI_GENERATOR_CLI_VERSION,
DOCKER_JENKINS,
DOCKER_SWAGGER_EDITOR,
R2DBC_DB_OPTIONS,
SQL_DB_OPTIONS,
DOCKER_COMPOSE_FORMAT_VERSION,
DOCKER_PROMETHEUS_OPERATOR,
DOCKER_GRAFANA_WATCHER,
KUBERNETES_CORE_API_VERSION,
KUBERNETES_BATCH_API_VERSION,
KUBERNETES_DEPLOYMENT_API_VERSION,
KUBERNETES_STATEFULSET_API_VERSION,
KUBERNETES_INGRESS_API_VERSION,
KUBERNETES_ISTIO_NETWORKING_API_VERSION,
KUBERNETES_RBAC_API_VERSION,
HELM_KAFKA,
HELM_ELASTICSEARCH,
HELM_PROMETHEUS,
HELM_GRAFANA,
HELM_MYSQL,
HELM_MARIADB,
HELM_POSTGRESQL,
HELM_MOGODB_REPLICASET,
HELM_COUCHBASE_OPERATOR,
};
module.exports = constants;
| Update postgres docker image version to 14.1
| generators/generator-constants.js | Update postgres docker image version to 14.1 | <ide><path>enerators/generator-constants.js
<ide> const DOCKER_JAVA_JRE = 'eclipse-temurin:11-jre-focal';
<ide> const DOCKER_MYSQL = 'mysql:8.0.27';
<ide> const DOCKER_MARIADB = 'mariadb:10.7.1';
<del>const DOCKER_POSTGRESQL = 'postgres:13.5';
<add>const DOCKER_POSTGRESQL = 'postgres:14.1';
<ide> const DOCKER_MONGODB = 'mongo:4.4.10';
<ide> const DOCKER_COUCHBASE = 'couchbase/server:7.0.0';
<ide> const DOCKER_CASSANDRA = 'cassandra:3.11.11'; |
|
Java | mit | cda56d06f10f7f26275133b5423a667a3a0c0a71 | 0 | nRo/DataFrame | /*
*
* * Copyright (c) 2017 Alexander Grün
* *
* * Permission is hereby granted, free of charge, to any person obtaining a copy
* * of this software and associated documentation files (the "Software"), to deal
* * in the Software without restriction, including without limitation the rights
* * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* * copies of the Software, and to permit persons to whom the Software is
* * furnished to do so, subject to the following conditions:
* *
* * The above copyright notice and this permission notice shall be included in all
* * copies or substantial portions of the Software.
* *
* * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* * SOFTWARE.
*
*/
package de.unknownreality.dataframe.column;
import de.unknownreality.dataframe.common.NumberUtil;
import de.unknownreality.dataframe.common.math.Quantiles;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.Comparator;
/**
* Created by Alex on 11.03.2016.
*/
public abstract class NumberColumn<T extends Number & Comparable<T>, C extends NumberColumn<T, C>> extends BasicColumn<T, C> {
private static final Logger log = LoggerFactory.getLogger(NumberColumn.class);
public NumberColumn(String name) {
super(name);
}
public NumberColumn() {
super(null);
}
public NumberColumn(String name, T[] values) {
super(name, values);
}
public NumberColumn(String name, T[] values, int size) {
super(name, values, size);
}
@Override
public T get(int index) {
return super.values[index];
}
/**
* Returns the median of all values in this column
*
* @return median of all values
*/
public T median() {
return new Quantiles<T>(
getSortedValues(),
getType(), true).median();
}
/**
* returns the specified quantile.
* This calculation requires sorting of the values each time.
* If more than one quantile should be calculated, use {@link #getQuantiles()}.
* @param percent quantile percent
* @return quantile
*/
public T getQuantile(double percent) {
return new Quantiles<T>(
getSortedValues(),
getType(), true)
.getQuantile(percent);
}
/**
* Returns a {@link Quantiles} object that can be used to calculate <tt>max</tt>, <tt>min</tt>, , <tt>median</tt> and quantiles.
* The values are sorted only once. When the values in the column have changed. A new {@link Quantiles} object should be created.
* @return quantiles object
*/
public Quantiles<T> getQuantiles() {
return new Quantiles<>(getSortedValues(), getType(), true);
}
/**
* Returns the mean of all values in this column
*
* @return mean of all values
*/
public Double mean() {
int naCount = 0;
Double sum = 0d;
int count = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (isNA(i)) {
naCount++;
continue;
}
count++;
sum += get(i).doubleValue();
}
if (naCount > 0) {
log.warn("mean() ignored {} NA", naCount);
}
return sum / count;
}
/**
* Returns the minimum of all values in this column
*
* @return minimum of all values
*/
public T min() {
Double min = Double.MAX_VALUE;
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (isNA(i)) {
naCount++;
continue;
}
min = Math.min(min, get(i).doubleValue());
}
if (naCount > 0) {
log.warn("min() ignored {} NA", naCount);
}
return NumberUtil.convert(min, getType());
}
/**
* Returns the maximum of all values in this column
*
* @return maximum of all values
*/
public T max() {
double max = Double.NEGATIVE_INFINITY;
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (isNA(i)) {
naCount++;
continue;
}
max = Math.max(max, get(i).doubleValue());
}
if (naCount > 0) {
log.warn("max() ignored {} NA", naCount);
}
return NumberUtil.convert(max, getType());
}
/**
* Returns the sum of all values in this column
*
* @return sum of all values
*/
public T sum() {
int naCount = 0;
double sum = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (isNA(i)) {
naCount++;
continue;
}
sum += get(i).doubleValue();
}
if (naCount > 0) {
log.warn("sum() ignored {} NA", naCount);
}
return NumberUtil.convert(sum, getType());
}
/**
* Adds the values of another {@link NumberColumn} to the values in this column.
* {@code column[index] += otherColumn[index]}
* <p>Calls {@link #notifyDataFrameValueChanged(int)} to ensure data frame index consistency</p>
*
* @param column column containing the values that are added
* @return <tt>self</tt> for method chaining
*/
public C add(NumberColumn column) {
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (!isNA(i) && !column.isNA(i)) {
doSet(i, NumberUtil.add(get(i), column.get(i), getType()));
} else {
naCount++;
}
}
if (naCount > 0) {
log.warn("add() ignored {} NA", naCount);
}
notifyDataFrameColumnChanged();
return getThis();
}
protected T[] getSortedValues() {
T[] sortedValues = (T[]) toArray();
Arrays.sort(sortedValues, new Comparator<T>() {
@Override
public int compare(T o1, T o2) {
if(o1 == null && o2 == null){
return 0;
}
if(o1 == null){
return -1;
}
if(o2 == null){
return 1;
}
return o1.compareTo(o2);
}
});
return sortedValues;
}
/**
* Subtracts the values of another {@link NumberColumn} from the values in this column.
* {@code column[index] -= otherColumn[index]}
* <p>Calls {@link #notifyDataFrameValueChanged(int)} to ensure data frame index consistency</p>
*
* @param column column containing the values that are subtracted
* @return <tt>self</tt> for method chaining
*/
public C subtract(NumberColumn column) {
if (column.size() != size()) {
throw new IllegalArgumentException("'subtract' requires column of same size");
}
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (!isNA(i) && !column.isNA(i)) {
doSet(i, NumberUtil.subtract(get(i), column.get(i), getType()));
} else {
naCount++;
}
}
if (naCount > 0) {
log.warn("subtract() ignored {} NA", naCount);
}
notifyDataFrameColumnChanged();
return getThis();
}
/**
* Multiplies the values of another {@link NumberColumn} to the values in this column.
* {@code column[index] *= otherColumn[index]}
* <p>Calls {@link #notifyDataFrameValueChanged(int)} to ensure data frame index consistency</p>
*
* @param column column containing the values that are multiplied
* @return <tt>self</tt> for method chaining
*/
public C multiply(NumberColumn column) {
if (column.size() != size()) {
throw new IllegalArgumentException("'multiply' requires column of same size");
}
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (!isNA(i) && !column.isNA(i)) {
doSet(i, NumberUtil.multiply(get(i), column.get(i), getType()));
} else {
naCount++;
}
}
if (naCount > 0) {
log.warn("multiply() ignored {} NA", naCount);
}
notifyDataFrameColumnChanged();
return getThis();
}
/**
* Divides the values of this column by the values of another {@link NumberColumn}.
* {@code column[index] /= otherColumn[index]}
* <p>Calls {@link #notifyDataFrameValueChanged(int)} to ensure data frame index consistency</p>
*
* @param column column containing the values that are divided
* @return <tt>self</tt> for method chaining
*/
public C divide(NumberColumn column) {
if (column.size() != size()) {
throw new IllegalArgumentException("'divide' requires column of same size");
}
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (!isNA(i) && !column.isNA(i)) {
doSet(i, NumberUtil.divide(get(i), column.get(i), getType()));
} else {
naCount++;
}
}
if (naCount > 0) {
log.warn("divide() ignored {} NA", naCount);
}
notifyDataFrameColumnChanged();
return getThis();
}
/**
* Adds a {@link Number} to the values in this column.
* {@code column[index] += number}
* <p>Calls {@link #notifyDataFrameValueChanged(int)} to ensure data frame index consistency</p>
*
* @param value value added to all values in this column
* @return <tt>self</tt> for method chaining
*/
public C add(Number value) {
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (!isNA(i) && value != null) {
doSet(i, NumberUtil.add(get(i), value, getType()));
} else {
naCount++;
}
}
if (naCount > 0) {
log.warn("add() ignored {} NA", naCount);
}
notifyDataFrameColumnChanged();
return getThis();
}
/**
* Subtracts a {@link Number} to the values in this column.
* {@code column[index] -= number}
* <p>Calls {@link #notifyDataFrameValueChanged(int)} to ensure data frame index consistency</p>
*
* @param value value subtracted from all values in this column
* @return <tt>self</tt> for method chaining
*/
public C subtract(Number value) {
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (!isNA(i) && value != null) {
doSet(i, NumberUtil.subtract(get(i), value, getType()));
} else {
naCount++;
}
}
if (naCount > 0) {
log.warn("subtract() ignored {} NA", naCount);
}
notifyDataFrameColumnChanged();
return getThis();
}
/**
* Multiplies a {@link Number} to the values in this column.
* {@code column[index] *= number}
* <p>Calls {@link #notifyDataFrameValueChanged(int)} to ensure data frame index consistency</p>
*
* @param value value multiplied to all values in this column
* @return <tt>self</tt> for method chaining
*/
public C multiply(Number value) {
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (!isNA(i) && value != null) {
doSet(i, NumberUtil.multiply(get(i), value, getType()));
} else {
naCount++;
}
}
if (naCount > 0) {
log.warn("multiply() ignored {} NA", naCount);
}
notifyDataFrameColumnChanged();
return getThis();
}
/**
* Divides all values in this column by a {@link Number}.
* {@code column[index] /= number}
* <p>Calls {@link #notifyDataFrameValueChanged(int)} to ensure data frame index consistency</p>
*
* @param value the value all values in this column are divided by
* @return <tt>self</tt> for method chaining
*/
public C divide(Number value) {
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (!isNA(i) && value != null) {
doSet(i, NumberUtil.divide(get(i), value, getType()));
} else {
naCount++;
}
}
if (naCount > 0) {
log.warn("divide() ignored {} NA", naCount);
}
notifyDataFrameColumnChanged();
return getThis();
}
}
| src/main/java/de/unknownreality/dataframe/column/NumberColumn.java | /*
*
* * Copyright (c) 2017 Alexander Grün
* *
* * Permission is hereby granted, free of charge, to any person obtaining a copy
* * of this software and associated documentation files (the "Software"), to deal
* * in the Software without restriction, including without limitation the rights
* * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* * copies of the Software, and to permit persons to whom the Software is
* * furnished to do so, subject to the following conditions:
* *
* * The above copyright notice and this permission notice shall be included in all
* * copies or substantial portions of the Software.
* *
* * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* * SOFTWARE.
*
*/
package de.unknownreality.dataframe.column;
import de.unknownreality.dataframe.common.NumberUtil;
import de.unknownreality.dataframe.common.math.Quantiles;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.Comparator;
/**
* Created by Alex on 11.03.2016.
*/
public abstract class NumberColumn<T extends Number & Comparable<T>, C extends NumberColumn<T, C>> extends BasicColumn<T, C> {
private static final Logger log = LoggerFactory.getLogger(NumberColumn.class);
public NumberColumn(String name) {
super(name);
}
public NumberColumn() {
super(null);
}
public NumberColumn(String name, T[] values) {
super(name, values);
}
public NumberColumn(String name, T[] values, int size) {
super(name, values, size);
}
@Override
public T get(int index) {
return super.values[index];
}
/**
* Returns the median of all values in this column
*
* @return median of all values
*/
public T median() {
return new Quantiles<T>(
getSortedValues(),
getType(), true).median();
}
/**
* returns the specified quantile.
* This calculation requires sorting of the values each time.
* If more than one quantile should be calculated, use {@link #getQuantiles()}.
* @param percent
* @return quantile
*/
public T getQuantile(double percent) {
return new Quantiles<T>(
getSortedValues(),
getType(), true)
.getQuantile(percent);
}
/**
* Returns a {@link Quantiles} object that can be used to calculate <tt>max</tt>, <tt>min</tt>, , <tt>median</tt> and quantiles.
* The values are sorted only once. When the values in the column have changed. A new {@link Quantiles} object should be created.
* @return quantiles object
*/
public Quantiles<T> getQuantiles() {
return new Quantiles<>(getSortedValues(), getType(), true);
}
/**
* Returns the mean of all values in this column
*
* @return mean of all values
*/
public Double mean() {
int naCount = 0;
Double sum = 0d;
int count = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (isNA(i)) {
naCount++;
continue;
}
count++;
sum += get(i).doubleValue();
}
if (naCount > 0) {
log.warn("mean() ignored {} NA", naCount);
}
return sum / count;
}
/**
* Returns the minimum of all values in this column
*
* @return minimum of all values
*/
public T min() {
Double min = Double.MAX_VALUE;
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (isNA(i)) {
naCount++;
continue;
}
min = Math.min(min, get(i).doubleValue());
}
if (naCount > 0) {
log.warn("min() ignored {} NA", naCount);
}
return NumberUtil.convert(min, getType());
}
/**
* Returns the maximum of all values in this column
*
* @return maximum of all values
*/
public T max() {
double max = Double.NEGATIVE_INFINITY;
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (isNA(i)) {
naCount++;
continue;
}
max = Math.max(max, get(i).doubleValue());
}
if (naCount > 0) {
log.warn("max() ignored {} NA", naCount);
}
return NumberUtil.convert(max, getType());
}
/**
* Returns the sum of all values in this column
*
* @return sum of all values
*/
public T sum() {
int naCount = 0;
double sum = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (isNA(i)) {
naCount++;
continue;
}
sum += get(i).doubleValue();
}
if (naCount > 0) {
log.warn("sum() ignored {} NA", naCount);
}
return NumberUtil.convert(sum, getType());
}
/**
* Adds the values of another {@link NumberColumn} to the values in this column.
* {@code column[index] += otherColumn[index]}
* <p>Calls {@link #notifyDataFrameValueChanged(int)} to ensure data frame index consistency</p>
*
* @param column column containing the values that are added
* @return <tt>self</tt> for method chaining
*/
public C add(NumberColumn column) {
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (!isNA(i) && !column.isNA(i)) {
doSet(i, NumberUtil.add(get(i), column.get(i), getType()));
} else {
naCount++;
}
}
if (naCount > 0) {
log.warn("add() ignored {} NA", naCount);
}
notifyDataFrameColumnChanged();
return getThis();
}
protected T[] getSortedValues() {
T[] sortedValues = (T[]) toArray();
Arrays.sort(sortedValues, new Comparator<T>() {
@Override
public int compare(T o1, T o2) {
if(o1 == null && o2 == null){
return 0;
}
if(o1 == null){
return -1;
}
if(o2 == null){
return 1;
}
return o1.compareTo(o2);
}
});
return sortedValues;
}
/**
* Subtracts the values of another {@link NumberColumn} from the values in this column.
* {@code column[index] -= otherColumn[index]}
* <p>Calls {@link #notifyDataFrameValueChanged(int)} to ensure data frame index consistency</p>
*
* @param column column containing the values that are subtracted
* @return <tt>self</tt> for method chaining
*/
public C subtract(NumberColumn column) {
if (column.size() != size()) {
throw new IllegalArgumentException("'subtract' requires column of same size");
}
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (!isNA(i) && !column.isNA(i)) {
doSet(i, NumberUtil.subtract(get(i), column.get(i), getType()));
} else {
naCount++;
}
}
if (naCount > 0) {
log.warn("subtract() ignored {} NA", naCount);
}
notifyDataFrameColumnChanged();
return getThis();
}
/**
* Multiplies the values of another {@link NumberColumn} to the values in this column.
* {@code column[index] *= otherColumn[index]}
* <p>Calls {@link #notifyDataFrameValueChanged(int)} to ensure data frame index consistency</p>
*
* @param column column containing the values that are multiplied
* @return <tt>self</tt> for method chaining
*/
public C multiply(NumberColumn column) {
if (column.size() != size()) {
throw new IllegalArgumentException("'multiply' requires column of same size");
}
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (!isNA(i) && !column.isNA(i)) {
doSet(i, NumberUtil.multiply(get(i), column.get(i), getType()));
} else {
naCount++;
}
}
if (naCount > 0) {
log.warn("multiply() ignored {} NA", naCount);
}
notifyDataFrameColumnChanged();
return getThis();
}
/**
* Divides the values of this column by the values of another {@link NumberColumn}.
* {@code column[index] /= otherColumn[index]}
* <p>Calls {@link #notifyDataFrameValueChanged(int)} to ensure data frame index consistency</p>
*
* @param column column containing the values that are divided
* @return <tt>self</tt> for method chaining
*/
public C divide(NumberColumn column) {
if (column.size() != size()) {
throw new IllegalArgumentException("'divide' requires column of same size");
}
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (!isNA(i) && !column.isNA(i)) {
doSet(i, NumberUtil.divide(get(i), column.get(i), getType()));
} else {
naCount++;
}
}
if (naCount > 0) {
log.warn("divide() ignored {} NA", naCount);
}
notifyDataFrameColumnChanged();
return getThis();
}
/**
* Adds a {@link Number} to the values in this column.
* {@code column[index] += number}
* <p>Calls {@link #notifyDataFrameValueChanged(int)} to ensure data frame index consistency</p>
*
* @param value value added to all values in this column
* @return <tt>self</tt> for method chaining
*/
public C add(Number value) {
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (!isNA(i) && value != null) {
doSet(i, NumberUtil.add(get(i), value, getType()));
} else {
naCount++;
}
}
if (naCount > 0) {
log.warn("add() ignored {} NA", naCount);
}
notifyDataFrameColumnChanged();
return getThis();
}
/**
* Subtracts a {@link Number} to the values in this column.
* {@code column[index] -= number}
* <p>Calls {@link #notifyDataFrameValueChanged(int)} to ensure data frame index consistency</p>
*
* @param value value subtracted from all values in this column
* @return <tt>self</tt> for method chaining
*/
public C subtract(Number value) {
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (!isNA(i) && value != null) {
doSet(i, NumberUtil.subtract(get(i), value, getType()));
} else {
naCount++;
}
}
if (naCount > 0) {
log.warn("subtract() ignored {} NA", naCount);
}
notifyDataFrameColumnChanged();
return getThis();
}
/**
* Multiplies a {@link Number} to the values in this column.
* {@code column[index] *= number}
* <p>Calls {@link #notifyDataFrameValueChanged(int)} to ensure data frame index consistency</p>
*
* @param value value multiplied to all values in this column
* @return <tt>self</tt> for method chaining
*/
public C multiply(Number value) {
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (!isNA(i) && value != null) {
doSet(i, NumberUtil.multiply(get(i), value, getType()));
} else {
naCount++;
}
}
if (naCount > 0) {
log.warn("multiply() ignored {} NA", naCount);
}
notifyDataFrameColumnChanged();
return getThis();
}
/**
* Divides all values in this column by a {@link Number}.
* {@code column[index] /= number}
* <p>Calls {@link #notifyDataFrameValueChanged(int)} to ensure data frame index consistency</p>
*
* @param value the value all values in this column are divided by
* @return <tt>self</tt> for method chaining
*/
public C divide(Number value) {
int naCount = 0;
int size = size();
for (int i = 0; i < size; i++) {
if (!isNA(i) && value != null) {
doSet(i, NumberUtil.divide(get(i), value, getType()));
} else {
naCount++;
}
}
if (naCount > 0) {
log.warn("divide() ignored {} NA", naCount);
}
notifyDataFrameColumnChanged();
return getThis();
}
}
| javadoc added
| src/main/java/de/unknownreality/dataframe/column/NumberColumn.java | javadoc added | <ide><path>rc/main/java/de/unknownreality/dataframe/column/NumberColumn.java
<ide> * returns the specified quantile.
<ide> * This calculation requires sorting of the values each time.
<ide> * If more than one quantile should be calculated, use {@link #getQuantiles()}.
<del> * @param percent
<add> * @param percent quantile percent
<ide> * @return quantile
<ide> */
<ide> public T getQuantile(double percent) { |
|
Java | apache-2.0 | 5f05de6c0a237c4175cda5f2187692d65de6a76b | 0 | bounswe/bounswe2016group7,bounswe/bounswe2016group7,bounswe/bounswe2016group7 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.bounswe.group7.web.controller;
import com.bounswe.group7.api.client.LoginServiceClient;
import com.bounswe.group7.model.Users;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.mvc.support.RedirectAttributes;
/**
*
* @author ugurbor
*/
@RestController
public class LoginController {
@RequestMapping(value = "/login", method = RequestMethod.POST)
public ModelAndView login(HttpServletRequest request, HttpServletResponse response, RedirectAttributes redirectAttributes) throws IOException {
String username = request.getParameter("username");
String password = request.getParameter("password");
LoginServiceClient client = new LoginServiceClient();
Users user = null;
ModelAndView index;
try {
user = client.login(new Users(username, password));
HttpSession session = request.getSession();
session.setAttribute("user", user);
session.setAttribute("token", user.getToken());
session.setAttribute("username", user.getUsername());
session.setAttribute("authorities", user.getAuthorities());
index = new ModelAndView("redirect:/home");
} catch (Exception ex) {
ex.printStackTrace();
redirectAttributes.addFlashAttribute("error", ex.getMessage());
index = new ModelAndView("redirect:/");
}
return index;
}
@RequestMapping(value="/logout", method = RequestMethod.GET)
public ModelAndView logout(HttpServletRequest request, HttpServletResponse response, RedirectAttributes redirectAttributes){
HttpSession session = request.getSession();
session.removeAttribute("user");
session.removeAttribute("token");
session.removeAttribute("username");
session.removeAttribute("authorities");
ModelAndView index = new ModelAndView("redirect:/");
return index;
}
}
| utopic/web-application/src/main/java/com/bounswe/group7/web/controller/LoginController.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.bounswe.group7.web.controller;
import com.bounswe.group7.api.client.LoginServiceClient;
import com.bounswe.group7.model.Users;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.mvc.support.RedirectAttributes;
/**
*
* @author ugurbor
*/
@RestController
public class LoginController {
@RequestMapping(value = "/login", method = RequestMethod.POST)
public ModelAndView login(HttpServletRequest request, HttpServletResponse response, RedirectAttributes redirectAttributes) throws IOException {
String username = request.getParameter("username");
String password = request.getParameter("password");
LoginServiceClient client = new LoginServiceClient();
Users user = null;
try {
user = client.login(new Users(username, password));
HttpSession session = request.getSession();
session.setAttribute("user", user);
session.setAttribute("token", user.getToken());
session.setAttribute("username", user.getUsername());
session.setAttribute("authorities", user.getAuthorities());
ModelAndView index = new ModelAndView("redirect:/home");
} catch (Exception ex) {
ex.printStackTrace();
redirectAttributes.addFlashAttribute("error", ex.getMessage());
ModelAndView index = new ModelAndView("redirect:/");
}
return index;
}
@RequestMapping(value="/logout", method = RequestMethod.GET)
public ModelAndView logout(HttpServletRequest request, HttpServletResponse response, RedirectAttributes redirectAttributes){
HttpSession session = request.getSession();
session.removeAttribute("user");
session.removeAttribute("token");
session.removeAttribute("username");
session.removeAttribute("authorities");
ModelAndView index = new ModelAndView("redirect:/");
return index;
}
}
| Another bug fix
| utopic/web-application/src/main/java/com/bounswe/group7/web/controller/LoginController.java | Another bug fix | <ide><path>topic/web-application/src/main/java/com/bounswe/group7/web/controller/LoginController.java
<ide> String password = request.getParameter("password");
<ide> LoginServiceClient client = new LoginServiceClient();
<ide> Users user = null;
<add> ModelAndView index;
<ide> try {
<ide> user = client.login(new Users(username, password));
<ide> HttpSession session = request.getSession();
<ide> session.setAttribute("token", user.getToken());
<ide> session.setAttribute("username", user.getUsername());
<ide> session.setAttribute("authorities", user.getAuthorities());
<del> ModelAndView index = new ModelAndView("redirect:/home");
<add> index = new ModelAndView("redirect:/home");
<ide> } catch (Exception ex) {
<ide> ex.printStackTrace();
<ide> redirectAttributes.addFlashAttribute("error", ex.getMessage());
<del> ModelAndView index = new ModelAndView("redirect:/");
<add> index = new ModelAndView("redirect:/");
<ide> }
<ide> return index;
<ide> } |
|
JavaScript | mit | 59a09db4d160db9e4ba9a94f1d563f73a078f94c | 0 | goje87/bsafe-rid,goje87/bsafe-rid | var express = require("express");
var router = express.Router();
var http = require('http');
var app = express();
var db = require("./model/db");
var sensorData = require("./model/sensorData");
var rideInfo = require("./model/rideInfo");
var mongoose = require("mongoose");
var bodyParser = require("body-parser");
var methodOverride = require("method-override");
app.set("views", __dirname + "/views");
app.set("view engine", "ejs");
app.engine("html", require("ejs").renderFile);
app.use(function(req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
res.setHeader('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE');
next();
});
//app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json({ extended: true }));
app.use(methodOverride(function(req, res){
if (req.body && typeof req.body === 'object' && '_method' in req.body) {
// look in urlencoded POST bodies and delete it
var method = req.body._method
delete req.body._method
return method
}
}));
//require("./router/crud.js")(app);
function normalise(obj) {
for(var i in obj) {
if(obj.hasOwnProperty(i)) {
switch(typeof obj[i]) {
case 'undefined':
obj[i] = null;
break;
case 'string':
if((obj[i].toLowerCase() == "undefined") || (obj[i].toLowerCase() == "null")){
obj[i] = null;
}
break;
}
}
}
return obj;
}
app.get("/sensorData/:rideId/:tagId?", function(req, res){
mongoose.model('sensorData').find({ "rideId" : req.params.rideId, "tagId" : req.params.tagId }, function (err, result) {
if (err) {
//res.status(404).send;
res.send(404, { "success" : false, error : { "message" : err } });
console.log("ERRORED at GET / , ");
return console.error(err);
} else {
console.log("SUCCESS at GET / , for collection : sensorData");
console.log("Get results for RIDE ID = " + req.params.rideId );
res.send(200, { "success" : true, "data" : result });
result = null;
//res.json({ "success" : true, "data" : result });
//res.render("index.html");
}
});
});
app.get("/rideInfo/:rideId", function(req, res){
mongoose.model('rideInfo').find({ "rideId" : req.params.rideId }, function (err, result) {
if (err) {
//res.status(404).send;
res.send(404, { "success" : false, error : { "message" : err } });
console.log("ERRORED at GET / , ");
return console.error(err);
} else {
console.log("SUCCESS at GET / , for collection : rideInfo");
console.log("Get results for RIDE ID = " + req.params.rideId );
res.send(200, { "success" : true, "data" : result });
result = null;
//res.json({ "success" : true, "data" : result });
//res.render("index.html");
}
});
});
app.post("/sensorData", function(req, resp){
// Get values from POST request. These can be done through forms or REST calls. These rely on the "name" attributes for forms
var acc = normalise({
"x" : req.query.accX,
"y" : req.query.accY,
"z" : req.query.accZ,
});
var geo = normalise({
"latitude" : req.query.geoLatitude,
"longitude" : req.query.geoLongitude,
"accuracy" : req.query.geoAccuracy,
"altitude" : req.query.geoAltitude,
"altitudeAccuracy": req.query.geoAltitudeAccuracy,
"heading" : req.query.geoHeading,
"speed" : req.query.geoSpeed
});
var comp = normalise({
"magneticHeading": req.query.compMagneticHeading,
"trueHeading" : req.query.compTrueHeading,
"headingAccuracy": req.query.compHeadingAccuracy,
});
var timestamp = req.query.timestamp;
var rideId = req.query.rideId;
var tagId = req.query.tagId;
var version = req.query.ver;
mongoose.model('sensorData').create({
"acc" : {
"x" : acc.x,
"y" : acc.y,
"z" : acc.z,
},
"geo" : {
"latitude" : geo.latitude,
"longitude" : geo.longitude,
"accuracy" : geo.accuracy,
"altitude" : geo.altitude,
"altitudeAccuracy": geo.altitudeAccuracy,
"heading" : geo.heading,
"speed" : geo.speed,
},
"comp" : {
"magneticHeading" : comp.magneticHeading,
"trueHeading" : comp.trueHeading,
"headingAccuracy" : comp.headingAccuracy,
},
"timestamp" : timestamp,
"rideId" : rideId,
"tagId" : tagId,
"version" : version
}, function (err, sensorData) {
if (err) {
console.log("ERRORED at POST / , " + err);
resp.send(422, { "success" : false, error : { "message" : err } });
} else {
console.log("SUCCESS at POST / for collection sensorData ############################# ");
console.log('POST creating new entry: ' + sensorData);
resp.send(201, { "success" : true, "data" : sensorData });
sensorData = null;
}
});
})
app.post("/rideInfo", function(req, resp){
if(req.query.rideId && req.query.startedAt){
var rideStatus;
if(req.query.status){
rideStatus = req.query.status;
} else {
rideStatus = "inprogress";
}
if(req.query.analysisStatus){
analysisStatus = req.query.analysisStatus;
} else {
analysisStatus = "pending";
}
mongoose.model('rideInfo').create({
"rideId" : req.query.rideId,
"status" : rideStatus,
"startedAt" : req.query.startedAt,
"endedAt" : req.query.endedAt,
"analysisInfo" : normalise({
"status" : analysisStatus,
"version" : req.query.analysisVersion,
"startedAt" : req.query.analysisStartedAt,
"endedAt" : req.query.analysisEndedAt
})
}, function (err, rideInfo) {
if (err) {
console.log("ERRORED at POST / , " + err);
resp.send(422, { "success" : false, error : { "message" : err }});
} else {
console.log("SUCCESS at POST / for collection rideInfo ############################# ");
console.log('POST creating new entry: ' + rideInfo);
resp.send(201, { "success" : true, "data" : rideInfo});
rideInfo = null;
}
});
return ;
}
resp.send(422, { "success" : false, error : { "message" : "Both rideId and startedAt are REQUIRED" } });
})
app.put("/rideInfo/:rideId", function(req, resp){
if(req.params.rideId){
var updateFields = {};
for(var i in req.query){
updateFields[i] = req.query[i];
}
mongoose.model('rideInfo').findOneAndUpdate({ rideId : req.params.rideId}, updateFields, { new : true}, function (err, rideInfo) {
if (err) {
console.log("ERRORED at PUT / , " + err);
resp.send(422, { "success" : false, error : { "message" : err }});
} else {
console.log("SUCCESS at PUT / for collection rideInfo ############################# ");
console.log('PUT updating entry: ' + rideInfo);
resp.send(201, { "success" : true, "data" : rideInfo});
rideInfo = null;
}
});
return ;
}
resp.send(422, { "success" : false, error : { "message" : "rideId is REQUIRED for PUT operation" } });
})
var server = app.listen(3000, function(){
console.log("Running on port 3000");
});
| app.js | var express = require("express");
var router = express.Router();
var http = require('http');
var app = express();
var db = require("./model/db");
var sensorData = require("./model/sensorData");
var rideInfo = require("./model/rideInfo");
var mongoose = require("mongoose");
var bodyParser = require("body-parser");
var methodOverride = require("method-override");
app.set("views", __dirname + "/views");
app.set("view engine", "ejs");
app.engine("html", require("ejs").renderFile);
app.use(function(req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
res.setHeader('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE');
next();
});
//app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json({ extended: true }));
app.use(methodOverride(function(req, res){
if (req.body && typeof req.body === 'object' && '_method' in req.body) {
// look in urlencoded POST bodies and delete it
var method = req.body._method
delete req.body._method
return method
}
}));
//require("./router/crud.js")(app);
function normalise(obj) {
for(var i in obj) {
if(obj.hasOwnProperty(i)) {
switch(typeof obj[i]) {
case 'undefined':
obj[i] = null;
break;
case 'string':
if((obj[i].toLowerCase() == "undefined") || (obj[i].toLowerCase() == "null")){
obj[i] = null;
}
break;
}
}
}
return obj;
}
app.get("/sensorData/:rideId/:tagId?", function(req, res){
mongoose.model('sensorData').find({ "rideId" : req.params.rideId, "tagId" : req.params.tagId }, function (err, result) {
if (err) {
//res.status(404).send;
res.send(404, { "success" : false, error : { "message" : err } });
console.log("ERRORED at GET / , ");
return console.error(err);
} else {
console.log("SUCCESS at GET / , for collection : sensorData");
console.log("Get results for RIDE ID = " + req.params.rideId );
res.send(200, { "success" : true, "data" : result });
//res.json({ "success" : true, "data" : result });
//res.render("index.html");
}
});
});
app.get("/rideInfo/:rideId", function(req, res){
mongoose.model('rideInfo').find({ "rideId" : req.params.rideId }, function (err, result) {
if (err) {
//res.status(404).send;
res.send(404, { "success" : false, error : { "message" : err } });
console.log("ERRORED at GET / , ");
return console.error(err);
} else {
console.log("SUCCESS at GET / , for collection : rideInfo");
console.log("Get results for RIDE ID = " + req.params.rideId );
res.send(200, { "success" : true, "data" : result });
//res.json({ "success" : true, "data" : result });
//res.render("index.html");
}
});
});
app.post("/sensorData", function(req, resp){
// Get values from POST request. These can be done through forms or REST calls. These rely on the "name" attributes for forms
var acc = normalise({
"x" : req.query.accX,
"y" : req.query.accY,
"z" : req.query.accZ,
});
var geo = normalise({
"latitude" : req.query.geoLatitude,
"longitude" : req.query.geoLongitude,
"accuracy" : req.query.geoAccuracy,
"altitude" : req.query.geoAltitude,
"altitudeAccuracy": req.query.geoAltitudeAccuracy,
"heading" : req.query.geoHeading,
"speed" : req.query.geoSpeed
});
var comp = normalise({
"magneticHeading": req.query.compMagneticHeading,
"trueHeading" : req.query.compTrueHeading,
"headingAccuracy": req.query.compHeadingAccuracy,
});
var timestamp = req.query.timestamp;
var rideId = req.query.rideId;
var tagId = req.query.tagId;
var version = req.query.ver;
mongoose.model('sensorData').create({
"acc" : {
"x" : acc.x,
"y" : acc.y,
"z" : acc.z,
},
"geo" : {
"latitude" : geo.latitude,
"longitude" : geo.longitude,
"accuracy" : geo.accuracy,
"altitude" : geo.altitude,
"altitudeAccuracy": geo.altitudeAccuracy,
"heading" : geo.heading,
"speed" : geo.speed,
},
"comp" : {
"magneticHeading" : comp.magneticHeading,
"trueHeading" : comp.trueHeading,
"headingAccuracy" : comp.headingAccuracy,
},
"timestamp" : timestamp,
"rideId" : rideId,
"tagId" : tagId,
"version" : version
}, function (err, sensorData) {
if (err) {
console.log("ERRORED at POST / , " + err);
resp.send(422, { "success" : false, error : { "message" : err } });
} else {
console.log("SUCCESS at POST / for collection sensorData ############################# ");
console.log('POST creating new entry: ' + sensorData);
resp.send(201, { "success" : true, "data" : sensorData });
}
});
})
app.post("/rideInfo", function(req, resp){
if(req.query.rideId && req.query.startedAt){
var rideStatus;
if(req.query.status){
rideStatus = req.query.status;
} else {
rideStatus = "inprogress";
}
if(req.query.analysisStatus){
analysisStatus = req.query.analysisStatus;
} else {
analysisStatus = "pending";
}
mongoose.model('rideInfo').create({
"rideId" : req.query.rideId,
"status" : rideStatus,
"startedAt" : req.query.startedAt,
"endedAt" : req.query.endedAt,
"analysisInfo" : normalise({
"status" : analysisStatus,
"version" : req.query.analysisVersion,
"startedAt" : req.query.analysisStartedAt,
"endedAt" : req.query.analysisEndedAt
})
}, function (err, rideInfo) {
if (err) {
console.log("ERRORED at POST / , " + err);
resp.send(422, { "success" : false, error : { "message" : err }});
} else {
console.log("SUCCESS at POST / for collection rideInfo ############################# ");
console.log('POST creating new entry: ' + rideInfo);
resp.send(201, { "success" : true, "data" : rideInfo});
}
});
return ;
}
resp.send(422, { "success" : false, error : { "message" : "Both rideId and startedAt are REQUIRED" } });
})
app.put("/rideInfo/:rideId", function(req, resp){
if(req.params.rideId){
var updateFields = {};
for(var i in req.query){
updateFields[i] = req.query[i];
}
mongoose.model('rideInfo').findOneAndUpdate({ rideId : req.params.rideId}, updateFields, { new : true}, function (err, rideInfo) {
if (err) {
console.log("ERRORED at PUT / , " + err);
resp.send(422, { "success" : false, error : { "message" : err }});
} else {
console.log("SUCCESS at PUT / for collection rideInfo ############################# ");
console.log('PUT updating entry: ' + rideInfo);
resp.send(201, { "success" : true, "data" : rideInfo});
}
});
return ;
}
resp.send(422, { "success" : false, error : { "message" : "rideId is REQUIRED for PUT operation" } });
})
var server = app.listen(3000, function(){
console.log("Running on port 3000");
});
| Clearing variables to preserve memory
| app.js | Clearing variables to preserve memory | <ide><path>pp.js
<ide> }
<ide> break;
<ide> }
<del>
<ide> }
<ide> }
<ide> return obj;
<ide> console.log("SUCCESS at GET / , for collection : sensorData");
<ide> console.log("Get results for RIDE ID = " + req.params.rideId );
<ide> res.send(200, { "success" : true, "data" : result });
<add> result = null;
<ide> //res.json({ "success" : true, "data" : result });
<ide> //res.render("index.html");
<ide> }
<ide> console.log("SUCCESS at GET / , for collection : rideInfo");
<ide> console.log("Get results for RIDE ID = " + req.params.rideId );
<ide> res.send(200, { "success" : true, "data" : result });
<add> result = null;
<ide> //res.json({ "success" : true, "data" : result });
<ide> //res.render("index.html");
<ide> }
<ide> console.log("SUCCESS at POST / for collection sensorData ############################# ");
<ide> console.log('POST creating new entry: ' + sensorData);
<ide> resp.send(201, { "success" : true, "data" : sensorData });
<add> sensorData = null;
<ide> }
<ide> });
<ide> })
<ide> console.log("SUCCESS at POST / for collection rideInfo ############################# ");
<ide> console.log('POST creating new entry: ' + rideInfo);
<ide> resp.send(201, { "success" : true, "data" : rideInfo});
<add> rideInfo = null;
<ide> }
<ide> });
<ide> return ;
<ide> console.log("SUCCESS at PUT / for collection rideInfo ############################# ");
<ide> console.log('PUT updating entry: ' + rideInfo);
<ide> resp.send(201, { "success" : true, "data" : rideInfo});
<add> rideInfo = null;
<ide> }
<ide> });
<ide> return ; |
|
Java | apache-2.0 | error: pathspec 'src/web/onClient/hr/fer/zemris/vhdllab/simulations/GhdlResults.java' did not match any file(s) known to git
| 6ad41597073c9fd9b9eae9bd318357330a5475a6 | 1 | mbezjak/vhdllab,mbezjak/vhdllab,mbezjak/vhdllab | /**
* Klasa sluzi za parsiranje stringa koji sadrzi rezultate simulacije prikazane
* u jednom stringu, internog formata. Parsirane rezultate direktno koristi
* applet za iscrtavanje rezultata simulacije
*
* @author Boris Ozegovic
*/
public class GhdlResults
{
private final String HEAD_LIMITER = "%%%";
private final String LIMITER = "###";
private final String VALUE_LIMITER = "&&&";
private String resultInString;
private String[] splitResults;
/* polje koji sadrzi imena signala nakon parsiranja */
private String[] signalNames;
private String[] defaultSignalNames;
/* svaki redak predstavlja sve vrijednosti pojedinog signala */
private String[][] signalValues;
private String[][] defaultSignalValues;
/* sadrzi tocke u kojima se dogada promjena vrijednosti signala */
private String[] transitionPointsInStrings;
private long[] transitionPoints;
/* broj znakova najduljeg imena signala */
private int maximumSignalNameLength;
/* privremeni string kod mijenjanja poretka signala */
String tempString;
/* privremeno polje stringova kod mijenjanja poretka signala */
String[] tempArrayString;
/**
* Constructor
*
* @param resultInString uzima string dobiven preko HTTP-a
*/
public GhdlResults (String resultInString)
{
this.resultInString = resultInString;
}
/**
* Metoda koja vrsi samo parsiranje stringa zapisanog u internom formatu
*/
public void parseString ()
{
/*
* razdvaja u cetiri stringa, imena signala, vrijednosti, tocke
* promjene vrijednosti signala i konacno broj znakova najduljeg imena
* signala
*/
splitResults = resultInString.split(HEAD_LIMITER);
/* [0] su imena signala, [1] values, [2] tocke promjene, [3] max ime */
signalNames = splitResults[0].split(LIMITER);
/* prvo razdvaja sve signale (0&&&Z&&&1 itd za svaki) */
String[] temp = splitResults[1].split(LIMITER);
String[][] matrica = new String[temp.length][];
/* a onda pojedinacno sve vrijednosti tih signala */
for (int i = 0; i < temp.length; i++)
{
matrica[i] = temp[i].split(VALUE_LIMITER);
}
signalValues = matrica;
/* dobivanje tocaka u kojima se dogada promjena vrijednosti signala */
transitionPointsInStrings = splitResults[2].split(LIMITER);
transitionPoints = new long[transitionPointsInStrings.length];
for (int i = 0; i < transitionPointsInStrings.length; i++)
{
transitionPoints[i] = Long.valueOf(transitionPointsInStrings[i]).longValue();
}
/* broj znakova najduljeg imena signala */
maximumSignalNameLength = Integer.valueOf(splitResults[3]).intValue();
/* postavi defaultni poredak koji ostaje konstantan */
defaultSignalNames = signalNames.clone();
defaultSignalValues = signalValues.clone();
}
/**
* Metoda koja mijenja poredak signala prema gore. Mijenja se poredak imena
* i poredak vrijednosti..
*
* @param index indeks signala koji se pomice prema gore
*/
public void changeSignalOrderUp (int index)
{
/* ako je signal vec na vrhu */
if (index == 0)
{
return;
}
/* promjena poretka imena signala */
tempString = signalNames[index];
signalNames[index] = signalNames[index - 1];
signalNames[index - 1] = tempString;
/* promjena poretka vrijednosti signala */
tempArrayString = signalValues[index];
signalValues[index] = signalValues[index - 1];
signalValues[index - 1] = tempArrayString;
}
/**
* Metoda koja mijenja poredak signala prema dolje.
*
* @param index indeks signala koji se pomice prema dolje
*/
public void changeSignalOrderDown (int index)
{
/* ako je signal vec na dnu */
if (index == (signalNames.length - 1))
{
return;
}
/* promjena poretka imena signala */
tempString = signalNames[index];
signalNames[index] = signalNames[index + 1];
signalNames[index + 1] = tempString;
/* promjena poretka vrijednosti signala */
tempArrayString = signalValues[index];
signalValues[index] = signalValues[index + 1];
signalValues[index + 1] = tempArrayString;
}
/**
* Metoda koja vraca defaultni poredak
*/
public void setDefaultOrder ()
{
signalNames = defaultSignalNames.clone();
signalValues = defaultSignalValues.clone();
}
/**
* Getter vrijednosti po signalima
*/
public String[][] getSignalValues ()
{
return signalValues;
}
/**
* Getter imena signala
*/
public String[] getSignalNames ()
{
return signalNames;
}
/**
* Vraca polje tocaka u kojima se dogada promjena signala
*/
public long[] getTransitionPoints()
{
return transitionPoints;
}
/**
* Vraca boj znakova najduljeg imena signala
*/
public int getMaximumSignalNameLength()
{
return maximumSignalNameLength;
}
/**
* Test metoda
*/
public static void main (String[] args)
{
VcdParser parser = new VcdParser("adder.vcd");
parser.parse();
parser.resultToString();
GhdlResults sParser = new GhdlResults(parser.getResultInString());
sParser.parseString();
for (String s : sParser.getSignalNames())
{
System.out.println(s);
}
for (String[] p : sParser.getSignalValues())
{
for (String s : p)
{
System.out.print(s + " ");
}
System.out.println("");
}
for (long broj : sParser.getTransitionPoints())
{
System.out.println(broj);
}
System.out.println(sParser.getMaximumSignalNameLength());
}
}
| src/web/onClient/hr/fer/zemris/vhdllab/simulations/GhdlResults.java | Klasa koja parsira string dobiven HTTP-om u rezultat simulacije
git-svn-id: 650ffc0a3ae286c3f0a198ebc7b4f99e37498f62@15 cbae1e92-611c-0410-ab7b-b19ac7dee622
| src/web/onClient/hr/fer/zemris/vhdllab/simulations/GhdlResults.java | Klasa koja parsira string dobiven HTTP-om u rezultat simulacije | <ide><path>rc/web/onClient/hr/fer/zemris/vhdllab/simulations/GhdlResults.java
<add>/**
<add> * Klasa sluzi za parsiranje stringa koji sadrzi rezultate simulacije prikazane
<add> * u jednom stringu, internog formata. Parsirane rezultate direktno koristi
<add> * applet za iscrtavanje rezultata simulacije
<add> *
<add> * @author Boris Ozegovic
<add> */
<add>public class GhdlResults
<add>{
<add> private final String HEAD_LIMITER = "%%%";
<add> private final String LIMITER = "###";
<add> private final String VALUE_LIMITER = "&&&";
<add> private String resultInString;
<add> private String[] splitResults;
<add>
<add> /* polje koji sadrzi imena signala nakon parsiranja */
<add> private String[] signalNames;
<add> private String[] defaultSignalNames;
<add>
<add> /* svaki redak predstavlja sve vrijednosti pojedinog signala */
<add> private String[][] signalValues;
<add> private String[][] defaultSignalValues;
<add>
<add> /* sadrzi tocke u kojima se dogada promjena vrijednosti signala */
<add> private String[] transitionPointsInStrings;
<add> private long[] transitionPoints;
<add>
<add> /* broj znakova najduljeg imena signala */
<add> private int maximumSignalNameLength;
<add>
<add> /* privremeni string kod mijenjanja poretka signala */
<add> String tempString;
<add>
<add> /* privremeno polje stringova kod mijenjanja poretka signala */
<add> String[] tempArrayString;
<add>
<add>
<add> /**
<add> * Constructor
<add> *
<add> * @param resultInString uzima string dobiven preko HTTP-a
<add> */
<add> public GhdlResults (String resultInString)
<add> {
<add> this.resultInString = resultInString;
<add> }
<add>
<add>
<add> /**
<add> * Metoda koja vrsi samo parsiranje stringa zapisanog u internom formatu
<add> */
<add> public void parseString ()
<add> {
<add> /*
<add> * razdvaja u cetiri stringa, imena signala, vrijednosti, tocke
<add> * promjene vrijednosti signala i konacno broj znakova najduljeg imena
<add> * signala
<add> */
<add> splitResults = resultInString.split(HEAD_LIMITER);
<add>
<add> /* [0] su imena signala, [1] values, [2] tocke promjene, [3] max ime */
<add> signalNames = splitResults[0].split(LIMITER);
<add>
<add> /* prvo razdvaja sve signale (0&&&Z&&&1 itd za svaki) */
<add> String[] temp = splitResults[1].split(LIMITER);
<add> String[][] matrica = new String[temp.length][];
<add>
<add> /* a onda pojedinacno sve vrijednosti tih signala */
<add> for (int i = 0; i < temp.length; i++)
<add> {
<add> matrica[i] = temp[i].split(VALUE_LIMITER);
<add> }
<add> signalValues = matrica;
<add>
<add> /* dobivanje tocaka u kojima se dogada promjena vrijednosti signala */
<add> transitionPointsInStrings = splitResults[2].split(LIMITER);
<add> transitionPoints = new long[transitionPointsInStrings.length];
<add> for (int i = 0; i < transitionPointsInStrings.length; i++)
<add> {
<add> transitionPoints[i] = Long.valueOf(transitionPointsInStrings[i]).longValue();
<add> }
<add>
<add> /* broj znakova najduljeg imena signala */
<add> maximumSignalNameLength = Integer.valueOf(splitResults[3]).intValue();
<add>
<add> /* postavi defaultni poredak koji ostaje konstantan */
<add> defaultSignalNames = signalNames.clone();
<add> defaultSignalValues = signalValues.clone();
<add> }
<add>
<add>
<add> /**
<add> * Metoda koja mijenja poredak signala prema gore. Mijenja se poredak imena
<add> * i poredak vrijednosti..
<add> *
<add> * @param index indeks signala koji se pomice prema gore
<add> */
<add> public void changeSignalOrderUp (int index)
<add> {
<add> /* ako je signal vec na vrhu */
<add> if (index == 0)
<add> {
<add> return;
<add> }
<add>
<add> /* promjena poretka imena signala */
<add> tempString = signalNames[index];
<add> signalNames[index] = signalNames[index - 1];
<add> signalNames[index - 1] = tempString;
<add>
<add> /* promjena poretka vrijednosti signala */
<add> tempArrayString = signalValues[index];
<add> signalValues[index] = signalValues[index - 1];
<add> signalValues[index - 1] = tempArrayString;
<add> }
<add>
<add>
<add> /**
<add> * Metoda koja mijenja poredak signala prema dolje.
<add> *
<add> * @param index indeks signala koji se pomice prema dolje
<add> */
<add> public void changeSignalOrderDown (int index)
<add> {
<add> /* ako je signal vec na dnu */
<add> if (index == (signalNames.length - 1))
<add> {
<add> return;
<add> }
<add>
<add> /* promjena poretka imena signala */
<add> tempString = signalNames[index];
<add> signalNames[index] = signalNames[index + 1];
<add> signalNames[index + 1] = tempString;
<add>
<add> /* promjena poretka vrijednosti signala */
<add> tempArrayString = signalValues[index];
<add> signalValues[index] = signalValues[index + 1];
<add> signalValues[index + 1] = tempArrayString;
<add> }
<add>
<add>
<add> /**
<add> * Metoda koja vraca defaultni poredak
<add> */
<add> public void setDefaultOrder ()
<add> {
<add> signalNames = defaultSignalNames.clone();
<add> signalValues = defaultSignalValues.clone();
<add> }
<add>
<add>
<add> /**
<add> * Getter vrijednosti po signalima
<add> */
<add> public String[][] getSignalValues ()
<add> {
<add> return signalValues;
<add> }
<add>
<add>
<add> /**
<add> * Getter imena signala
<add> */
<add> public String[] getSignalNames ()
<add> {
<add> return signalNames;
<add> }
<add>
<add>
<add> /**
<add> * Vraca polje tocaka u kojima se dogada promjena signala
<add> */
<add> public long[] getTransitionPoints()
<add> {
<add> return transitionPoints;
<add> }
<add>
<add>
<add> /**
<add> * Vraca boj znakova najduljeg imena signala
<add> */
<add> public int getMaximumSignalNameLength()
<add> {
<add> return maximumSignalNameLength;
<add> }
<add>
<add>
<add> /**
<add> * Test metoda
<add> */
<add> public static void main (String[] args)
<add> {
<add> VcdParser parser = new VcdParser("adder.vcd");
<add> parser.parse();
<add> parser.resultToString();
<add> GhdlResults sParser = new GhdlResults(parser.getResultInString());
<add> sParser.parseString();
<add> for (String s : sParser.getSignalNames())
<add> {
<add> System.out.println(s);
<add> }
<add> for (String[] p : sParser.getSignalValues())
<add> {
<add> for (String s : p)
<add> {
<add> System.out.print(s + " ");
<add> }
<add> System.out.println("");
<add> }
<add> for (long broj : sParser.getTransitionPoints())
<add> {
<add> System.out.println(broj);
<add> }
<add> System.out.println(sParser.getMaximumSignalNameLength());
<add> }
<add>}
<add>
<add>
<add>
<add> |
|
Java | apache-2.0 | 41dc0db52bb570a8a439f24e620193a3e18a855c | 0 | vincent-zurczak/time-sheet-generator,vincent-zurczak/time-sheet-generator | /**
* Copyright 2014 - Vincent Zurczak
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.vzurczak.timesheetgenerator.internal;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Properties;
import java.util.Random;
import com.itextpdf.text.Chunk;
import com.itextpdf.text.Document;
import com.itextpdf.text.DocumentException;
import com.itextpdf.text.Element;
import com.itextpdf.text.Font;
import com.itextpdf.text.FontFactory;
import com.itextpdf.text.Image;
import com.itextpdf.text.PageSize;
import com.itextpdf.text.Paragraph;
import com.itextpdf.text.Phrase;
import com.itextpdf.text.Rectangle;
import com.itextpdf.text.pdf.PdfPCell;
import com.itextpdf.text.pdf.PdfPTable;
import com.itextpdf.text.pdf.PdfWriter;
/**
* @author Vincent Zurczak
*/
public class PdfGenerator {
final SimpleDateFormat sdf = new SimpleDateFormat( "EEEE d MMMM yyyy" );
/**
* Creates a PDF document.
* @param bean a generation bean (not null)
* @param scheduleProperties the details of the schedule (not null)
* @throws DocumentException
* @throws FileNotFoundException
*/
public void createDocument( GenerationDataBean bean, Properties scheduleProperties )
throws Exception {
// File name
StringBuilder sb = new StringBuilder();
sb.append( "Feuille-De-Temps--s" );
sb.append( String.format( "%02d", bean.getStartWeek()));
sb.append( "--s" );
sb.append( String.format( "%02d", bean.getEndWeek()));
sb.append( "--" );
sb.append( bean.getYear());
sb.append( ".pdf" );
// Create the document
File outputFile = new File( "./pdf/" + sb.toString());
final Document doc = new Document( PageSize.A4.rotate());
PdfWriter.getInstance( doc, new FileOutputStream( outputFile ));
doc.open();
doc.addAuthor( bean.getName());
doc.addCreator( bean.getName());
String s;
if( bean.getEndWeek() - bean.getStartWeek() > 1 )
s = "Feuilles de Temps - Semaines " + bean.getStartWeek() + " à " + bean.getEndWeek();
else
s = "Feuille de Temps - Semaine " + bean.getStartWeek();
doc.addTitle( s );
doc.addSubject( s );
// Add pages
for( int i=bean.getStartWeek(); i<=bean.getEndWeek(); i++ )
addPageForWeek( i, doc, bean, scheduleProperties );
// That's it!
doc.close();
}
/**
* Adds a page for a given week.
* @param i the week number
* @param doc the document to update
* @param bean a generation bean (not null)
* @param scheduleProperties
* @throws DocumentException
* @throws IOException
* @throws MalformedURLException
*/
private void addPageForWeek( int weekNumber, Document doc, GenerationDataBean bean, Properties scheduleProperties )
throws DocumentException, MalformedURLException, IOException {
doc.newPage();
final Font boldFont = FontFactory.getFont( FontFactory.HELVETICA_BOLD );
final Font normalFont = FontFactory.getFont( FontFactory.HELVETICA );
Calendar calendar = Utils.findCalendar( weekNumber, bean.getYear());
// Title
Paragraph paragraph = new Paragraph( "Bordereau de Déclaration des Temps", boldFont );
paragraph.setAlignment( Element.ALIGN_CENTER );
doc.add( paragraph );
doc.add( new Paragraph( " " ));
doc.add( new Paragraph( " " ));
// Meta: week
final PdfPTable metaTable = new PdfPTable( 1 );
paragraph = new Paragraph();
paragraph.add( new Chunk( "Semaine : ", boldFont ));
paragraph.add( new Chunk( String.valueOf( weekNumber ), normalFont ));
PdfPCell c = new PdfPCell( paragraph );
c.setBorder( Rectangle.NO_BORDER );
metaTable.addCell( c );
// Meta: date
Calendar endOfWeekCalendar = ((Calendar) calendar.clone());
endOfWeekCalendar.add( Calendar.DATE, 4 );
String formattedDate = new SimpleDateFormat( "dd/MM/yyyy" ).format( endOfWeekCalendar.getTime());
paragraph = new Paragraph();
paragraph.add( new Chunk( "Date : ", boldFont ));
paragraph.add( new Chunk( formattedDate, normalFont ));
c = new PdfPCell( paragraph );
c.setBorder( Rectangle.NO_BORDER );
metaTable.addCell( c );
doc.add( metaTable );
doc.add( new Paragraph( " " ));
doc.add( new Paragraph( " " ));
// Signatures
final PdfPTable signaturesTable = new PdfPTable( 2 );
paragraph = new Paragraph();
paragraph.add( new Chunk( "Nom : ", boldFont ));
paragraph.add( new Chunk( bean.getName(), normalFont ));
c = new PdfPCell( paragraph );
c.setBorder( Rectangle.NO_BORDER );
signaturesTable.addCell( c );
paragraph = new Paragraph();
paragraph.add( new Chunk( "Responsable : ", boldFont ));
paragraph.add( new Chunk( bean.getManagerName(), normalFont ));
c = new PdfPCell( paragraph );
c.setBorder( Rectangle.NO_BORDER );
signaturesTable.addCell( c );
c = new PdfPCell( new Paragraph( "Signature : ", boldFont ));
c.setBorder( Rectangle.NO_BORDER );
signaturesTable.addCell( c );
c = new PdfPCell( new Paragraph( "Signature : ", boldFont ));
c.setBorder( Rectangle.NO_BORDER );
signaturesTable.addCell( c );
doc.add( signaturesTable );
// Signature image
if( ! bean.signatures.isEmpty()) {
int random = new Random().nextInt( bean.signatures.size());
Image img = Image.getInstance( bean.signatures.get( random ).toURI().toURL());
img.scaleToFit( 200, 100 );
img.setIndentationLeft( 25 + (random * random * 14) % 51 );
doc.add( img );
} else {
doc.add( new Paragraph( " " ));
}
doc.add( new Paragraph( " " ));
doc.add( new Paragraph( " " ));
doc.add( new Paragraph( " " ));
// Calendar
final PdfPTable timeTable = new PdfPTable( 7 );
timeTable.addCell( new PdfPCell());
for( int i=0; i<5; i++ ) {
final String date = this.sdf.format( calendar.getTime());
timeTable.addCell( newCell( date, 10 ));
calendar.add( Calendar.DATE, 1 );
}
timeTable.addCell( newCell( "Total", 10 ));
timeTable.addCell( newCell( "Heures Effectuées", 20 ));
int total = 0;
boolean daysOff = false;
calendar.add( Calendar.DATE, -5 );
for( int i=0; i<5; i++ ) {
String key = new SimpleDateFormat( "dd_MM_yyyy" ).format( calendar.getTime());
String value = scheduleProperties.getProperty( key );
if( ! value.matches( "\\d+" )) {
value = "0\n\n" + value;
daysOff = true;
} else {
total += Integer.parseInt( value );
}
timeTable.addCell( newCell( value, 20 ));
calendar.add( Calendar.DATE, 1 );
}
if( total > bean.getTotalHours())
throw new IOException( "Too many hours, you were supposed to do " + bean.getTotalHours() + " hours..." );
else if( ! daysOff && total != bean.getTotalHours())
throw new IOException( "Wrong schedule, you were supposed to do EXACTLY " + bean.getTotalHours() + " hours..." );
timeTable.addCell( newCell( total + " h", 20 ));
timeTable.completeRow();
doc.add( timeTable );
}
private PdfPCell newCell( String content, int padding ) {
PdfPCell c = new PdfPCell( new Phrase( content ));
c.setHorizontalAlignment( Element.ALIGN_CENTER );
c.setPaddingTop( padding );
c.setPaddingBottom( padding );
c.setPaddingLeft( padding / 2f );
c.setPaddingRight( padding / 2f );
return c;
}
}
| src/main/java/net/vzurczak/timesheetgenerator/internal/PdfGenerator.java | /**
* Copyright 2014 - Vincent Zurczak
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.vzurczak.timesheetgenerator.internal;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Properties;
import java.util.Random;
import com.itextpdf.text.Chunk;
import com.itextpdf.text.Document;
import com.itextpdf.text.DocumentException;
import com.itextpdf.text.Element;
import com.itextpdf.text.Font;
import com.itextpdf.text.FontFactory;
import com.itextpdf.text.Image;
import com.itextpdf.text.PageSize;
import com.itextpdf.text.Paragraph;
import com.itextpdf.text.Phrase;
import com.itextpdf.text.Rectangle;
import com.itextpdf.text.pdf.PdfPCell;
import com.itextpdf.text.pdf.PdfPTable;
import com.itextpdf.text.pdf.PdfWriter;
/**
* @author Vincent Zurczak
*/
public class PdfGenerator {
final SimpleDateFormat sdf = new SimpleDateFormat( "EEEE d MMMM yyyy" );
/**
* Creates a PDF document.
* @param bean a generation bean (not null)
* @param scheduleProperties the details of the schedule (not null)
* @throws DocumentException
* @throws FileNotFoundException
*/
public void createDocument( GenerationDataBean bean, Properties scheduleProperties )
throws Exception {
// File name
StringBuilder sb = new StringBuilder();
sb.append( "Feuille-De-Temps--s" );
sb.append( bean.getStartWeek());
sb.append( "--s" );
sb.append( bean.getEndWeek());
sb.append( "--" );
sb.append( bean.getYear());
sb.append( ".pdf" );
// Create the document
File outputFile = new File( "./pdf/" + sb.toString());
final Document doc = new Document( PageSize.A4.rotate());
PdfWriter.getInstance( doc, new FileOutputStream( outputFile ));
doc.open();
doc.addAuthor( bean.getName());
doc.addCreator( bean.getName());
String s;
if( bean.getEndWeek() - bean.getStartWeek() > 1 )
s = "Feuilles de Temps - Semaines " + bean.getStartWeek() + " à " + bean.getEndWeek();
else
s = "Feuille de Temps - Semaine " + bean.getStartWeek();
doc.addTitle( s );
doc.addSubject( s );
// Add pages
for( int i=bean.getStartWeek(); i<=bean.getEndWeek(); i++ )
addPageForWeek( i, doc, bean, scheduleProperties );
// That's it!
doc.close();
}
/**
* Adds a page for a given week.
* @param i the week number
* @param doc the document to update
* @param bean a generation bean (not null)
* @param scheduleProperties
* @throws DocumentException
* @throws IOException
* @throws MalformedURLException
*/
private void addPageForWeek( int weekNumber, Document doc, GenerationDataBean bean, Properties scheduleProperties )
throws DocumentException, MalformedURLException, IOException {
doc.newPage();
final Font boldFont = FontFactory.getFont( FontFactory.HELVETICA_BOLD );
final Font normalFont = FontFactory.getFont( FontFactory.HELVETICA );
Calendar calendar = Utils.findCalendar( weekNumber, bean.getYear());
// Title
Paragraph paragraph = new Paragraph( "Bordereau de Déclaration des Temps", boldFont );
paragraph.setAlignment( Element.ALIGN_CENTER );
doc.add( paragraph );
doc.add( new Paragraph( " " ));
doc.add( new Paragraph( " " ));
// Meta: week
final PdfPTable metaTable = new PdfPTable( 1 );
paragraph = new Paragraph();
paragraph.add( new Chunk( "Semaine : ", boldFont ));
paragraph.add( new Chunk( String.valueOf( weekNumber ), normalFont ));
PdfPCell c = new PdfPCell( paragraph );
c.setBorder( Rectangle.NO_BORDER );
metaTable.addCell( c );
// Meta: date
Calendar endOfWeekCalendar = ((Calendar) calendar.clone());
endOfWeekCalendar.add( Calendar.DATE, 4 );
String formattedDate = new SimpleDateFormat( "dd/MM/yyyy" ).format( endOfWeekCalendar.getTime());
paragraph = new Paragraph();
paragraph.add( new Chunk( "Date : ", boldFont ));
paragraph.add( new Chunk( formattedDate, normalFont ));
c = new PdfPCell( paragraph );
c.setBorder( Rectangle.NO_BORDER );
metaTable.addCell( c );
doc.add( metaTable );
doc.add( new Paragraph( " " ));
doc.add( new Paragraph( " " ));
// Signatures
final PdfPTable signaturesTable = new PdfPTable( 2 );
paragraph = new Paragraph();
paragraph.add( new Chunk( "Nom : ", boldFont ));
paragraph.add( new Chunk( bean.getName(), normalFont ));
c = new PdfPCell( paragraph );
c.setBorder( Rectangle.NO_BORDER );
signaturesTable.addCell( c );
paragraph = new Paragraph();
paragraph.add( new Chunk( "Responsable : ", boldFont ));
paragraph.add( new Chunk( bean.getManagerName(), normalFont ));
c = new PdfPCell( paragraph );
c.setBorder( Rectangle.NO_BORDER );
signaturesTable.addCell( c );
c = new PdfPCell( new Paragraph( "Signature : ", boldFont ));
c.setBorder( Rectangle.NO_BORDER );
signaturesTable.addCell( c );
c = new PdfPCell( new Paragraph( "Signature : ", boldFont ));
c.setBorder( Rectangle.NO_BORDER );
signaturesTable.addCell( c );
doc.add( signaturesTable );
// Signature image
if( ! bean.signatures.isEmpty()) {
int random = new Random().nextInt( bean.signatures.size());
Image img = Image.getInstance( bean.signatures.get( random ).toURI().toURL());
img.scaleToFit( 200, 100 );
img.setIndentationLeft( 25 + (random * random * 14) % 51 );
doc.add( img );
} else {
doc.add( new Paragraph( " " ));
}
doc.add( new Paragraph( " " ));
doc.add( new Paragraph( " " ));
doc.add( new Paragraph( " " ));
// Calendar
final PdfPTable timeTable = new PdfPTable( 7 );
timeTable.addCell( new PdfPCell());
for( int i=0; i<5; i++ ) {
final String date = this.sdf.format( calendar.getTime());
timeTable.addCell( newCell( date, 10 ));
calendar.add( Calendar.DATE, 1 );
}
timeTable.addCell( newCell( "Total", 10 ));
timeTable.addCell( newCell( "Heures Effectuées", 20 ));
int total = 0;
boolean daysOff = false;
calendar.add( Calendar.DATE, -5 );
for( int i=0; i<5; i++ ) {
String key = new SimpleDateFormat( "dd_MM_yyyy" ).format( calendar.getTime());
String value = scheduleProperties.getProperty( key );
if( ! value.matches( "\\d+" )) {
value = "0\n\n" + value;
daysOff = true;
} else {
total += Integer.parseInt( value );
}
timeTable.addCell( newCell( value, 20 ));
calendar.add( Calendar.DATE, 1 );
}
if( total > bean.getTotalHours())
throw new IOException( "Too many hours, you were supposed to do " + bean.getTotalHours() + " hours..." );
else if( ! daysOff && total != bean.getTotalHours())
throw new IOException( "Wrong schedule, you were supposed to do EXACTLY " + bean.getTotalHours() + " hours..." );
timeTable.addCell( newCell( total + " h", 20 ));
timeTable.completeRow();
doc.add( timeTable );
}
private PdfPCell newCell( String content, int padding ) {
PdfPCell c = new PdfPCell( new Phrase( content ));
c.setHorizontalAlignment( Element.ALIGN_CENTER );
c.setPaddingTop( padding );
c.setPaddingBottom( padding );
c.setPaddingLeft( padding / 2f );
c.setPaddingRight( padding / 2f );
return c;
}
}
| Format week numbers with 2 digits | src/main/java/net/vzurczak/timesheetgenerator/internal/PdfGenerator.java | Format week numbers with 2 digits | <ide><path>rc/main/java/net/vzurczak/timesheetgenerator/internal/PdfGenerator.java
<ide> // File name
<ide> StringBuilder sb = new StringBuilder();
<ide> sb.append( "Feuille-De-Temps--s" );
<del> sb.append( bean.getStartWeek());
<add> sb.append( String.format( "%02d", bean.getStartWeek()));
<ide> sb.append( "--s" );
<del> sb.append( bean.getEndWeek());
<add> sb.append( String.format( "%02d", bean.getEndWeek()));
<ide> sb.append( "--" );
<ide> sb.append( bean.getYear());
<ide> sb.append( ".pdf" ); |
|
Java | apache-2.0 | 78d2f4105725954418164ae15b53d55e3aee49bf | 0 | dimone-kun/cuba,dimone-kun/cuba,cuba-platform/cuba,cuba-platform/cuba,cuba-platform/cuba,dimone-kun/cuba | /*
* Copyright (c) 2008-2013 Haulmont. All rights reserved.
* Use is subject to license terms, see http://www.cuba-platform.com/license for details.
*/
package com.haulmont.cuba.gui.categories;
import com.haulmont.chile.core.datatypes.Datatypes;
import com.haulmont.chile.core.model.MetaClass;
import com.haulmont.chile.core.model.utils.InstanceUtils;
import com.haulmont.cuba.core.app.DataService;
import com.haulmont.cuba.core.entity.BaseUuidEntity;
import com.haulmont.cuba.core.entity.CategoryAttribute;
import com.haulmont.cuba.core.entity.annotation.SystemLevel;
import com.haulmont.cuba.core.global.LoadContext;
import com.haulmont.cuba.core.global.MessageTools;
import com.haulmont.cuba.core.global.Metadata;
import com.haulmont.cuba.core.global.MetadataTools;
import com.haulmont.cuba.gui.AppConfig;
import com.haulmont.cuba.gui.components.*;
import com.haulmont.cuba.gui.components.validators.DateValidator;
import com.haulmont.cuba.gui.components.validators.DoubleValidator;
import com.haulmont.cuba.gui.components.validators.IntegerValidator;
import com.haulmont.cuba.gui.config.WindowConfig;
import com.haulmont.cuba.gui.config.WindowInfo;
import com.haulmont.cuba.gui.data.CollectionDatasource;
import com.haulmont.cuba.gui.data.Datasource;
import com.haulmont.cuba.gui.data.RuntimePropsDatasource;
import com.haulmont.cuba.gui.data.ValueListener;
import com.haulmont.cuba.gui.data.impl.DatasourceImplementation;
import com.haulmont.cuba.gui.xml.layout.ComponentsFactory;
import org.apache.commons.lang.BooleanUtils;
import javax.annotation.Nullable;
import javax.inject.Inject;
import java.util.*;
/**
* Class that encapsulates editing of {@link com.haulmont.cuba.core.entity.CategoryAttribute} entities.
* <p/>
*
* @author devyatkin
* @version $Id$
*/
public class AttributeEditor extends AbstractEditor<CategoryAttribute> {
protected Container fieldsContainer;
protected TextField nameField;
protected TextField codeField;
protected CheckBox requiredField;
protected LookupField screenField;
protected CheckBox lookupField;
protected LookupField dataTypeField;
protected CategoryAttribute attribute;
protected boolean dataTypeFieldInited = false;
protected DataService dataService;
@Inject
protected Datasource attributeDs;
@Inject
protected ComponentsFactory factory;
@Inject
protected WindowConfig windowConfig;
@Inject
protected Metadata metadata;
@Inject
protected MetadataTools metadataTools;
@Inject
protected MessageTools messageTools;
protected static final String FIELD_WIDTH = "200px";
@Override
public void init(Map<String, Object> params) {
getDialogParams().setWidth(251);
dataService = getDsContext().getDataSupplier();
fieldsContainer = getComponent("attributeProperties");
nameField = factory.createComponent(TextField.NAME);
nameField.setId("name");
nameField.setRequired(true);
nameField.setRequiredMessage(getMessage("nameRequired"));
nameField.setCaption(getMessage("name"));
nameField.setWidth(FIELD_WIDTH);
nameField.setFrame(frame);
nameField.requestFocus();
nameField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setName((String) value);
}
});
fieldsContainer.add(nameField);
codeField = factory.createComponent(TextField.NAME);
codeField.setId("code");
codeField.setCaption(getMessage("code"));
codeField.setWidth(FIELD_WIDTH);
codeField.setFrame(frame);
codeField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setCode((String) value);
}
});
codeField.setMaxLength(CategoryAttribute.CODE_FIELD_LENGTH);
fieldsContainer.add(codeField);
requiredField = factory.createComponent(CheckBox.NAME);
requiredField.setId("required");
requiredField.setCaption(getMessage("required"));
requiredField.setWidth(FIELD_WIDTH);
requiredField.setFrame(frame);
requiredField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setRequired((Boolean) value);
}
});
fieldsContainer.add(requiredField);
dataTypeField = factory.createComponent(LookupField.NAME);
Map<String, Object> options = new HashMap<>();
RuntimePropsDatasource.PropertyType[] types = RuntimePropsDatasource.PropertyType.values();
for (RuntimePropsDatasource.PropertyType propertyType : types) {
options.put(getMessage(propertyType.toString()), propertyType);
}
dataTypeField.setWidth(FIELD_WIDTH);
dataTypeField.setNewOptionAllowed(false);
dataTypeField.setRequired(true);
dataTypeField.setRequiredMessage(getMessage("dataTypeRequired"));
dataTypeField.setOptionsMap(options);
dataTypeField.setCaption(getMessage("dataType"));
dataTypeField.setFrame(frame);
dataTypeField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
if (prevValue != null) {
clearValue(attribute);
}
lookupField.setVisible(false);
if (RuntimePropsDatasource.PropertyType.ENTITY.equals(value)) {
attribute.setIsEntity(true);
generateDefaultEntityValueField(!dataTypeFieldInited);
lookupField.setVisible(true);
} else if (RuntimePropsDatasource.PropertyType.ENUMERATION.equals(value)) {
attribute.setIsEntity(false);
attribute.setDataType(value.toString());
generateDefaultEnumValueField(!dataTypeFieldInited);
} else {
if (RuntimePropsDatasource.PropertyType.BOOLEAN.equals(value)) {
requiredField.setVisible(false);
}
attribute.setDataType(value.toString());
attribute.setIsEntity(false);
generateDefaultValueField((Enum<RuntimePropsDatasource.PropertyType>) value, !dataTypeFieldInited);
}
dataTypeFieldInited = true;
}
});
fieldsContainer.add(dataTypeField);
lookupField = factory.createComponent(CheckBox.NAME);
lookupField.setId("lookup");
lookupField.setCaption(getMessage("lookup"));
lookupField.setWidth(FIELD_WIDTH);
lookupField.setFrame(frame);
lookupField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setLookup((Boolean) value);
}
});
lookupField.setVisible(false);
fieldsContainer.add(lookupField);
}
@Override
public void postValidate(ValidationErrors errors) {
CollectionDatasource parent = (CollectionDatasource) ((DatasourceImplementation) attributeDs).getParent();
if (parent != null) {
CategoryAttribute categoryAttribute = getItem();
for (Object id : parent.getItemIds()) {
CategoryAttribute ca = (CategoryAttribute) parent.getItem(id);
if (ca.getName().equals(categoryAttribute.getName())
&& (!ca.equals(categoryAttribute))) {
errors.add(getMessage("uniqueName"));
return;
} else if (ca.getCode() != null && ca.getCode().equals(categoryAttribute.getCode())
&& (!ca.equals(categoryAttribute))) {
errors.add(getMessage("uniqueCode"));
return;
}
}
}
}
protected void generateDefaultValueField(Enum<RuntimePropsDatasource.PropertyType> dataType, boolean setValue) {
boolean hasValue = (attribute.getDefaultValue() == null || !setValue) ? (false) : (true);
clearComponents();
if (RuntimePropsDatasource.PropertyType.STRING.equals(dataType)) {
TextField textField = factory.createComponent(TextField.NAME);
textField.setId("stringDefaultValueField");
textField.setCaption(getMessage("defaultValue"));
textField.setDatatype(Datatypes.getNN(String.class));
textField.setWidth(FIELD_WIDTH);
textField.setFrame(frame);
textField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setDefaultString((String) value);
}
});
if (hasValue)
textField.setValue(attribute.getDefaultString());
fieldsContainer.add(textField);
}
if (RuntimePropsDatasource.PropertyType.DATE.equals(dataType)) {
BoxLayout boxLayout = factory.createComponent(BoxLayout.VBOX);
boxLayout.setId("defaultDateBox");
CheckBox checkBox = factory.createComponent(CheckBox.NAME);
checkBox.setId("defaultDateIsCurrent");
checkBox.setCaption(getMessage("currentDate"));
final DateField dateField = factory.createComponent(DateField.NAME);
dateField.setId("dateDefaultValueField");
dateField.setCaption(getMessage("defaultValue"));
dateField.setFrame(frame);
boxLayout.add(checkBox);
boxLayout.add(dateField);
fieldsContainer.add(boxLayout);
checkBox.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
if (BooleanUtils.isTrue((Boolean) value)) {
dateField.setVisible(false);
attribute.setDefaultDateIsCurrent(true);
} else {
dateField.setVisible(true);
attribute.setDefaultDateIsCurrent(false);
}
}
});
dateField.addValidator(new DateValidator(messages.getMessage(AppConfig.getMessagesPack(), "validation.invalidDate")));
dateField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setDefaultDate((Date) value);
}
});
if (BooleanUtils.isTrue(attribute.getDefaultDateIsCurrent())) {
checkBox.setValue(true);
dateField.setVisible(false);
}
if (hasValue)
dateField.setValue(attribute.getDefaultDate());
} else if (RuntimePropsDatasource.PropertyType.INTEGER.equals(dataType)) {
TextField textField = factory.createComponent(TextField.NAME);
textField.setId("intDefaultValueField");
textField.setCaption(getMessage("defaultValue"));
textField.addValidator(new IntegerValidator(messages.getMessage(AppConfig.getMessagesPack(),
"validation.invalidNumber")));
textField.setDatatype(Datatypes.get(Integer.class));
textField.setWidth(FIELD_WIDTH);
textField.setFrame(frame);
textField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setDefaultInt((Integer) value);
}
});
if (hasValue)
textField.setValue(attribute.getDefaultInt());
fieldsContainer.add(textField);
} else if (RuntimePropsDatasource.PropertyType.DOUBLE.equals(dataType)) {
TextField textField = factory.createComponent(TextField.NAME);
textField.setId("doubleDefaultValueField");
textField.setCaption(getMessage("defaultValue"));
textField.setDatatype(Datatypes.get(Double.class));
textField.setWidth(FIELD_WIDTH);
textField.setFrame(frame);
textField.addValidator(new DoubleValidator(
messages.getMessage(AppConfig.getMessagesPack(),
"validation.invalidNumber")));
textField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setDefaultDouble((Double) value);
}
});
if (hasValue)
textField.setValue(attribute.getDefaultDouble());
fieldsContainer.add(textField);
} else if (RuntimePropsDatasource.PropertyType.BOOLEAN.equals(dataType)) {
CheckBox checkBox = factory.createComponent(CheckBox.NAME);
checkBox.setId("booleanDefaultValueField");
checkBox.setCaption(getMessage("defaultValue"));
checkBox.setFrame(frame);
checkBox.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setDefaultBoolean((Boolean) value);
}
});
if (hasValue)
checkBox.setValue(attribute.getDefaultBoolean());
fieldsContainer.add(checkBox);
}
}
protected void generateDefaultEnumValueField(boolean setValue) {
clearComponents();
boolean hasValue = (attribute.getDataType() == null || !setValue) ? (false) : (true);
TextField textField = factory.createComponent(TextField.NAME);
textField.setId("enumeration");
textField.setCaption(getMessage("ENUMERATION"));
textField.setWidth(FIELD_WIDTH);
textField.setRequired(true);
textField.setRequiredMessage(getMessage("enumRequired"));
textField.setFrame(frame);
textField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setEnumeration((String) value);
}
});
if (hasValue)
textField.setValue(attribute.getEnumeration());
fieldsContainer.add(textField);
final TextField defaultValueField = factory.createComponent(TextField.NAME);
defaultValueField.setId("enumDefaultValueField");
defaultValueField.setCaption(getMessage("defaultValue"));
defaultValueField.setWidth(FIELD_WIDTH);
defaultValueField.setFrame(frame);
fieldsContainer.add(defaultValueField);
defaultValueField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setDefaultString((String) value);
}
});
defaultValueField.setValue(attribute.getDefaultValue());
}
protected void generateDefaultEntityValueField(boolean setValue) {
clearComponents();
boolean hasValue = (attribute.getDataType() == null || !setValue) ? (false) : (true);
LookupField entityTypeField = factory.createComponent(LookupField.NAME);
entityTypeField.setId("entityType");
entityTypeField.setCaption(getMessage("entityType"));
entityTypeField.setRequired(true);
entityTypeField.setRequiredMessage(getMessage("entityTypeRequired"));
entityTypeField.setWidth(FIELD_WIDTH);
entityTypeField.setFrame(frame);
Map<String, Object> options = new TreeMap<>();
MetaClass entityType = null;
for (MetaClass metaClass : metadataTools.getAllPersistentMetaClasses()) {
if (!BooleanUtils.isTrue((Boolean) metaClass.getAnnotations().get(SystemLevel.class.getName()))) {
options.put(messageTools.getEntityCaption(metaClass) + " (" + metaClass.getName() + ")", metaClass);
if (hasValue && metaClass.getJavaClass().getName().equals(attribute.getDataType())) {
entityType = metaClass;
}
}
}
entityTypeField.setOptionsMap(options);
fieldsContainer.add(entityTypeField);
final LookupField entityField = factory.createComponent(LookupField.NAME);
entityField.setId("entityDefaultValueField");
entityField.setCaption(getMessage("defaultValue"));
entityField.setWidth(FIELD_WIDTH);
entityField.setFrame(frame);
fieldsContainer.add(entityField);
entityTypeField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setDataType(((MetaClass) value).getJavaClass().getName());
fillEntities(entityField, ((MetaClass) value).getJavaClass());
}
});
entityTypeField.setValue(entityType);
screenField = factory.createComponent(LookupField.NAME);
screenField.setId("screenField");
screenField.setCaption(getMessage("screen"));
screenField.setWidth(FIELD_WIDTH);
screenField.setRequired(true);
screenField.setRequiredMessage(getMessage("entityScreenRequired"));
screenField.setFrame(frame);
fieldsContainer.add(screenField);
lookupField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, @Nullable Object prevValue, Object value) {
if ((Boolean) value) {
screenField.setValue(null);
screenField.setEnabled(false);
} else {
screenField.setEnabled(true);
}
}
});
Collection<WindowInfo> windowInfoCollection = windowConfig.getWindows();
List screensList = new ArrayList();
for (WindowInfo windowInfo : windowInfoCollection) {
if (!windowInfo.getId().contains(".") || windowInfo.getId().contains(".browse") || windowInfo.getId().contains(".lookup"))
screensList.add(windowInfo.getId());
}
screenField.setOptionsList(screensList);
screenField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setScreen(value != null ? value.toString() : null);
}
});
screenField.setValue(attribute.getScreen());
}
protected void fillEntities(LookupField entityField, Class clazz) {
Map<String, Object> entitiesMap = new HashMap<>();
String entityClassName = metadata.getClass(clazz).getName();
LoadContext entitiesContext = new LoadContext(clazz);
LoadContext.Query query = entitiesContext.setQueryString("select a from " + entityClassName + " a");
entitiesContext.setView("_minimal");
List<BaseUuidEntity> list = dataService.loadList(entitiesContext);
for (BaseUuidEntity entity : list) {
entitiesMap.put(InstanceUtils.getInstanceName(entity), entity);
}
entityField.setOptionsMap(entitiesMap);
if (attribute.getDefaultEntityId() != null) {
LoadContext entityContext = new LoadContext(clazz);
LoadContext.Query query2 = entityContext.setQueryString("select a from " + entityClassName + " a where a.id =:e");
query2.setParameter("e", attribute.getDefaultEntityId());
entityContext.setView("_minimal");
BaseUuidEntity entity = dataService.load(entityContext);
if (entity != null) {
entityField.setValue(entity);
} else {
entityField.setValue(null);
}
}
entityField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
if (value != null)
attribute.setDefaultEntityId(((BaseUuidEntity) value).getId());
else
attribute.setDefaultEntityId(null);
}
});
}
@Override
protected void postInit() {
attribute = getItem();
nameField.setValue(attribute.getName());
codeField.setValue(attribute.getCode());
requiredField.setValue(attribute.getRequired());
lookupField.setValue(attribute.getLookup());
if (BooleanUtils.isTrue(attribute.getIsEntity())) {
dataTypeField.setValue(RuntimePropsDatasource.PropertyType.ENTITY);
} else {
if (attribute.getDataType() != null) {
RuntimePropsDatasource.PropertyType type = RuntimePropsDatasource.PropertyType.valueOf(attribute.getDataType());
dataTypeField.setValue(type);
}
}
if (screenField != null) {
screenField.setEnabled(!attribute.getLookup());
}
if (dataTypeField.getValue() != null && dataTypeField.getValue().equals(RuntimePropsDatasource.PropertyType.BOOLEAN)) {
requiredField.setVisible(false);
}
}
protected void clearComponents() {
Component component = fieldsContainer.getComponent("defaultValue");
if (component != null)
fieldsContainer.remove(component);
Component component2 = fieldsContainer.getComponent("entityType");
if (component2 != null)
fieldsContainer.remove(component2);
Component component3 = fieldsContainer.getComponent("entityField");
if (component3 != null) {
fieldsContainer.remove(component3);
}
Component component4 = fieldsContainer.getComponent("enumeration");
if (component4 != null) {
fieldsContainer.remove(component4);
}
Component component5 = fieldsContainer.getComponent("screenField");
if (component5 != null) {
fieldsContainer.remove(component5);
}
Component component6 = fieldsContainer.getComponent("defaultDateBox");
if (component6 != null) {
fieldsContainer.remove(component6);
}
}
protected void clearValue(CategoryAttribute attribute) {
attribute.setDefaultString(null);
attribute.setDefaultInt(null);
attribute.setDefaultDouble(null);
attribute.setDefaultBoolean(null);
attribute.setDefaultDate(null);
attribute.setDefaultEntityId(null);
attribute.setEnumeration(null);
}
}
| modules/gui/src/com/haulmont/cuba/gui/categories/AttributeEditor.java | /*
* Copyright (c) 2008-2013 Haulmont. All rights reserved.
* Use is subject to license terms, see http://www.cuba-platform.com/license for details.
*/
package com.haulmont.cuba.gui.categories;
import com.haulmont.chile.core.datatypes.Datatypes;
import com.haulmont.chile.core.model.MetaClass;
import com.haulmont.chile.core.model.utils.InstanceUtils;
import com.haulmont.cuba.core.app.DataService;
import com.haulmont.cuba.core.entity.BaseUuidEntity;
import com.haulmont.cuba.core.entity.CategoryAttribute;
import com.haulmont.cuba.core.entity.annotation.SystemLevel;
import com.haulmont.cuba.core.global.LoadContext;
import com.haulmont.cuba.core.global.MessageTools;
import com.haulmont.cuba.core.global.Metadata;
import com.haulmont.cuba.core.global.MetadataTools;
import com.haulmont.cuba.gui.AppConfig;
import com.haulmont.cuba.gui.components.*;
import com.haulmont.cuba.gui.components.validators.DateValidator;
import com.haulmont.cuba.gui.components.validators.DoubleValidator;
import com.haulmont.cuba.gui.components.validators.IntegerValidator;
import com.haulmont.cuba.gui.config.WindowConfig;
import com.haulmont.cuba.gui.config.WindowInfo;
import com.haulmont.cuba.gui.data.CollectionDatasource;
import com.haulmont.cuba.gui.data.Datasource;
import com.haulmont.cuba.gui.data.RuntimePropsDatasource;
import com.haulmont.cuba.gui.data.ValueListener;
import com.haulmont.cuba.gui.data.impl.DatasourceImplementation;
import com.haulmont.cuba.gui.xml.layout.ComponentsFactory;
import org.apache.commons.lang.BooleanUtils;
import javax.annotation.Nullable;
import javax.inject.Inject;
import java.util.*;
/**
* Class that encapsulates editing of {@link com.haulmont.cuba.core.entity.CategoryAttribute} entities.
* <p/>
*
* @author devyatkin
* @version $Id$
*/
public class AttributeEditor extends AbstractEditor<CategoryAttribute> {
protected Container fieldsContainer;
protected TextField nameField;
protected TextField codeField;
protected CheckBox requiredField;
protected LookupField screenField;
protected CheckBox lookupField;
protected LookupField dataTypeField;
protected CategoryAttribute attribute;
protected boolean dataTypeFieldInited = false;
protected DataService dataService;
@Inject
protected Datasource attributeDs;
@Inject
protected ComponentsFactory factory;
@Inject
protected WindowConfig windowConfig;
@Inject
protected Metadata metadata;
@Inject
protected MetadataTools metadataTools;
@Inject
protected MessageTools messageTools;
protected static final String FIELD_WIDTH = "200px";
@Override
public void init(Map<String, Object> params) {
getDialogParams().setWidth(251);
dataService = getDsContext().getDataSupplier();
fieldsContainer = getComponent("attributeProperties");
nameField = factory.createComponent(TextField.NAME);
nameField.setId("name");
nameField.setRequired(true);
nameField.setRequiredMessage(getMessage("nameRequired"));
nameField.setCaption(getMessage("name"));
nameField.setWidth(FIELD_WIDTH);
nameField.setFrame(frame);
nameField.requestFocus();
nameField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setName((String) value);
}
});
fieldsContainer.add(nameField);
codeField = factory.createComponent(TextField.NAME);
codeField.setId("code");
codeField.setCaption(getMessage("code"));
codeField.setWidth(FIELD_WIDTH);
codeField.setFrame(frame);
codeField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setCode((String) value);
}
});
codeField.setMaxLength(CategoryAttribute.CODE_FIELD_LENGTH);
fieldsContainer.add(codeField);
requiredField = factory.createComponent(CheckBox.NAME);
requiredField.setId("required");
requiredField.setCaption(getMessage("required"));
requiredField.setWidth(FIELD_WIDTH);
requiredField.setFrame(frame);
requiredField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setRequired((Boolean) value);
}
});
fieldsContainer.add(requiredField);
dataTypeField = factory.createComponent(LookupField.NAME);
Map<String, Object> options = new HashMap<>();
RuntimePropsDatasource.PropertyType[] types = RuntimePropsDatasource.PropertyType.values();
for (RuntimePropsDatasource.PropertyType propertyType : types) {
options.put(getMessage(propertyType.toString()), propertyType);
}
dataTypeField.setWidth(FIELD_WIDTH);
dataTypeField.setNewOptionAllowed(false);
dataTypeField.setRequired(true);
dataTypeField.setRequiredMessage(getMessage("dataTypeRequired"));
dataTypeField.setOptionsMap(options);
dataTypeField.setCaption(getMessage("dataType"));
dataTypeField.setFrame(frame);
dataTypeField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
if (prevValue != null) {
clearValue(attribute);
}
lookupField.setVisible(false);
if (RuntimePropsDatasource.PropertyType.ENTITY.equals(value)) {
attribute.setIsEntity(true);
generateDefaultEntityValueField(!dataTypeFieldInited);
lookupField.setVisible(true);
} else if (RuntimePropsDatasource.PropertyType.ENUMERATION.equals(value)) {
attribute.setIsEntity(false);
attribute.setDataType(value.toString());
generateDefaultEnumValueField(!dataTypeFieldInited);
} else {
if (RuntimePropsDatasource.PropertyType.BOOLEAN.equals(value)) {
requiredField.setVisible(false);
}
attribute.setDataType(value.toString());
attribute.setIsEntity(false);
generateDefaultValueField((Enum<RuntimePropsDatasource.PropertyType>) value, !dataTypeFieldInited);
}
dataTypeFieldInited = true;
}
});
fieldsContainer.add(dataTypeField);
lookupField = factory.createComponent(CheckBox.NAME);
lookupField.setId("lookup");
lookupField.setCaption(getMessage("lookup"));
lookupField.setWidth(FIELD_WIDTH);
lookupField.setFrame(frame);
lookupField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setLookup((Boolean) value);
}
});
lookupField.setVisible(false);
fieldsContainer.add(lookupField);
}
@Override
public void postValidate(ValidationErrors errors) {
CollectionDatasource parent = (CollectionDatasource) ((DatasourceImplementation) attributeDs).getParent();
if (parent != null) {
CategoryAttribute categoryAttribute = getItem();
for (Object id : parent.getItemIds()) {
CategoryAttribute ca = (CategoryAttribute) parent.getItem(id);
if (ca.getName().equals(categoryAttribute.getName())
&& (!ca.equals(categoryAttribute))) {
errors.add(getMessage("uniqueName"));
return;
} else if (ca.getCode() != null && ca.getCode().equals(categoryAttribute.getCode())
&& (!ca.equals(categoryAttribute))) {
errors.add(getMessage("uniqueCode"));
return;
}
}
}
}
protected void generateDefaultValueField(Enum<RuntimePropsDatasource.PropertyType> dataType, boolean setValue) {
boolean hasValue = (attribute.getDefaultValue() == null || !setValue) ? (false) : (true);
clearComponents();
if (RuntimePropsDatasource.PropertyType.STRING.equals(dataType)) {
TextField textField = factory.createComponent(TextField.NAME);
textField.setId("defaultValue");
textField.setCaption(getMessage("defaultValue"));
textField.setDatatype(Datatypes.getNN(String.class));
textField.setWidth(FIELD_WIDTH);
textField.setFrame(frame);
textField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setDefaultString((String) value);
}
});
if (hasValue)
textField.setValue(attribute.getDefaultString());
fieldsContainer.add(textField);
}
if (RuntimePropsDatasource.PropertyType.DATE.equals(dataType)) {
BoxLayout boxLayout = factory.createComponent(BoxLayout.VBOX);
boxLayout.setId("defaultDateBox");
CheckBox checkBox = factory.createComponent(CheckBox.NAME);
checkBox.setId("defaultDateIsCurrent");
checkBox.setCaption(getMessage("currentDate"));
final DateField dateField = factory.createComponent(DateField.NAME);
dateField.setId("defaultValue");
dateField.setCaption(getMessage("defaultValue"));
dateField.setFrame(frame);
boxLayout.add(checkBox);
boxLayout.add(dateField);
fieldsContainer.add(boxLayout);
checkBox.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
if (BooleanUtils.isTrue((Boolean) value)) {
dateField.setVisible(false);
attribute.setDefaultDateIsCurrent(true);
} else {
dateField.setVisible(true);
attribute.setDefaultDateIsCurrent(false);
}
}
});
dateField.addValidator(new DateValidator(messages.getMessage(AppConfig.getMessagesPack(), "validation.invalidDate")));
dateField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setDefaultDate((Date) value);
}
});
if (BooleanUtils.isTrue(attribute.getDefaultDateIsCurrent())) {
checkBox.setValue(true);
dateField.setVisible(false);
}
if (hasValue)
dateField.setValue(attribute.getDefaultDate());
} else if (RuntimePropsDatasource.PropertyType.INTEGER.equals(dataType)) {
TextField textField = factory.createComponent(TextField.NAME);
textField.setId("defaultValue");
textField.setCaption(getMessage("defaultValue"));
textField.addValidator(new IntegerValidator(messages.getMessage(AppConfig.getMessagesPack(),
"validation.invalidNumber")));
textField.setDatatype(Datatypes.get(Integer.class));
textField.setWidth(FIELD_WIDTH);
textField.setFrame(frame);
textField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setDefaultInt((Integer) value);
}
});
if (hasValue)
textField.setValue(attribute.getDefaultInt());
fieldsContainer.add(textField);
} else if (RuntimePropsDatasource.PropertyType.DOUBLE.equals(dataType)) {
TextField textField = factory.createComponent(TextField.NAME);
textField.setId("defaultValue");
textField.setCaption(getMessage("defaultValue"));
textField.setDatatype(Datatypes.get(Double.class));
textField.setWidth(FIELD_WIDTH);
textField.setFrame(frame);
textField.addValidator(new DoubleValidator(
messages.getMessage(AppConfig.getMessagesPack(),
"validation.invalidNumber")));
textField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setDefaultDouble((Double) value);
}
});
if (hasValue)
textField.setValue(attribute.getDefaultDouble());
fieldsContainer.add(textField);
} else if (RuntimePropsDatasource.PropertyType.BOOLEAN.equals(dataType)) {
CheckBox checkBox = factory.createComponent(CheckBox.NAME);
checkBox.setId("defaultValue");
checkBox.setCaption(getMessage("defaultValue"));
checkBox.setFrame(frame);
checkBox.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setDefaultBoolean((Boolean) value);
}
});
if (hasValue)
checkBox.setValue(attribute.getDefaultBoolean());
fieldsContainer.add(checkBox);
}
}
protected void generateDefaultEnumValueField(boolean setValue) {
clearComponents();
boolean hasValue = (attribute.getDataType() == null || !setValue) ? (false) : (true);
TextField textField = factory.createComponent(TextField.NAME);
textField.setId("enumeration");
textField.setCaption(getMessage("ENUMERATION"));
textField.setWidth(FIELD_WIDTH);
textField.setRequired(true);
textField.setRequiredMessage(getMessage("enumRequired"));
textField.setFrame(frame);
textField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setEnumeration((String) value);
}
});
if (hasValue)
textField.setValue(attribute.getEnumeration());
fieldsContainer.add(textField);
final TextField defaultValueField = factory.createComponent(TextField.NAME);
defaultValueField.setId("defaultValue");
defaultValueField.setCaption(getMessage("defaultValue"));
defaultValueField.setWidth(FIELD_WIDTH);
defaultValueField.setFrame(frame);
fieldsContainer.add(defaultValueField);
defaultValueField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setDefaultString((String) value);
}
});
defaultValueField.setValue(attribute.getDefaultValue());
}
protected void generateDefaultEntityValueField(boolean setValue) {
clearComponents();
boolean hasValue = (attribute.getDataType() == null || !setValue) ? (false) : (true);
LookupField entityTypeField = factory.createComponent(LookupField.NAME);
entityTypeField.setId("entityType");
entityTypeField.setCaption(getMessage("entityType"));
entityTypeField.setRequired(true);
entityTypeField.setRequiredMessage(getMessage("entityTypeRequired"));
entityTypeField.setWidth(FIELD_WIDTH);
entityTypeField.setFrame(frame);
Map<String, Object> options = new TreeMap<>();
MetaClass entityType = null;
for (MetaClass metaClass : metadataTools.getAllPersistentMetaClasses()) {
if (!BooleanUtils.isTrue((Boolean) metaClass.getAnnotations().get(SystemLevel.class.getName()))) {
options.put(messageTools.getEntityCaption(metaClass) + " (" + metaClass.getName() + ")", metaClass);
if (hasValue && metaClass.getJavaClass().getName().equals(attribute.getDataType())) {
entityType = metaClass;
}
}
}
entityTypeField.setOptionsMap(options);
fieldsContainer.add(entityTypeField);
final LookupField entityField = factory.createComponent(LookupField.NAME);
entityField.setId("entityField");
entityField.setCaption(getMessage("defaultValue"));
entityField.setWidth(FIELD_WIDTH);
fieldsContainer.add(entityField);
entityTypeField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setDataType(((MetaClass) value).getJavaClass().getName());
fillEntities(entityField, ((MetaClass) value).getJavaClass());
}
});
entityTypeField.setValue(entityType);
screenField = factory.createComponent(LookupField.NAME);
screenField.setId("screenField");
screenField.setCaption(getMessage("screen"));
screenField.setWidth(FIELD_WIDTH);
screenField.setRequired(true);
screenField.setRequiredMessage(getMessage("entityScreenRequired"));
screenField.setFrame(frame);
fieldsContainer.add(screenField);
lookupField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, @Nullable Object prevValue, Object value) {
if ((Boolean) value) {
screenField.setValue(null);
screenField.setEnabled(false);
} else {
screenField.setEnabled(true);
}
}
});
Collection<WindowInfo> windowInfoCollection = windowConfig.getWindows();
List screensList = new ArrayList();
for (WindowInfo windowInfo : windowInfoCollection) {
if (!windowInfo.getId().contains(".") || windowInfo.getId().contains(".browse") || windowInfo.getId().contains(".lookup"))
screensList.add(windowInfo.getId());
}
screenField.setOptionsList(screensList);
screenField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
attribute.setScreen(value != null ? value.toString() : null);
}
});
screenField.setValue(attribute.getScreen());
}
protected void fillEntities(LookupField entityField, Class clazz) {
Map<String, Object> entitiesMap = new HashMap<>();
String entityClassName = metadata.getClass(clazz).getName();
LoadContext entitiesContext = new LoadContext(clazz);
LoadContext.Query query = entitiesContext.setQueryString("select a from " + entityClassName + " a");
entitiesContext.setView("_minimal");
List<BaseUuidEntity> list = dataService.loadList(entitiesContext);
for (BaseUuidEntity entity : list) {
entitiesMap.put(InstanceUtils.getInstanceName(entity), entity);
}
entityField.setOptionsMap(entitiesMap);
if (attribute.getDefaultEntityId() != null) {
LoadContext entityContext = new LoadContext(clazz);
LoadContext.Query query2 = entityContext.setQueryString("select a from " + entityClassName + " a where a.id =:e");
query2.setParameter("e", attribute.getDefaultEntityId());
entityContext.setView("_minimal");
BaseUuidEntity entity = dataService.load(entityContext);
if (entity != null) {
entityField.setValue(entity);
} else {
entityField.setValue(null);
}
}
entityField.addListener(new ValueListener() {
@Override
public void valueChanged(Object source, String property, Object prevValue, Object value) {
if (value != null)
attribute.setDefaultEntityId(((BaseUuidEntity) value).getId());
else
attribute.setDefaultEntityId(null);
}
});
}
@Override
protected void postInit() {
attribute = getItem();
nameField.setValue(attribute.getName());
codeField.setValue(attribute.getCode());
requiredField.setValue(attribute.getRequired());
lookupField.setValue(attribute.getLookup());
if (BooleanUtils.isTrue(attribute.getIsEntity())) {
dataTypeField.setValue(RuntimePropsDatasource.PropertyType.ENTITY);
} else {
if (attribute.getDataType() != null) {
RuntimePropsDatasource.PropertyType type = RuntimePropsDatasource.PropertyType.valueOf(attribute.getDataType());
dataTypeField.setValue(type);
}
}
if (screenField != null) {
screenField.setEnabled(!attribute.getLookup());
}
if (dataTypeField.getValue() != null && dataTypeField.getValue().equals(RuntimePropsDatasource.PropertyType.BOOLEAN)) {
requiredField.setVisible(false);
}
}
protected void clearComponents() {
Component component = fieldsContainer.getComponent("defaultValue");
if (component != null)
fieldsContainer.remove(component);
Component component2 = fieldsContainer.getComponent("entityType");
if (component2 != null)
fieldsContainer.remove(component2);
Component component3 = fieldsContainer.getComponent("entityField");
if (component3 != null) {
fieldsContainer.remove(component3);
}
Component component4 = fieldsContainer.getComponent("enumeration");
if (component4 != null) {
fieldsContainer.remove(component4);
}
Component component5 = fieldsContainer.getComponent("screenField");
if (component5 != null) {
fieldsContainer.remove(component5);
}
Component component6 = fieldsContainer.getComponent("defaultDateBox");
if (component6 != null) {
fieldsContainer.remove(component6);
}
}
protected void clearValue(CategoryAttribute attribute) {
attribute.setDefaultString(null);
attribute.setDefaultInt(null);
attribute.setDefaultDouble(null);
attribute.setDefaultBoolean(null);
attribute.setDefaultDate(null);
attribute.setDefaultEntityId(null);
attribute.setEnumeration(null);
}
}
| New test Id mechanism for web, attribute editor field Ids #PL-2809
| modules/gui/src/com/haulmont/cuba/gui/categories/AttributeEditor.java | New test Id mechanism for web, attribute editor field Ids #PL-2809 | <ide><path>odules/gui/src/com/haulmont/cuba/gui/categories/AttributeEditor.java
<ide> clearComponents();
<ide> if (RuntimePropsDatasource.PropertyType.STRING.equals(dataType)) {
<ide> TextField textField = factory.createComponent(TextField.NAME);
<del> textField.setId("defaultValue");
<add> textField.setId("stringDefaultValueField");
<ide> textField.setCaption(getMessage("defaultValue"));
<ide> textField.setDatatype(Datatypes.getNN(String.class));
<ide> textField.setWidth(FIELD_WIDTH);
<ide> checkBox.setId("defaultDateIsCurrent");
<ide> checkBox.setCaption(getMessage("currentDate"));
<ide> final DateField dateField = factory.createComponent(DateField.NAME);
<del> dateField.setId("defaultValue");
<add> dateField.setId("dateDefaultValueField");
<ide> dateField.setCaption(getMessage("defaultValue"));
<ide> dateField.setFrame(frame);
<ide> boxLayout.add(checkBox);
<ide> dateField.setValue(attribute.getDefaultDate());
<ide> } else if (RuntimePropsDatasource.PropertyType.INTEGER.equals(dataType)) {
<ide> TextField textField = factory.createComponent(TextField.NAME);
<del> textField.setId("defaultValue");
<add> textField.setId("intDefaultValueField");
<ide> textField.setCaption(getMessage("defaultValue"));
<ide> textField.addValidator(new IntegerValidator(messages.getMessage(AppConfig.getMessagesPack(),
<ide> "validation.invalidNumber")));
<ide> fieldsContainer.add(textField);
<ide> } else if (RuntimePropsDatasource.PropertyType.DOUBLE.equals(dataType)) {
<ide> TextField textField = factory.createComponent(TextField.NAME);
<del> textField.setId("defaultValue");
<add> textField.setId("doubleDefaultValueField");
<ide> textField.setCaption(getMessage("defaultValue"));
<ide> textField.setDatatype(Datatypes.get(Double.class));
<ide> textField.setWidth(FIELD_WIDTH);
<ide> fieldsContainer.add(textField);
<ide> } else if (RuntimePropsDatasource.PropertyType.BOOLEAN.equals(dataType)) {
<ide> CheckBox checkBox = factory.createComponent(CheckBox.NAME);
<del> checkBox.setId("defaultValue");
<add> checkBox.setId("booleanDefaultValueField");
<ide> checkBox.setCaption(getMessage("defaultValue"));
<ide> checkBox.setFrame(frame);
<ide> checkBox.addListener(new ValueListener() {
<ide> fieldsContainer.add(textField);
<ide>
<ide> final TextField defaultValueField = factory.createComponent(TextField.NAME);
<del> defaultValueField.setId("defaultValue");
<add> defaultValueField.setId("enumDefaultValueField");
<ide> defaultValueField.setCaption(getMessage("defaultValue"));
<ide> defaultValueField.setWidth(FIELD_WIDTH);
<ide> defaultValueField.setFrame(frame);
<ide> fieldsContainer.add(entityTypeField);
<ide>
<ide> final LookupField entityField = factory.createComponent(LookupField.NAME);
<del> entityField.setId("entityField");
<add> entityField.setId("entityDefaultValueField");
<ide> entityField.setCaption(getMessage("defaultValue"));
<ide> entityField.setWidth(FIELD_WIDTH);
<add> entityField.setFrame(frame);
<ide> fieldsContainer.add(entityField);
<ide>
<ide> entityTypeField.addListener(new ValueListener() { |
|
Java | mit | 2e90bc965f4a079623196e31ce8b17bf7625f889 | 0 | SpongePowered/Sponge,SpongePowered/Sponge,SpongePowered/Sponge | /*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.data.provider.item.stack;
import com.google.common.collect.ImmutableSet;
import com.mojang.datafixers.util.Pair;
import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer;
import org.spongepowered.api.block.BlockType;
import org.spongepowered.api.data.Keys;
import org.spongepowered.api.effect.potion.PotionEffect;
import org.spongepowered.api.item.ItemRarity;
import org.spongepowered.api.item.ItemType;
import org.spongepowered.api.util.weighted.ChanceTable;
import org.spongepowered.api.util.weighted.NestedTableEntry;
import org.spongepowered.api.util.weighted.WeightedTable;
import org.spongepowered.common.SpongeCommon;
import org.spongepowered.common.accessor.world.item.DiggerItemAccessor;
import org.spongepowered.common.adventure.SpongeAdventure;
import org.spongepowered.common.data.provider.DataProviderRegistrator;
import org.spongepowered.common.util.Constants;
import org.spongepowered.common.util.NBTCollectors;
import java.util.List;
import java.util.Set;
import net.minecraft.core.Registry;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.nbt.ListTag;
import net.minecraft.nbt.StringTag;
import net.minecraft.world.effect.MobEffectInstance;
import net.minecraft.world.food.FoodProperties;
import net.minecraft.world.item.Item;
import net.minecraft.world.item.ItemStack;
import net.minecraft.world.item.Items;
import net.minecraft.world.item.PickaxeItem;
import net.minecraft.world.level.block.Block;
import net.minecraft.world.level.block.entity.AbstractFurnaceBlockEntity;
@SuppressWarnings({"unchecked", "UnstableApiUsage"})
public final class ItemStackData {
private ItemStackData() {
}
// @formatter:off
public static void register(final DataProviderRegistrator registrator) {
registrator
.asMutable(ItemStack.class)
.create(Keys.APPLICABLE_POTION_EFFECTS)
.get(h -> {
if (h.isEdible()) {
final List<Pair<MobEffectInstance,Float>> itemEffects = h.getItem().getFoodProperties().getEffects();
final WeightedTable<PotionEffect> effects = new WeightedTable<>();
final ChanceTable<PotionEffect> chance = new ChanceTable<>();
for (final Pair<MobEffectInstance,Float> effect : itemEffects) {
chance.add((PotionEffect) effect.getFirst(), effect.getSecond());
}
effects.add(new NestedTableEntry<>(1, chance));
return effects;
}
return null;
})
.create(Keys.BURN_TIME)
.get(h -> {
final Integer burnTime = AbstractFurnaceBlockEntity.getFuel().get(h.getItem());
if (burnTime != null && burnTime > 0) {
return burnTime;
}
return null;
})
.create(Keys.CAN_HARVEST)
.get(h -> {
final Item item = h.getItem();
if (item instanceof DiggerItemAccessor && !(item instanceof PickaxeItem)) {
final Set<Block> blocks = ((DiggerItemAccessor) item).accessor$blocks();
return ImmutableSet.copyOf((Set<BlockType>) (Object) blocks);
}
final Set<BlockType> blockTypes = Registry.BLOCK.stream()
.filter(b -> item.isCorrectToolForDrops(b.defaultBlockState()))
.map(BlockType.class::cast)
.collect(ImmutableSet.toImmutableSet());
return blockTypes.isEmpty() ? null : blockTypes;
})
.create(Keys.CONTAINER_ITEM)
.get(h -> (ItemType) h.getItem().getCraftingRemainingItem())
.create(Keys.DISPLAY_NAME)
.get(h -> SpongeAdventure.asAdventure(h.getDisplayName()))
.create(Keys.CUSTOM_MODEL_DATA)
.get(h -> {
final CompoundTag tag = h.getTag();
if (tag == null || !tag.contains(Constants.Item.CUSTOM_MODEL_DATA, Constants.NBT.TAG_INT)) {
return null;
}
return tag.getInt(Constants.Item.CUSTOM_MODEL_DATA);
})
.set((h, v) -> {
final CompoundTag tag = h.getOrCreateTag();
tag.putInt(Constants.Item.CUSTOM_MODEL_DATA, v);
})
.delete(h -> {
final CompoundTag tag = h.getTag();
if (tag != null) {
tag.remove(Constants.Item.CUSTOM_MODEL_DATA);
}
})
.create(Keys.CUSTOM_NAME)
.get(h -> {
if (h.hasCustomHoverName()) {
return SpongeAdventure.asAdventure(h.getHoverName());
}
if (h.getItem() == Items.WRITTEN_BOOK) {
// When no custom name is set on a written book fallback to its title
// The custom name has a higher priority than the title so no setter is needed.
final CompoundTag tag = h.getTag();
if (tag != null) {
final String title = tag.getString(Constants.Item.Book.ITEM_BOOK_TITLE);
return LegacyComponentSerializer.legacySection().deserialize(title);
}
}
return null;
})
.set((h, v) -> h.setHoverName(SpongeAdventure.asVanilla(v)))
.delete(ItemStack::resetHoverName)
.create(Keys.IS_UNBREAKABLE)
.get(h -> {
final CompoundTag tag = h.getTag();
if (tag == null || !tag.contains(Constants.Item.ITEM_UNBREAKABLE, Constants.NBT.TAG_BYTE)) {
return false;
}
return tag.getBoolean(Constants.Item.ITEM_UNBREAKABLE);
})
.set(ItemStackData::setIsUnbrekable)
.delete(h -> ItemStackData.setIsUnbrekable(h, false))
.create(Keys.LORE)
.get(h -> {
final CompoundTag tag = h.getTag();
if (tag == null || !tag.contains(Constants.Item.ITEM_DISPLAY)) {
return null;
}
final CompoundTag displayCompound = tag.getCompound(Constants.Item.ITEM_DISPLAY);
final ListTag list = displayCompound.getList(Constants.Item.ITEM_LORE, Constants.NBT.TAG_STRING);
return list.isEmpty() ? null : SpongeAdventure.json(list.stream().collect(NBTCollectors.toStringList()));
})
.set((h, v) -> {
if (v.isEmpty()) {
ItemStackData.deleteLore(h);
return;
}
final ListTag list = SpongeAdventure.listTagJson(v);
h.getOrCreateTagElement(Constants.Item.ITEM_DISPLAY).put(Constants.Item.ITEM_LORE, list);
})
.delete(ItemStackData::deleteLore)
.create(Keys.MAX_DURABILITY)
.get(h -> h.getItem().canBeDepleted() ? h.getItem().getMaxDamage() : null)
.supports(h -> h.getItem().canBeDepleted())
.create(Keys.ITEM_DURABILITY)
.get(stack -> stack.getMaxDamage() - stack.getDamageValue())
.set((stack, durability) -> stack.setDamageValue(stack.getMaxDamage() - durability))
.supports(h -> h.getItem().canBeDepleted())
.create(Keys.ITEM_RARITY)
.get(stack -> (ItemRarity) (Object) stack.getRarity())
.create(Keys.REPLENISHED_FOOD)
.get(h -> {
if (h.getItem().isEdible()) {
final FoodProperties food = h.getItem().getFoodProperties();
return food == null ? null : food.getNutrition();
}
return null;
})
.supports(h -> h.getItem().isEdible())
.create(Keys.REPLENISHED_SATURATION)
.get(h -> {
if (h.getItem().isEdible()) {
final FoodProperties food = h.getItem().getFoodProperties();
if (food != null) {
// Translate's Minecraft's weird internal value to the actual saturation value
return food.getSaturationModifier() * food.getNutrition() * 2.0;
}
}
return null;
})
.supports(h -> h.getItem().isEdible());
}
// @formatter:on
private static void setIsUnbrekable(final ItemStack stack, final Boolean value) {
if (value == null || (!value && !stack.hasTag())) {
return;
}
final CompoundTag tag = stack.getOrCreateTag();
if (value) {
tag.putBoolean(Constants.Item.ITEM_UNBREAKABLE, true);
} else {
tag.remove(Constants.Item.ITEM_UNBREAKABLE);
}
}
private static void deleteLore(final ItemStack stack) {
final CompoundTag tag = stack.getTag();
if (tag != null && tag.contains(Constants.Item.ITEM_DISPLAY)) {
tag.getCompound(Constants.Item.ITEM_DISPLAY).remove(Constants.Item.ITEM_LORE);
}
}
}
| src/main/java/org/spongepowered/common/data/provider/item/stack/ItemStackData.java | /*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.data.provider.item.stack;
import com.google.common.collect.ImmutableSet;
import com.mojang.datafixers.util.Pair;
import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer;
import org.spongepowered.api.block.BlockType;
import org.spongepowered.api.data.Keys;
import org.spongepowered.api.effect.potion.PotionEffect;
import org.spongepowered.api.item.ItemRarity;
import org.spongepowered.api.item.ItemType;
import org.spongepowered.api.util.weighted.ChanceTable;
import org.spongepowered.api.util.weighted.NestedTableEntry;
import org.spongepowered.api.util.weighted.WeightedTable;
import org.spongepowered.common.accessor.world.item.DiggerItemAccessor;
import org.spongepowered.common.adventure.SpongeAdventure;
import org.spongepowered.common.data.provider.DataProviderRegistrator;
import org.spongepowered.common.util.Constants;
import org.spongepowered.common.util.NBTCollectors;
import java.util.List;
import java.util.Set;
import net.minecraft.core.Registry;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.nbt.ListTag;
import net.minecraft.nbt.StringTag;
import net.minecraft.world.effect.MobEffectInstance;
import net.minecraft.world.food.FoodProperties;
import net.minecraft.world.item.Item;
import net.minecraft.world.item.ItemStack;
import net.minecraft.world.item.Items;
import net.minecraft.world.item.PickaxeItem;
import net.minecraft.world.level.block.Block;
import net.minecraft.world.level.block.entity.AbstractFurnaceBlockEntity;
@SuppressWarnings({"unchecked", "UnstableApiUsage"})
public final class ItemStackData {
private ItemStackData() {
}
// @formatter:off
public static void register(final DataProviderRegistrator registrator) {
registrator
.asMutable(ItemStack.class)
.create(Keys.APPLICABLE_POTION_EFFECTS)
.get(h -> {
if (h.isEdible()) {
final List<Pair<MobEffectInstance,Float>> itemEffects = h.getItem().getFoodProperties().getEffects();
final WeightedTable<PotionEffect> effects = new WeightedTable<>();
final ChanceTable<PotionEffect> chance = new ChanceTable<>();
for (final Pair<MobEffectInstance,Float> effect : itemEffects) {
chance.add((PotionEffect) effect.getFirst(), effect.getSecond());
}
effects.add(new NestedTableEntry<>(1, chance));
return effects;
}
return null;
})
.create(Keys.BURN_TIME)
.get(h -> {
final Integer burnTime = AbstractFurnaceBlockEntity.getFuel().get(h.getItem());
if (burnTime != null && burnTime > 0) {
return burnTime;
}
return null;
})
.create(Keys.CAN_HARVEST)
.get(h -> {
final Item item = h.getItem();
if (item instanceof DiggerItemAccessor && !(item instanceof PickaxeItem)) {
final Set<Block> blocks = ((DiggerItemAccessor) item).accessor$blocks();
return ImmutableSet.copyOf((Set<BlockType>) (Object) blocks);
}
final Set<BlockType> blockTypes = Registry.BLOCK.stream()
.filter(b -> item.isCorrectToolForDrops(b.defaultBlockState()))
.map(BlockType.class::cast)
.collect(ImmutableSet.toImmutableSet());
return blockTypes.isEmpty() ? null : blockTypes;
})
.create(Keys.CONTAINER_ITEM)
.get(h -> (ItemType) h.getItem().getCraftingRemainingItem())
.create(Keys.DISPLAY_NAME)
.get(h -> SpongeAdventure.asAdventure(h.getDisplayName()))
.create(Keys.CUSTOM_MODEL_DATA)
.get(h -> {
final CompoundTag tag = h.getTag();
if (tag == null || !tag.contains(Constants.Item.CUSTOM_MODEL_DATA, Constants.NBT.TAG_INT)) {
return null;
}
return tag.getInt(Constants.Item.CUSTOM_MODEL_DATA);
})
.set((h, v) -> {
final CompoundTag tag = h.getOrCreateTag();
tag.putInt(Constants.Item.CUSTOM_MODEL_DATA, v);
})
.delete(h -> {
final CompoundTag tag = h.getTag();
if (tag != null) {
tag.remove(Constants.Item.CUSTOM_MODEL_DATA);
}
})
.create(Keys.CUSTOM_NAME)
.get(h -> {
if (h.hasCustomHoverName()) {
return SpongeAdventure.asAdventure(h.getHoverName());
}
if (h.getItem() == Items.WRITTEN_BOOK) {
// When no custom name is set on a written book fallback to its title
// The custom name has a higher priority than the title so no setter is needed.
final CompoundTag tag = h.getTag();
if (tag != null) {
final String title = tag.getString(Constants.Item.Book.ITEM_BOOK_TITLE);
return LegacyComponentSerializer.legacySection().deserialize(title);
}
}
return null;
})
.set((h, v) -> h.setHoverName(SpongeAdventure.asVanilla(v)))
.delete(ItemStack::resetHoverName)
.create(Keys.IS_UNBREAKABLE)
.get(h -> {
final CompoundTag tag = h.getTag();
if (tag == null || !tag.contains(Constants.Item.ITEM_UNBREAKABLE, Constants.NBT.TAG_BYTE)) {
return false;
}
return tag.getBoolean(Constants.Item.ITEM_UNBREAKABLE);
})
.set(ItemStackData::setIsUnbrekable)
.delete(h -> ItemStackData.setIsUnbrekable(h, false))
.create(Keys.LORE)
.get(h -> {
final CompoundTag tag = h.getTag();
if (tag == null || tag.contains(Constants.Item.ITEM_DISPLAY)) {
return null;
}
final ListTag list = tag.getList(Constants.Item.ITEM_LORE, Constants.NBT.TAG_STRING);
return list.isEmpty() ? null : SpongeAdventure.json(list.stream().collect(NBTCollectors.toStringList()));
})
.set((h, v) -> {
if (v.isEmpty()) {
ItemStackData.deleteLore(h);
return;
}
final ListTag list = SpongeAdventure.listTagJson(v);
h.getOrCreateTagElement(Constants.Item.ITEM_DISPLAY).put(Constants.Item.ITEM_LORE, list);
})
.delete(ItemStackData::deleteLore)
.create(Keys.MAX_DURABILITY)
.get(h -> h.getItem().canBeDepleted() ? h.getItem().getMaxDamage() : null)
.supports(h -> h.getItem().canBeDepleted())
.create(Keys.ITEM_DURABILITY)
.get(stack -> stack.getMaxDamage() - stack.getDamageValue())
.set((stack, durability) -> stack.setDamageValue(stack.getMaxDamage() - durability))
.supports(h -> h.getItem().canBeDepleted())
.create(Keys.ITEM_RARITY)
.get(stack -> (ItemRarity) (Object) stack.getRarity())
.create(Keys.REPLENISHED_FOOD)
.get(h -> {
if (h.getItem().isEdible()) {
final FoodProperties food = h.getItem().getFoodProperties();
return food == null ? null : food.getNutrition();
}
return null;
})
.supports(h -> h.getItem().isEdible())
.create(Keys.REPLENISHED_SATURATION)
.get(h -> {
if (h.getItem().isEdible()) {
final FoodProperties food = h.getItem().getFoodProperties();
if (food != null) {
// Translate's Minecraft's weird internal value to the actual saturation value
return food.getSaturationModifier() * food.getNutrition() * 2.0;
}
}
return null;
})
.supports(h -> h.getItem().isEdible());
}
// @formatter:on
private static void setIsUnbrekable(final ItemStack stack, final Boolean value) {
if (value == null || (!value && !stack.hasTag())) {
return;
}
final CompoundTag tag = stack.getOrCreateTag();
if (value) {
tag.putBoolean(Constants.Item.ITEM_UNBREAKABLE, true);
} else {
tag.remove(Constants.Item.ITEM_UNBREAKABLE);
}
}
private static void deleteLore(final ItemStack stack) {
final CompoundTag tag = stack.getTag();
if (tag != null && tag.contains(Constants.Item.ITEM_DISPLAY)) {
tag.getCompound(Constants.Item.ITEM_DISPLAY).remove(Constants.Item.ITEM_LORE);
}
}
}
| Fix data accessing code for Keys.LORE
| src/main/java/org/spongepowered/common/data/provider/item/stack/ItemStackData.java | Fix data accessing code for Keys.LORE | <ide><path>rc/main/java/org/spongepowered/common/data/provider/item/stack/ItemStackData.java
<ide> import org.spongepowered.api.util.weighted.ChanceTable;
<ide> import org.spongepowered.api.util.weighted.NestedTableEntry;
<ide> import org.spongepowered.api.util.weighted.WeightedTable;
<add>import org.spongepowered.common.SpongeCommon;
<ide> import org.spongepowered.common.accessor.world.item.DiggerItemAccessor;
<ide> import org.spongepowered.common.adventure.SpongeAdventure;
<ide> import org.spongepowered.common.data.provider.DataProviderRegistrator;
<ide> .create(Keys.LORE)
<ide> .get(h -> {
<ide> final CompoundTag tag = h.getTag();
<del> if (tag == null || tag.contains(Constants.Item.ITEM_DISPLAY)) {
<add> if (tag == null || !tag.contains(Constants.Item.ITEM_DISPLAY)) {
<ide> return null;
<ide> }
<del>
<del> final ListTag list = tag.getList(Constants.Item.ITEM_LORE, Constants.NBT.TAG_STRING);
<add> final CompoundTag displayCompound = tag.getCompound(Constants.Item.ITEM_DISPLAY);
<add> final ListTag list = displayCompound.getList(Constants.Item.ITEM_LORE, Constants.NBT.TAG_STRING);
<ide> return list.isEmpty() ? null : SpongeAdventure.json(list.stream().collect(NBTCollectors.toStringList()));
<ide> })
<ide> .set((h, v) -> { |
|
Java | apache-2.0 | 23fa2663cea01ef294a67a9e3c05a2add635c6a4 | 0 | francisliu/hbase_namespace,cloudera/hbase,matteobertozzi/hbase,francisliu/hbase_namespace,centiteo/hbase,matteobertozzi/hbase,francisliu/hbase_namespace,Shmuma/hbase-trunk,optimizely/hbase,ryanobjc/hbase,bcopeland/hbase-thrift,centiteo/hbase,lichongxin/hbase-snapshot,matteobertozzi/hbase,jyates/hbase,francisliu/hbase_namespace,jyates/hbase,centiteo/hbase,bcopeland/hbase-thrift,jyates/hbase,lichongxin/hbase-snapshot,StumbleUponArchive/hbase,jyates/hbase,StumbleUponArchive/hbase,lichongxin/hbase-snapshot,ryanobjc/hbase,Shmuma/hbase-trunk,jyates/hbase,Shmuma/hbase-trunk,centiteo/hbase,matteobertozzi/hbase,matteobertozzi/hbase,StumbleUponArchive/hbase,optimizely/hbase,ryanobjc/hbase,matteobertozzi/hbase,StumbleUponArchive/hbase,centiteo/hbase,lichongxin/hbase-snapshot,cloudera/hbase,centiteo/hbase,optimizely/hbase,francisliu/hbase_namespace,matteobertozzi/hbase,francisliu/hbase_namespace,cloudera/hbase,optimizely/hbase,Shmuma/hbase-trunk,jyates/hbase,bcopeland/hbase-thrift,francisliu/hbase_namespace,cloudera/hbase,ryanobjc/hbase,bcopeland/hbase-thrift,Shmuma/hbase-trunk,jyates/hbase,bcopeland/hbase-thrift,StumbleUponArchive/hbase,ryanobjc/hbase,Shmuma/hbase-trunk,centiteo/hbase,lichongxin/hbase-snapshot,Shmuma/hbase-trunk,StumbleUponArchive/hbase,jyates/hbase,matteobertozzi/hbase,optimizely/hbase,francisliu/hbase_namespace,cloudera/hbase,francisliu/hbase_namespace,matteobertozzi/hbase,bcopeland/hbase-thrift,jyates/hbase,optimizely/hbase,cloudera/hbase,bcopeland/hbase-thrift,ryanobjc/hbase,lichongxin/hbase-snapshot,StumbleUponArchive/hbase | /**
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.Reference.Range;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.ipc.HRegionInterface;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.StringUtils;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.NavigableSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Random;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
* HRegion stores data for a certain region of a table. It stores all columns
* for each row. A given table consists of one or more HRegions.
*
* <p>We maintain multiple HStores for a single HRegion.
*
* <p>An Store is a set of rows with some column data; together,
* they make up all the data for the rows.
*
* <p>Each HRegion has a 'startKey' and 'endKey'.
* <p>The first is inclusive, the second is exclusive (except for
* the final region) The endKey of region 0 is the same as
* startKey for region 1 (if it exists). The startKey for the
* first region is null. The endKey for the final region is null.
*
* <p>Locking at the HRegion level serves only one purpose: preventing the
* region from being closed (and consequently split) while other operations
* are ongoing. Each row level operation obtains both a row lock and a region
* read lock for the duration of the operation. While a scanner is being
* constructed, getScanner holds a read lock. If the scanner is successfully
* constructed, it holds a read lock until it is closed. A close takes out a
* write lock and consequently will block for ongoing operations and will block
* new operations from starting while the close is in progress.
*
* <p>An HRegion is defined by its table and its key extent.
*
* <p>It consists of at least one Store. The number of Stores should be
* configurable, so that data which is accessed together is stored in the same
* Store. Right now, we approximate that by building a single Store for
* each column family. (This config info will be communicated via the
* tabledesc.)
*
* <p>The HTableDescriptor contains metainfo about the HRegion's table.
* regionName is a unique identifier for this HRegion. (startKey, endKey]
* defines the keyspace for this HRegion.
*/
public class HRegion implements HConstants, HeapSize { // , Writable{
static final Log LOG = LogFactory.getLog(HRegion.class);
static final String SPLITDIR = "splits";
static final String MERGEDIR = "merges";
final AtomicBoolean closed = new AtomicBoolean(false);
/* Closing can take some time; use the closing flag if there is stuff we don't
* want to do while in closing state; e.g. like offer this region up to the
* master as a region to close if the carrying regionserver is overloaded.
* Once set, it is never cleared.
*/
final AtomicBoolean closing = new AtomicBoolean(false);
//////////////////////////////////////////////////////////////////////////////
// Members
//////////////////////////////////////////////////////////////////////////////
private final Set<byte[]> lockedRows =
new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
private final Map<Integer, byte []> lockIds =
new HashMap<Integer, byte []>();
private int lockIdGenerator = 1;
static private Random rand = new Random();
protected final Map<byte [], Store> stores =
new ConcurrentSkipListMap<byte [], Store>(Bytes.BYTES_RAWCOMPARATOR);
//These variable are just used for getting data out of the region, to test on
//client side
// private int numStores = 0;
// private int [] storeSize = null;
// private byte [] name = null;
final AtomicLong memstoreSize = new AtomicLong(0);
// This is the table subdirectory.
final Path basedir;
final HLog log;
final FileSystem fs;
final Configuration conf;
final HRegionInfo regionInfo;
final Path regiondir;
private final Path regionCompactionDir;
KeyValue.KVComparator comparator;
/*
* Set this when scheduling compaction if want the next compaction to be a
* major compaction. Cleared each time through compaction code.
*/
private volatile boolean forceMajorCompaction = false;
/*
* Data structure of write state flags used coordinating flushes,
* compactions and closes.
*/
static class WriteState {
// Set while a memstore flush is happening.
volatile boolean flushing = false;
// Set when a flush has been requested.
volatile boolean flushRequested = false;
// Set while a compaction is running.
volatile boolean compacting = false;
// Gets set in close. If set, cannot compact or flush again.
volatile boolean writesEnabled = true;
// Set if region is read-only
volatile boolean readOnly = false;
/**
* Set flags that make this region read-only.
*/
synchronized void setReadOnly(final boolean onOff) {
this.writesEnabled = !onOff;
this.readOnly = onOff;
}
boolean isReadOnly() {
return this.readOnly;
}
boolean isFlushRequested() {
return this.flushRequested;
}
}
private volatile WriteState writestate = new WriteState();
final long memstoreFlushSize;
private volatile long lastFlushTime;
final FlushRequester flushListener;
private final long blockingMemStoreSize;
final long threadWakeFrequency;
// Used to guard splits and closes
private final ReentrantReadWriteLock splitsAndClosesLock =
new ReentrantReadWriteLock();
private final ReentrantReadWriteLock newScannerLock =
new ReentrantReadWriteLock();
// Stop updates lock
private final ReentrantReadWriteLock updatesLock =
new ReentrantReadWriteLock();
private final Object splitLock = new Object();
private long minSequenceId;
private boolean splitRequest;
/**
* Name of the region info file that resides just under the region directory.
*/
public final static String REGIONINFO_FILE = ".regioninfo";
/**
* REGIONINFO_FILE as byte array.
*/
public final static byte [] REGIONINFO_FILE_BYTES =
Bytes.toBytes(REGIONINFO_FILE);
/**
* Should only be used for testing purposes
*/
public HRegion(){
this.basedir = null;
this.blockingMemStoreSize = 0L;
this.conf = null;
this.flushListener = null;
this.fs = null;
this.memstoreFlushSize = 0L;
this.log = null;
this.regionCompactionDir = null;
this.regiondir = null;
this.regionInfo = null;
this.threadWakeFrequency = 0L;
}
/**
* HRegion constructor.
*
* @param basedir qualified path of directory where region should be located,
* usually the table directory.
* @param log The HLog is the outbound log for any updates to the HRegion
* (There's a single HLog for all the HRegions on a single HRegionServer.)
* The log file is a logfile from the previous execution that's
* custom-computed for this HRegion. The HRegionServer computes and sorts the
* appropriate log info for this HRegion. If there is a previous log file
* (implying that the HRegion has been written-to before), then read it from
* the supplied path.
* @param fs is the filesystem.
* @param conf is global configuration settings.
* @param regionInfo - HRegionInfo that describes the region
* is new), then read them from the supplied path.
* @param flushListener an object that implements CacheFlushListener or null
* making progress to master -- otherwise master might think region deploy
* failed. Can be null.
*/
public HRegion(Path basedir, HLog log, FileSystem fs, Configuration conf,
HRegionInfo regionInfo, FlushRequester flushListener) {
this.basedir = basedir;
this.comparator = regionInfo.getComparator();
this.log = log;
this.fs = fs;
this.conf = conf;
this.regionInfo = regionInfo;
this.flushListener = flushListener;
this.threadWakeFrequency = conf.getLong(THREAD_WAKE_FREQUENCY, 10 * 1000);
String encodedNameStr = Integer.toString(this.regionInfo.getEncodedName());
this.regiondir = new Path(basedir, encodedNameStr);
if (LOG.isDebugEnabled()) {
// Write out region name as string and its encoded name.
LOG.debug("Creating region " + this + ", encoded=" +
this.regionInfo.getEncodedName());
}
this.regionCompactionDir =
new Path(getCompactionDir(basedir), encodedNameStr);
long flushSize = regionInfo.getTableDesc().getMemStoreFlushSize();
if (flushSize == HTableDescriptor.DEFAULT_MEMSTORE_FLUSH_SIZE) {
flushSize = conf.getLong("hbase.hregion.memstore.flush.size",
HTableDescriptor.DEFAULT_MEMSTORE_FLUSH_SIZE);
}
this.memstoreFlushSize = flushSize;
this.blockingMemStoreSize = this.memstoreFlushSize *
conf.getLong("hbase.hregion.memstore.block.multiplier", 2);
}
/**
* Initialize this region and get it ready to roll.
* Called after construction.
*
* @param initialFiles
* @param reporter
* @throws IOException
*/
public void initialize(Path initialFiles, final Progressable reporter)
throws IOException {
Path oldLogFile = new Path(regiondir, HREGION_OLDLOGFILE_NAME);
moveInitialFilesIntoPlace(this.fs, initialFiles, this.regiondir);
// Write HRI to a file in case we need to recover .META.
checkRegioninfoOnFilesystem();
// Load in all the HStores.
long maxSeqId = -1;
long minSeqIdToRecover = Integer.MAX_VALUE;
for (HColumnDescriptor c : this.regionInfo.getTableDesc().getFamilies()) {
Store store = instantiateHStore(this.basedir, c, oldLogFile, reporter);
this.stores.put(c.getName(), store);
long storeSeqId = store.getMaxSequenceId();
if (storeSeqId > maxSeqId) {
maxSeqId = storeSeqId;
}
long storeSeqIdBeforeRecovery = store.getMaxSeqIdBeforeLogRecovery();
if (storeSeqIdBeforeRecovery < minSeqIdToRecover) {
minSeqIdToRecover = storeSeqIdBeforeRecovery;
}
}
// Play log if one. Delete when done.
doReconstructionLog(oldLogFile, minSeqIdToRecover, maxSeqId, reporter);
if (fs.exists(oldLogFile)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Deleting old log file: " + oldLogFile);
}
fs.delete(oldLogFile, false);
}
// Add one to the current maximum sequence id so new edits are beyond.
this.minSequenceId = maxSeqId + 1;
// Get rid of any splits or merges that were lost in-progress. Clean out
// these directories here on open. We may be opening a region that was
// being split but we crashed in the middle of it all.
FSUtils.deleteDirectory(this.fs, new Path(regiondir, SPLITDIR));
FSUtils.deleteDirectory(this.fs, new Path(regiondir, MERGEDIR));
// See if region is meant to run read-only.
if (this.regionInfo.getTableDesc().isReadOnly()) {
this.writestate.setReadOnly(true);
}
// HRegion is ready to go!
this.writestate.compacting = false;
this.lastFlushTime = System.currentTimeMillis();
LOG.info("region " + this + "/" + this.regionInfo.getEncodedName() +
" available; sequence id is " + this.minSequenceId);
}
/*
* Move any passed HStore files into place (if any). Used to pick up split
* files and any merges from splits and merges dirs.
* @param initialFiles
* @throws IOException
*/
private static void moveInitialFilesIntoPlace(final FileSystem fs,
final Path initialFiles, final Path regiondir)
throws IOException {
if (initialFiles != null && fs.exists(initialFiles)) {
fs.rename(initialFiles, regiondir);
}
}
/**
* @return True if this region has references.
*/
boolean hasReferences() {
for (Map.Entry<byte [], Store> e: this.stores.entrySet()) {
for (Map.Entry<Long, StoreFile> ee:
e.getValue().getStorefiles().entrySet()) {
// Found a reference, return.
if (ee.getValue().isReference()) return true;
}
}
return false;
}
/*
* Write out an info file under the region directory. Useful recovering
* mangled regions.
* @throws IOException
*/
private void checkRegioninfoOnFilesystem() throws IOException {
// Name of this file has two leading and trailing underscores so it doesn't
// clash w/ a store/family name. There is possibility, but assumption is
// that its slim (don't want to use control character in filename because
//
Path regioninfo = new Path(this.regiondir, REGIONINFO_FILE);
if (this.fs.exists(regioninfo) &&
this.fs.getFileStatus(regioninfo).getLen() > 0) {
return;
}
FSDataOutputStream out = this.fs.create(regioninfo, true);
try {
this.regionInfo.write(out);
out.write('\n');
out.write('\n');
out.write(Bytes.toBytes(this.regionInfo.toString()));
} finally {
out.close();
}
}
/**
* @return Updates to this region need to have a sequence id that is >= to
* the this number.
*/
long getMinSequenceId() {
return this.minSequenceId;
}
/** @return a HRegionInfo object for this region */
public HRegionInfo getRegionInfo() {
return this.regionInfo;
}
/** @return true if region is closed */
public boolean isClosed() {
return this.closed.get();
}
/**
* @return True if closing process has started.
*/
public boolean isClosing() {
return this.closing.get();
}
/**
* Close down this HRegion. Flush the cache, shut down each HStore, don't
* service any more calls.
*
* <p>This method could take some time to execute, so don't call it from a
* time-sensitive thread.
*
* @return Vector of all the storage files that the HRegion's component
* HStores make use of. It's a list of all HStoreFile objects. Returns empty
* vector if already closed and null if judged that it should not close.
*
* @throws IOException
*/
public List<StoreFile> close() throws IOException {
return close(false);
}
/**
* Close down this HRegion. Flush the cache unless abort parameter is true,
* Shut down each HStore, don't service any more calls.
*
* This method could take some time to execute, so don't call it from a
* time-sensitive thread.
*
* @param abort true if server is aborting (only during testing)
* @return Vector of all the storage files that the HRegion's component
* HStores make use of. It's a list of HStoreFile objects. Can be null if
* we are not to close at this time or we are already closed.
*
* @throws IOException
*/
public List<StoreFile> close(final boolean abort) throws IOException {
if (isClosed()) {
LOG.warn("region " + this + " already closed");
return null;
}
this.closing.set(true);
synchronized (splitLock) {
synchronized (writestate) {
// Disable compacting and flushing by background threads for this
// region.
writestate.writesEnabled = false;
LOG.debug("Closing " + this + ": compactions & flushes disabled ");
while (writestate.compacting || writestate.flushing) {
LOG.debug("waiting for" +
(writestate.compacting ? " compaction" : "") +
(writestate.flushing ?
(writestate.compacting ? "," : "") + " cache flush" :
"") + " to complete for region " + this);
try {
writestate.wait();
} catch (InterruptedException iex) {
// continue
}
}
}
newScannerLock.writeLock().lock();
try {
splitsAndClosesLock.writeLock().lock();
LOG.debug("Updates disabled for region, no outstanding scanners on " +
this);
try {
// Write lock means no more row locks can be given out. Wait on
// outstanding row locks to come in before we close so we do not drop
// outstanding updates.
waitOnRowLocks();
LOG.debug("No more row locks outstanding on region " + this);
// Don't flush the cache if we are aborting
if (!abort) {
internalFlushcache();
}
List<StoreFile> result = new ArrayList<StoreFile>();
for (Store store: stores.values()) {
result.addAll(store.close());
}
this.closed.set(true);
LOG.info("Closed " + this);
return result;
} finally {
splitsAndClosesLock.writeLock().unlock();
}
} finally {
newScannerLock.writeLock().unlock();
}
}
}
//////////////////////////////////////////////////////////////////////////////
// HRegion accessors
//////////////////////////////////////////////////////////////////////////////
/** @return start key for region */
public byte [] getStartKey() {
return this.regionInfo.getStartKey();
}
/** @return end key for region */
public byte [] getEndKey() {
return this.regionInfo.getEndKey();
}
/** @return region id */
public long getRegionId() {
return this.regionInfo.getRegionId();
}
/** @return region name */
public byte [] getRegionName() {
return this.regionInfo.getRegionName();
}
/** @return region name as string for logging */
public String getRegionNameAsString() {
return this.regionInfo.getRegionNameAsString();
}
/** @return HTableDescriptor for this region */
public HTableDescriptor getTableDesc() {
return this.regionInfo.getTableDesc();
}
/** @return HLog in use for this region */
public HLog getLog() {
return this.log;
}
/** @return Configuration object */
public Configuration getConf() {
return this.conf;
}
/** @return region directory Path */
public Path getRegionDir() {
return this.regiondir;
}
/** @return FileSystem being used by this region */
public FileSystem getFilesystem() {
return this.fs;
}
/** @return the last time the region was flushed */
public long getLastFlushTime() {
return this.lastFlushTime;
}
//////////////////////////////////////////////////////////////////////////////
// HRegion maintenance.
//
// These methods are meant to be called periodically by the HRegionServer for
// upkeep.
//////////////////////////////////////////////////////////////////////////////
/** @return returns size of largest HStore. */
public long getLargestHStoreSize() {
long size = 0;
for (Store h: stores.values()) {
long storeSize = h.getSize();
if (storeSize > size) {
size = storeSize;
}
}
return size;
}
/*
* Split the HRegion to create two brand-new ones. This also closes
* current HRegion. Split should be fast since we don't rewrite store files
* but instead create new 'reference' store files that read off the top and
* bottom ranges of parent store files.
* @param splitRow row on which to split region
* @return two brand-new HRegions or null if a split is not needed
* @throws IOException
*/
HRegion [] splitRegion(final byte [] splitRow) throws IOException {
prepareToSplit();
synchronized (splitLock) {
if (closed.get()) {
return null;
}
// Add start/end key checking: hbase-428.
byte [] startKey = this.regionInfo.getStartKey();
byte [] endKey = this.regionInfo.getEndKey();
if (this.comparator.matchingRows(startKey, 0, startKey.length,
splitRow, 0, splitRow.length)) {
LOG.debug("Startkey and midkey are same, not splitting");
return null;
}
if (this.comparator.matchingRows(splitRow, 0, splitRow.length,
endKey, 0, endKey.length)) {
LOG.debug("Endkey and midkey are same, not splitting");
return null;
}
LOG.info("Starting split of region " + this);
Path splits = new Path(this.regiondir, SPLITDIR);
if(!this.fs.exists(splits)) {
this.fs.mkdirs(splits);
}
// Calculate regionid to use. Can't be less than that of parent else
// it'll insert into wrong location over in .META. table: HBASE-710.
long rid = System.currentTimeMillis();
if (rid < this.regionInfo.getRegionId()) {
LOG.warn("Clock skew; parent regions id is " +
this.regionInfo.getRegionId() + " but current time here is " + rid);
rid = this.regionInfo.getRegionId() + 1;
}
HRegionInfo regionAInfo = new HRegionInfo(this.regionInfo.getTableDesc(),
startKey, splitRow, false, rid);
Path dirA = getSplitDirForDaughter(splits, regionAInfo);
HRegionInfo regionBInfo = new HRegionInfo(this.regionInfo.getTableDesc(),
splitRow, endKey, false, rid);
Path dirB = getSplitDirForDaughter(splits, regionBInfo);
// Now close the HRegion. Close returns all store files or null if not
// supposed to close (? What to do in this case? Implement abort of close?)
// Close also does wait on outstanding rows and calls a flush just-in-case.
List<StoreFile> hstoreFilesToSplit = close(false);
if (hstoreFilesToSplit == null) {
LOG.warn("Close came back null (Implement abort of close?)");
throw new RuntimeException("close returned empty vector of HStoreFiles");
}
// Split each store file.
for(StoreFile h: hstoreFilesToSplit) {
StoreFile.split(fs,
Store.getStoreHomedir(splits, regionAInfo.getEncodedName(),
h.getFamily()),
h, splitRow, Range.bottom);
StoreFile.split(fs,
Store.getStoreHomedir(splits, regionBInfo.getEncodedName(),
h.getFamily()),
h, splitRow, Range.top);
}
// Create a region instance and then move the splits into place under
// regionA and regionB.
HRegion regionA =
new HRegion(basedir, log, fs, conf, regionAInfo, null);
moveInitialFilesIntoPlace(this.fs, dirA, regionA.getRegionDir());
HRegion regionB =
new HRegion(basedir, log, fs, conf, regionBInfo, null);
moveInitialFilesIntoPlace(this.fs, dirB, regionB.getRegionDir());
HRegion regions[] = new HRegion [] {regionA, regionB};
return regions;
}
}
/*
* Get the daughter directories in the splits dir. The splits dir is under
* the parent regions' directory.
* @param splits
* @param hri
* @return Path to split dir.
* @throws IOException
*/
private Path getSplitDirForDaughter(final Path splits, final HRegionInfo hri)
throws IOException {
Path d =
new Path(splits, Integer.toString(hri.getEncodedName()));
if (fs.exists(d)) {
// This should never happen; the splits dir will be newly made when we
// come in here. Even if we crashed midway through a split, the reopen
// of the parent region clears out the dir in its initialize method.
throw new IOException("Cannot split; target file collision at " + d);
}
return d;
}
protected void prepareToSplit() {
// nothing
}
/*
* @param dir
* @return compaction directory for the passed in <code>dir</code>
*/
static Path getCompactionDir(final Path dir) {
return new Path(dir, HREGION_COMPACTIONDIR_NAME);
}
/*
* Do preparation for pending compaction.
* Clean out any vestiges of previous failed compactions.
* @throws IOException
*/
private void doRegionCompactionPrep() throws IOException {
doRegionCompactionCleanup();
}
/*
* Removes the compaction directory for this Store.
* @throws IOException
*/
private void doRegionCompactionCleanup() throws IOException {
FSUtils.deleteDirectory(this.fs, this.regionCompactionDir);
}
void setForceMajorCompaction(final boolean b) {
this.forceMajorCompaction = b;
}
boolean getForceMajorCompaction() {
return this.forceMajorCompaction;
}
/**
* Called by compaction thread and after region is opened to compact the
* HStores if necessary.
*
* <p>This operation could block for a long time, so don't call it from a
* time-sensitive thread.
*
* Note that no locking is necessary at this level because compaction only
* conflicts with a region split, and that cannot happen because the region
* server does them sequentially and not in parallel.
*
* @return mid key if split is needed
* @throws IOException
*/
public byte [] compactStores() throws IOException {
boolean majorCompaction = this.forceMajorCompaction;
this.forceMajorCompaction = false;
return compactStores(majorCompaction);
}
/*
* Called by compaction thread and after region is opened to compact the
* HStores if necessary.
*
* <p>This operation could block for a long time, so don't call it from a
* time-sensitive thread.
*
* Note that no locking is necessary at this level because compaction only
* conflicts with a region split, and that cannot happen because the region
* server does them sequentially and not in parallel.
*
* @param majorCompaction True to force a major compaction regardless of thresholds
* @return split row if split is needed
* @throws IOException
*/
byte [] compactStores(final boolean majorCompaction)
throws IOException {
if (this.closing.get() || this.closed.get()) {
LOG.debug("Skipping compaction on " + this + " because closing/closed");
return null;
}
splitsAndClosesLock.readLock().lock();
try {
byte [] splitRow = null;
if (this.closed.get()) {
return splitRow;
}
try {
synchronized (writestate) {
if (!writestate.compacting && writestate.writesEnabled) {
writestate.compacting = true;
} else {
LOG.info("NOT compacting region " + this +
": compacting=" + writestate.compacting + ", writesEnabled=" +
writestate.writesEnabled);
return splitRow;
}
}
LOG.info("Starting" + (majorCompaction? " major " : " ") +
"compaction on region " + this);
long startTime = System.currentTimeMillis();
doRegionCompactionPrep();
long maxSize = -1;
for (Store store: stores.values()) {
final Store.StoreSize ss = store.compact(majorCompaction);
if (ss != null && ss.getSize() > maxSize) {
maxSize = ss.getSize();
splitRow = ss.getSplitRow();
}
}
doRegionCompactionCleanup();
String timeTaken = StringUtils.formatTimeDiff(System.currentTimeMillis(),
startTime);
LOG.info("compaction completed on region " + this + " in " + timeTaken);
} finally {
synchronized (writestate) {
writestate.compacting = false;
writestate.notifyAll();
}
}
return splitRow;
} finally {
splitsAndClosesLock.readLock().unlock();
}
}
/**
* Flush the cache.
*
* When this method is called the cache will be flushed unless:
* <ol>
* <li>the cache is empty</li>
* <li>the region is closed.</li>
* <li>a flush is already in progress</li>
* <li>writes are disabled</li>
* </ol>
*
* <p>This method may block for some time, so it should not be called from a
* time-sensitive thread.
*
* @return true if cache was flushed
*
* @throws IOException
* @throws DroppedSnapshotException Thrown when replay of hlog is required
* because a Snapshot was not properly persisted.
*/
public boolean flushcache() throws IOException {
if (this.closed.get()) {
return false;
}
synchronized (writestate) {
if (!writestate.flushing && writestate.writesEnabled) {
this.writestate.flushing = true;
} else {
if(LOG.isDebugEnabled()) {
LOG.debug("NOT flushing memstore for region " + this +
", flushing=" +
writestate.flushing + ", writesEnabled=" +
writestate.writesEnabled);
}
return false;
}
}
try {
// Prevent splits and closes
splitsAndClosesLock.readLock().lock();
try {
return internalFlushcache();
} finally {
splitsAndClosesLock.readLock().unlock();
}
} finally {
synchronized (writestate) {
writestate.flushing = false;
this.writestate.flushRequested = false;
writestate.notifyAll();
}
}
}
/**
* Flushing the cache is a little tricky. We have a lot of updates in the
* memstore, all of which have also been written to the log. We need to
* write those updates in the memstore out to disk, while being able to
* process reads/writes as much as possible during the flush operation. Also,
* the log has to state clearly the point in time at which the memstore was
* flushed. (That way, during recovery, we know when we can rely on the
* on-disk flushed structures and when we have to recover the memstore from
* the log.)
*
* <p>So, we have a three-step process:
*
* <ul><li>A. Flush the memstore to the on-disk stores, noting the current
* sequence ID for the log.<li>
*
* <li>B. Write a FLUSHCACHE-COMPLETE message to the log, using the sequence
* ID that was current at the time of memstore-flush.</li>
*
* <li>C. Get rid of the memstore structures that are now redundant, as
* they've been flushed to the on-disk HStores.</li>
* </ul>
* <p>This method is protected, but can be accessed via several public
* routes.
*
* <p> This method may block for some time.
*
* @return true if the region needs compacting
*
* @throws IOException
* @throws DroppedSnapshotException Thrown when replay of hlog is required
* because a Snapshot was not properly persisted.
*/
private boolean internalFlushcache() throws IOException {
final long startTime = System.currentTimeMillis();
// Clear flush flag.
// Record latest flush time
this.lastFlushTime = startTime;
// If nothing to flush, return and avoid logging start/stop flush.
if (this.memstoreSize.get() <= 0) {
return false;
}
if (LOG.isDebugEnabled()) {
LOG.debug("Started memstore flush for region " + this +
". Current region memstore size " +
StringUtils.humanReadableInt(this.memstoreSize.get()));
}
// Stop updates while we snapshot the memstore of all stores. We only have
// to do this for a moment. Its quick. The subsequent sequence id that
// goes into the HLog after we've flushed all these snapshots also goes
// into the info file that sits beside the flushed files.
// We also set the memstore size to zero here before we allow updates
// again so its value will represent the size of the updates received
// during the flush
long sequenceId = -1L;
long completeSequenceId = -1L;
this.updatesLock.writeLock().lock();
// Get current size of memstores.
final long currentMemStoreSize = this.memstoreSize.get();
try {
for (Store s: stores.values()) {
s.snapshot();
}
sequenceId = log.startCacheFlush();
completeSequenceId = this.getCompleteCacheFlushSequenceId(sequenceId);
} finally {
this.updatesLock.writeLock().unlock();
}
// Any failure from here on out will be catastrophic requiring server
// restart so hlog content can be replayed and put back into the memstore.
// Otherwise, the snapshot content while backed up in the hlog, it will not
// be part of the current running servers state.
boolean compactionRequested = false;
try {
// A. Flush memstore to all the HStores.
// Keep running vector of all store files that includes both old and the
// just-made new flush store file.
for (Store hstore: stores.values()) {
boolean needsCompaction = hstore.flushCache(completeSequenceId);
if (needsCompaction) {
compactionRequested = true;
}
}
// Set down the memstore size by amount of flush.
this.memstoreSize.addAndGet(-currentMemStoreSize);
} catch (Throwable t) {
// An exception here means that the snapshot was not persisted.
// The hlog needs to be replayed so its content is restored to memstore.
// Currently, only a server restart will do this.
// We used to only catch IOEs but its possible that we'd get other
// exceptions -- e.g. HBASE-659 was about an NPE -- so now we catch
// all and sundry.
this.log.abortCacheFlush();
DroppedSnapshotException dse = new DroppedSnapshotException("region: " +
Bytes.toStringBinary(getRegionName()));
dse.initCause(t);
throw dse;
}
// If we get to here, the HStores have been written. If we get an
// error in completeCacheFlush it will release the lock it is holding
// B. Write a FLUSHCACHE-COMPLETE message to the log.
// This tells future readers that the HStores were emitted correctly,
// and that all updates to the log for this regionName that have lower
// log-sequence-ids can be safely ignored.
this.log.completeCacheFlush(getRegionName(),
regionInfo.getTableDesc().getName(), completeSequenceId);
// C. Finally notify anyone waiting on memstore to clear:
// e.g. checkResources().
synchronized (this) {
notifyAll(); // FindBugs NN_NAKED_NOTIFY
}
if (LOG.isDebugEnabled()) {
long now = System.currentTimeMillis();
LOG.debug("Finished memstore flush of ~" +
StringUtils.humanReadableInt(currentMemStoreSize) + " for region " +
this + " in " + (now - startTime) + "ms, sequence id=" + sequenceId +
", compaction requested=" + compactionRequested);
}
return compactionRequested;
}
/**
* Get the sequence number to be associated with this cache flush. Used by
* TransactionalRegion to not complete pending transactions.
*
*
* @param currentSequenceId
* @return sequence id to complete the cache flush with
*/
protected long getCompleteCacheFlushSequenceId(long currentSequenceId) {
return currentSequenceId;
}
//////////////////////////////////////////////////////////////////////////////
// get() methods for client use.
//////////////////////////////////////////////////////////////////////////////
/**
* Return all the data for the row that matches <i>row</i> exactly,
* or the one that immediately preceeds it, at or immediately before
* <i>ts</i>.
*
* @param row row key
* @return map of values
* @throws IOException
*/
Result getClosestRowBefore(final byte [] row)
throws IOException{
return getClosestRowBefore(row, HConstants.CATALOG_FAMILY);
}
/**
* Return all the data for the row that matches <i>row</i> exactly,
* or the one that immediately preceeds it, at or immediately before
* <i>ts</i>.
*
* @param row row key
* @param family
* @return map of values
* @throws IOException
*/
public Result getClosestRowBefore(final byte [] row, final byte [] family)
throws IOException {
// look across all the HStores for this region and determine what the
// closest key is across all column families, since the data may be sparse
KeyValue key = null;
checkRow(row);
splitsAndClosesLock.readLock().lock();
try {
Store store = getStore(family);
KeyValue kv = new KeyValue(row, HConstants.LATEST_TIMESTAMP);
// get the closest key. (HStore.getRowKeyAtOrBefore can return null)
key = store.getRowKeyAtOrBefore(kv);
if (key == null) {
return null;
}
// This will get all results for this store. TODO: Do we need to do this?
Get get = new Get(key.getRow());
List<KeyValue> results = new ArrayList<KeyValue>();
store.get(get, null, results);
return new Result(results);
} finally {
splitsAndClosesLock.readLock().unlock();
}
}
/**
* Return an iterator that scans over the HRegion, returning the indicated
* columns and rows specified by the {@link Scan}.
* <p>
* This Iterator must be closed by the caller.
*
* @param scan configured {@link Scan}
* @return InternalScanner
* @throws IOException
*/
public InternalScanner getScanner(Scan scan)
throws IOException {
return getScanner(scan, null);
}
protected InternalScanner getScanner(Scan scan, List<KeyValueScanner> additionalScanners) throws IOException {
newScannerLock.readLock().lock();
try {
if (this.closed.get()) {
throw new IOException("Region " + this + " closed");
}
// Verify families are all valid
if(scan.hasFamilies()) {
for(byte [] family : scan.getFamilyMap().keySet()) {
checkFamily(family);
}
} else { // Adding all families to scanner
for(byte[] family: regionInfo.getTableDesc().getFamiliesKeys()){
scan.addFamily(family);
}
}
return new RegionScanner(scan, additionalScanners);
} finally {
newScannerLock.readLock().unlock();
}
}
//////////////////////////////////////////////////////////////////////////////
// set() methods for client use.
//////////////////////////////////////////////////////////////////////////////
/**
* @param delete
* @param lockid
* @param writeToWAL
* @throws IOException
*/
public void delete(Delete delete, Integer lockid, boolean writeToWAL)
throws IOException {
checkReadOnly();
checkResources();
splitsAndClosesLock.readLock().lock();
Integer lid = null;
try {
byte [] row = delete.getRow();
// If we did not pass an existing row lock, obtain a new one
lid = getLock(lockid, row);
//Check to see if this is a deleteRow insert
if(delete.getFamilyMap().isEmpty()){
for(byte [] family : regionInfo.getTableDesc().getFamiliesKeys()){
// Don't eat the timestamp
delete.deleteFamily(family, delete.getTimeStamp());
}
} else {
for(byte [] family : delete.getFamilyMap().keySet()) {
if(family == null) {
throw new NoSuchColumnFamilyException("Empty family is invalid");
}
checkFamily(family);
}
}
for(Map.Entry<byte[], List<KeyValue>> e: delete.getFamilyMap().entrySet()) {
byte [] family = e.getKey();
delete(family, e.getValue(), writeToWAL);
}
} finally {
if(lockid == null) releaseRowLock(lid);
splitsAndClosesLock.readLock().unlock();
}
}
/**
* @param family
* @param kvs
* @param writeToWAL
* @throws IOException
*/
public void delete(byte [] family, List<KeyValue> kvs, boolean writeToWAL)
throws IOException {
long now = System.currentTimeMillis();
byte [] byteNow = Bytes.toBytes(now);
boolean flush = false;
this.updatesLock.readLock().lock();
try {
long size = 0;
Store store = getStore(family);
Iterator<KeyValue> kvIterator = kvs.iterator();
while(kvIterator.hasNext()) {
KeyValue kv = kvIterator.next();
// Check if time is LATEST, change to time of most recent addition if so
// This is expensive.
if (kv.isLatestTimestamp() && kv.isDeleteType()) {
List<KeyValue> result = new ArrayList<KeyValue>(1);
Get g = new Get(kv.getRow());
NavigableSet<byte []> qualifiers =
new TreeSet<byte []>(Bytes.BYTES_COMPARATOR);
byte [] q = kv.getQualifier();
if(q == null) q = HConstants.EMPTY_BYTE_ARRAY;
qualifiers.add(q);
get(store, g, qualifiers, result);
if (result.isEmpty()) {
// Nothing to delete
kvIterator.remove();
continue;
}
if (result.size() > 1) {
throw new RuntimeException("Unexpected size: " + result.size());
}
KeyValue getkv = result.get(0);
Bytes.putBytes(kv.getBuffer(), kv.getTimestampOffset(),
getkv.getBuffer(), getkv.getTimestampOffset(), Bytes.SIZEOF_LONG);
} else {
kv.updateLatestStamp(byteNow);
}
// We must do this in this loop because it could affect
// the above get to find the next timestamp to remove.
// This is the case when there are multiple deletes for the same column.
size = this.memstoreSize.addAndGet(store.delete(kv));
}
if (writeToWAL) {
this.log.append(regionInfo,
regionInfo.getTableDesc().getName(), kvs, now);
}
flush = isFlushSize(size);
} finally {
this.updatesLock.readLock().unlock();
}
if (flush) {
// Request a cache flush. Do it outside update lock.
requestFlush();
}
}
/**
* @param put
* @throws IOException
*/
public void put(Put put) throws IOException {
this.put(put, null, put.getWriteToWAL());
}
/**
* @param put
* @param writeToWAL
* @throws IOException
*/
public void put(Put put, boolean writeToWAL) throws IOException {
this.put(put, null, writeToWAL);
}
/**
* @param put
* @param lockid
* @throws IOException
*/
public void put(Put put, Integer lockid) throws IOException {
this.put(put, lockid, put.getWriteToWAL());
}
/**
* @param put
* @param lockid
* @param writeToWAL
* @throws IOException
*/
public void put(Put put, Integer lockid, boolean writeToWAL)
throws IOException {
checkReadOnly();
// Do a rough check that we have resources to accept a write. The check is
// 'rough' in that between the resource check and the call to obtain a
// read lock, resources may run out. For now, the thought is that this
// will be extremely rare; we'll deal with it when it happens.
checkResources();
splitsAndClosesLock.readLock().lock();
try {
// We obtain a per-row lock, so other clients will block while one client
// performs an update. The read lock is released by the client calling
// #commit or #abort or if the HRegionServer lease on the lock expires.
// See HRegionServer#RegionListener for how the expire on HRegionServer
// invokes a HRegion#abort.
byte [] row = put.getRow();
// If we did not pass an existing row lock, obtain a new one
Integer lid = getLock(lockid, row);
byte [] now = Bytes.toBytes(System.currentTimeMillis());
try {
for (Map.Entry<byte[], List<KeyValue>> entry:
put.getFamilyMap().entrySet()) {
byte [] family = entry.getKey();
checkFamily(family);
List<KeyValue> puts = entry.getValue();
if (updateKeys(puts, now)) {
put(family, puts, writeToWAL);
}
}
} finally {
if(lockid == null) releaseRowLock(lid);
}
} finally {
splitsAndClosesLock.readLock().unlock();
}
}
//TODO, Think that gets/puts and deletes should be refactored a bit so that
//the getting of the lock happens before, so that you would just pass it into
//the methods. So in the case of checkAndPut you could just do lockRow,
//get, put, unlockRow or something
/**
*
* @param row
* @param family
* @param qualifier
* @param expectedValue
* @param put
* @param lockId
* @param writeToWAL
* @throws IOException
* @return true if the new put was execute, false otherwise
*/
public boolean checkAndPut(byte [] row, byte [] family, byte [] qualifier,
byte [] expectedValue, Put put, Integer lockId, boolean writeToWAL)
throws IOException{
checkReadOnly();
//TODO, add check for value length or maybe even better move this to the
//client if this becomes a global setting
checkResources();
splitsAndClosesLock.readLock().lock();
try {
Get get = new Get(row, put.getRowLock());
checkFamily(family);
get.addColumn(family, qualifier);
byte [] now = Bytes.toBytes(System.currentTimeMillis());
// Lock row
Integer lid = getLock(lockId, get.getRow());
List<KeyValue> result = new ArrayList<KeyValue>();
try {
//Getting data
for(Map.Entry<byte[],NavigableSet<byte[]>> entry:
get.getFamilyMap().entrySet()) {
get(this.stores.get(entry.getKey()), get, entry.getValue(), result);
}
boolean matches = false;
if (result.size() == 0 && expectedValue.length == 0) {
matches = true;
} else if(result.size() == 1) {
//Compare the expected value with the actual value
byte [] actualValue = result.get(0).getValue();
matches = Bytes.equals(expectedValue, actualValue);
}
//If matches put the new put
if(matches) {
for(Map.Entry<byte[], List<KeyValue>> entry :
put.getFamilyMap().entrySet()) {
byte [] fam = entry.getKey();
checkFamily(fam);
List<KeyValue> puts = entry.getValue();
if(updateKeys(puts, now)) {
put(fam, puts, writeToWAL);
}
}
return true;
}
return false;
} finally {
if(lockId == null) releaseRowLock(lid);
}
} finally {
splitsAndClosesLock.readLock().unlock();
}
}
/**
* Checks if any stamps is Long.MAX_VALUE. If so, sets them to now.
* <p>
* This acts to replace LATEST_TIMESTAMP with now.
* @param keys
* @param now
* @return <code>true</code> when updating the time stamp completed.
*/
private boolean updateKeys(List<KeyValue> keys, byte [] now) {
if(keys == null || keys.isEmpty()) {
return false;
}
for(KeyValue key : keys) {
if(key.getTimestamp() == HConstants.LATEST_TIMESTAMP) {
key.updateLatestStamp(now);
}
}
return true;
}
// /*
// * Utility method to verify values length.
// * @param batchUpdate The update to verify
// * @throws IOException Thrown if a value is too long
// */
// private void validateValuesLength(Put put)
// throws IOException {
// Map<byte[], List<KeyValue>> families = put.getFamilyMap();
// for(Map.Entry<byte[], List<KeyValue>> entry : families.entrySet()) {
// HColumnDescriptor hcd =
// this.regionInfo.getTableDesc().getFamily(entry.getKey());
// int maxLen = hcd.getMaxValueLength();
// for(KeyValue kv : entry.getValue()) {
// if(kv.getValueLength() > maxLen) {
// throw new ValueOverMaxLengthException("Value in column "
// + Bytes.toString(kv.getColumn()) + " is too long. "
// + kv.getValueLength() + " > " + maxLen);
// }
// }
// }
// }
/*
* Check if resources to support an update.
*
* Here we synchronize on HRegion, a broad scoped lock. Its appropriate
* given we're figuring in here whether this region is able to take on
* writes. This is only method with a synchronize (at time of writing),
* this and the synchronize on 'this' inside in internalFlushCache to send
* the notify.
*/
private void checkResources() {
boolean blocked = false;
while (this.memstoreSize.get() > this.blockingMemStoreSize) {
requestFlush();
if (!blocked) {
LOG.info("Blocking updates for '" + Thread.currentThread().getName() +
"' on region " + Bytes.toStringBinary(getRegionName()) +
": memstore size " +
StringUtils.humanReadableInt(this.memstoreSize.get()) +
" is >= than blocking " +
StringUtils.humanReadableInt(this.blockingMemStoreSize) + " size");
}
blocked = true;
synchronized(this) {
try {
wait(threadWakeFrequency);
} catch (InterruptedException e) {
// continue;
}
}
}
if (blocked) {
LOG.info("Unblocking updates for region " + this + " '"
+ Thread.currentThread().getName() + "'");
}
}
/**
* @throws IOException Throws exception if region is in read-only mode.
*/
protected void checkReadOnly() throws IOException {
if (this.writestate.isReadOnly()) {
throw new IOException("region is read only");
}
}
/**
* Add updates first to the hlog and then add values to memstore.
* Warning: Assumption is caller has lock on passed in row.
* @param edits Cell updates by column
* @praram now
* @throws IOException
*/
private void put(final byte [] family, final List<KeyValue> edits)
throws IOException {
this.put(family, edits, true);
}
/**
* Add updates first to the hlog (if writeToWal) and then add values to memstore.
* Warning: Assumption is caller has lock on passed in row.
* @param family
* @param edits
* @param writeToWAL if true, then we should write to the log
* @throws IOException
*/
private void put(final byte [] family, final List<KeyValue> edits,
boolean writeToWAL) throws IOException {
if (edits == null || edits.isEmpty()) {
return;
}
boolean flush = false;
this.updatesLock.readLock().lock();
try {
if (writeToWAL) {
long now = System.currentTimeMillis();
this.log.append(regionInfo,
regionInfo.getTableDesc().getName(), edits, now);
}
long size = 0;
Store store = getStore(family);
for (KeyValue kv: edits) {
size = this.memstoreSize.addAndGet(store.add(kv));
}
flush = isFlushSize(size);
} finally {
this.updatesLock.readLock().unlock();
}
if (flush) {
// Request a cache flush. Do it outside update lock.
requestFlush();
}
}
private void requestFlush() {
if (this.flushListener == null) {
return;
}
synchronized (writestate) {
if (this.writestate.isFlushRequested()) {
return;
}
writestate.flushRequested = true;
}
// Make request outside of synchronize block; HBASE-818.
this.flushListener.request(this);
if (LOG.isDebugEnabled()) {
LOG.debug("Flush requested on " + this);
}
}
/*
* @param size
* @return True if size is over the flush threshold
*/
private boolean isFlushSize(final long size) {
return size > this.memstoreFlushSize;
}
// Do any reconstruction needed from the log
protected void doReconstructionLog(Path oldLogFile, long minSeqId, long maxSeqId,
Progressable reporter)
throws UnsupportedEncodingException, IOException {
// Nothing to do (Replaying is done in HStores)
// Used by subclasses; e.g. THBase.
}
protected Store instantiateHStore(Path baseDir,
HColumnDescriptor c, Path oldLogFile, Progressable reporter)
throws IOException {
return new Store(baseDir, this, c, this.fs, oldLogFile,
this.conf, reporter);
}
/**
* Return HStore instance.
* Use with caution. Exposed for use of fixup utilities.
* @param column Name of column family hosted by this region.
* @return Store that goes with the family on passed <code>column</code>.
* TODO: Make this lookup faster.
*/
public Store getStore(final byte [] column) {
return this.stores.get(column);
}
//////////////////////////////////////////////////////////////////////////////
// Support code
//////////////////////////////////////////////////////////////////////////////
/** Make sure this is a valid row for the HRegion */
private void checkRow(final byte [] row) throws IOException {
if(!rowIsInRange(regionInfo, row)) {
throw new WrongRegionException("Requested row out of range for " +
"HRegion " + this + ", startKey='" +
Bytes.toStringBinary(regionInfo.getStartKey()) + "', getEndKey()='" +
Bytes.toStringBinary(regionInfo.getEndKey()) + "', row='" +
Bytes.toStringBinary(row) + "'");
}
}
/**
* Obtain a lock on the given row. Blocks until success.
*
* I know it's strange to have two mappings:
* <pre>
* ROWS ==> LOCKS
* </pre>
* as well as
* <pre>
* LOCKS ==> ROWS
* </pre>
*
* But it acts as a guard on the client; a miswritten client just can't
* submit the name of a row and start writing to it; it must know the correct
* lockid, which matches the lock list in memory.
*
* <p>It would be more memory-efficient to assume a correctly-written client,
* which maybe we'll do in the future.
*
* @param row Name of row to lock.
* @throws IOException
* @return The id of the held lock.
*/
public Integer obtainRowLock(final byte [] row) throws IOException {
checkRow(row);
splitsAndClosesLock.readLock().lock();
try {
if (this.closed.get()) {
throw new NotServingRegionException("Region " + this + " closed");
}
synchronized (lockedRows) {
while (lockedRows.contains(row)) {
try {
lockedRows.wait();
} catch (InterruptedException ie) {
// Empty
}
}
// generate a new lockid. Attempt to insert the new [lockid, row].
// if this lockid already exists in the map then revert and retry
// We could have first done a lockIds.get, and if it does not exist only
// then do a lockIds.put, but the hope is that the lockIds.put will
// mostly return null the first time itself because there won't be
// too many lockId collisions.
byte [] prev = null;
Integer lockId = null;
do {
lockId = new Integer(lockIdGenerator++);
prev = lockIds.put(lockId, row);
if (prev != null) {
lockIds.put(lockId, prev); // revert old value
lockIdGenerator = rand.nextInt(); // generate new start point
}
} while (prev != null);
lockedRows.add(row);
lockedRows.notifyAll();
return lockId;
}
} finally {
splitsAndClosesLock.readLock().unlock();
}
}
/**
* Used by unit tests.
* @param lockid
* @return Row that goes with <code>lockid</code>
*/
byte [] getRowFromLock(final Integer lockid) {
synchronized (lockedRows) {
return lockIds.get(lockid);
}
}
/**
* Release the row lock!
* @param lockid The lock ID to release.
*/
void releaseRowLock(final Integer lockid) {
synchronized (lockedRows) {
byte[] row = lockIds.remove(lockid);
lockedRows.remove(row);
lockedRows.notifyAll();
}
}
/**
* See if row is currently locked.
* @param lockid
* @return boolean
*/
private boolean isRowLocked(final Integer lockid) {
synchronized (lockedRows) {
if (lockIds.get(lockid) != null) {
return true;
}
return false;
}
}
/**
* Returns existing row lock if found, otherwise
* obtains a new row lock and returns it.
* @param lockid
* @return lockid
*/
private Integer getLock(Integer lockid, byte [] row)
throws IOException {
Integer lid = null;
if (lockid == null) {
lid = obtainRowLock(row);
} else {
if (!isRowLocked(lockid)) {
throw new IOException("Invalid row lock");
}
lid = lockid;
}
return lid;
}
private void waitOnRowLocks() {
synchronized (lockedRows) {
while (!this.lockedRows.isEmpty()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Waiting on " + this.lockedRows.size() + " row locks");
}
try {
this.lockedRows.wait();
} catch (InterruptedException e) {
// Catch. Let while test determine loop-end.
}
}
}
}
@Override
public boolean equals(Object o) {
if (!(o instanceof HRegion)) {
return false;
}
return this.hashCode() == ((HRegion)o).hashCode();
}
@Override
public int hashCode() {
return Bytes.hashCode(this.regionInfo.getRegionName());
}
@Override
public String toString() {
return this.regionInfo.getRegionNameAsString();
}
/** @return Path of region base directory */
public Path getBaseDir() {
return this.basedir;
}
/**
* RegionScanner is an iterator through a bunch of rows in an HRegion.
* <p>
* It is used to combine scanners from multiple Stores (aka column families).
*/
class RegionScanner implements InternalScanner {
private final KeyValueHeap storeHeap;
private final byte [] stopRow;
private Filter filter;
private List<KeyValue> results = new ArrayList<KeyValue>();
private int batch;
RegionScanner(Scan scan, List<KeyValueScanner> additionalScanners) {
this.filter = scan.getFilter();
this.batch = scan.getBatch();
if (Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW)) {
this.stopRow = null;
} else {
this.stopRow = scan.getStopRow();
}
List<KeyValueScanner> scanners = new ArrayList<KeyValueScanner>();
if (additionalScanners != null) {
scanners.addAll(additionalScanners);
}
for (Map.Entry<byte[], NavigableSet<byte[]>> entry :
scan.getFamilyMap().entrySet()) {
Store store = stores.get(entry.getKey());
scanners.add(store.getScanner(scan, entry.getValue()));
}
this.storeHeap =
new KeyValueHeap(scanners.toArray(new KeyValueScanner[0]), comparator);
}
RegionScanner(Scan scan) {
this(scan, null);
}
private void resetFilters() {
if (filter != null) {
filter.reset();
}
}
public boolean next(List<KeyValue> outResults, int limit) throws IOException {
if (closing.get() || closed.get()) {
close();
throw new NotServingRegionException(regionInfo.getRegionNameAsString() +
" is closing=" + closing.get() + " or closed=" + closed.get());
}
results.clear();
boolean returnResult = nextInternal(limit);
if (!returnResult && filter != null && filter.filterRow()) {
results.clear();
}
outResults.addAll(results);
resetFilters();
if (isFilterDone()) {
return false;
}
return returnResult;
}
public boolean next(List<KeyValue> outResults) throws IOException {
// apply the batching limit by default
return next(outResults, batch);
}
/*
* @return True if a filter rules the scanner is over, done.
*/
boolean isFilterDone() {
return this.filter != null && this.filter.filterAllRemaining();
}
/*
* @return true if there are more rows, false if scanner is done
* @throws IOException
*/
private boolean nextInternal(int limit) throws IOException {
byte [] currentRow = null;
boolean filterCurrentRow = false;
while (true) {
KeyValue kv = this.storeHeap.peek();
if (kv == null) return false;
byte [] row = kv.getRow();
boolean samerow = Bytes.equals(currentRow, row);
if (samerow && filterCurrentRow) {
// Filter all columns until row changes
readAndDumpCurrentResult();
continue;
}
if (!samerow) {
// Continue on the next row:
currentRow = row;
filterCurrentRow = false;
// See if we passed stopRow
if (this.stopRow != null &&
comparator.compareRows(this.stopRow, 0, this.stopRow.length,
currentRow, 0, currentRow.length) <= 0) {
return false;
}
if (hasResults()) return true;
}
// See if current row should be filtered based on row key
if (this.filter != null && this.filter.filterRowKey(row, 0, row.length)) {
readAndDumpCurrentResult();
resetFilters();
filterCurrentRow = true;
currentRow = row;
continue;
}
this.storeHeap.next(results, limit);
if (limit > 0 && results.size() == limit) {
return true;
}
}
}
private void readAndDumpCurrentResult() throws IOException {
this.storeHeap.next(this.results);
this.results.clear();
}
/*
* Do we have results to return or should we continue. Call when we get to
* the end of a row. Does house cleaning -- clearing results and resetting
* filters -- if we are to continue.
* @return True if we should return else false if need to keep going.
*/
private boolean hasResults() {
if (this.results.isEmpty() ||
this.filter != null && this.filter.filterRow()) {
// Make sure results is empty, reset filters
this.results.clear();
resetFilters();
return false;
}
return true;
}
public void close() {
storeHeap.close();
}
/**
*
* @param scanner to be closed
*/
public void close(KeyValueScanner scanner) {
try {
scanner.close();
} catch(NullPointerException npe) {}
}
/**
* @return the current storeHeap
*/
public KeyValueHeap getStoreHeap() {
return this.storeHeap;
}
}
// Utility methods
/**
* Convenience method creating new HRegions. Used by createTable and by the
* bootstrap code in the HMaster constructor.
* Note, this method creates an {@link HLog} for the created region. It
* needs to be closed explicitly. Use {@link HRegion#getLog()} to get
* access.
* @param info Info for region to create.
* @param rootDir Root directory for HBase instance
* @param conf
* @return new HRegion
*
* @throws IOException
*/
public static HRegion createHRegion(final HRegionInfo info, final Path rootDir,
final Configuration conf)
throws IOException {
Path tableDir =
HTableDescriptor.getTableDir(rootDir, info.getTableDesc().getName());
Path regionDir = HRegion.getRegionDir(tableDir, info.getEncodedName());
FileSystem fs = FileSystem.get(conf);
fs.mkdirs(regionDir);
HRegion region = new HRegion(tableDir,
new HLog(fs, new Path(regionDir, HREGION_LOGDIR_NAME),
new Path(regionDir, HREGION_OLDLOGDIR_NAME), conf, null),
fs, conf, info, null);
region.initialize(null, null);
return region;
}
/**
* Convenience method to open a HRegion outside of an HRegionServer context.
* @param info Info for region to be opened.
* @param rootDir Root directory for HBase instance
* @param log HLog for region to use. This method will call
* HLog#setSequenceNumber(long) passing the result of the call to
* HRegion#getMinSequenceId() to ensure the log id is properly kept
* up. HRegionStore does this every time it opens a new region.
* @param conf
* @return new HRegion
*
* @throws IOException
*/
public static HRegion openHRegion(final HRegionInfo info, final Path rootDir,
final HLog log, final Configuration conf)
throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Opening region: " + info);
}
if (info == null) {
throw new NullPointerException("Passed region info is null");
}
HRegion r = new HRegion(
HTableDescriptor.getTableDir(rootDir, info.getTableDesc().getName()),
log, FileSystem.get(conf), conf, info, null);
r.initialize(null, null);
if (log != null) {
log.setSequenceNumber(r.getMinSequenceId());
}
return r;
}
/**
* Inserts a new region's meta information into the passed
* <code>meta</code> region. Used by the HMaster bootstrap code adding
* new table to ROOT table.
*
* @param meta META HRegion to be updated
* @param r HRegion to add to <code>meta</code>
*
* @throws IOException
*/
public static void addRegionToMETA(HRegion meta, HRegion r)
throws IOException {
meta.checkResources();
// The row key is the region name
byte [] row = r.getRegionName();
Integer lid = meta.obtainRowLock(row);
try {
List<KeyValue> edits = new ArrayList<KeyValue>();
edits.add(new KeyValue(row, CATALOG_FAMILY, REGIONINFO_QUALIFIER,
System.currentTimeMillis(), Writables.getBytes(r.getRegionInfo())));
meta.put(HConstants.CATALOG_FAMILY, edits);
} finally {
meta.releaseRowLock(lid);
}
}
/**
* Delete a region's meta information from the passed
* <code>meta</code> region. Deletes the row.
* @param srvr META server to be updated
* @param metaRegionName Meta region name
* @param regionName HRegion to remove from <code>meta</code>
*
* @throws IOException
*/
public static void removeRegionFromMETA(final HRegionInterface srvr,
final byte [] metaRegionName, final byte [] regionName)
throws IOException {
Delete delete = new Delete(regionName);
srvr.delete(metaRegionName, delete);
}
/**
* Utility method used by HMaster marking regions offlined.
* @param srvr META server to be updated
* @param metaRegionName Meta region name
* @param info HRegion to update in <code>meta</code>
*
* @throws IOException
*/
public static void offlineRegionInMETA(final HRegionInterface srvr,
final byte [] metaRegionName, final HRegionInfo info)
throws IOException {
// Puts and Deletes used to be "atomic" here. We can use row locks if
// we need to keep that property, or we can expand Puts and Deletes to
// allow them to be committed at once.
byte [] row = info.getRegionName();
Put put = new Put(row);
info.setOffline(true);
put.add(CATALOG_FAMILY, REGIONINFO_QUALIFIER, Writables.getBytes(info));
srvr.put(metaRegionName, put);
cleanRegionInMETA(srvr, metaRegionName, info);
}
/**
* Clean COL_SERVER and COL_STARTCODE for passed <code>info</code> in
* <code>.META.</code>
* @param srvr
* @param metaRegionName
* @param info
* @throws IOException
*/
public static void cleanRegionInMETA(final HRegionInterface srvr,
final byte [] metaRegionName, final HRegionInfo info)
throws IOException {
Delete del = new Delete(info.getRegionName());
del.deleteColumns(CATALOG_FAMILY, SERVER_QUALIFIER);
del.deleteColumns(CATALOG_FAMILY, STARTCODE_QUALIFIER);
srvr.delete(metaRegionName, del);
}
/**
* Deletes all the files for a HRegion
*
* @param fs the file system object
* @param rootdir qualified path of HBase root directory
* @param info HRegionInfo for region to be deleted
* @throws IOException
*/
public static void deleteRegion(FileSystem fs, Path rootdir, HRegionInfo info)
throws IOException {
deleteRegion(fs, HRegion.getRegionDir(rootdir, info));
}
private static void deleteRegion(FileSystem fs, Path regiondir)
throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("DELETING region " + regiondir.toString());
}
if (!fs.delete(regiondir, true)) {
LOG.warn("Failed delete of " + regiondir);
}
}
/**
* Computes the Path of the HRegion
*
* @param tabledir qualified path for table
* @param name ENCODED region name
* @return Path of HRegion directory
*/
public static Path getRegionDir(final Path tabledir, final int name) {
return new Path(tabledir, Integer.toString(name));
}
/**
* Computes the Path of the HRegion
*
* @param rootdir qualified path of HBase root directory
* @param info HRegionInfo for the region
* @return qualified path of region directory
*/
public static Path getRegionDir(final Path rootdir, final HRegionInfo info) {
return new Path(
HTableDescriptor.getTableDir(rootdir, info.getTableDesc().getName()),
Integer.toString(info.getEncodedName()));
}
/**
* Determines if the specified row is within the row range specified by the
* specified HRegionInfo
*
* @param info HRegionInfo that specifies the row range
* @param row row to be checked
* @return true if the row is within the range specified by the HRegionInfo
*/
public static boolean rowIsInRange(HRegionInfo info, final byte [] row) {
return ((info.getStartKey().length == 0) ||
(Bytes.compareTo(info.getStartKey(), row) <= 0)) &&
((info.getEndKey().length == 0) ||
(Bytes.compareTo(info.getEndKey(), row) > 0));
}
/**
* Make the directories for a specific column family
*
* @param fs the file system
* @param tabledir base directory where region will live (usually the table dir)
* @param hri
* @param colFamily the column family
* @throws IOException
*/
public static void makeColumnFamilyDirs(FileSystem fs, Path tabledir,
final HRegionInfo hri, byte [] colFamily)
throws IOException {
Path dir = Store.getStoreHomedir(tabledir, hri.getEncodedName(), colFamily);
if (!fs.mkdirs(dir)) {
LOG.warn("Failed to create " + dir);
}
}
/**
* Merge two HRegions. The regions must be adjacent and must not overlap.
*
* @param srcA
* @param srcB
* @return new merged HRegion
* @throws IOException
*/
public static HRegion mergeAdjacent(final HRegion srcA, final HRegion srcB)
throws IOException {
HRegion a = srcA;
HRegion b = srcB;
// Make sure that srcA comes first; important for key-ordering during
// write of the merged file.
if (srcA.getStartKey() == null) {
if (srcB.getStartKey() == null) {
throw new IOException("Cannot merge two regions with null start key");
}
// A's start key is null but B's isn't. Assume A comes before B
} else if ((srcB.getStartKey() == null) ||
(Bytes.compareTo(srcA.getStartKey(), srcB.getStartKey()) > 0)) {
a = srcB;
b = srcA;
}
if (!(Bytes.compareTo(a.getEndKey(), b.getStartKey()) == 0)) {
throw new IOException("Cannot merge non-adjacent regions");
}
return merge(a, b);
}
/**
* Merge two regions whether they are adjacent or not.
*
* @param a region a
* @param b region b
* @return new merged region
* @throws IOException
*/
public static HRegion merge(HRegion a, HRegion b) throws IOException {
if (!a.getRegionInfo().getTableDesc().getNameAsString().equals(
b.getRegionInfo().getTableDesc().getNameAsString())) {
throw new IOException("Regions do not belong to the same table");
}
FileSystem fs = a.getFilesystem();
// Make sure each region's cache is empty
a.flushcache();
b.flushcache();
// Compact each region so we only have one store file per family
a.compactStores(true);
if (LOG.isDebugEnabled()) {
LOG.debug("Files for region: " + a);
listPaths(fs, a.getRegionDir());
}
b.compactStores(true);
if (LOG.isDebugEnabled()) {
LOG.debug("Files for region: " + b);
listPaths(fs, b.getRegionDir());
}
Configuration conf = a.getConf();
HTableDescriptor tabledesc = a.getTableDesc();
HLog log = a.getLog();
Path basedir = a.getBaseDir();
// Presume both are of same region type -- i.e. both user or catalog
// table regions. This way can use comparator.
final byte [] startKey = a.comparator.matchingRows(a.getStartKey(), 0,
a.getStartKey().length,
EMPTY_BYTE_ARRAY, 0, EMPTY_BYTE_ARRAY.length) ||
b.comparator.matchingRows(b.getStartKey(), 0, b.getStartKey().length,
EMPTY_BYTE_ARRAY, 0, EMPTY_BYTE_ARRAY.length)?
EMPTY_BYTE_ARRAY:
a.comparator.compareRows(a.getStartKey(), 0, a.getStartKey().length,
b.getStartKey(), 0, b.getStartKey().length) <= 0?
a.getStartKey(): b.getStartKey();
final byte [] endKey = a.comparator.matchingRows(a.getEndKey(), 0,
a.getEndKey().length, EMPTY_BYTE_ARRAY, 0, EMPTY_BYTE_ARRAY.length) ||
a.comparator.matchingRows(b.getEndKey(), 0, b.getEndKey().length,
EMPTY_BYTE_ARRAY, 0, EMPTY_BYTE_ARRAY.length)?
EMPTY_BYTE_ARRAY:
a.comparator.compareRows(a.getEndKey(), 0, a.getEndKey().length,
b.getEndKey(), 0, b.getEndKey().length) <= 0?
b.getEndKey(): a.getEndKey();
HRegionInfo newRegionInfo = new HRegionInfo(tabledesc, startKey, endKey);
LOG.info("Creating new region " + newRegionInfo.toString());
int encodedName = newRegionInfo.getEncodedName();
Path newRegionDir = HRegion.getRegionDir(a.getBaseDir(), encodedName);
if(fs.exists(newRegionDir)) {
throw new IOException("Cannot merge; target file collision at " +
newRegionDir);
}
fs.mkdirs(newRegionDir);
LOG.info("starting merge of regions: " + a + " and " + b +
" into new region " + newRegionInfo.toString() +
" with start key <" + Bytes.toString(startKey) + "> and end key <" +
Bytes.toString(endKey) + ">");
// Move HStoreFiles under new region directory
Map<byte [], List<StoreFile>> byFamily =
new TreeMap<byte [], List<StoreFile>>(Bytes.BYTES_COMPARATOR);
byFamily = filesByFamily(byFamily, a.close());
byFamily = filesByFamily(byFamily, b.close());
for (Map.Entry<byte [], List<StoreFile>> es : byFamily.entrySet()) {
byte [] colFamily = es.getKey();
makeColumnFamilyDirs(fs, basedir, newRegionInfo, colFamily);
// Because we compacted the source regions we should have no more than two
// HStoreFiles per family and there will be no reference store
List<StoreFile> srcFiles = es.getValue();
if (srcFiles.size() == 2) {
long seqA = srcFiles.get(0).getMaxSequenceId();
long seqB = srcFiles.get(1).getMaxSequenceId();
if (seqA == seqB) {
// Can't have same sequenceid since on open of a store, this is what
// distingushes the files (see the map of stores how its keyed by
// sequenceid).
throw new IOException("Files have same sequenceid: " + seqA);
}
}
for (StoreFile hsf: srcFiles) {
StoreFile.rename(fs, hsf.getPath(),
StoreFile.getUniqueFile(fs, Store.getStoreHomedir(basedir,
newRegionInfo.getEncodedName(), colFamily)));
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Files for new region");
listPaths(fs, newRegionDir);
}
HRegion dstRegion = new HRegion(basedir, log, fs, conf, newRegionInfo, null);
dstRegion.initialize(null, null);
dstRegion.compactStores();
if (LOG.isDebugEnabled()) {
LOG.debug("Files for new region");
listPaths(fs, dstRegion.getRegionDir());
}
deleteRegion(fs, a.getRegionDir());
deleteRegion(fs, b.getRegionDir());
LOG.info("merge completed. New region is " + dstRegion);
return dstRegion;
}
/*
* Fills a map with a vector of store files keyed by column family.
* @param byFamily Map to fill.
* @param storeFiles Store files to process.
* @param family
* @return Returns <code>byFamily</code>
*/
private static Map<byte [], List<StoreFile>> filesByFamily(
Map<byte [], List<StoreFile>> byFamily, List<StoreFile> storeFiles) {
for (StoreFile src: storeFiles) {
byte [] family = src.getFamily();
List<StoreFile> v = byFamily.get(family);
if (v == null) {
v = new ArrayList<StoreFile>();
byFamily.put(family, v);
}
v.add(src);
}
return byFamily;
}
/**
* @return True if needs a mojor compaction.
* @throws IOException
*/
boolean isMajorCompaction() throws IOException {
for (Store store: this.stores.values()) {
if (store.isMajorCompaction()) {
return true;
}
}
return false;
}
/*
* List the files under the specified directory
*
* @param fs
* @param dir
* @throws IOException
*/
private static void listPaths(FileSystem fs, Path dir) throws IOException {
if (LOG.isDebugEnabled()) {
FileStatus[] stats = fs.listStatus(dir);
if (stats == null || stats.length == 0) {
return;
}
for (int i = 0; i < stats.length; i++) {
String path = stats[i].getPath().toString();
if (stats[i].isDir()) {
LOG.debug("d " + path);
listPaths(fs, stats[i].getPath());
} else {
LOG.debug("f " + path + " size=" + stats[i].getLen());
}
}
}
}
//
// HBASE-880
//
/**
* @param get
* @param lockid
* @return result
* @throws IOException
*/
public Result get(final Get get, final Integer lockid) throws IOException {
// Verify families are all valid
if (get.hasFamilies()) {
for (byte [] family: get.familySet()) {
checkFamily(family);
}
} else { // Adding all families to scanner
for (byte[] family: regionInfo.getTableDesc().getFamiliesKeys()) {
get.addFamily(family);
}
}
// Lock row
Integer lid = getLock(lockid, get.getRow());
List<KeyValue> result = new ArrayList<KeyValue>();
try {
for (Map.Entry<byte[],NavigableSet<byte[]>> entry:
get.getFamilyMap().entrySet()) {
get(this.stores.get(entry.getKey()), get, entry.getValue(), result);
}
} finally {
if(lockid == null) releaseRowLock(lid);
}
return new Result(result);
}
private void get(final Store store, final Get get,
final NavigableSet<byte []> qualifiers, List<KeyValue> result)
throws IOException {
store.get(get, qualifiers, result);
}
/**
*
* @param row
* @param family
* @param qualifier
* @param amount
* @return The new value.
* @throws IOException
*/
public long incrementColumnValue(byte [] row, byte [] family,
byte [] qualifier, long amount, boolean writeToWAL)
throws IOException {
checkRow(row);
boolean flush = false;
// Lock row
Integer lid = obtainRowLock(row);
long result = amount;
try {
Store store = stores.get(family);
// Get the old value:
Get get = new Get(row);
get.addColumn(family, qualifier);
List<KeyValue> results = new ArrayList<KeyValue>();
NavigableSet<byte[]> qualifiers = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
qualifiers.add(qualifier);
store.get(get, qualifiers, results);
if (!results.isEmpty()) {
KeyValue kv = results.get(0);
byte [] buffer = kv.getBuffer();
int valueOffset = kv.getValueOffset();
result += Bytes.toLong(buffer, valueOffset, Bytes.SIZEOF_LONG);
}
// bulid the KeyValue now:
KeyValue newKv = new KeyValue(row, family,
qualifier, System.currentTimeMillis(),
Bytes.toBytes(result));
// now log it:
if (writeToWAL) {
long now = System.currentTimeMillis();
List<KeyValue> edits = new ArrayList<KeyValue>(1);
edits.add(newKv);
this.log.append(regionInfo,
regionInfo.getTableDesc().getName(), edits, now);
}
// Now request the ICV to the store, this will set the timestamp
// appropriately depending on if there is a value in memcache or not.
// returns the
long size = store.updateColumnValue(row, family, qualifier, result);
size = this.memstoreSize.addAndGet(size);
flush = isFlushSize(size);
} finally {
releaseRowLock(lid);
}
if (flush) {
// Request a cache flush. Do it outside update lock.
requestFlush();
}
return result;
}
//
// New HBASE-880 Helpers
//
private void checkFamily(final byte [] family)
throws NoSuchColumnFamilyException {
if(!regionInfo.getTableDesc().hasFamily(family)) {
throw new NoSuchColumnFamilyException("Column family " +
Bytes.toString(family) + " does not exist in region " + this
+ " in table " + regionInfo.getTableDesc());
}
}
public static final long FIXED_OVERHEAD = ClassSize.align(
(5 * Bytes.SIZEOF_LONG) + Bytes.SIZEOF_BOOLEAN +
(19 * ClassSize.REFERENCE) + ClassSize.OBJECT);
public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD +
ClassSize.OBJECT + (2 * ClassSize.ATOMIC_BOOLEAN) +
ClassSize.ATOMIC_LONG + ClassSize.ATOMIC_INTEGER +
ClassSize.CONCURRENT_HASHMAP +
(16 * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
(16 * ClassSize.CONCURRENT_HASHMAP_SEGMENT) +
ClassSize.CONCURRENT_SKIPLISTMAP + ClassSize.CONCURRENT_SKIPLISTMAP_ENTRY +
ClassSize.align(ClassSize.OBJECT + (5 * Bytes.SIZEOF_BOOLEAN)) +
(3 * ClassSize.REENTRANT_LOCK));
@Override
public long heapSize() {
long heapSize = DEEP_OVERHEAD;
for(Store store : this.stores.values()) {
heapSize += store.heapSize();
}
return heapSize;
}
/*
* This method calls System.exit.
* @param message Message to print out. May be null.
*/
private static void printUsageAndExit(final String message) {
if (message != null && message.length() > 0) System.out.println(message);
System.out.println("Usage: HRegion CATLALOG_TABLE_DIR [major_compact]");
System.out.println("Options:");
System.out.println(" major_compact Pass this option to major compact " +
"passed region.");
System.out.println("Default outputs scan of passed region.");
System.exit(1);
}
/*
* Process table.
* Do major compaction or list content.
* @param fs
* @param p
* @param log
* @param c
* @param majorCompact
* @throws IOException
*/
private static void processTable(final FileSystem fs, final Path p,
final HLog log, final Configuration c,
final boolean majorCompact)
throws IOException {
HRegion region = null;
String rootStr = Bytes.toString(HConstants.ROOT_TABLE_NAME);
String metaStr = Bytes.toString(HConstants.META_TABLE_NAME);
// Currently expects tables have one region only.
if (p.getName().startsWith(rootStr)) {
region = new HRegion(p, log, fs, c, HRegionInfo.ROOT_REGIONINFO, null);
} else if (p.getName().startsWith(metaStr)) {
region = new HRegion(p, log, fs, c, HRegionInfo.FIRST_META_REGIONINFO,
null);
} else {
throw new IOException("Not a known catalog table: " + p.toString());
}
try {
region.initialize(null, null);
if (majorCompact) {
region.compactStores(true);
} else {
// Default behavior
Scan scan = new Scan();
// scan.addFamily(HConstants.CATALOG_FAMILY);
InternalScanner scanner = region.getScanner(scan);
try {
List<KeyValue> kvs = new ArrayList<KeyValue>();
boolean done = false;
do {
kvs.clear();
done = scanner.next(kvs);
if (kvs.size() > 0) LOG.info(kvs);
} while (done);
} finally {
scanner.close();
}
// System.out.println(region.getClosestRowBefore(Bytes.toBytes("GeneratedCSVContent2,E3652782193BC8D66A0BA1629D0FAAAB,9993372036854775807")));
}
} finally {
region.close();
}
}
/**
* For internal use in forcing splits ahead of file size limit.
* @param b
* @return previous value
*/
public boolean shouldSplit(boolean b) {
boolean old = this.splitRequest;
this.splitRequest = b;
return old;
}
/**
* Checks every store to see if one has too many
* store files
* @return true if any store has too many store files
*/
public boolean hasTooManyStoreFiles() {
for(Store store : stores.values()) {
if(store.hasTooManyStoreFiles()) {
return true;
}
}
return false;
}
/**
* Facility for dumping and compacting catalog tables.
* Only does catalog tables since these are only tables we for sure know
* schema on. For usage run:
* <pre>
* ./bin/hbase org.apache.hadoop.hbase.regionserver.HRegion
* </pre>
* @param args
* @throws IOException
*/
public static void main(String[] args) throws IOException {
if (args.length < 1) {
printUsageAndExit(null);
}
boolean majorCompact = false;
if (args.length > 1) {
if (!args[1].toLowerCase().startsWith("major")) {
printUsageAndExit("ERROR: Unrecognized option <" + args[1] + ">");
}
majorCompact = true;
}
Path tableDir = new Path(args[0]);
Configuration c = HBaseConfiguration.create();
FileSystem fs = FileSystem.get(c);
Path logdir = new Path(c.get("hbase.tmp.dir"),
"hlog" + tableDir.getName() + System.currentTimeMillis());
Path oldLogDir = new Path(c.get("hbase.tmp.dir"), HREGION_OLDLOGDIR_NAME);
HLog log = new HLog(fs, logdir, oldLogDir, c, null);
try {
processTable(fs, tableDir, log, c, majorCompact);
} finally {
log.close();
BlockCache bc = StoreFile.getBlockCache(c);
if (bc != null) bc.shutdown();
}
}
}
| core/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java | /**
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.Reference.Range;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.ipc.HRegionInterface;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.StringUtils;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
* HRegion stores data for a certain region of a table. It stores all columns
* for each row. A given table consists of one or more HRegions.
*
* <p>We maintain multiple HStores for a single HRegion.
*
* <p>An Store is a set of rows with some column data; together,
* they make up all the data for the rows.
*
* <p>Each HRegion has a 'startKey' and 'endKey'.
* <p>The first is inclusive, the second is exclusive (except for
* the final region) The endKey of region 0 is the same as
* startKey for region 1 (if it exists). The startKey for the
* first region is null. The endKey for the final region is null.
*
* <p>Locking at the HRegion level serves only one purpose: preventing the
* region from being closed (and consequently split) while other operations
* are ongoing. Each row level operation obtains both a row lock and a region
* read lock for the duration of the operation. While a scanner is being
* constructed, getScanner holds a read lock. If the scanner is successfully
* constructed, it holds a read lock until it is closed. A close takes out a
* write lock and consequently will block for ongoing operations and will block
* new operations from starting while the close is in progress.
*
* <p>An HRegion is defined by its table and its key extent.
*
* <p>It consists of at least one Store. The number of Stores should be
* configurable, so that data which is accessed together is stored in the same
* Store. Right now, we approximate that by building a single Store for
* each column family. (This config info will be communicated via the
* tabledesc.)
*
* <p>The HTableDescriptor contains metainfo about the HRegion's table.
* regionName is a unique identifier for this HRegion. (startKey, endKey]
* defines the keyspace for this HRegion.
*/
public class HRegion implements HConstants, HeapSize { // , Writable{
static final Log LOG = LogFactory.getLog(HRegion.class);
static final String SPLITDIR = "splits";
static final String MERGEDIR = "merges";
final AtomicBoolean closed = new AtomicBoolean(false);
/* Closing can take some time; use the closing flag if there is stuff we don't
* want to do while in closing state; e.g. like offer this region up to the
* master as a region to close if the carrying regionserver is overloaded.
* Once set, it is never cleared.
*/
final AtomicBoolean closing = new AtomicBoolean(false);
//////////////////////////////////////////////////////////////////////////////
// Members
//////////////////////////////////////////////////////////////////////////////
private final Map<Integer, byte []> locksToRows =
new ConcurrentHashMap<Integer, byte []>();
protected final Map<byte [], Store> stores =
new ConcurrentSkipListMap<byte [], Store>(Bytes.BYTES_RAWCOMPARATOR);
//These variable are just used for getting data out of the region, to test on
//client side
// private int numStores = 0;
// private int [] storeSize = null;
// private byte [] name = null;
final AtomicLong memstoreSize = new AtomicLong(0);
// This is the table subdirectory.
final Path basedir;
final HLog log;
final FileSystem fs;
final Configuration conf;
final HRegionInfo regionInfo;
final Path regiondir;
private final Path regionCompactionDir;
KeyValue.KVComparator comparator;
/*
* Set this when scheduling compaction if want the next compaction to be a
* major compaction. Cleared each time through compaction code.
*/
private volatile boolean forceMajorCompaction = false;
/*
* Data structure of write state flags used coordinating flushes,
* compactions and closes.
*/
static class WriteState {
// Set while a memstore flush is happening.
volatile boolean flushing = false;
// Set when a flush has been requested.
volatile boolean flushRequested = false;
// Set while a compaction is running.
volatile boolean compacting = false;
// Gets set in close. If set, cannot compact or flush again.
volatile boolean writesEnabled = true;
// Set if region is read-only
volatile boolean readOnly = false;
/**
* Set flags that make this region read-only.
*/
synchronized void setReadOnly(final boolean onOff) {
this.writesEnabled = !onOff;
this.readOnly = onOff;
}
boolean isReadOnly() {
return this.readOnly;
}
boolean isFlushRequested() {
return this.flushRequested;
}
}
private volatile WriteState writestate = new WriteState();
final long memstoreFlushSize;
private volatile long lastFlushTime;
final FlushRequester flushListener;
private final long blockingMemStoreSize;
final long threadWakeFrequency;
// Used to guard splits and closes
private final ReentrantReadWriteLock splitsAndClosesLock =
new ReentrantReadWriteLock();
private final ReentrantReadWriteLock newScannerLock =
new ReentrantReadWriteLock();
// Stop updates lock
private final ReentrantReadWriteLock updatesLock =
new ReentrantReadWriteLock();
private final Object splitLock = new Object();
private long minSequenceId;
private boolean splitRequest;
/**
* Name of the region info file that resides just under the region directory.
*/
public final static String REGIONINFO_FILE = ".regioninfo";
/**
* REGIONINFO_FILE as byte array.
*/
public final static byte [] REGIONINFO_FILE_BYTES =
Bytes.toBytes(REGIONINFO_FILE);
/**
* Should only be used for testing purposes
*/
public HRegion(){
this.basedir = null;
this.blockingMemStoreSize = 0L;
this.conf = null;
this.flushListener = null;
this.fs = null;
this.memstoreFlushSize = 0L;
this.log = null;
this.regionCompactionDir = null;
this.regiondir = null;
this.regionInfo = null;
this.threadWakeFrequency = 0L;
}
/**
* HRegion constructor.
*
* @param basedir qualified path of directory where region should be located,
* usually the table directory.
* @param log The HLog is the outbound log for any updates to the HRegion
* (There's a single HLog for all the HRegions on a single HRegionServer.)
* The log file is a logfile from the previous execution that's
* custom-computed for this HRegion. The HRegionServer computes and sorts the
* appropriate log info for this HRegion. If there is a previous log file
* (implying that the HRegion has been written-to before), then read it from
* the supplied path.
* @param fs is the filesystem.
* @param conf is global configuration settings.
* @param regionInfo - HRegionInfo that describes the region
* is new), then read them from the supplied path.
* @param flushListener an object that implements CacheFlushListener or null
* making progress to master -- otherwise master might think region deploy
* failed. Can be null.
*/
public HRegion(Path basedir, HLog log, FileSystem fs, Configuration conf,
HRegionInfo regionInfo, FlushRequester flushListener) {
this.basedir = basedir;
this.comparator = regionInfo.getComparator();
this.log = log;
this.fs = fs;
this.conf = conf;
this.regionInfo = regionInfo;
this.flushListener = flushListener;
this.threadWakeFrequency = conf.getLong(THREAD_WAKE_FREQUENCY, 10 * 1000);
String encodedNameStr = Integer.toString(this.regionInfo.getEncodedName());
this.regiondir = new Path(basedir, encodedNameStr);
if (LOG.isDebugEnabled()) {
// Write out region name as string and its encoded name.
LOG.debug("Creating region " + this + ", encoded=" +
this.regionInfo.getEncodedName());
}
this.regionCompactionDir =
new Path(getCompactionDir(basedir), encodedNameStr);
long flushSize = regionInfo.getTableDesc().getMemStoreFlushSize();
if (flushSize == HTableDescriptor.DEFAULT_MEMSTORE_FLUSH_SIZE) {
flushSize = conf.getLong("hbase.hregion.memstore.flush.size",
HTableDescriptor.DEFAULT_MEMSTORE_FLUSH_SIZE);
}
this.memstoreFlushSize = flushSize;
this.blockingMemStoreSize = this.memstoreFlushSize *
conf.getLong("hbase.hregion.memstore.block.multiplier", 2);
}
/**
* Initialize this region and get it ready to roll.
* Called after construction.
*
* @param initialFiles
* @param reporter
* @throws IOException
*/
public void initialize(Path initialFiles, final Progressable reporter)
throws IOException {
Path oldLogFile = new Path(regiondir, HREGION_OLDLOGFILE_NAME);
moveInitialFilesIntoPlace(this.fs, initialFiles, this.regiondir);
// Write HRI to a file in case we need to recover .META.
checkRegioninfoOnFilesystem();
// Load in all the HStores.
long maxSeqId = -1;
long minSeqIdToRecover = Integer.MAX_VALUE;
for (HColumnDescriptor c : this.regionInfo.getTableDesc().getFamilies()) {
Store store = instantiateHStore(this.basedir, c, oldLogFile, reporter);
this.stores.put(c.getName(), store);
long storeSeqId = store.getMaxSequenceId();
if (storeSeqId > maxSeqId) {
maxSeqId = storeSeqId;
}
long storeSeqIdBeforeRecovery = store.getMaxSeqIdBeforeLogRecovery();
if (storeSeqIdBeforeRecovery < minSeqIdToRecover) {
minSeqIdToRecover = storeSeqIdBeforeRecovery;
}
}
// Play log if one. Delete when done.
doReconstructionLog(oldLogFile, minSeqIdToRecover, maxSeqId, reporter);
if (fs.exists(oldLogFile)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Deleting old log file: " + oldLogFile);
}
fs.delete(oldLogFile, false);
}
// Add one to the current maximum sequence id so new edits are beyond.
this.minSequenceId = maxSeqId + 1;
// Get rid of any splits or merges that were lost in-progress. Clean out
// these directories here on open. We may be opening a region that was
// being split but we crashed in the middle of it all.
FSUtils.deleteDirectory(this.fs, new Path(regiondir, SPLITDIR));
FSUtils.deleteDirectory(this.fs, new Path(regiondir, MERGEDIR));
// See if region is meant to run read-only.
if (this.regionInfo.getTableDesc().isReadOnly()) {
this.writestate.setReadOnly(true);
}
// HRegion is ready to go!
this.writestate.compacting = false;
this.lastFlushTime = System.currentTimeMillis();
LOG.info("region " + this + "/" + this.regionInfo.getEncodedName() +
" available; sequence id is " + this.minSequenceId);
}
/*
* Move any passed HStore files into place (if any). Used to pick up split
* files and any merges from splits and merges dirs.
* @param initialFiles
* @throws IOException
*/
private static void moveInitialFilesIntoPlace(final FileSystem fs,
final Path initialFiles, final Path regiondir)
throws IOException {
if (initialFiles != null && fs.exists(initialFiles)) {
fs.rename(initialFiles, regiondir);
}
}
/**
* @return True if this region has references.
*/
boolean hasReferences() {
for (Map.Entry<byte [], Store> e: this.stores.entrySet()) {
for (Map.Entry<Long, StoreFile> ee:
e.getValue().getStorefiles().entrySet()) {
// Found a reference, return.
if (ee.getValue().isReference()) return true;
}
}
return false;
}
/*
* Write out an info file under the region directory. Useful recovering
* mangled regions.
* @throws IOException
*/
private void checkRegioninfoOnFilesystem() throws IOException {
// Name of this file has two leading and trailing underscores so it doesn't
// clash w/ a store/family name. There is possibility, but assumption is
// that its slim (don't want to use control character in filename because
//
Path regioninfo = new Path(this.regiondir, REGIONINFO_FILE);
if (this.fs.exists(regioninfo) &&
this.fs.getFileStatus(regioninfo).getLen() > 0) {
return;
}
FSDataOutputStream out = this.fs.create(regioninfo, true);
try {
this.regionInfo.write(out);
out.write('\n');
out.write('\n');
out.write(Bytes.toBytes(this.regionInfo.toString()));
} finally {
out.close();
}
}
/**
* @return Updates to this region need to have a sequence id that is >= to
* the this number.
*/
long getMinSequenceId() {
return this.minSequenceId;
}
/** @return a HRegionInfo object for this region */
public HRegionInfo getRegionInfo() {
return this.regionInfo;
}
/** @return true if region is closed */
public boolean isClosed() {
return this.closed.get();
}
/**
* @return True if closing process has started.
*/
public boolean isClosing() {
return this.closing.get();
}
/**
* Close down this HRegion. Flush the cache, shut down each HStore, don't
* service any more calls.
*
* <p>This method could take some time to execute, so don't call it from a
* time-sensitive thread.
*
* @return Vector of all the storage files that the HRegion's component
* HStores make use of. It's a list of all HStoreFile objects. Returns empty
* vector if already closed and null if judged that it should not close.
*
* @throws IOException
*/
public List<StoreFile> close() throws IOException {
return close(false);
}
/**
* Close down this HRegion. Flush the cache unless abort parameter is true,
* Shut down each HStore, don't service any more calls.
*
* This method could take some time to execute, so don't call it from a
* time-sensitive thread.
*
* @param abort true if server is aborting (only during testing)
* @return Vector of all the storage files that the HRegion's component
* HStores make use of. It's a list of HStoreFile objects. Can be null if
* we are not to close at this time or we are already closed.
*
* @throws IOException
*/
public List<StoreFile> close(final boolean abort) throws IOException {
if (isClosed()) {
LOG.warn("region " + this + " already closed");
return null;
}
this.closing.set(true);
synchronized (splitLock) {
synchronized (writestate) {
// Disable compacting and flushing by background threads for this
// region.
writestate.writesEnabled = false;
LOG.debug("Closing " + this + ": compactions & flushes disabled ");
while (writestate.compacting || writestate.flushing) {
LOG.debug("waiting for" +
(writestate.compacting ? " compaction" : "") +
(writestate.flushing ?
(writestate.compacting ? "," : "") + " cache flush" :
"") + " to complete for region " + this);
try {
writestate.wait();
} catch (InterruptedException iex) {
// continue
}
}
}
newScannerLock.writeLock().lock();
try {
splitsAndClosesLock.writeLock().lock();
LOG.debug("Updates disabled for region, no outstanding scanners on " +
this);
try {
// Write lock means no more row locks can be given out. Wait on
// outstanding row locks to come in before we close so we do not drop
// outstanding updates.
waitOnRowLocks();
LOG.debug("No more row locks outstanding on region " + this);
// Don't flush the cache if we are aborting
if (!abort) {
internalFlushcache();
}
List<StoreFile> result = new ArrayList<StoreFile>();
for (Store store: stores.values()) {
result.addAll(store.close());
}
this.closed.set(true);
LOG.info("Closed " + this);
return result;
} finally {
splitsAndClosesLock.writeLock().unlock();
}
} finally {
newScannerLock.writeLock().unlock();
}
}
}
//////////////////////////////////////////////////////////////////////////////
// HRegion accessors
//////////////////////////////////////////////////////////////////////////////
/** @return start key for region */
public byte [] getStartKey() {
return this.regionInfo.getStartKey();
}
/** @return end key for region */
public byte [] getEndKey() {
return this.regionInfo.getEndKey();
}
/** @return region id */
public long getRegionId() {
return this.regionInfo.getRegionId();
}
/** @return region name */
public byte [] getRegionName() {
return this.regionInfo.getRegionName();
}
/** @return region name as string for logging */
public String getRegionNameAsString() {
return this.regionInfo.getRegionNameAsString();
}
/** @return HTableDescriptor for this region */
public HTableDescriptor getTableDesc() {
return this.regionInfo.getTableDesc();
}
/** @return HLog in use for this region */
public HLog getLog() {
return this.log;
}
/** @return Configuration object */
public Configuration getConf() {
return this.conf;
}
/** @return region directory Path */
public Path getRegionDir() {
return this.regiondir;
}
/** @return FileSystem being used by this region */
public FileSystem getFilesystem() {
return this.fs;
}
/** @return the last time the region was flushed */
public long getLastFlushTime() {
return this.lastFlushTime;
}
//////////////////////////////////////////////////////////////////////////////
// HRegion maintenance.
//
// These methods are meant to be called periodically by the HRegionServer for
// upkeep.
//////////////////////////////////////////////////////////////////////////////
/** @return returns size of largest HStore. */
public long getLargestHStoreSize() {
long size = 0;
for (Store h: stores.values()) {
long storeSize = h.getSize();
if (storeSize > size) {
size = storeSize;
}
}
return size;
}
/*
* Split the HRegion to create two brand-new ones. This also closes
* current HRegion. Split should be fast since we don't rewrite store files
* but instead create new 'reference' store files that read off the top and
* bottom ranges of parent store files.
* @param splitRow row on which to split region
* @return two brand-new HRegions or null if a split is not needed
* @throws IOException
*/
HRegion [] splitRegion(final byte [] splitRow) throws IOException {
prepareToSplit();
synchronized (splitLock) {
if (closed.get()) {
return null;
}
// Add start/end key checking: hbase-428.
byte [] startKey = this.regionInfo.getStartKey();
byte [] endKey = this.regionInfo.getEndKey();
if (this.comparator.matchingRows(startKey, 0, startKey.length,
splitRow, 0, splitRow.length)) {
LOG.debug("Startkey and midkey are same, not splitting");
return null;
}
if (this.comparator.matchingRows(splitRow, 0, splitRow.length,
endKey, 0, endKey.length)) {
LOG.debug("Endkey and midkey are same, not splitting");
return null;
}
LOG.info("Starting split of region " + this);
Path splits = new Path(this.regiondir, SPLITDIR);
if(!this.fs.exists(splits)) {
this.fs.mkdirs(splits);
}
// Calculate regionid to use. Can't be less than that of parent else
// it'll insert into wrong location over in .META. table: HBASE-710.
long rid = System.currentTimeMillis();
if (rid < this.regionInfo.getRegionId()) {
LOG.warn("Clock skew; parent regions id is " +
this.regionInfo.getRegionId() + " but current time here is " + rid);
rid = this.regionInfo.getRegionId() + 1;
}
HRegionInfo regionAInfo = new HRegionInfo(this.regionInfo.getTableDesc(),
startKey, splitRow, false, rid);
Path dirA = getSplitDirForDaughter(splits, regionAInfo);
HRegionInfo regionBInfo = new HRegionInfo(this.regionInfo.getTableDesc(),
splitRow, endKey, false, rid);
Path dirB = getSplitDirForDaughter(splits, regionBInfo);
// Now close the HRegion. Close returns all store files or null if not
// supposed to close (? What to do in this case? Implement abort of close?)
// Close also does wait on outstanding rows and calls a flush just-in-case.
List<StoreFile> hstoreFilesToSplit = close(false);
if (hstoreFilesToSplit == null) {
LOG.warn("Close came back null (Implement abort of close?)");
throw new RuntimeException("close returned empty vector of HStoreFiles");
}
// Split each store file.
for(StoreFile h: hstoreFilesToSplit) {
StoreFile.split(fs,
Store.getStoreHomedir(splits, regionAInfo.getEncodedName(),
h.getFamily()),
h, splitRow, Range.bottom);
StoreFile.split(fs,
Store.getStoreHomedir(splits, regionBInfo.getEncodedName(),
h.getFamily()),
h, splitRow, Range.top);
}
// Create a region instance and then move the splits into place under
// regionA and regionB.
HRegion regionA =
new HRegion(basedir, log, fs, conf, regionAInfo, null);
moveInitialFilesIntoPlace(this.fs, dirA, regionA.getRegionDir());
HRegion regionB =
new HRegion(basedir, log, fs, conf, regionBInfo, null);
moveInitialFilesIntoPlace(this.fs, dirB, regionB.getRegionDir());
HRegion regions[] = new HRegion [] {regionA, regionB};
return regions;
}
}
/*
* Get the daughter directories in the splits dir. The splits dir is under
* the parent regions' directory.
* @param splits
* @param hri
* @return Path to split dir.
* @throws IOException
*/
private Path getSplitDirForDaughter(final Path splits, final HRegionInfo hri)
throws IOException {
Path d =
new Path(splits, Integer.toString(hri.getEncodedName()));
if (fs.exists(d)) {
// This should never happen; the splits dir will be newly made when we
// come in here. Even if we crashed midway through a split, the reopen
// of the parent region clears out the dir in its initialize method.
throw new IOException("Cannot split; target file collision at " + d);
}
return d;
}
protected void prepareToSplit() {
// nothing
}
/*
* @param dir
* @return compaction directory for the passed in <code>dir</code>
*/
static Path getCompactionDir(final Path dir) {
return new Path(dir, HREGION_COMPACTIONDIR_NAME);
}
/*
* Do preparation for pending compaction.
* Clean out any vestiges of previous failed compactions.
* @throws IOException
*/
private void doRegionCompactionPrep() throws IOException {
doRegionCompactionCleanup();
}
/*
* Removes the compaction directory for this Store.
* @throws IOException
*/
private void doRegionCompactionCleanup() throws IOException {
FSUtils.deleteDirectory(this.fs, this.regionCompactionDir);
}
void setForceMajorCompaction(final boolean b) {
this.forceMajorCompaction = b;
}
boolean getForceMajorCompaction() {
return this.forceMajorCompaction;
}
/**
* Called by compaction thread and after region is opened to compact the
* HStores if necessary.
*
* <p>This operation could block for a long time, so don't call it from a
* time-sensitive thread.
*
* Note that no locking is necessary at this level because compaction only
* conflicts with a region split, and that cannot happen because the region
* server does them sequentially and not in parallel.
*
* @return mid key if split is needed
* @throws IOException
*/
public byte [] compactStores() throws IOException {
boolean majorCompaction = this.forceMajorCompaction;
this.forceMajorCompaction = false;
return compactStores(majorCompaction);
}
/*
* Called by compaction thread and after region is opened to compact the
* HStores if necessary.
*
* <p>This operation could block for a long time, so don't call it from a
* time-sensitive thread.
*
* Note that no locking is necessary at this level because compaction only
* conflicts with a region split, and that cannot happen because the region
* server does them sequentially and not in parallel.
*
* @param majorCompaction True to force a major compaction regardless of thresholds
* @return split row if split is needed
* @throws IOException
*/
byte [] compactStores(final boolean majorCompaction)
throws IOException {
if (this.closing.get() || this.closed.get()) {
LOG.debug("Skipping compaction on " + this + " because closing/closed");
return null;
}
splitsAndClosesLock.readLock().lock();
try {
byte [] splitRow = null;
if (this.closed.get()) {
return splitRow;
}
try {
synchronized (writestate) {
if (!writestate.compacting && writestate.writesEnabled) {
writestate.compacting = true;
} else {
LOG.info("NOT compacting region " + this +
": compacting=" + writestate.compacting + ", writesEnabled=" +
writestate.writesEnabled);
return splitRow;
}
}
LOG.info("Starting" + (majorCompaction? " major " : " ") +
"compaction on region " + this);
long startTime = System.currentTimeMillis();
doRegionCompactionPrep();
long maxSize = -1;
for (Store store: stores.values()) {
final Store.StoreSize ss = store.compact(majorCompaction);
if (ss != null && ss.getSize() > maxSize) {
maxSize = ss.getSize();
splitRow = ss.getSplitRow();
}
}
doRegionCompactionCleanup();
String timeTaken = StringUtils.formatTimeDiff(System.currentTimeMillis(),
startTime);
LOG.info("compaction completed on region " + this + " in " + timeTaken);
} finally {
synchronized (writestate) {
writestate.compacting = false;
writestate.notifyAll();
}
}
return splitRow;
} finally {
splitsAndClosesLock.readLock().unlock();
}
}
/**
* Flush the cache.
*
* When this method is called the cache will be flushed unless:
* <ol>
* <li>the cache is empty</li>
* <li>the region is closed.</li>
* <li>a flush is already in progress</li>
* <li>writes are disabled</li>
* </ol>
*
* <p>This method may block for some time, so it should not be called from a
* time-sensitive thread.
*
* @return true if cache was flushed
*
* @throws IOException
* @throws DroppedSnapshotException Thrown when replay of hlog is required
* because a Snapshot was not properly persisted.
*/
public boolean flushcache() throws IOException {
if (this.closed.get()) {
return false;
}
synchronized (writestate) {
if (!writestate.flushing && writestate.writesEnabled) {
this.writestate.flushing = true;
} else {
if(LOG.isDebugEnabled()) {
LOG.debug("NOT flushing memstore for region " + this +
", flushing=" +
writestate.flushing + ", writesEnabled=" +
writestate.writesEnabled);
}
return false;
}
}
try {
// Prevent splits and closes
splitsAndClosesLock.readLock().lock();
try {
return internalFlushcache();
} finally {
splitsAndClosesLock.readLock().unlock();
}
} finally {
synchronized (writestate) {
writestate.flushing = false;
this.writestate.flushRequested = false;
writestate.notifyAll();
}
}
}
/**
* Flushing the cache is a little tricky. We have a lot of updates in the
* memstore, all of which have also been written to the log. We need to
* write those updates in the memstore out to disk, while being able to
* process reads/writes as much as possible during the flush operation. Also,
* the log has to state clearly the point in time at which the memstore was
* flushed. (That way, during recovery, we know when we can rely on the
* on-disk flushed structures and when we have to recover the memstore from
* the log.)
*
* <p>So, we have a three-step process:
*
* <ul><li>A. Flush the memstore to the on-disk stores, noting the current
* sequence ID for the log.<li>
*
* <li>B. Write a FLUSHCACHE-COMPLETE message to the log, using the sequence
* ID that was current at the time of memstore-flush.</li>
*
* <li>C. Get rid of the memstore structures that are now redundant, as
* they've been flushed to the on-disk HStores.</li>
* </ul>
* <p>This method is protected, but can be accessed via several public
* routes.
*
* <p> This method may block for some time.
*
* @return true if the region needs compacting
*
* @throws IOException
* @throws DroppedSnapshotException Thrown when replay of hlog is required
* because a Snapshot was not properly persisted.
*/
private boolean internalFlushcache() throws IOException {
final long startTime = System.currentTimeMillis();
// Clear flush flag.
// Record latest flush time
this.lastFlushTime = startTime;
// If nothing to flush, return and avoid logging start/stop flush.
if (this.memstoreSize.get() <= 0) {
return false;
}
if (LOG.isDebugEnabled()) {
LOG.debug("Started memstore flush for region " + this +
". Current region memstore size " +
StringUtils.humanReadableInt(this.memstoreSize.get()));
}
// Stop updates while we snapshot the memstore of all stores. We only have
// to do this for a moment. Its quick. The subsequent sequence id that
// goes into the HLog after we've flushed all these snapshots also goes
// into the info file that sits beside the flushed files.
// We also set the memstore size to zero here before we allow updates
// again so its value will represent the size of the updates received
// during the flush
long sequenceId = -1L;
long completeSequenceId = -1L;
this.updatesLock.writeLock().lock();
// Get current size of memstores.
final long currentMemStoreSize = this.memstoreSize.get();
try {
for (Store s: stores.values()) {
s.snapshot();
}
sequenceId = log.startCacheFlush();
completeSequenceId = this.getCompleteCacheFlushSequenceId(sequenceId);
} finally {
this.updatesLock.writeLock().unlock();
}
// Any failure from here on out will be catastrophic requiring server
// restart so hlog content can be replayed and put back into the memstore.
// Otherwise, the snapshot content while backed up in the hlog, it will not
// be part of the current running servers state.
boolean compactionRequested = false;
try {
// A. Flush memstore to all the HStores.
// Keep running vector of all store files that includes both old and the
// just-made new flush store file.
for (Store hstore: stores.values()) {
boolean needsCompaction = hstore.flushCache(completeSequenceId);
if (needsCompaction) {
compactionRequested = true;
}
}
// Set down the memstore size by amount of flush.
this.memstoreSize.addAndGet(-currentMemStoreSize);
} catch (Throwable t) {
// An exception here means that the snapshot was not persisted.
// The hlog needs to be replayed so its content is restored to memstore.
// Currently, only a server restart will do this.
// We used to only catch IOEs but its possible that we'd get other
// exceptions -- e.g. HBASE-659 was about an NPE -- so now we catch
// all and sundry.
this.log.abortCacheFlush();
DroppedSnapshotException dse = new DroppedSnapshotException("region: " +
Bytes.toStringBinary(getRegionName()));
dse.initCause(t);
throw dse;
}
// If we get to here, the HStores have been written. If we get an
// error in completeCacheFlush it will release the lock it is holding
// B. Write a FLUSHCACHE-COMPLETE message to the log.
// This tells future readers that the HStores were emitted correctly,
// and that all updates to the log for this regionName that have lower
// log-sequence-ids can be safely ignored.
this.log.completeCacheFlush(getRegionName(),
regionInfo.getTableDesc().getName(), completeSequenceId);
// C. Finally notify anyone waiting on memstore to clear:
// e.g. checkResources().
synchronized (this) {
notifyAll(); // FindBugs NN_NAKED_NOTIFY
}
if (LOG.isDebugEnabled()) {
long now = System.currentTimeMillis();
LOG.debug("Finished memstore flush of ~" +
StringUtils.humanReadableInt(currentMemStoreSize) + " for region " +
this + " in " + (now - startTime) + "ms, sequence id=" + sequenceId +
", compaction requested=" + compactionRequested);
}
return compactionRequested;
}
/**
* Get the sequence number to be associated with this cache flush. Used by
* TransactionalRegion to not complete pending transactions.
*
*
* @param currentSequenceId
* @return sequence id to complete the cache flush with
*/
protected long getCompleteCacheFlushSequenceId(long currentSequenceId) {
return currentSequenceId;
}
//////////////////////////////////////////////////////////////////////////////
// get() methods for client use.
//////////////////////////////////////////////////////////////////////////////
/**
* Return all the data for the row that matches <i>row</i> exactly,
* or the one that immediately preceeds it, at or immediately before
* <i>ts</i>.
*
* @param row row key
* @return map of values
* @throws IOException
*/
Result getClosestRowBefore(final byte [] row)
throws IOException{
return getClosestRowBefore(row, HConstants.CATALOG_FAMILY);
}
/**
* Return all the data for the row that matches <i>row</i> exactly,
* or the one that immediately preceeds it, at or immediately before
* <i>ts</i>.
*
* @param row row key
* @param family
* @return map of values
* @throws IOException
*/
public Result getClosestRowBefore(final byte [] row, final byte [] family)
throws IOException {
// look across all the HStores for this region and determine what the
// closest key is across all column families, since the data may be sparse
KeyValue key = null;
checkRow(row);
splitsAndClosesLock.readLock().lock();
try {
Store store = getStore(family);
KeyValue kv = new KeyValue(row, HConstants.LATEST_TIMESTAMP);
// get the closest key. (HStore.getRowKeyAtOrBefore can return null)
key = store.getRowKeyAtOrBefore(kv);
if (key == null) {
return null;
}
// This will get all results for this store. TODO: Do we need to do this?
Get get = new Get(key.getRow());
List<KeyValue> results = new ArrayList<KeyValue>();
store.get(get, null, results);
return new Result(results);
} finally {
splitsAndClosesLock.readLock().unlock();
}
}
/**
* Return an iterator that scans over the HRegion, returning the indicated
* columns and rows specified by the {@link Scan}.
* <p>
* This Iterator must be closed by the caller.
*
* @param scan configured {@link Scan}
* @return InternalScanner
* @throws IOException
*/
public InternalScanner getScanner(Scan scan)
throws IOException {
return getScanner(scan, null);
}
protected InternalScanner getScanner(Scan scan, List<KeyValueScanner> additionalScanners) throws IOException {
newScannerLock.readLock().lock();
try {
if (this.closed.get()) {
throw new IOException("Region " + this + " closed");
}
// Verify families are all valid
if(scan.hasFamilies()) {
for(byte [] family : scan.getFamilyMap().keySet()) {
checkFamily(family);
}
} else { // Adding all families to scanner
for(byte[] family: regionInfo.getTableDesc().getFamiliesKeys()){
scan.addFamily(family);
}
}
return new RegionScanner(scan, additionalScanners);
} finally {
newScannerLock.readLock().unlock();
}
}
//////////////////////////////////////////////////////////////////////////////
// set() methods for client use.
//////////////////////////////////////////////////////////////////////////////
/**
* @param delete
* @param lockid
* @param writeToWAL
* @throws IOException
*/
public void delete(Delete delete, Integer lockid, boolean writeToWAL)
throws IOException {
checkReadOnly();
checkResources();
splitsAndClosesLock.readLock().lock();
Integer lid = null;
try {
byte [] row = delete.getRow();
// If we did not pass an existing row lock, obtain a new one
lid = getLock(lockid, row);
//Check to see if this is a deleteRow insert
if(delete.getFamilyMap().isEmpty()){
for(byte [] family : regionInfo.getTableDesc().getFamiliesKeys()){
// Don't eat the timestamp
delete.deleteFamily(family, delete.getTimeStamp());
}
} else {
for(byte [] family : delete.getFamilyMap().keySet()) {
if(family == null) {
throw new NoSuchColumnFamilyException("Empty family is invalid");
}
checkFamily(family);
}
}
for(Map.Entry<byte[], List<KeyValue>> e: delete.getFamilyMap().entrySet()) {
byte [] family = e.getKey();
delete(family, e.getValue(), writeToWAL);
}
} finally {
if(lockid == null) releaseRowLock(lid);
splitsAndClosesLock.readLock().unlock();
}
}
/**
* @param family
* @param kvs
* @param writeToWAL
* @throws IOException
*/
public void delete(byte [] family, List<KeyValue> kvs, boolean writeToWAL)
throws IOException {
long now = System.currentTimeMillis();
byte [] byteNow = Bytes.toBytes(now);
boolean flush = false;
this.updatesLock.readLock().lock();
try {
long size = 0;
Store store = getStore(family);
Iterator<KeyValue> kvIterator = kvs.iterator();
while(kvIterator.hasNext()) {
KeyValue kv = kvIterator.next();
// Check if time is LATEST, change to time of most recent addition if so
// This is expensive.
if (kv.isLatestTimestamp() && kv.isDeleteType()) {
List<KeyValue> result = new ArrayList<KeyValue>(1);
Get g = new Get(kv.getRow());
NavigableSet<byte []> qualifiers =
new TreeSet<byte []>(Bytes.BYTES_COMPARATOR);
byte [] q = kv.getQualifier();
if(q == null) q = HConstants.EMPTY_BYTE_ARRAY;
qualifiers.add(q);
get(store, g, qualifiers, result);
if (result.isEmpty()) {
// Nothing to delete
kvIterator.remove();
continue;
}
if (result.size() > 1) {
throw new RuntimeException("Unexpected size: " + result.size());
}
KeyValue getkv = result.get(0);
Bytes.putBytes(kv.getBuffer(), kv.getTimestampOffset(),
getkv.getBuffer(), getkv.getTimestampOffset(), Bytes.SIZEOF_LONG);
} else {
kv.updateLatestStamp(byteNow);
}
// We must do this in this loop because it could affect
// the above get to find the next timestamp to remove.
// This is the case when there are multiple deletes for the same column.
size = this.memstoreSize.addAndGet(store.delete(kv));
}
if (writeToWAL) {
this.log.append(regionInfo,
regionInfo.getTableDesc().getName(), kvs, now);
}
flush = isFlushSize(size);
} finally {
this.updatesLock.readLock().unlock();
}
if (flush) {
// Request a cache flush. Do it outside update lock.
requestFlush();
}
}
/**
* @param put
* @throws IOException
*/
public void put(Put put) throws IOException {
this.put(put, null, put.getWriteToWAL());
}
/**
* @param put
* @param writeToWAL
* @throws IOException
*/
public void put(Put put, boolean writeToWAL) throws IOException {
this.put(put, null, writeToWAL);
}
/**
* @param put
* @param lockid
* @throws IOException
*/
public void put(Put put, Integer lockid) throws IOException {
this.put(put, lockid, put.getWriteToWAL());
}
/**
* @param put
* @param lockid
* @param writeToWAL
* @throws IOException
*/
public void put(Put put, Integer lockid, boolean writeToWAL)
throws IOException {
checkReadOnly();
// Do a rough check that we have resources to accept a write. The check is
// 'rough' in that between the resource check and the call to obtain a
// read lock, resources may run out. For now, the thought is that this
// will be extremely rare; we'll deal with it when it happens.
checkResources();
splitsAndClosesLock.readLock().lock();
try {
// We obtain a per-row lock, so other clients will block while one client
// performs an update. The read lock is released by the client calling
// #commit or #abort or if the HRegionServer lease on the lock expires.
// See HRegionServer#RegionListener for how the expire on HRegionServer
// invokes a HRegion#abort.
byte [] row = put.getRow();
// If we did not pass an existing row lock, obtain a new one
Integer lid = getLock(lockid, row);
byte [] now = Bytes.toBytes(System.currentTimeMillis());
try {
for (Map.Entry<byte[], List<KeyValue>> entry:
put.getFamilyMap().entrySet()) {
byte [] family = entry.getKey();
checkFamily(family);
List<KeyValue> puts = entry.getValue();
if (updateKeys(puts, now)) {
put(family, puts, writeToWAL);
}
}
} finally {
if(lockid == null) releaseRowLock(lid);
}
} finally {
splitsAndClosesLock.readLock().unlock();
}
}
//TODO, Think that gets/puts and deletes should be refactored a bit so that
//the getting of the lock happens before, so that you would just pass it into
//the methods. So in the case of checkAndPut you could just do lockRow,
//get, put, unlockRow or something
/**
*
* @param row
* @param family
* @param qualifier
* @param expectedValue
* @param put
* @param lockId
* @param writeToWAL
* @throws IOException
* @return true if the new put was execute, false otherwise
*/
public boolean checkAndPut(byte [] row, byte [] family, byte [] qualifier,
byte [] expectedValue, Put put, Integer lockId, boolean writeToWAL)
throws IOException{
checkReadOnly();
//TODO, add check for value length or maybe even better move this to the
//client if this becomes a global setting
checkResources();
splitsAndClosesLock.readLock().lock();
try {
Get get = new Get(row, put.getRowLock());
checkFamily(family);
get.addColumn(family, qualifier);
byte [] now = Bytes.toBytes(System.currentTimeMillis());
// Lock row
Integer lid = getLock(lockId, get.getRow());
List<KeyValue> result = new ArrayList<KeyValue>();
try {
//Getting data
for(Map.Entry<byte[],NavigableSet<byte[]>> entry:
get.getFamilyMap().entrySet()) {
get(this.stores.get(entry.getKey()), get, entry.getValue(), result);
}
boolean matches = false;
if (result.size() == 0 && expectedValue.length == 0) {
matches = true;
} else if(result.size() == 1) {
//Compare the expected value with the actual value
byte [] actualValue = result.get(0).getValue();
matches = Bytes.equals(expectedValue, actualValue);
}
//If matches put the new put
if(matches) {
for(Map.Entry<byte[], List<KeyValue>> entry :
put.getFamilyMap().entrySet()) {
byte [] fam = entry.getKey();
checkFamily(fam);
List<KeyValue> puts = entry.getValue();
if(updateKeys(puts, now)) {
put(fam, puts, writeToWAL);
}
}
return true;
}
return false;
} finally {
if(lockId == null) releaseRowLock(lid);
}
} finally {
splitsAndClosesLock.readLock().unlock();
}
}
/**
* Checks if any stamps is Long.MAX_VALUE. If so, sets them to now.
* <p>
* This acts to replace LATEST_TIMESTAMP with now.
* @param keys
* @param now
* @return <code>true</code> when updating the time stamp completed.
*/
private boolean updateKeys(List<KeyValue> keys, byte [] now) {
if(keys == null || keys.isEmpty()) {
return false;
}
for(KeyValue key : keys) {
if(key.getTimestamp() == HConstants.LATEST_TIMESTAMP) {
key.updateLatestStamp(now);
}
}
return true;
}
// /*
// * Utility method to verify values length.
// * @param batchUpdate The update to verify
// * @throws IOException Thrown if a value is too long
// */
// private void validateValuesLength(Put put)
// throws IOException {
// Map<byte[], List<KeyValue>> families = put.getFamilyMap();
// for(Map.Entry<byte[], List<KeyValue>> entry : families.entrySet()) {
// HColumnDescriptor hcd =
// this.regionInfo.getTableDesc().getFamily(entry.getKey());
// int maxLen = hcd.getMaxValueLength();
// for(KeyValue kv : entry.getValue()) {
// if(kv.getValueLength() > maxLen) {
// throw new ValueOverMaxLengthException("Value in column "
// + Bytes.toString(kv.getColumn()) + " is too long. "
// + kv.getValueLength() + " > " + maxLen);
// }
// }
// }
// }
/*
* Check if resources to support an update.
*
* Here we synchronize on HRegion, a broad scoped lock. Its appropriate
* given we're figuring in here whether this region is able to take on
* writes. This is only method with a synchronize (at time of writing),
* this and the synchronize on 'this' inside in internalFlushCache to send
* the notify.
*/
private void checkResources() {
boolean blocked = false;
while (this.memstoreSize.get() > this.blockingMemStoreSize) {
requestFlush();
if (!blocked) {
LOG.info("Blocking updates for '" + Thread.currentThread().getName() +
"' on region " + Bytes.toStringBinary(getRegionName()) +
": memstore size " +
StringUtils.humanReadableInt(this.memstoreSize.get()) +
" is >= than blocking " +
StringUtils.humanReadableInt(this.blockingMemStoreSize) + " size");
}
blocked = true;
synchronized(this) {
try {
wait(threadWakeFrequency);
} catch (InterruptedException e) {
// continue;
}
}
}
if (blocked) {
LOG.info("Unblocking updates for region " + this + " '"
+ Thread.currentThread().getName() + "'");
}
}
/**
* @throws IOException Throws exception if region is in read-only mode.
*/
protected void checkReadOnly() throws IOException {
if (this.writestate.isReadOnly()) {
throw new IOException("region is read only");
}
}
/**
* Add updates first to the hlog and then add values to memstore.
* Warning: Assumption is caller has lock on passed in row.
* @param edits Cell updates by column
* @praram now
* @throws IOException
*/
private void put(final byte [] family, final List<KeyValue> edits)
throws IOException {
this.put(family, edits, true);
}
/**
* Add updates first to the hlog (if writeToWal) and then add values to memstore.
* Warning: Assumption is caller has lock on passed in row.
* @param family
* @param edits
* @param writeToWAL if true, then we should write to the log
* @throws IOException
*/
private void put(final byte [] family, final List<KeyValue> edits,
boolean writeToWAL) throws IOException {
if (edits == null || edits.isEmpty()) {
return;
}
boolean flush = false;
this.updatesLock.readLock().lock();
try {
if (writeToWAL) {
long now = System.currentTimeMillis();
this.log.append(regionInfo,
regionInfo.getTableDesc().getName(), edits, now);
}
long size = 0;
Store store = getStore(family);
for (KeyValue kv: edits) {
size = this.memstoreSize.addAndGet(store.add(kv));
}
flush = isFlushSize(size);
} finally {
this.updatesLock.readLock().unlock();
}
if (flush) {
// Request a cache flush. Do it outside update lock.
requestFlush();
}
}
private void requestFlush() {
if (this.flushListener == null) {
return;
}
synchronized (writestate) {
if (this.writestate.isFlushRequested()) {
return;
}
writestate.flushRequested = true;
}
// Make request outside of synchronize block; HBASE-818.
this.flushListener.request(this);
if (LOG.isDebugEnabled()) {
LOG.debug("Flush requested on " + this);
}
}
/*
* @param size
* @return True if size is over the flush threshold
*/
private boolean isFlushSize(final long size) {
return size > this.memstoreFlushSize;
}
// Do any reconstruction needed from the log
protected void doReconstructionLog(Path oldLogFile, long minSeqId, long maxSeqId,
Progressable reporter)
throws UnsupportedEncodingException, IOException {
// Nothing to do (Replaying is done in HStores)
// Used by subclasses; e.g. THBase.
}
protected Store instantiateHStore(Path baseDir,
HColumnDescriptor c, Path oldLogFile, Progressable reporter)
throws IOException {
return new Store(baseDir, this, c, this.fs, oldLogFile,
this.conf, reporter);
}
/**
* Return HStore instance.
* Use with caution. Exposed for use of fixup utilities.
* @param column Name of column family hosted by this region.
* @return Store that goes with the family on passed <code>column</code>.
* TODO: Make this lookup faster.
*/
public Store getStore(final byte [] column) {
return this.stores.get(column);
}
//////////////////////////////////////////////////////////////////////////////
// Support code
//////////////////////////////////////////////////////////////////////////////
/** Make sure this is a valid row for the HRegion */
private void checkRow(final byte [] row) throws IOException {
if(!rowIsInRange(regionInfo, row)) {
throw new WrongRegionException("Requested row out of range for " +
"HRegion " + this + ", startKey='" +
Bytes.toStringBinary(regionInfo.getStartKey()) + "', getEndKey()='" +
Bytes.toStringBinary(regionInfo.getEndKey()) + "', row='" +
Bytes.toStringBinary(row) + "'");
}
}
/**
* Obtain a lock on the given row. Blocks until success.
*
* I know it's strange to have two mappings:
* <pre>
* ROWS ==> LOCKS
* </pre>
* as well as
* <pre>
* LOCKS ==> ROWS
* </pre>
*
* But it acts as a guard on the client; a miswritten client just can't
* submit the name of a row and start writing to it; it must know the correct
* lockid, which matches the lock list in memory.
*
* <p>It would be more memory-efficient to assume a correctly-written client,
* which maybe we'll do in the future.
*
* @param row Name of row to lock.
* @throws IOException
* @return The id of the held lock.
*/
public Integer obtainRowLock(final byte [] row) throws IOException {
checkRow(row);
splitsAndClosesLock.readLock().lock();
try {
if (this.closed.get()) {
throw new NotServingRegionException("Region " + this + " closed");
}
Integer key = Bytes.mapKey(row);
synchronized (locksToRows) {
while (locksToRows.containsKey(key)) {
try {
locksToRows.wait();
} catch (InterruptedException ie) {
// Empty
}
}
locksToRows.put(key, row);
locksToRows.notifyAll();
return key;
}
} finally {
splitsAndClosesLock.readLock().unlock();
}
}
/**
* Used by unit tests.
* @param lockid
* @return Row that goes with <code>lockid</code>
*/
byte [] getRowFromLock(final Integer lockid) {
return locksToRows.get(lockid);
}
/**
* Release the row lock!
* @param lockid The lock ID to release.
*/
void releaseRowLock(final Integer lockid) {
synchronized (locksToRows) {
locksToRows.remove(lockid);
locksToRows.notifyAll();
}
}
/**
* See if row is currently locked.
* @param lockid
* @return boolean
*/
private boolean isRowLocked(final Integer lockid) {
synchronized (locksToRows) {
if(locksToRows.containsKey(lockid)) {
return true;
}
return false;
}
}
/**
* Returns existing row lock if found, otherwise
* obtains a new row lock and returns it.
* @param lockid
* @return lockid
*/
private Integer getLock(Integer lockid, byte [] row)
throws IOException {
Integer lid = null;
if (lockid == null) {
lid = obtainRowLock(row);
} else {
if (!isRowLocked(lockid)) {
throw new IOException("Invalid row lock");
}
lid = lockid;
}
return lid;
}
private void waitOnRowLocks() {
synchronized (locksToRows) {
while (this.locksToRows.size() > 0) {
LOG.debug("waiting for " + this.locksToRows.size() + " row locks");
try {
this.locksToRows.wait();
} catch (InterruptedException e) {
// Catch. Let while test determine loop-end.
}
}
}
}
@Override
public boolean equals(Object o) {
if (!(o instanceof HRegion)) {
return false;
}
return this.hashCode() == ((HRegion)o).hashCode();
}
@Override
public int hashCode() {
return Bytes.hashCode(this.regionInfo.getRegionName());
}
@Override
public String toString() {
return this.regionInfo.getRegionNameAsString();
}
/** @return Path of region base directory */
public Path getBaseDir() {
return this.basedir;
}
/**
* RegionScanner is an iterator through a bunch of rows in an HRegion.
* <p>
* It is used to combine scanners from multiple Stores (aka column families).
*/
class RegionScanner implements InternalScanner {
private final KeyValueHeap storeHeap;
private final byte [] stopRow;
private Filter filter;
private List<KeyValue> results = new ArrayList<KeyValue>();
private int batch;
RegionScanner(Scan scan, List<KeyValueScanner> additionalScanners) {
this.filter = scan.getFilter();
this.batch = scan.getBatch();
if (Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW)) {
this.stopRow = null;
} else {
this.stopRow = scan.getStopRow();
}
List<KeyValueScanner> scanners = new ArrayList<KeyValueScanner>();
if (additionalScanners != null) {
scanners.addAll(additionalScanners);
}
for (Map.Entry<byte[], NavigableSet<byte[]>> entry :
scan.getFamilyMap().entrySet()) {
Store store = stores.get(entry.getKey());
scanners.add(store.getScanner(scan, entry.getValue()));
}
this.storeHeap =
new KeyValueHeap(scanners.toArray(new KeyValueScanner[0]), comparator);
}
RegionScanner(Scan scan) {
this(scan, null);
}
private void resetFilters() {
if (filter != null) {
filter.reset();
}
}
public boolean next(List<KeyValue> outResults, int limit) throws IOException {
if (closing.get() || closed.get()) {
close();
throw new NotServingRegionException(regionInfo.getRegionNameAsString() +
" is closing=" + closing.get() + " or closed=" + closed.get());
}
results.clear();
boolean returnResult = nextInternal(limit);
if (!returnResult && filter != null && filter.filterRow()) {
results.clear();
}
outResults.addAll(results);
resetFilters();
if (isFilterDone()) {
return false;
}
return returnResult;
}
public boolean next(List<KeyValue> outResults) throws IOException {
// apply the batching limit by default
return next(outResults, batch);
}
/*
* @return True if a filter rules the scanner is over, done.
*/
boolean isFilterDone() {
return this.filter != null && this.filter.filterAllRemaining();
}
/*
* @return true if there are more rows, false if scanner is done
* @throws IOException
*/
private boolean nextInternal(int limit) throws IOException {
byte [] currentRow = null;
boolean filterCurrentRow = false;
while (true) {
KeyValue kv = this.storeHeap.peek();
if (kv == null) return false;
byte [] row = kv.getRow();
boolean samerow = Bytes.equals(currentRow, row);
if (samerow && filterCurrentRow) {
// Filter all columns until row changes
readAndDumpCurrentResult();
continue;
}
if (!samerow) {
// Continue on the next row:
currentRow = row;
filterCurrentRow = false;
// See if we passed stopRow
if (this.stopRow != null &&
comparator.compareRows(this.stopRow, 0, this.stopRow.length,
currentRow, 0, currentRow.length) <= 0) {
return false;
}
if (hasResults()) return true;
}
// See if current row should be filtered based on row key
if (this.filter != null && this.filter.filterRowKey(row, 0, row.length)) {
readAndDumpCurrentResult();
resetFilters();
filterCurrentRow = true;
currentRow = row;
continue;
}
this.storeHeap.next(results, limit);
if (limit > 0 && results.size() == limit) {
return true;
}
}
}
private void readAndDumpCurrentResult() throws IOException {
this.storeHeap.next(this.results);
this.results.clear();
}
/*
* Do we have results to return or should we continue. Call when we get to
* the end of a row. Does house cleaning -- clearing results and resetting
* filters -- if we are to continue.
* @return True if we should return else false if need to keep going.
*/
private boolean hasResults() {
if (this.results.isEmpty() ||
this.filter != null && this.filter.filterRow()) {
// Make sure results is empty, reset filters
this.results.clear();
resetFilters();
return false;
}
return true;
}
public void close() {
storeHeap.close();
}
/**
*
* @param scanner to be closed
*/
public void close(KeyValueScanner scanner) {
try {
scanner.close();
} catch(NullPointerException npe) {}
}
/**
* @return the current storeHeap
*/
public KeyValueHeap getStoreHeap() {
return this.storeHeap;
}
}
// Utility methods
/**
* Convenience method creating new HRegions. Used by createTable and by the
* bootstrap code in the HMaster constructor.
* Note, this method creates an {@link HLog} for the created region. It
* needs to be closed explicitly. Use {@link HRegion#getLog()} to get
* access.
* @param info Info for region to create.
* @param rootDir Root directory for HBase instance
* @param conf
* @return new HRegion
*
* @throws IOException
*/
public static HRegion createHRegion(final HRegionInfo info, final Path rootDir,
final Configuration conf)
throws IOException {
Path tableDir =
HTableDescriptor.getTableDir(rootDir, info.getTableDesc().getName());
Path regionDir = HRegion.getRegionDir(tableDir, info.getEncodedName());
FileSystem fs = FileSystem.get(conf);
fs.mkdirs(regionDir);
HRegion region = new HRegion(tableDir,
new HLog(fs, new Path(regionDir, HREGION_LOGDIR_NAME),
new Path(regionDir, HREGION_OLDLOGDIR_NAME), conf, null),
fs, conf, info, null);
region.initialize(null, null);
return region;
}
/**
* Convenience method to open a HRegion outside of an HRegionServer context.
* @param info Info for region to be opened.
* @param rootDir Root directory for HBase instance
* @param log HLog for region to use. This method will call
* HLog#setSequenceNumber(long) passing the result of the call to
* HRegion#getMinSequenceId() to ensure the log id is properly kept
* up. HRegionStore does this every time it opens a new region.
* @param conf
* @return new HRegion
*
* @throws IOException
*/
public static HRegion openHRegion(final HRegionInfo info, final Path rootDir,
final HLog log, final Configuration conf)
throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Opening region: " + info);
}
if (info == null) {
throw new NullPointerException("Passed region info is null");
}
HRegion r = new HRegion(
HTableDescriptor.getTableDir(rootDir, info.getTableDesc().getName()),
log, FileSystem.get(conf), conf, info, null);
r.initialize(null, null);
if (log != null) {
log.setSequenceNumber(r.getMinSequenceId());
}
return r;
}
/**
* Inserts a new region's meta information into the passed
* <code>meta</code> region. Used by the HMaster bootstrap code adding
* new table to ROOT table.
*
* @param meta META HRegion to be updated
* @param r HRegion to add to <code>meta</code>
*
* @throws IOException
*/
public static void addRegionToMETA(HRegion meta, HRegion r)
throws IOException {
meta.checkResources();
// The row key is the region name
byte [] row = r.getRegionName();
Integer lid = meta.obtainRowLock(row);
try {
List<KeyValue> edits = new ArrayList<KeyValue>();
edits.add(new KeyValue(row, CATALOG_FAMILY, REGIONINFO_QUALIFIER,
System.currentTimeMillis(), Writables.getBytes(r.getRegionInfo())));
meta.put(HConstants.CATALOG_FAMILY, edits);
} finally {
meta.releaseRowLock(lid);
}
}
/**
* Delete a region's meta information from the passed
* <code>meta</code> region. Deletes the row.
* @param srvr META server to be updated
* @param metaRegionName Meta region name
* @param regionName HRegion to remove from <code>meta</code>
*
* @throws IOException
*/
public static void removeRegionFromMETA(final HRegionInterface srvr,
final byte [] metaRegionName, final byte [] regionName)
throws IOException {
Delete delete = new Delete(regionName);
srvr.delete(metaRegionName, delete);
}
/**
* Utility method used by HMaster marking regions offlined.
* @param srvr META server to be updated
* @param metaRegionName Meta region name
* @param info HRegion to update in <code>meta</code>
*
* @throws IOException
*/
public static void offlineRegionInMETA(final HRegionInterface srvr,
final byte [] metaRegionName, final HRegionInfo info)
throws IOException {
// Puts and Deletes used to be "atomic" here. We can use row locks if
// we need to keep that property, or we can expand Puts and Deletes to
// allow them to be committed at once.
byte [] row = info.getRegionName();
Put put = new Put(row);
info.setOffline(true);
put.add(CATALOG_FAMILY, REGIONINFO_QUALIFIER, Writables.getBytes(info));
srvr.put(metaRegionName, put);
cleanRegionInMETA(srvr, metaRegionName, info);
}
/**
* Clean COL_SERVER and COL_STARTCODE for passed <code>info</code> in
* <code>.META.</code>
* @param srvr
* @param metaRegionName
* @param info
* @throws IOException
*/
public static void cleanRegionInMETA(final HRegionInterface srvr,
final byte [] metaRegionName, final HRegionInfo info)
throws IOException {
Delete del = new Delete(info.getRegionName());
del.deleteColumns(CATALOG_FAMILY, SERVER_QUALIFIER);
del.deleteColumns(CATALOG_FAMILY, STARTCODE_QUALIFIER);
srvr.delete(metaRegionName, del);
}
/**
* Deletes all the files for a HRegion
*
* @param fs the file system object
* @param rootdir qualified path of HBase root directory
* @param info HRegionInfo for region to be deleted
* @throws IOException
*/
public static void deleteRegion(FileSystem fs, Path rootdir, HRegionInfo info)
throws IOException {
deleteRegion(fs, HRegion.getRegionDir(rootdir, info));
}
private static void deleteRegion(FileSystem fs, Path regiondir)
throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("DELETING region " + regiondir.toString());
}
if (!fs.delete(regiondir, true)) {
LOG.warn("Failed delete of " + regiondir);
}
}
/**
* Computes the Path of the HRegion
*
* @param tabledir qualified path for table
* @param name ENCODED region name
* @return Path of HRegion directory
*/
public static Path getRegionDir(final Path tabledir, final int name) {
return new Path(tabledir, Integer.toString(name));
}
/**
* Computes the Path of the HRegion
*
* @param rootdir qualified path of HBase root directory
* @param info HRegionInfo for the region
* @return qualified path of region directory
*/
public static Path getRegionDir(final Path rootdir, final HRegionInfo info) {
return new Path(
HTableDescriptor.getTableDir(rootdir, info.getTableDesc().getName()),
Integer.toString(info.getEncodedName()));
}
/**
* Determines if the specified row is within the row range specified by the
* specified HRegionInfo
*
* @param info HRegionInfo that specifies the row range
* @param row row to be checked
* @return true if the row is within the range specified by the HRegionInfo
*/
public static boolean rowIsInRange(HRegionInfo info, final byte [] row) {
return ((info.getStartKey().length == 0) ||
(Bytes.compareTo(info.getStartKey(), row) <= 0)) &&
((info.getEndKey().length == 0) ||
(Bytes.compareTo(info.getEndKey(), row) > 0));
}
/**
* Make the directories for a specific column family
*
* @param fs the file system
* @param tabledir base directory where region will live (usually the table dir)
* @param hri
* @param colFamily the column family
* @throws IOException
*/
public static void makeColumnFamilyDirs(FileSystem fs, Path tabledir,
final HRegionInfo hri, byte [] colFamily)
throws IOException {
Path dir = Store.getStoreHomedir(tabledir, hri.getEncodedName(), colFamily);
if (!fs.mkdirs(dir)) {
LOG.warn("Failed to create " + dir);
}
}
/**
* Merge two HRegions. The regions must be adjacent and must not overlap.
*
* @param srcA
* @param srcB
* @return new merged HRegion
* @throws IOException
*/
public static HRegion mergeAdjacent(final HRegion srcA, final HRegion srcB)
throws IOException {
HRegion a = srcA;
HRegion b = srcB;
// Make sure that srcA comes first; important for key-ordering during
// write of the merged file.
if (srcA.getStartKey() == null) {
if (srcB.getStartKey() == null) {
throw new IOException("Cannot merge two regions with null start key");
}
// A's start key is null but B's isn't. Assume A comes before B
} else if ((srcB.getStartKey() == null) ||
(Bytes.compareTo(srcA.getStartKey(), srcB.getStartKey()) > 0)) {
a = srcB;
b = srcA;
}
if (!(Bytes.compareTo(a.getEndKey(), b.getStartKey()) == 0)) {
throw new IOException("Cannot merge non-adjacent regions");
}
return merge(a, b);
}
/**
* Merge two regions whether they are adjacent or not.
*
* @param a region a
* @param b region b
* @return new merged region
* @throws IOException
*/
public static HRegion merge(HRegion a, HRegion b) throws IOException {
if (!a.getRegionInfo().getTableDesc().getNameAsString().equals(
b.getRegionInfo().getTableDesc().getNameAsString())) {
throw new IOException("Regions do not belong to the same table");
}
FileSystem fs = a.getFilesystem();
// Make sure each region's cache is empty
a.flushcache();
b.flushcache();
// Compact each region so we only have one store file per family
a.compactStores(true);
if (LOG.isDebugEnabled()) {
LOG.debug("Files for region: " + a);
listPaths(fs, a.getRegionDir());
}
b.compactStores(true);
if (LOG.isDebugEnabled()) {
LOG.debug("Files for region: " + b);
listPaths(fs, b.getRegionDir());
}
Configuration conf = a.getConf();
HTableDescriptor tabledesc = a.getTableDesc();
HLog log = a.getLog();
Path basedir = a.getBaseDir();
// Presume both are of same region type -- i.e. both user or catalog
// table regions. This way can use comparator.
final byte [] startKey = a.comparator.matchingRows(a.getStartKey(), 0,
a.getStartKey().length,
EMPTY_BYTE_ARRAY, 0, EMPTY_BYTE_ARRAY.length) ||
b.comparator.matchingRows(b.getStartKey(), 0, b.getStartKey().length,
EMPTY_BYTE_ARRAY, 0, EMPTY_BYTE_ARRAY.length)?
EMPTY_BYTE_ARRAY:
a.comparator.compareRows(a.getStartKey(), 0, a.getStartKey().length,
b.getStartKey(), 0, b.getStartKey().length) <= 0?
a.getStartKey(): b.getStartKey();
final byte [] endKey = a.comparator.matchingRows(a.getEndKey(), 0,
a.getEndKey().length, EMPTY_BYTE_ARRAY, 0, EMPTY_BYTE_ARRAY.length) ||
a.comparator.matchingRows(b.getEndKey(), 0, b.getEndKey().length,
EMPTY_BYTE_ARRAY, 0, EMPTY_BYTE_ARRAY.length)?
EMPTY_BYTE_ARRAY:
a.comparator.compareRows(a.getEndKey(), 0, a.getEndKey().length,
b.getEndKey(), 0, b.getEndKey().length) <= 0?
b.getEndKey(): a.getEndKey();
HRegionInfo newRegionInfo = new HRegionInfo(tabledesc, startKey, endKey);
LOG.info("Creating new region " + newRegionInfo.toString());
int encodedName = newRegionInfo.getEncodedName();
Path newRegionDir = HRegion.getRegionDir(a.getBaseDir(), encodedName);
if(fs.exists(newRegionDir)) {
throw new IOException("Cannot merge; target file collision at " +
newRegionDir);
}
fs.mkdirs(newRegionDir);
LOG.info("starting merge of regions: " + a + " and " + b +
" into new region " + newRegionInfo.toString() +
" with start key <" + Bytes.toString(startKey) + "> and end key <" +
Bytes.toString(endKey) + ">");
// Move HStoreFiles under new region directory
Map<byte [], List<StoreFile>> byFamily =
new TreeMap<byte [], List<StoreFile>>(Bytes.BYTES_COMPARATOR);
byFamily = filesByFamily(byFamily, a.close());
byFamily = filesByFamily(byFamily, b.close());
for (Map.Entry<byte [], List<StoreFile>> es : byFamily.entrySet()) {
byte [] colFamily = es.getKey();
makeColumnFamilyDirs(fs, basedir, newRegionInfo, colFamily);
// Because we compacted the source regions we should have no more than two
// HStoreFiles per family and there will be no reference store
List<StoreFile> srcFiles = es.getValue();
if (srcFiles.size() == 2) {
long seqA = srcFiles.get(0).getMaxSequenceId();
long seqB = srcFiles.get(1).getMaxSequenceId();
if (seqA == seqB) {
// Can't have same sequenceid since on open of a store, this is what
// distingushes the files (see the map of stores how its keyed by
// sequenceid).
throw new IOException("Files have same sequenceid: " + seqA);
}
}
for (StoreFile hsf: srcFiles) {
StoreFile.rename(fs, hsf.getPath(),
StoreFile.getUniqueFile(fs, Store.getStoreHomedir(basedir,
newRegionInfo.getEncodedName(), colFamily)));
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Files for new region");
listPaths(fs, newRegionDir);
}
HRegion dstRegion = new HRegion(basedir, log, fs, conf, newRegionInfo, null);
dstRegion.initialize(null, null);
dstRegion.compactStores();
if (LOG.isDebugEnabled()) {
LOG.debug("Files for new region");
listPaths(fs, dstRegion.getRegionDir());
}
deleteRegion(fs, a.getRegionDir());
deleteRegion(fs, b.getRegionDir());
LOG.info("merge completed. New region is " + dstRegion);
return dstRegion;
}
/*
* Fills a map with a vector of store files keyed by column family.
* @param byFamily Map to fill.
* @param storeFiles Store files to process.
* @param family
* @return Returns <code>byFamily</code>
*/
private static Map<byte [], List<StoreFile>> filesByFamily(
Map<byte [], List<StoreFile>> byFamily, List<StoreFile> storeFiles) {
for (StoreFile src: storeFiles) {
byte [] family = src.getFamily();
List<StoreFile> v = byFamily.get(family);
if (v == null) {
v = new ArrayList<StoreFile>();
byFamily.put(family, v);
}
v.add(src);
}
return byFamily;
}
/**
* @return True if needs a mojor compaction.
* @throws IOException
*/
boolean isMajorCompaction() throws IOException {
for (Store store: this.stores.values()) {
if (store.isMajorCompaction()) {
return true;
}
}
return false;
}
/*
* List the files under the specified directory
*
* @param fs
* @param dir
* @throws IOException
*/
private static void listPaths(FileSystem fs, Path dir) throws IOException {
if (LOG.isDebugEnabled()) {
FileStatus[] stats = fs.listStatus(dir);
if (stats == null || stats.length == 0) {
return;
}
for (int i = 0; i < stats.length; i++) {
String path = stats[i].getPath().toString();
if (stats[i].isDir()) {
LOG.debug("d " + path);
listPaths(fs, stats[i].getPath());
} else {
LOG.debug("f " + path + " size=" + stats[i].getLen());
}
}
}
}
//
// HBASE-880
//
/**
* @param get
* @param lockid
* @return result
* @throws IOException
*/
public Result get(final Get get, final Integer lockid) throws IOException {
// Verify families are all valid
if (get.hasFamilies()) {
for (byte [] family: get.familySet()) {
checkFamily(family);
}
} else { // Adding all families to scanner
for (byte[] family: regionInfo.getTableDesc().getFamiliesKeys()) {
get.addFamily(family);
}
}
// Lock row
Integer lid = getLock(lockid, get.getRow());
List<KeyValue> result = new ArrayList<KeyValue>();
try {
for (Map.Entry<byte[],NavigableSet<byte[]>> entry:
get.getFamilyMap().entrySet()) {
get(this.stores.get(entry.getKey()), get, entry.getValue(), result);
}
} finally {
if(lockid == null) releaseRowLock(lid);
}
return new Result(result);
}
private void get(final Store store, final Get get,
final NavigableSet<byte []> qualifiers, List<KeyValue> result)
throws IOException {
store.get(get, qualifiers, result);
}
/**
*
* @param row
* @param family
* @param qualifier
* @param amount
* @return The new value.
* @throws IOException
*/
public long incrementColumnValue(byte [] row, byte [] family,
byte [] qualifier, long amount, boolean writeToWAL)
throws IOException {
checkRow(row);
boolean flush = false;
// Lock row
Integer lid = obtainRowLock(row);
long result = amount;
try {
Store store = stores.get(family);
// Get the old value:
Get get = new Get(row);
get.addColumn(family, qualifier);
List<KeyValue> results = new ArrayList<KeyValue>();
NavigableSet<byte[]> qualifiers = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
qualifiers.add(qualifier);
store.get(get, qualifiers, results);
if (!results.isEmpty()) {
KeyValue kv = results.get(0);
byte [] buffer = kv.getBuffer();
int valueOffset = kv.getValueOffset();
result += Bytes.toLong(buffer, valueOffset, Bytes.SIZEOF_LONG);
}
// bulid the KeyValue now:
KeyValue newKv = new KeyValue(row, family,
qualifier, System.currentTimeMillis(),
Bytes.toBytes(result));
// now log it:
if (writeToWAL) {
long now = System.currentTimeMillis();
List<KeyValue> edits = new ArrayList<KeyValue>(1);
edits.add(newKv);
this.log.append(regionInfo,
regionInfo.getTableDesc().getName(), edits, now);
}
// Now request the ICV to the store, this will set the timestamp
// appropriately depending on if there is a value in memcache or not.
// returns the
long size = store.updateColumnValue(row, family, qualifier, result);
size = this.memstoreSize.addAndGet(size);
flush = isFlushSize(size);
} finally {
releaseRowLock(lid);
}
if (flush) {
// Request a cache flush. Do it outside update lock.
requestFlush();
}
return result;
}
//
// New HBASE-880 Helpers
//
private void checkFamily(final byte [] family)
throws NoSuchColumnFamilyException {
if(!regionInfo.getTableDesc().hasFamily(family)) {
throw new NoSuchColumnFamilyException("Column family " +
Bytes.toString(family) + " does not exist in region " + this
+ " in table " + regionInfo.getTableDesc());
}
}
public static final long FIXED_OVERHEAD = ClassSize.align(
(5 * Bytes.SIZEOF_LONG) + Bytes.SIZEOF_BOOLEAN +
(19 * ClassSize.REFERENCE) + ClassSize.OBJECT);
public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD +
ClassSize.OBJECT + (2 * ClassSize.ATOMIC_BOOLEAN) +
ClassSize.ATOMIC_LONG + ClassSize.ATOMIC_INTEGER +
ClassSize.CONCURRENT_HASHMAP +
(16 * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
(16 * ClassSize.CONCURRENT_HASHMAP_SEGMENT) +
ClassSize.CONCURRENT_SKIPLISTMAP + ClassSize.CONCURRENT_SKIPLISTMAP_ENTRY +
ClassSize.align(ClassSize.OBJECT + (5 * Bytes.SIZEOF_BOOLEAN)) +
(3 * ClassSize.REENTRANT_LOCK));
@Override
public long heapSize() {
long heapSize = DEEP_OVERHEAD;
for(Store store : this.stores.values()) {
heapSize += store.heapSize();
}
return heapSize;
}
/*
* This method calls System.exit.
* @param message Message to print out. May be null.
*/
private static void printUsageAndExit(final String message) {
if (message != null && message.length() > 0) System.out.println(message);
System.out.println("Usage: HRegion CATLALOG_TABLE_DIR [major_compact]");
System.out.println("Options:");
System.out.println(" major_compact Pass this option to major compact " +
"passed region.");
System.out.println("Default outputs scan of passed region.");
System.exit(1);
}
/*
* Process table.
* Do major compaction or list content.
* @param fs
* @param p
* @param log
* @param c
* @param majorCompact
* @throws IOException
*/
private static void processTable(final FileSystem fs, final Path p,
final HLog log, final Configuration c,
final boolean majorCompact)
throws IOException {
HRegion region = null;
String rootStr = Bytes.toString(HConstants.ROOT_TABLE_NAME);
String metaStr = Bytes.toString(HConstants.META_TABLE_NAME);
// Currently expects tables have one region only.
if (p.getName().startsWith(rootStr)) {
region = new HRegion(p, log, fs, c, HRegionInfo.ROOT_REGIONINFO, null);
} else if (p.getName().startsWith(metaStr)) {
region = new HRegion(p, log, fs, c, HRegionInfo.FIRST_META_REGIONINFO,
null);
} else {
throw new IOException("Not a known catalog table: " + p.toString());
}
try {
region.initialize(null, null);
if (majorCompact) {
region.compactStores(true);
} else {
// Default behavior
Scan scan = new Scan();
// scan.addFamily(HConstants.CATALOG_FAMILY);
InternalScanner scanner = region.getScanner(scan);
try {
List<KeyValue> kvs = new ArrayList<KeyValue>();
boolean done = false;
do {
kvs.clear();
done = scanner.next(kvs);
if (kvs.size() > 0) LOG.info(kvs);
} while (done);
} finally {
scanner.close();
}
// System.out.println(region.getClosestRowBefore(Bytes.toBytes("GeneratedCSVContent2,E3652782193BC8D66A0BA1629D0FAAAB,9993372036854775807")));
}
} finally {
region.close();
}
}
/**
* For internal use in forcing splits ahead of file size limit.
* @param b
* @return previous value
*/
public boolean shouldSplit(boolean b) {
boolean old = this.splitRequest;
this.splitRequest = b;
return old;
}
/**
* Checks every store to see if one has too many
* store files
* @return true if any store has too many store files
*/
public boolean hasTooManyStoreFiles() {
for(Store store : stores.values()) {
if(store.hasTooManyStoreFiles()) {
return true;
}
}
return false;
}
/**
* Facility for dumping and compacting catalog tables.
* Only does catalog tables since these are only tables we for sure know
* schema on. For usage run:
* <pre>
* ./bin/hbase org.apache.hadoop.hbase.regionserver.HRegion
* </pre>
* @param args
* @throws IOException
*/
public static void main(String[] args) throws IOException {
if (args.length < 1) {
printUsageAndExit(null);
}
boolean majorCompact = false;
if (args.length > 1) {
if (!args[1].toLowerCase().startsWith("major")) {
printUsageAndExit("ERROR: Unrecognized option <" + args[1] + ">");
}
majorCompact = true;
}
Path tableDir = new Path(args[0]);
Configuration c = HBaseConfiguration.create();
FileSystem fs = FileSystem.get(c);
Path logdir = new Path(c.get("hbase.tmp.dir"),
"hlog" + tableDir.getName() + System.currentTimeMillis());
Path oldLogDir = new Path(c.get("hbase.tmp.dir"), HREGION_OLDLOGDIR_NAME);
HLog log = new HLog(fs, logdir, oldLogDir, c, null);
try {
processTable(fs, tableDir, log, c, majorCompact);
} finally {
log.close();
BlockCache bc = StoreFile.getBlockCache(c);
if (bc != null) bc.shutdown();
}
}
}
| HBASE-2295 Row locks may deadlock with themselves
git-svn-id: 25ca64b629f24bdef6d1cceac138f74b13f55e41@921098 13f79535-47bb-0310-9956-ffa450edef68
| core/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java | HBASE-2295 Row locks may deadlock with themselves | <ide><path>ore/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
<ide> import java.util.Iterator;
<ide> import java.util.List;
<ide> import java.util.Map;
<add> import java.util.Set;
<ide> import java.util.NavigableSet;
<ide> import java.util.TreeMap;
<ide> import java.util.TreeSet;
<del> import java.util.concurrent.ConcurrentHashMap;
<add> import java.util.HashMap;
<add> import java.util.HashSet;
<add> import java.util.Random;
<ide> import java.util.concurrent.ConcurrentSkipListMap;
<ide> import java.util.concurrent.atomic.AtomicBoolean;
<ide> import java.util.concurrent.atomic.AtomicLong;
<ide> // Members
<ide> //////////////////////////////////////////////////////////////////////////////
<ide>
<del> private final Map<Integer, byte []> locksToRows =
<del> new ConcurrentHashMap<Integer, byte []>();
<add> private final Set<byte[]> lockedRows =
<add> new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
<add> private final Map<Integer, byte []> lockIds =
<add> new HashMap<Integer, byte []>();
<add> private int lockIdGenerator = 1;
<add> static private Random rand = new Random();
<add>
<ide> protected final Map<byte [], Store> stores =
<ide> new ConcurrentSkipListMap<byte [], Store>(Bytes.BYTES_RAWCOMPARATOR);
<ide>
<ide> if (this.closed.get()) {
<ide> throw new NotServingRegionException("Region " + this + " closed");
<ide> }
<del> Integer key = Bytes.mapKey(row);
<del> synchronized (locksToRows) {
<del> while (locksToRows.containsKey(key)) {
<add> synchronized (lockedRows) {
<add> while (lockedRows.contains(row)) {
<ide> try {
<del> locksToRows.wait();
<add> lockedRows.wait();
<ide> } catch (InterruptedException ie) {
<ide> // Empty
<ide> }
<ide> }
<del> locksToRows.put(key, row);
<del> locksToRows.notifyAll();
<del> return key;
<add> // generate a new lockid. Attempt to insert the new [lockid, row].
<add> // if this lockid already exists in the map then revert and retry
<add> // We could have first done a lockIds.get, and if it does not exist only
<add> // then do a lockIds.put, but the hope is that the lockIds.put will
<add> // mostly return null the first time itself because there won't be
<add> // too many lockId collisions.
<add> byte [] prev = null;
<add> Integer lockId = null;
<add> do {
<add> lockId = new Integer(lockIdGenerator++);
<add> prev = lockIds.put(lockId, row);
<add> if (prev != null) {
<add> lockIds.put(lockId, prev); // revert old value
<add> lockIdGenerator = rand.nextInt(); // generate new start point
<add> }
<add> } while (prev != null);
<add>
<add> lockedRows.add(row);
<add> lockedRows.notifyAll();
<add> return lockId;
<ide> }
<ide> } finally {
<ide> splitsAndClosesLock.readLock().unlock();
<ide> * @return Row that goes with <code>lockid</code>
<ide> */
<ide> byte [] getRowFromLock(final Integer lockid) {
<del> return locksToRows.get(lockid);
<add> synchronized (lockedRows) {
<add> return lockIds.get(lockid);
<add> }
<ide> }
<ide>
<ide> /**
<ide> * @param lockid The lock ID to release.
<ide> */
<ide> void releaseRowLock(final Integer lockid) {
<del> synchronized (locksToRows) {
<del> locksToRows.remove(lockid);
<del> locksToRows.notifyAll();
<add> synchronized (lockedRows) {
<add> byte[] row = lockIds.remove(lockid);
<add> lockedRows.remove(row);
<add> lockedRows.notifyAll();
<ide> }
<ide> }
<ide>
<ide> * @return boolean
<ide> */
<ide> private boolean isRowLocked(final Integer lockid) {
<del> synchronized (locksToRows) {
<del> if(locksToRows.containsKey(lockid)) {
<add> synchronized (lockedRows) {
<add> if (lockIds.get(lockid) != null) {
<ide> return true;
<ide> }
<ide> return false;
<ide> }
<ide>
<ide> private void waitOnRowLocks() {
<del> synchronized (locksToRows) {
<del> while (this.locksToRows.size() > 0) {
<del> LOG.debug("waiting for " + this.locksToRows.size() + " row locks");
<add> synchronized (lockedRows) {
<add> while (!this.lockedRows.isEmpty()) {
<add> if (LOG.isDebugEnabled()) {
<add> LOG.debug("Waiting on " + this.lockedRows.size() + " row locks");
<add> }
<ide> try {
<del> this.locksToRows.wait();
<add> this.lockedRows.wait();
<ide> } catch (InterruptedException e) {
<ide> // Catch. Let while test determine loop-end.
<ide> } |
|
Java | mit | 8f06523d82b94cf5bd586f57a7faecb3d7af23be | 0 | bcgit/bc-java,bcgit/bc-java,bcgit/bc-java | package org.bouncycastle.jce.provider.test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.math.BigInteger;
import java.security.InvalidKeyException;
import java.security.KeyFactory;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.SecureRandom;
import java.security.Security;
import java.security.Signature;
import java.security.SignatureException;
import java.security.UnrecoverableKeyException;
import java.security.cert.Certificate;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.security.spec.PKCS8EncodedKeySpec;
import java.security.spec.X509EncodedKeySpec;
import java.util.Date;
import org.bouncycastle.asn1.cryptopro.CryptoProObjectIdentifiers;
import org.bouncycastle.crypto.CipherParameters;
import org.bouncycastle.crypto.params.ECDomainParameters;
import org.bouncycastle.crypto.params.ECPublicKeyParameters;
import org.bouncycastle.crypto.params.ParametersWithRandom;
import org.bouncycastle.crypto.signers.ECGOST3410_2012Signer;
import org.bouncycastle.jcajce.provider.asymmetric.util.ECUtil;
import org.bouncycastle.jce.X509Principal;
import org.bouncycastle.jce.interfaces.ECPrivateKey;
import org.bouncycastle.jce.interfaces.ECPublicKey;
import org.bouncycastle.jce.interfaces.GOST3410PrivateKey;
import org.bouncycastle.jce.interfaces.GOST3410PublicKey;
import org.bouncycastle.jce.interfaces.PKCS12BagAttributeCarrier;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.bouncycastle.jce.spec.ECNamedCurveGenParameterSpec;
import org.bouncycastle.jce.spec.ECParameterSpec;
import org.bouncycastle.jce.spec.ECPrivateKeySpec;
import org.bouncycastle.jce.spec.ECPublicKeySpec;
import org.bouncycastle.jce.spec.GOST3410ParameterSpec;
import org.bouncycastle.math.ec.ECCurve;
import org.bouncycastle.util.BigIntegers;
import org.bouncycastle.util.Strings;
import org.bouncycastle.util.test.SimpleTest;
import org.bouncycastle.util.test.TestRandomBigInteger;
import org.bouncycastle.x509.X509V3CertificateGenerator;
public class GOST3410Test
extends SimpleTest
{
private void ecGOST3410Test()
throws Exception
{
BigInteger r = new BigInteger("29700980915817952874371204983938256990422752107994319651632687982059210933395");
BigInteger s = new BigInteger("46959264877825372965922731380059061821746083849389763294914877353246631700866");
byte[] kData = BigIntegers.asUnsignedByteArray(new BigInteger("53854137677348463731403841147996619241504003434302020712960838528893196233395"));
SecureRandom k = new TestRandomBigInteger(kData);
BigInteger mod_p = new BigInteger("57896044618658097711785492504343953926634992332820282019728792003956564821041"); //p
ECCurve curve = new ECCurve.Fp(
mod_p, // p
new BigInteger("7"), // a
new BigInteger("43308876546767276905765904595650931995942111794451039583252968842033849580414")); // b
ECParameterSpec spec = new ECParameterSpec(
curve,
curve.createPoint(
new BigInteger("2"), // x
new BigInteger("4018974056539037503335449422937059775635739389905545080690979365213431566280")), // y
new BigInteger("57896044618658097711785492504343953927082934583725450622380973592137631069619")); // q
ECPrivateKeySpec priKey = new ECPrivateKeySpec(
new BigInteger("55441196065363246126355624130324183196576709222340016572108097750006097525544"), // d
spec);
ECPublicKeySpec pubKey = new ECPublicKeySpec(
curve.createPoint(
new BigInteger("57520216126176808443631405023338071176630104906313632182896741342206604859403"), // x
new BigInteger("17614944419213781543809391949654080031942662045363639260709847859438286763994")), // y
spec);
Signature sgr = Signature.getInstance("ECGOST3410", "BC");
KeyFactory f = KeyFactory.getInstance("ECGOST3410", "BC");
PrivateKey sKey = f.generatePrivate(priKey);
PublicKey vKey = f.generatePublic(pubKey);
sgr.initSign(sKey, k);
byte[] message = new byte[]{(byte)'a', (byte)'b', (byte)'c'};
sgr.update(message);
byte[] sigBytes = sgr.sign();
sgr.initVerify(vKey);
sgr.update(message);
if (!sgr.verify(sigBytes))
{
fail("ECGOST3410 verification failed");
}
BigInteger[] sig = decode(sigBytes);
if (!r.equals(sig[0]))
{
fail(
": r component wrong." + Strings.lineSeparator()
+ " expecting: " + r + Strings.lineSeparator()
+ " got : " + sig[0]);
}
if (!s.equals(sig[1]))
{
fail(
": s component wrong." + Strings.lineSeparator()
+ " expecting: " + s + Strings.lineSeparator()
+ " got : " + sig[1]);
}
}
private void generationTest()
throws Exception
{
Signature s = Signature.getInstance("GOST3410", "BC");
KeyPairGenerator g = KeyPairGenerator.getInstance("GOST3410", "BC");
byte[] data = {1, 2, 3, 4, 5, 6, 7, 8, 9, 0};
GOST3410ParameterSpec gost3410P = new GOST3410ParameterSpec(CryptoProObjectIdentifiers.gostR3410_94_CryptoPro_A.getId());
g.initialize(gost3410P, new SecureRandom());
KeyPair p = g.generateKeyPair();
PrivateKey sKey = p.getPrivate();
PublicKey vKey = p.getPublic();
s.initSign(sKey);
s.update(data);
byte[] sigBytes = s.sign();
s = Signature.getInstance("GOST3410", "BC");
s.initVerify(vKey);
s.update(data);
if (!s.verify(sigBytes))
{
fail("GOST3410 verification failed");
}
//
// default initialisation test
//
s = Signature.getInstance("GOST3410", "BC");
g = KeyPairGenerator.getInstance("GOST3410", "BC");
p = g.generateKeyPair();
sKey = p.getPrivate();
vKey = p.getPublic();
s.initSign(sKey);
s.update(data);
sigBytes = s.sign();
s = Signature.getInstance("GOST3410", "BC");
s.initVerify(vKey);
s.update(data);
if (!s.verify(sigBytes))
{
fail("GOST3410 verification failed");
}
//
// encoded test
//
KeyFactory f = KeyFactory.getInstance("GOST3410", "BC");
X509EncodedKeySpec x509s = new X509EncodedKeySpec(vKey.getEncoded());
GOST3410PublicKey k1 = (GOST3410PublicKey)f.generatePublic(x509s);
if (!k1.getY().equals(((GOST3410PublicKey)vKey).getY()))
{
fail("public number not decoded properly");
}
if (!k1.getParameters().equals(((GOST3410PublicKey)vKey).getParameters()))
{
fail("public parameters not decoded properly");
}
PKCS8EncodedKeySpec pkcs8 = new PKCS8EncodedKeySpec(sKey.getEncoded());
GOST3410PrivateKey k2 = (GOST3410PrivateKey)f.generatePrivate(pkcs8);
if (!k2.getX().equals(((GOST3410PrivateKey)sKey).getX()))
{
fail("private number not decoded properly");
}
if (!k2.getParameters().equals(((GOST3410PrivateKey)sKey).getParameters()))
{
fail("private number not decoded properly");
}
k2 = (GOST3410PrivateKey)serializeDeserialize(sKey);
if (!k2.getX().equals(((GOST3410PrivateKey)sKey).getX()))
{
fail("private number not deserialised properly");
}
if (!k2.getParameters().equals(((GOST3410PrivateKey)sKey).getParameters()))
{
fail("private number not deserialised properly");
}
checkEquals(k2, sKey);
if (!(k2 instanceof PKCS12BagAttributeCarrier))
{
fail("private key not implementing PKCS12 attribute carrier");
}
k1 = (GOST3410PublicKey)serializeDeserialize(vKey);
if (!k1.getY().equals(((GOST3410PublicKey)vKey).getY()))
{
fail("public number not deserialised properly");
}
if (!k1.getParameters().equals(((GOST3410PublicKey)vKey).getParameters()))
{
fail("public parameters not deserialised properly");
}
checkEquals(k1, vKey);
//
// ECGOST3410 generation test
//
s = Signature.getInstance("ECGOST3410", "BC");
g = KeyPairGenerator.getInstance("ECGOST3410", "BC");
// BigInteger mod_p = new BigInteger("57896044618658097711785492504343953926634992332820282019728792003956564821041"); //p
//
// ECCurve curve = new ECCurve.Fp(
// mod_p, // p
// new BigInteger("7"), // a
// new BigInteger("43308876546767276905765904595650931995942111794451039583252968842033849580414")); // b
//
// ECParameterSpec ecSpec = new ECParameterSpec(
// curve,
// new ECPoint.Fp(curve,
// new ECFieldElement.Fp(mod_p,new BigInteger("2")), // x
// new ECFieldElement.Fp(mod_p,new BigInteger("4018974056539037503335449422937059775635739389905545080690979365213431566280"))), // y
// new BigInteger("57896044618658097711785492504343953927082934583725450622380973592137631069619")); // q
g.initialize(new ECNamedCurveGenParameterSpec("GostR3410-2001-CryptoPro-A"), new SecureRandom());
p = g.generateKeyPair();
sKey = p.getPrivate();
vKey = p.getPublic();
s.initSign(sKey);
s.update(data);
sigBytes = s.sign();
s = Signature.getInstance("ECGOST3410", "BC");
s.initVerify(vKey);
s.update(data);
if (!s.verify(sigBytes))
{
fail("ECGOST3410 verification failed");
}
//
// encoded test
//
f = KeyFactory.getInstance("ECGOST3410", "BC");
x509s = new X509EncodedKeySpec(vKey.getEncoded());
ECPublicKey eck1 = (ECPublicKey)f.generatePublic(x509s);
if (!eck1.getQ().equals(((ECPublicKey)vKey).getQ()))
{
fail("public number not decoded properly");
}
if (!eck1.getParameters().equals(((ECPublicKey)vKey).getParameters()))
{
fail("public parameters not decoded properly");
}
pkcs8 = new PKCS8EncodedKeySpec(sKey.getEncoded());
ECPrivateKey eck2 = (ECPrivateKey)f.generatePrivate(pkcs8);
if (!eck2.getD().equals(((ECPrivateKey)sKey).getD()))
{
fail("private number not decoded properly");
}
if (!eck2.getParameters().equals(((ECPrivateKey)sKey).getParameters()))
{
fail("private number not decoded properly");
}
eck2 = (ECPrivateKey)serializeDeserialize(sKey);
if (!eck2.getD().equals(((ECPrivateKey)sKey).getD()))
{
fail("private number not decoded properly");
}
if (!eck2.getParameters().equals(((ECPrivateKey)sKey).getParameters()))
{
fail("private number not decoded properly");
}
checkEquals(eck2, sKey);
if (!(eck2 instanceof PKCS12BagAttributeCarrier))
{
fail("private key not implementing PKCS12 attribute carrier");
}
eck1 = (ECPublicKey)serializeDeserialize(vKey);
if (!eck1.getQ().equals(((ECPublicKey)vKey).getQ()))
{
fail("public number not decoded properly");
}
if (!eck1.getParameters().equals(((ECPublicKey)vKey).getParameters()))
{
fail("public parameters not decoded properly");
}
checkEquals(eck1, vKey);
}
private void ecGOST34102012256Test()
throws Exception
{
BigInteger r = new BigInteger("29700980915817952874371204983938256990422752107994319651632687982059210933395");
BigInteger s = new BigInteger("574973400270084654178925310019147038455227042649098563933718999175515839552");
BigInteger e = new BigInteger("20798893674476452017134061561508270130637142515379653289952617252661468872421");
byte[] kData = BigIntegers.asUnsignedByteArray(new BigInteger("53854137677348463731403841147996619241504003434302020712960838528893196233395"));
SecureRandom k = new TestRandomBigInteger(kData);
BigInteger mod_p = new BigInteger("57896044618658097711785492504343953926634992332820282019728792003956564821041"); //p
ECCurve curve = new ECCurve.Fp(
mod_p, // p
new BigInteger("7"), // a
new BigInteger("43308876546767276905765904595650931995942111794451039583252968842033849580414")); // b
ECParameterSpec spec = new ECParameterSpec(
curve,
curve.createPoint(
new BigInteger("2"), // x
new BigInteger("4018974056539037503335449422937059775635739389905545080690979365213431566280")), // y
new BigInteger("57896044618658097711785492504343953927082934583725450622380973592137631069619")); // q
ECPrivateKeySpec priKey = new ECPrivateKeySpec(
new BigInteger("55441196065363246126355624130324183196576709222340016572108097750006097525544"), // d
spec);
ECPublicKeySpec pubKey = new ECPublicKeySpec(
curve.createPoint(
new BigInteger("57520216126176808443631405023338071176630104906313632182896741342206604859403"), // x
new BigInteger("17614944419213781543809391949654080031942662045363639260709847859438286763994")), // y
spec);
KeyFactory f = KeyFactory.getInstance("ECGOST3410-2012", "BC");
PrivateKey sKey = f.generatePrivate(priKey);
PublicKey vKey = f.generatePublic(pubKey);
ECGOST3410_2012Signer signer = new ECGOST3410_2012Signer();
CipherParameters param = ECUtil.generatePrivateKeyParameter(sKey);
signer.init(true, new ParametersWithRandom(param, k));
byte[] rev = e.toByteArray();
byte[] message = new byte[rev.length];
for (int i = 0; i != rev.length; i++)
{
message[i] = rev[rev.length - 1 - i];
}
BigInteger[] sig = signer.generateSignature(message);
ECPublicKey ecPublicKey = (ECPublicKey)vKey;
param = new ECPublicKeyParameters(
ecPublicKey.getQ(),
new ECDomainParameters(spec.getCurve(), spec.getG(), spec.getN()));
signer.init(false, param);
if (!signer.verifySignature(message, sig[0], sig[1]))
{
fail("ECGOST3410 2012 verification failed");
}
if (!r.equals(sig[0]))
{
fail(
": r component wrong." + Strings.lineSeparator()
+ " expecting: " + r + Strings.lineSeparator()
+ " got : " + sig[0]);
}
if (!s.equals(sig[1]))
{
fail(
": s component wrong." + Strings.lineSeparator()
+ " expecting: " + s + Strings.lineSeparator()
+ " got : " + sig[1]);
}
KeyPairGenerator g = KeyPairGenerator.getInstance("ECGOST3410-2012", "BC");
g.initialize(new ECNamedCurveGenParameterSpec("Tc26-Gost-3410-12-256-paramSetA"), new SecureRandom());
KeyPair p = g.generateKeyPair();
signatureGost12Test("ECGOST3410-2012-256", 64, p);
encodedGost12Test(p);
g.initialize(new ECNamedCurveGenParameterSpec("Tc26-Gost-3410-12-512-paramSetA"), new SecureRandom());
p = g.generateKeyPair();
signatureGost12Test("ECGOST3410-2012-512", 128, p);
encodedGost12Test(p);
}
private void ecGOST34102012512Test()
throws Exception
{
BigInteger r = new BigInteger("2489204477031349265072864643032147753667451319282131444027498637357611092810221795101871412928823716805959828708330284243653453085322004442442534151761462");
BigInteger s = new BigInteger("864523221707669519038849297382936917075023735848431579919598799313385180564748877195639672460179421760770893278030956807690115822709903853682831835159370");
BigInteger e = new BigInteger("2897963881682868575562827278553865049173745197871825199562947419041388950970536661109553499954248733088719748844538964641281654463513296973827706272045964");
byte[] kData = BigIntegers.asUnsignedByteArray(new BigInteger("359E7F4B1410FEACC570456C6801496946312120B39D019D455986E364F365886748ED7A44B3E794434006011842286212273A6D14CF70EA3AF71BB1AE679F1", 16));
SecureRandom k = new TestRandomBigInteger(kData);
BigInteger mod_p = new BigInteger("3623986102229003635907788753683874306021320925534678605086546150450856166624002482588482022271496854025090823603058735163734263822371964987228582907372403"); //p
ECCurve curve = new ECCurve.Fp(
mod_p, // p
new BigInteger("7"), // a
new BigInteger("1518655069210828534508950034714043154928747527740206436194018823352809982443793732829756914785974674866041605397883677596626326413990136959047435811826396")); // b
ECParameterSpec spec = new ECParameterSpec(
curve,
curve.createPoint(
new BigInteger("1928356944067022849399309401243137598997786635459507974357075491307766592685835441065557681003184874819658004903212332884252335830250729527632383493573274"), // x
new BigInteger("2288728693371972859970012155529478416353562327329506180314497425931102860301572814141997072271708807066593850650334152381857347798885864807605098724013854")), // y
new BigInteger("3623986102229003635907788753683874306021320925534678605086546150450856166623969164898305032863068499961404079437936585455865192212970734808812618120619743")); // q
ECPrivateKeySpec priKey = new ECPrivateKeySpec(
new BigInteger("610081804136373098219538153239847583006845519069531562982388135354890606301782255383608393423372379057665527595116827307025046458837440766121180466875860"), // d
spec);
ECPublicKeySpec pubKey = new ECPublicKeySpec(
curve.createPoint(
new BigInteger("909546853002536596556690768669830310006929272546556281596372965370312498563182320436892870052842808608262832456858223580713780290717986855863433431150561"), // x
new BigInteger("2921457203374425620632449734248415455640700823559488705164895837509539134297327397380287741428246088626609329139441895016863758984106326600572476822372076")), // y
spec);
KeyFactory f = KeyFactory.getInstance("ECGOST3410-2012", "BC");
PrivateKey sKey = f.generatePrivate(priKey);
PublicKey vKey = f.generatePublic(pubKey);
ECGOST3410_2012Signer signer = new ECGOST3410_2012Signer();
CipherParameters param = ECUtil.generatePrivateKeyParameter(sKey);
signer.init(true, new ParametersWithRandom(param, k));
byte[] rev = e.toByteArray();
byte[] message = new byte[rev.length];
for (int i = 0; i != rev.length; i++)
{
message[i] = rev[rev.length - 1 - i];
}
BigInteger[] sig = signer.generateSignature(message);
ECPublicKey ecPublicKey = (ECPublicKey)vKey;
param = new ECPublicKeyParameters(
ecPublicKey.getQ(),
new ECDomainParameters(spec.getCurve(), spec.getG(), spec.getN()));
signer.init(false, param);
if (!signer.verifySignature(message, sig[0], sig[1]))
{
fail("ECGOST3410 2012 verification failed");
}
if (!r.equals(sig[0]))
{
fail(
": r component wrong." + Strings.lineSeparator()
+ " expecting: " + r + Strings.lineSeparator()
+ " got : " + sig[0]);
}
if (!s.equals(sig[1]))
{
fail(
": s component wrong." + Strings.lineSeparator()
+ " expecting: " + s + Strings.lineSeparator()
+ " got : " + sig[1]);
}
}
private void signatureGost12Test(String signatureAlg, int expectedSignLen, KeyPair p)
throws Exception
{
byte[] data = {1, 2, 3, 4, 5, 6, 7, 8, 9, 0};
PrivateKey sKey = p.getPrivate();
PublicKey vKey = p.getPublic();
Signature s = Signature.getInstance(signatureAlg, "BC");
s.initSign(sKey);
s.update(data);
byte[] sigBytes = s.sign();
if (sigBytes.length != expectedSignLen)
{
fail(signatureAlg + " signature failed");
}
s = Signature.getInstance(signatureAlg, "BC");
s.initVerify(vKey);
s.update(data);
if (!s.verify(sigBytes))
{
fail(signatureAlg + " verification failed");
}
}
private void encodedGost12Test(KeyPair p)
throws Exception
{
PrivateKey sKey = p.getPrivate();
PublicKey vKey = p.getPublic();
KeyFactory f = KeyFactory.getInstance("ECGOST3410-2012", "BC");
X509EncodedKeySpec x509s = new X509EncodedKeySpec(vKey.getEncoded());
ECPublicKey eck1 = (ECPublicKey)f.generatePublic(x509s);
if (!eck1.getQ().equals(((ECPublicKey)vKey).getQ()))
{
fail("public number not decoded properly");
}
if (!eck1.getParameters().equals(((ECPublicKey)vKey).getParameters()))
{
fail("public parameters not decoded properly");
}
PKCS8EncodedKeySpec pkcs8 = new PKCS8EncodedKeySpec(sKey.getEncoded());
ECPrivateKey eck2 = (ECPrivateKey)f.generatePrivate(pkcs8);
if (!eck2.getD().equals(((ECPrivateKey)sKey).getD()))
{
fail("private number not decoded properly");
}
if (!eck2.getParameters().equals(((ECPrivateKey)sKey).getParameters()))
{
fail("private number not decoded properly");
}
eck2 = (ECPrivateKey)serializeDeserialize(sKey);
if (!eck2.getD().equals(((ECPrivateKey)sKey).getD()))
{
fail("private number not decoded properly");
}
if (!eck2.getParameters().equals(((ECPrivateKey)sKey).getParameters()))
{
fail("private number not decoded properly");
}
checkEquals(eck2, sKey);
if (!(eck2 instanceof PKCS12BagAttributeCarrier))
{
fail("private key not implementing PKCS12 attribute carrier");
}
eck1 = (ECPublicKey)serializeDeserialize(vKey);
if (!eck1.getQ().equals(((ECPublicKey)vKey).getQ()))
{
fail("public number not decoded properly");
}
if (!eck1.getParameters().equals(((ECPublicKey)vKey).getParameters()))
{
fail("public parameters not decoded properly");
}
checkEquals(eck1, vKey);
}
private void keyStoreTest(PrivateKey sKey, PublicKey vKey)
throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException, NoSuchProviderException, SignatureException, InvalidKeyException, UnrecoverableKeyException
{
//
// keystore test
//
KeyStore ks = KeyStore.getInstance("JKS");
ks.load(null, null);
//
// create the certificate - version 3
//
X509V3CertificateGenerator certGen = new X509V3CertificateGenerator();
certGen.setSerialNumber(BigInteger.valueOf(1));
certGen.setIssuerDN(new X509Principal("CN=Test"));
certGen.setNotBefore(new Date(System.currentTimeMillis() - 50000));
certGen.setNotAfter(new Date(System.currentTimeMillis() + 50000));
certGen.setSubjectDN(new X509Principal("CN=Test"));
certGen.setPublicKey(vKey);
certGen.setSignatureAlgorithm("GOST3411withGOST3410");
X509Certificate cert = certGen.generate(sKey, "BC");
ks.setKeyEntry("gost", sKey, "gost".toCharArray(), new Certificate[]{cert});
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
ks.store(bOut, "gost".toCharArray());
ks = KeyStore.getInstance("JKS");
ks.load(new ByteArrayInputStream(bOut.toByteArray()), "gost".toCharArray());
PrivateKey gKey = (PrivateKey)ks.getKey("gost", "gost".toCharArray());
}
private void checkEquals(Object o1, Object o2)
{
if (!o1.equals(o2))
{
fail("comparison test failed");
}
if (o1.hashCode() != o2.hashCode())
{
fail("hashCode test failed");
}
}
private void parametersTest()
throws Exception
{
// AlgorithmParameterGenerator a = AlgorithmParameterGenerator.getInstance("GOST3410", "BC");
// a.init(512, random);
// AlgorithmParameters params = a.generateParameters();
//
// byte[] encodeParams = params.getEncoded();
//
// AlgorithmParameters a2 = AlgorithmParameters.getInstance("GOST3410", "BC");
// a2.init(encodeParams);
//
// // a and a2 should be equivalent!
// byte[] encodeParams_2 = a2.getEncoded();
//
// if (!arrayEquals(encodeParams, encodeParams_2))
// {
// fail("encode/decode parameters failed");
// }
GOST3410ParameterSpec gost3410P = new GOST3410ParameterSpec(CryptoProObjectIdentifiers.gostR3410_94_CryptoPro_B.getId());
KeyPairGenerator g = KeyPairGenerator.getInstance("GOST3410", "BC");
g.initialize(gost3410P, new SecureRandom());
KeyPair p = g.generateKeyPair();
PrivateKey sKey = p.getPrivate();
PublicKey vKey = p.getPublic();
Signature s = Signature.getInstance("GOST3410", "BC");
byte[] data = {1, 2, 3, 4, 5, 6, 7, 8, 9, 0};
s.initSign(sKey);
s.update(data);
byte[] sigBytes = s.sign();
s = Signature.getInstance("GOST3410", "BC");
s.initVerify(vKey);
s.update(data);
if (!s.verify(sigBytes))
{
fail("GOST3410 verification failed");
}
keyStoreTest(sKey, vKey);
}
private BigInteger[] decode(
byte[] encoding)
{
byte[] r = new byte[32];
byte[] s = new byte[32];
System.arraycopy(encoding, 0, s, 0, 32);
System.arraycopy(encoding, 32, r, 0, 32);
BigInteger[] sig = new BigInteger[2];
sig[0] = new BigInteger(1, r);
sig[1] = new BigInteger(1, s);
return sig;
}
private Object serializeDeserialize(Object o)
throws Exception
{
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
ObjectOutputStream oOut = new ObjectOutputStream(bOut);
oOut.writeObject(o);
oOut.close();
ObjectInputStream oIn = new ObjectInputStream(new ByteArrayInputStream(bOut.toByteArray()));
return oIn.readObject();
}
public String getName()
{
return "GOST3410/ECGOST3410/ECGOST3410 2012";
}
public void performTest()
throws Exception
{
ecGOST3410Test();
if (Security.getProvider("BC").containsKey("KeyFactory.ECGOST3410-2012"))
{
ecGOST34102012256Test();
ecGOST34102012512Test();
}
generationTest();
parametersTest();
}
protected byte[] toByteArray(String input)
{
byte[] bytes = new byte[input.length()];
for (int i = 0; i != bytes.length; i++)
{
bytes[i] = (byte)input.charAt(i);
}
return bytes;
}
public static void main(
String[] args)
{
Security.addProvider(new BouncyCastleProvider());
runTest(new GOST3410Test());
}
}
| prov/src/test/java/org/bouncycastle/jce/provider/test/GOST3410Test.java | package org.bouncycastle.jce.provider.test;
import org.bouncycastle.asn1.cryptopro.CryptoProObjectIdentifiers;
import org.bouncycastle.crypto.CipherParameters;
import org.bouncycastle.crypto.params.ECDomainParameters;
import org.bouncycastle.crypto.params.ECPublicKeyParameters;
import org.bouncycastle.crypto.params.ParametersWithRandom;
import org.bouncycastle.crypto.signers.ECGOST3410_2012Signer;
import org.bouncycastle.jcajce.provider.asymmetric.util.ECUtil;
import org.bouncycastle.jce.X509Principal;
import org.bouncycastle.jce.interfaces.*;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.bouncycastle.jce.spec.*;
import org.bouncycastle.math.ec.ECCurve;
import org.bouncycastle.util.BigIntegers;
import org.bouncycastle.util.Strings;
import org.bouncycastle.util.test.SimpleTest;
import org.bouncycastle.util.test.TestRandomBigInteger;
import org.bouncycastle.x509.X509V3CertificateGenerator;
import java.io.*;
import java.math.BigInteger;
import java.security.*;
import java.security.cert.Certificate;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.security.spec.PKCS8EncodedKeySpec;
import java.security.spec.X509EncodedKeySpec;
import java.util.Date;
public class GOST3410Test
extends SimpleTest {
private void ecGOST3410Test()
throws Exception {
BigInteger r = new BigInteger("29700980915817952874371204983938256990422752107994319651632687982059210933395");
BigInteger s = new BigInteger("46959264877825372965922731380059061821746083849389763294914877353246631700866");
byte[] kData = BigIntegers.asUnsignedByteArray(new BigInteger("53854137677348463731403841147996619241504003434302020712960838528893196233395"));
SecureRandom k = new TestRandomBigInteger(kData);
BigInteger mod_p = new BigInteger("57896044618658097711785492504343953926634992332820282019728792003956564821041"); //p
ECCurve curve = new ECCurve.Fp(
mod_p, // p
new BigInteger("7"), // a
new BigInteger("43308876546767276905765904595650931995942111794451039583252968842033849580414")); // b
ECParameterSpec spec = new ECParameterSpec(
curve,
curve.createPoint(
new BigInteger("2"), // x
new BigInteger("4018974056539037503335449422937059775635739389905545080690979365213431566280")), // y
new BigInteger("57896044618658097711785492504343953927082934583725450622380973592137631069619")); // q
ECPrivateKeySpec priKey = new ECPrivateKeySpec(
new BigInteger("55441196065363246126355624130324183196576709222340016572108097750006097525544"), // d
spec);
ECPublicKeySpec pubKey = new ECPublicKeySpec(
curve.createPoint(
new BigInteger("57520216126176808443631405023338071176630104906313632182896741342206604859403"), // x
new BigInteger("17614944419213781543809391949654080031942662045363639260709847859438286763994")), // y
spec);
Signature sgr = Signature.getInstance("ECGOST3410", "BC");
KeyFactory f = KeyFactory.getInstance("ECGOST3410", "BC");
PrivateKey sKey = f.generatePrivate(priKey);
PublicKey vKey = f.generatePublic(pubKey);
sgr.initSign(sKey, k);
byte[] message = new byte[]{(byte) 'a', (byte) 'b', (byte) 'c'};
sgr.update(message);
byte[] sigBytes = sgr.sign();
sgr.initVerify(vKey);
sgr.update(message);
if (!sgr.verify(sigBytes)) {
fail("ECGOST3410 verification failed");
}
BigInteger[] sig = decode(sigBytes);
if (!r.equals(sig[0])) {
fail(
": r component wrong." + Strings.lineSeparator()
+ " expecting: " + r + Strings.lineSeparator()
+ " got : " + sig[0]);
}
if (!s.equals(sig[1])) {
fail(
": s component wrong." + Strings.lineSeparator()
+ " expecting: " + s + Strings.lineSeparator()
+ " got : " + sig[1]);
}
}
private void generationTest()
throws Exception {
Signature s = Signature.getInstance("GOST3410", "BC");
KeyPairGenerator g = KeyPairGenerator.getInstance("GOST3410", "BC");
byte[] data = {1, 2, 3, 4, 5, 6, 7, 8, 9, 0};
GOST3410ParameterSpec gost3410P = new GOST3410ParameterSpec(CryptoProObjectIdentifiers.gostR3410_94_CryptoPro_A.getId());
g.initialize(gost3410P, new SecureRandom());
KeyPair p = g.generateKeyPair();
PrivateKey sKey = p.getPrivate();
PublicKey vKey = p.getPublic();
s.initSign(sKey);
s.update(data);
byte[] sigBytes = s.sign();
s = Signature.getInstance("GOST3410", "BC");
s.initVerify(vKey);
s.update(data);
if (!s.verify(sigBytes)) {
fail("GOST3410 verification failed");
}
//
// default initialisation test
//
s = Signature.getInstance("GOST3410", "BC");
g = KeyPairGenerator.getInstance("GOST3410", "BC");
p = g.generateKeyPair();
sKey = p.getPrivate();
vKey = p.getPublic();
s.initSign(sKey);
s.update(data);
sigBytes = s.sign();
s = Signature.getInstance("GOST3410", "BC");
s.initVerify(vKey);
s.update(data);
if (!s.verify(sigBytes)) {
fail("GOST3410 verification failed");
}
//
// encoded test
//
KeyFactory f = KeyFactory.getInstance("GOST3410", "BC");
X509EncodedKeySpec x509s = new X509EncodedKeySpec(vKey.getEncoded());
GOST3410PublicKey k1 = (GOST3410PublicKey) f.generatePublic(x509s);
if (!k1.getY().equals(((GOST3410PublicKey) vKey).getY())) {
fail("public number not decoded properly");
}
if (!k1.getParameters().equals(((GOST3410PublicKey) vKey).getParameters())) {
fail("public parameters not decoded properly");
}
PKCS8EncodedKeySpec pkcs8 = new PKCS8EncodedKeySpec(sKey.getEncoded());
GOST3410PrivateKey k2 = (GOST3410PrivateKey) f.generatePrivate(pkcs8);
if (!k2.getX().equals(((GOST3410PrivateKey) sKey).getX())) {
fail("private number not decoded properly");
}
if (!k2.getParameters().equals(((GOST3410PrivateKey) sKey).getParameters())) {
fail("private number not decoded properly");
}
k2 = (GOST3410PrivateKey) serializeDeserialize(sKey);
if (!k2.getX().equals(((GOST3410PrivateKey) sKey).getX())) {
fail("private number not deserialised properly");
}
if (!k2.getParameters().equals(((GOST3410PrivateKey) sKey).getParameters())) {
fail("private number not deserialised properly");
}
checkEquals(k2, sKey);
if (!(k2 instanceof PKCS12BagAttributeCarrier)) {
fail("private key not implementing PKCS12 attribute carrier");
}
k1 = (GOST3410PublicKey) serializeDeserialize(vKey);
if (!k1.getY().equals(((GOST3410PublicKey) vKey).getY())) {
fail("public number not deserialised properly");
}
if (!k1.getParameters().equals(((GOST3410PublicKey) vKey).getParameters())) {
fail("public parameters not deserialised properly");
}
checkEquals(k1, vKey);
//
// ECGOST3410 generation test
//
s = Signature.getInstance("ECGOST3410", "BC");
g = KeyPairGenerator.getInstance("ECGOST3410", "BC");
// BigInteger mod_p = new BigInteger("57896044618658097711785492504343953926634992332820282019728792003956564821041"); //p
//
// ECCurve curve = new ECCurve.Fp(
// mod_p, // p
// new BigInteger("7"), // a
// new BigInteger("43308876546767276905765904595650931995942111794451039583252968842033849580414")); // b
//
// ECParameterSpec ecSpec = new ECParameterSpec(
// curve,
// new ECPoint.Fp(curve,
// new ECFieldElement.Fp(mod_p,new BigInteger("2")), // x
// new ECFieldElement.Fp(mod_p,new BigInteger("4018974056539037503335449422937059775635739389905545080690979365213431566280"))), // y
// new BigInteger("57896044618658097711785492504343953927082934583725450622380973592137631069619")); // q
g.initialize(new ECNamedCurveGenParameterSpec("GostR3410-2001-CryptoPro-A"), new SecureRandom());
p = g.generateKeyPair();
sKey = p.getPrivate();
vKey = p.getPublic();
s.initSign(sKey);
s.update(data);
sigBytes = s.sign();
s = Signature.getInstance("ECGOST3410", "BC");
s.initVerify(vKey);
s.update(data);
if (!s.verify(sigBytes)) {
fail("ECGOST3410 verification failed");
}
//
// encoded test
//
f = KeyFactory.getInstance("ECGOST3410", "BC");
x509s = new X509EncodedKeySpec(vKey.getEncoded());
ECPublicKey eck1 = (ECPublicKey) f.generatePublic(x509s);
if (!eck1.getQ().equals(((ECPublicKey) vKey).getQ())) {
fail("public number not decoded properly");
}
if (!eck1.getParameters().equals(((ECPublicKey) vKey).getParameters())) {
fail("public parameters not decoded properly");
}
pkcs8 = new PKCS8EncodedKeySpec(sKey.getEncoded());
ECPrivateKey eck2 = (ECPrivateKey) f.generatePrivate(pkcs8);
if (!eck2.getD().equals(((ECPrivateKey) sKey).getD())) {
fail("private number not decoded properly");
}
if (!eck2.getParameters().equals(((ECPrivateKey) sKey).getParameters())) {
fail("private number not decoded properly");
}
eck2 = (ECPrivateKey) serializeDeserialize(sKey);
if (!eck2.getD().equals(((ECPrivateKey) sKey).getD())) {
fail("private number not decoded properly");
}
if (!eck2.getParameters().equals(((ECPrivateKey) sKey).getParameters())) {
fail("private number not decoded properly");
}
checkEquals(eck2, sKey);
if (!(eck2 instanceof PKCS12BagAttributeCarrier)) {
fail("private key not implementing PKCS12 attribute carrier");
}
eck1 = (ECPublicKey) serializeDeserialize(vKey);
if (!eck1.getQ().equals(((ECPublicKey) vKey).getQ())) {
fail("public number not decoded properly");
}
if (!eck1.getParameters().equals(((ECPublicKey) vKey).getParameters())) {
fail("public parameters not decoded properly");
}
checkEquals(eck1, vKey);
}
private void ecGOST34102012256Test() throws Exception {
BigInteger r = new BigInteger("29700980915817952874371204983938256990422752107994319651632687982059210933395");
BigInteger s = new BigInteger("574973400270084654178925310019147038455227042649098563933718999175515839552");
BigInteger e = new BigInteger("20798893674476452017134061561508270130637142515379653289952617252661468872421");
byte[] kData = BigIntegers.asUnsignedByteArray(new BigInteger("53854137677348463731403841147996619241504003434302020712960838528893196233395"));
SecureRandom k = new TestRandomBigInteger(kData);
BigInteger mod_p = new BigInteger("57896044618658097711785492504343953926634992332820282019728792003956564821041"); //p
ECCurve curve = new ECCurve.Fp(
mod_p, // p
new BigInteger("7"), // a
new BigInteger("43308876546767276905765904595650931995942111794451039583252968842033849580414")); // b
ECParameterSpec spec = new ECParameterSpec(
curve,
curve.createPoint(
new BigInteger("2"), // x
new BigInteger("4018974056539037503335449422937059775635739389905545080690979365213431566280")), // y
new BigInteger("57896044618658097711785492504343953927082934583725450622380973592137631069619")); // q
ECPrivateKeySpec priKey = new ECPrivateKeySpec(
new BigInteger("55441196065363246126355624130324183196576709222340016572108097750006097525544"), // d
spec);
ECPublicKeySpec pubKey = new ECPublicKeySpec(
curve.createPoint(
new BigInteger("57520216126176808443631405023338071176630104906313632182896741342206604859403"), // x
new BigInteger("17614944419213781543809391949654080031942662045363639260709847859438286763994")), // y
spec);
KeyFactory f = KeyFactory.getInstance("ECGOST3410-2012", "BC");
PrivateKey sKey = f.generatePrivate(priKey);
PublicKey vKey = f.generatePublic(pubKey);
ECGOST3410_2012Signer signer = new ECGOST3410_2012Signer();
CipherParameters param = ECUtil.generatePrivateKeyParameter(sKey);
signer.init(true, new ParametersWithRandom(param, k));
byte[] rev = e.toByteArray();
byte[] message = new byte[rev.length];
for (int i = 0; i != rev.length; i++) {
message[i] = rev[rev.length - 1 - i];
}
BigInteger[] sig = signer.generateSignature(message);
ECPublicKey ecPublicKey = (ECPublicKey)vKey;
param = new ECPublicKeyParameters(
ecPublicKey.getQ(),
new ECDomainParameters(spec.getCurve(), spec.getG(), spec.getN()));
signer.init(false, param );
if(!signer.verifySignature(message, sig[0], sig[1])){
fail("ECGOST3410 2012 verification failed");
}
if (!r.equals(sig[0])) {
fail(
": r component wrong." + Strings.lineSeparator()
+ " expecting: " + r + Strings.lineSeparator()
+ " got : " + sig[0]);
}
if (!s.equals(sig[1])) {
fail(
": s component wrong." + Strings.lineSeparator()
+ " expecting: " + s + Strings.lineSeparator()
+ " got : " + sig[1]);
}
KeyPairGenerator g = KeyPairGenerator.getInstance("ECGOST3410-2012", "BC");
g.initialize(new ECNamedCurveGenParameterSpec("Tc26-Gost-3410-12-256-paramSetA"), new SecureRandom());
KeyPair p = g.generateKeyPair();
signatureGost12Test("ECGOST3410-2012-256", 64, p);
encodedGost12Test(p);
g.initialize(new ECNamedCurveGenParameterSpec("Tc26-Gost-3410-12-512-paramSetA"), new SecureRandom());
p = g.generateKeyPair();
signatureGost12Test("ECGOST3410-2012-512", 128, p);
encodedGost12Test(p);
}
private void ecGOST34102012512Test() throws Exception {
BigInteger r = new BigInteger("2489204477031349265072864643032147753667451319282131444027498637357611092810221795101871412928823716805959828708330284243653453085322004442442534151761462");
BigInteger s = new BigInteger("864523221707669519038849297382936917075023735848431579919598799313385180564748877195639672460179421760770893278030956807690115822709903853682831835159370");
BigInteger e = new BigInteger("2897963881682868575562827278553865049173745197871825199562947419041388950970536661109553499954248733088719748844538964641281654463513296973827706272045964");
byte[] kData = BigIntegers.asUnsignedByteArray(new BigInteger("359E7F4B1410FEACC570456C6801496946312120B39D019D455986E364F365886748ED7A44B3E794434006011842286212273A6D14CF70EA3AF71BB1AE679F1", 16));
SecureRandom k = new TestRandomBigInteger(kData);
BigInteger mod_p = new BigInteger("3623986102229003635907788753683874306021320925534678605086546150450856166624002482588482022271496854025090823603058735163734263822371964987228582907372403"); //p
ECCurve curve = new ECCurve.Fp(
mod_p, // p
new BigInteger("7"), // a
new BigInteger("1518655069210828534508950034714043154928747527740206436194018823352809982443793732829756914785974674866041605397883677596626326413990136959047435811826396")); // b
ECParameterSpec spec = new ECParameterSpec(
curve,
curve.createPoint(
new BigInteger("1928356944067022849399309401243137598997786635459507974357075491307766592685835441065557681003184874819658004903212332884252335830250729527632383493573274"), // x
new BigInteger("2288728693371972859970012155529478416353562327329506180314497425931102860301572814141997072271708807066593850650334152381857347798885864807605098724013854")), // y
new BigInteger("3623986102229003635907788753683874306021320925534678605086546150450856166623969164898305032863068499961404079437936585455865192212970734808812618120619743")); // q
ECPrivateKeySpec priKey = new ECPrivateKeySpec(
new BigInteger("610081804136373098219538153239847583006845519069531562982388135354890606301782255383608393423372379057665527595116827307025046458837440766121180466875860"), // d
spec);
ECPublicKeySpec pubKey = new ECPublicKeySpec(
curve.createPoint(
new BigInteger("909546853002536596556690768669830310006929272546556281596372965370312498563182320436892870052842808608262832456858223580713780290717986855863433431150561"), // x
new BigInteger("2921457203374425620632449734248415455640700823559488705164895837509539134297327397380287741428246088626609329139441895016863758984106326600572476822372076")), // y
spec);
KeyFactory f = KeyFactory.getInstance("ECGOST3410-2012", "BC");
PrivateKey sKey = f.generatePrivate(priKey);
PublicKey vKey = f.generatePublic(pubKey);
ECGOST3410_2012Signer signer = new ECGOST3410_2012Signer();
CipherParameters param = ECUtil.generatePrivateKeyParameter(sKey);
signer.init(true, new ParametersWithRandom(param, k));
byte[] rev = e.toByteArray();
byte[] message = new byte[rev.length];
for (int i = 0; i != rev.length; i++) {
message[i] = rev[rev.length - 1 - i];
}
BigInteger[] sig = signer.generateSignature(message);
ECPublicKey ecPublicKey = (ECPublicKey)vKey;
param = new ECPublicKeyParameters(
ecPublicKey.getQ(),
new ECDomainParameters(spec.getCurve(), spec.getG(), spec.getN()));
signer.init(false, param );
if(!signer.verifySignature(message, sig[0], sig[1])){
fail("ECGOST3410 2012 verification failed");
}
if (!r.equals(sig[0])) {
fail(
": r component wrong." + Strings.lineSeparator()
+ " expecting: " + r + Strings.lineSeparator()
+ " got : " + sig[0]);
}
if (!s.equals(sig[1])) {
fail(
": s component wrong." + Strings.lineSeparator()
+ " expecting: " + s + Strings.lineSeparator()
+ " got : " + sig[1]);
}
}
private void signatureGost12Test(String signatureAlg, int expectedSignLen, KeyPair p)
throws Exception {
byte[] data = {1, 2, 3, 4, 5, 6, 7, 8, 9, 0};
PrivateKey sKey = p.getPrivate();
PublicKey vKey = p.getPublic();
Signature s = Signature.getInstance(signatureAlg, "BC");
s.initSign(sKey);
s.update(data);
byte[] sigBytes = s.sign();
if (sigBytes.length != expectedSignLen) {
fail(signatureAlg + " signature failed");
}
s = Signature.getInstance(signatureAlg, "BC");
s.initVerify(vKey);
s.update(data);
if (!s.verify(sigBytes)) {
fail(signatureAlg + " verification failed");
}
}
private void encodedGost12Test(KeyPair p) throws Exception {
PrivateKey sKey = p.getPrivate();
PublicKey vKey = p.getPublic();
KeyFactory f = KeyFactory.getInstance("ECGOST3410-2012", "BC");
X509EncodedKeySpec x509s = new X509EncodedKeySpec(vKey.getEncoded());
ECPublicKey eck1 = (ECPublicKey) f.generatePublic(x509s);
if (!eck1.getQ().equals(((ECPublicKey) vKey).getQ())) {
fail("public number not decoded properly");
}
if (!eck1.getParameters().equals(((ECPublicKey) vKey).getParameters())) {
fail("public parameters not decoded properly");
}
PKCS8EncodedKeySpec pkcs8 = new PKCS8EncodedKeySpec(sKey.getEncoded());
ECPrivateKey eck2 = (ECPrivateKey) f.generatePrivate(pkcs8);
if (!eck2.getD().equals(((ECPrivateKey) sKey).getD())) {
fail("private number not decoded properly");
}
if (!eck2.getParameters().equals(((ECPrivateKey) sKey).getParameters())) {
fail("private number not decoded properly");
}
eck2 = (ECPrivateKey) serializeDeserialize(sKey);
if (!eck2.getD().equals(((ECPrivateKey) sKey).getD())) {
fail("private number not decoded properly");
}
if (!eck2.getParameters().equals(((ECPrivateKey) sKey).getParameters())) {
fail("private number not decoded properly");
}
checkEquals(eck2, sKey);
if (!(eck2 instanceof PKCS12BagAttributeCarrier)) {
fail("private key not implementing PKCS12 attribute carrier");
}
eck1 = (ECPublicKey) serializeDeserialize(vKey);
if (!eck1.getQ().equals(((ECPublicKey) vKey).getQ())) {
fail("public number not decoded properly");
}
if (!eck1.getParameters().equals(((ECPublicKey) vKey).getParameters())) {
fail("public parameters not decoded properly");
}
checkEquals(eck1, vKey);
}
private void keyStoreTest(PrivateKey sKey, PublicKey vKey)
throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException, NoSuchProviderException, SignatureException, InvalidKeyException, UnrecoverableKeyException {
//
// keystore test
//
KeyStore ks = KeyStore.getInstance("JKS");
ks.load(null, null);
//
// create the certificate - version 3
//
X509V3CertificateGenerator certGen = new X509V3CertificateGenerator();
certGen.setSerialNumber(BigInteger.valueOf(1));
certGen.setIssuerDN(new X509Principal("CN=Test"));
certGen.setNotBefore(new Date(System.currentTimeMillis() - 50000));
certGen.setNotAfter(new Date(System.currentTimeMillis() + 50000));
certGen.setSubjectDN(new X509Principal("CN=Test"));
certGen.setPublicKey(vKey);
certGen.setSignatureAlgorithm("GOST3411withGOST3410");
X509Certificate cert = certGen.generate(sKey, "BC");
ks.setKeyEntry("gost", sKey, "gost".toCharArray(), new Certificate[]{cert});
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
ks.store(bOut, "gost".toCharArray());
ks = KeyStore.getInstance("JKS");
ks.load(new ByteArrayInputStream(bOut.toByteArray()), "gost".toCharArray());
PrivateKey gKey = (PrivateKey) ks.getKey("gost", "gost".toCharArray());
}
private void checkEquals(Object o1, Object o2) {
if (!o1.equals(o2)) {
fail("comparison test failed");
}
if (o1.hashCode() != o2.hashCode()) {
fail("hashCode test failed");
}
}
private void parametersTest()
throws Exception {
// AlgorithmParameterGenerator a = AlgorithmParameterGenerator.getInstance("GOST3410", "BC");
// a.init(512, random);
// AlgorithmParameters params = a.generateParameters();
//
// byte[] encodeParams = params.getEncoded();
//
// AlgorithmParameters a2 = AlgorithmParameters.getInstance("GOST3410", "BC");
// a2.init(encodeParams);
//
// // a and a2 should be equivalent!
// byte[] encodeParams_2 = a2.getEncoded();
//
// if (!arrayEquals(encodeParams, encodeParams_2))
// {
// fail("encode/decode parameters failed");
// }
GOST3410ParameterSpec gost3410P = new GOST3410ParameterSpec(CryptoProObjectIdentifiers.gostR3410_94_CryptoPro_B.getId());
KeyPairGenerator g = KeyPairGenerator.getInstance("GOST3410", "BC");
g.initialize(gost3410P, new SecureRandom());
KeyPair p = g.generateKeyPair();
PrivateKey sKey = p.getPrivate();
PublicKey vKey = p.getPublic();
Signature s = Signature.getInstance("GOST3410", "BC");
byte[] data = {1, 2, 3, 4, 5, 6, 7, 8, 9, 0};
s.initSign(sKey);
s.update(data);
byte[] sigBytes = s.sign();
s = Signature.getInstance("GOST3410", "BC");
s.initVerify(vKey);
s.update(data);
if (!s.verify(sigBytes)) {
fail("GOST3410 verification failed");
}
keyStoreTest(sKey, vKey);
}
private BigInteger[] decode(
byte[] encoding) {
byte[] r = new byte[32];
byte[] s = new byte[32];
System.arraycopy(encoding, 0, s, 0, 32);
System.arraycopy(encoding, 32, r, 0, 32);
BigInteger[] sig = new BigInteger[2];
sig[0] = new BigInteger(1, r);
sig[1] = new BigInteger(1, s);
return sig;
}
private Object serializeDeserialize(Object o)
throws Exception {
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
ObjectOutputStream oOut = new ObjectOutputStream(bOut);
oOut.writeObject(o);
oOut.close();
ObjectInputStream oIn = new ObjectInputStream(new ByteArrayInputStream(bOut.toByteArray()));
return oIn.readObject();
}
public String getName() {
return "GOST3410/ECGOST3410/ECGOST3410 2012";
}
public void performTest()
throws Exception {
ecGOST3410Test();
ecGOST34102012256Test();
ecGOST34102012512Test();
generationTest();
parametersTest();
}
protected byte[] toByteArray(String input) {
byte[] bytes = new byte[input.length()];
for (int i = 0; i != bytes.length; i++) {
bytes[i] = (byte) input.charAt(i);
}
return bytes;
}
public static void main(
String[] args) {
Security.addProvider(new BouncyCastleProvider());
runTest(new GOST3410Test());
}
}
| Java 1.4 updates
| prov/src/test/java/org/bouncycastle/jce/provider/test/GOST3410Test.java | Java 1.4 updates | <ide><path>rov/src/test/java/org/bouncycastle/jce/provider/test/GOST3410Test.java
<ide> package org.bouncycastle.jce.provider.test;
<add>
<add>import java.io.ByteArrayInputStream;
<add>import java.io.ByteArrayOutputStream;
<add>import java.io.IOException;
<add>import java.io.ObjectInputStream;
<add>import java.io.ObjectOutputStream;
<add>import java.math.BigInteger;
<add>import java.security.InvalidKeyException;
<add>import java.security.KeyFactory;
<add>import java.security.KeyPair;
<add>import java.security.KeyPairGenerator;
<add>import java.security.KeyStore;
<add>import java.security.KeyStoreException;
<add>import java.security.NoSuchAlgorithmException;
<add>import java.security.NoSuchProviderException;
<add>import java.security.PrivateKey;
<add>import java.security.PublicKey;
<add>import java.security.SecureRandom;
<add>import java.security.Security;
<add>import java.security.Signature;
<add>import java.security.SignatureException;
<add>import java.security.UnrecoverableKeyException;
<add>import java.security.cert.Certificate;
<add>import java.security.cert.CertificateException;
<add>import java.security.cert.X509Certificate;
<add>import java.security.spec.PKCS8EncodedKeySpec;
<add>import java.security.spec.X509EncodedKeySpec;
<add>import java.util.Date;
<ide>
<ide> import org.bouncycastle.asn1.cryptopro.CryptoProObjectIdentifiers;
<ide> import org.bouncycastle.crypto.CipherParameters;
<ide> import org.bouncycastle.crypto.signers.ECGOST3410_2012Signer;
<ide> import org.bouncycastle.jcajce.provider.asymmetric.util.ECUtil;
<ide> import org.bouncycastle.jce.X509Principal;
<del>import org.bouncycastle.jce.interfaces.*;
<add>import org.bouncycastle.jce.interfaces.ECPrivateKey;
<add>import org.bouncycastle.jce.interfaces.ECPublicKey;
<add>import org.bouncycastle.jce.interfaces.GOST3410PrivateKey;
<add>import org.bouncycastle.jce.interfaces.GOST3410PublicKey;
<add>import org.bouncycastle.jce.interfaces.PKCS12BagAttributeCarrier;
<ide> import org.bouncycastle.jce.provider.BouncyCastleProvider;
<del>import org.bouncycastle.jce.spec.*;
<add>import org.bouncycastle.jce.spec.ECNamedCurveGenParameterSpec;
<add>import org.bouncycastle.jce.spec.ECParameterSpec;
<add>import org.bouncycastle.jce.spec.ECPrivateKeySpec;
<add>import org.bouncycastle.jce.spec.ECPublicKeySpec;
<add>import org.bouncycastle.jce.spec.GOST3410ParameterSpec;
<ide> import org.bouncycastle.math.ec.ECCurve;
<ide> import org.bouncycastle.util.BigIntegers;
<ide> import org.bouncycastle.util.Strings;
<ide> import org.bouncycastle.util.test.TestRandomBigInteger;
<ide> import org.bouncycastle.x509.X509V3CertificateGenerator;
<ide>
<del>import java.io.*;
<del>import java.math.BigInteger;
<del>import java.security.*;
<del>import java.security.cert.Certificate;
<del>import java.security.cert.CertificateException;
<del>import java.security.cert.X509Certificate;
<del>import java.security.spec.PKCS8EncodedKeySpec;
<del>import java.security.spec.X509EncodedKeySpec;
<del>import java.util.Date;
<del>
<ide> public class GOST3410Test
<del> extends SimpleTest {
<add> extends SimpleTest
<add>{
<ide> private void ecGOST3410Test()
<del> throws Exception {
<add> throws Exception
<add> {
<ide>
<ide> BigInteger r = new BigInteger("29700980915817952874371204983938256990422752107994319651632687982059210933395");
<ide> BigInteger s = new BigInteger("46959264877825372965922731380059061821746083849389763294914877353246631700866");
<ide> BigInteger mod_p = new BigInteger("57896044618658097711785492504343953926634992332820282019728792003956564821041"); //p
<ide>
<ide> ECCurve curve = new ECCurve.Fp(
<del> mod_p, // p
<del> new BigInteger("7"), // a
<del> new BigInteger("43308876546767276905765904595650931995942111794451039583252968842033849580414")); // b
<add> mod_p, // p
<add> new BigInteger("7"), // a
<add> new BigInteger("43308876546767276905765904595650931995942111794451039583252968842033849580414")); // b
<ide>
<ide> ECParameterSpec spec = new ECParameterSpec(
<del> curve,
<del> curve.createPoint(
<del> new BigInteger("2"), // x
<del> new BigInteger("4018974056539037503335449422937059775635739389905545080690979365213431566280")), // y
<del> new BigInteger("57896044618658097711785492504343953927082934583725450622380973592137631069619")); // q
<add> curve,
<add> curve.createPoint(
<add> new BigInteger("2"), // x
<add> new BigInteger("4018974056539037503335449422937059775635739389905545080690979365213431566280")), // y
<add> new BigInteger("57896044618658097711785492504343953927082934583725450622380973592137631069619")); // q
<ide>
<ide> ECPrivateKeySpec priKey = new ECPrivateKeySpec(
<del> new BigInteger("55441196065363246126355624130324183196576709222340016572108097750006097525544"), // d
<del> spec);
<add> new BigInteger("55441196065363246126355624130324183196576709222340016572108097750006097525544"), // d
<add> spec);
<ide>
<ide> ECPublicKeySpec pubKey = new ECPublicKeySpec(
<del> curve.createPoint(
<del> new BigInteger("57520216126176808443631405023338071176630104906313632182896741342206604859403"), // x
<del> new BigInteger("17614944419213781543809391949654080031942662045363639260709847859438286763994")), // y
<del> spec);
<add> curve.createPoint(
<add> new BigInteger("57520216126176808443631405023338071176630104906313632182896741342206604859403"), // x
<add> new BigInteger("17614944419213781543809391949654080031942662045363639260709847859438286763994")), // y
<add> spec);
<ide>
<ide> Signature sgr = Signature.getInstance("ECGOST3410", "BC");
<ide> KeyFactory f = KeyFactory.getInstance("ECGOST3410", "BC");
<ide>
<ide> sgr.initSign(sKey, k);
<ide>
<del> byte[] message = new byte[]{(byte) 'a', (byte) 'b', (byte) 'c'};
<add> byte[] message = new byte[]{(byte)'a', (byte)'b', (byte)'c'};
<ide>
<ide> sgr.update(message);
<ide>
<ide>
<ide> sgr.update(message);
<ide>
<del> if (!sgr.verify(sigBytes)) {
<add> if (!sgr.verify(sigBytes))
<add> {
<ide> fail("ECGOST3410 verification failed");
<ide> }
<ide>
<ide> BigInteger[] sig = decode(sigBytes);
<ide>
<del> if (!r.equals(sig[0])) {
<add> if (!r.equals(sig[0]))
<add> {
<ide> fail(
<del> ": r component wrong." + Strings.lineSeparator()
<del> + " expecting: " + r + Strings.lineSeparator()
<del> + " got : " + sig[0]);
<del> }
<del>
<del> if (!s.equals(sig[1])) {
<add> ": r component wrong." + Strings.lineSeparator()
<add> + " expecting: " + r + Strings.lineSeparator()
<add> + " got : " + sig[0]);
<add> }
<add>
<add> if (!s.equals(sig[1]))
<add> {
<ide> fail(
<del> ": s component wrong." + Strings.lineSeparator()
<del> + " expecting: " + s + Strings.lineSeparator()
<del> + " got : " + sig[1]);
<add> ": s component wrong." + Strings.lineSeparator()
<add> + " expecting: " + s + Strings.lineSeparator()
<add> + " got : " + sig[1]);
<ide> }
<ide> }
<ide>
<ide> private void generationTest()
<del> throws Exception {
<add> throws Exception
<add> {
<ide> Signature s = Signature.getInstance("GOST3410", "BC");
<ide> KeyPairGenerator g = KeyPairGenerator.getInstance("GOST3410", "BC");
<ide> byte[] data = {1, 2, 3, 4, 5, 6, 7, 8, 9, 0};
<ide>
<ide> s.update(data);
<ide>
<del> if (!s.verify(sigBytes)) {
<add> if (!s.verify(sigBytes))
<add> {
<ide> fail("GOST3410 verification failed");
<ide> }
<ide>
<ide>
<ide> s.update(data);
<ide>
<del> if (!s.verify(sigBytes)) {
<add> if (!s.verify(sigBytes))
<add> {
<ide> fail("GOST3410 verification failed");
<ide> }
<ide>
<ide> KeyFactory f = KeyFactory.getInstance("GOST3410", "BC");
<ide>
<ide> X509EncodedKeySpec x509s = new X509EncodedKeySpec(vKey.getEncoded());
<del> GOST3410PublicKey k1 = (GOST3410PublicKey) f.generatePublic(x509s);
<del>
<del> if (!k1.getY().equals(((GOST3410PublicKey) vKey).getY())) {
<add> GOST3410PublicKey k1 = (GOST3410PublicKey)f.generatePublic(x509s);
<add>
<add> if (!k1.getY().equals(((GOST3410PublicKey)vKey).getY()))
<add> {
<ide> fail("public number not decoded properly");
<ide> }
<ide>
<del> if (!k1.getParameters().equals(((GOST3410PublicKey) vKey).getParameters())) {
<add> if (!k1.getParameters().equals(((GOST3410PublicKey)vKey).getParameters()))
<add> {
<ide> fail("public parameters not decoded properly");
<ide> }
<ide>
<ide> PKCS8EncodedKeySpec pkcs8 = new PKCS8EncodedKeySpec(sKey.getEncoded());
<del> GOST3410PrivateKey k2 = (GOST3410PrivateKey) f.generatePrivate(pkcs8);
<del>
<del> if (!k2.getX().equals(((GOST3410PrivateKey) sKey).getX())) {
<del> fail("private number not decoded properly");
<del> }
<del>
<del> if (!k2.getParameters().equals(((GOST3410PrivateKey) sKey).getParameters())) {
<del> fail("private number not decoded properly");
<del> }
<del>
<del> k2 = (GOST3410PrivateKey) serializeDeserialize(sKey);
<del> if (!k2.getX().equals(((GOST3410PrivateKey) sKey).getX())) {
<add> GOST3410PrivateKey k2 = (GOST3410PrivateKey)f.generatePrivate(pkcs8);
<add>
<add> if (!k2.getX().equals(((GOST3410PrivateKey)sKey).getX()))
<add> {
<add> fail("private number not decoded properly");
<add> }
<add>
<add> if (!k2.getParameters().equals(((GOST3410PrivateKey)sKey).getParameters()))
<add> {
<add> fail("private number not decoded properly");
<add> }
<add>
<add> k2 = (GOST3410PrivateKey)serializeDeserialize(sKey);
<add> if (!k2.getX().equals(((GOST3410PrivateKey)sKey).getX()))
<add> {
<ide> fail("private number not deserialised properly");
<ide> }
<ide>
<del> if (!k2.getParameters().equals(((GOST3410PrivateKey) sKey).getParameters())) {
<add> if (!k2.getParameters().equals(((GOST3410PrivateKey)sKey).getParameters()))
<add> {
<ide> fail("private number not deserialised properly");
<ide> }
<ide>
<ide> checkEquals(k2, sKey);
<ide>
<del> if (!(k2 instanceof PKCS12BagAttributeCarrier)) {
<add> if (!(k2 instanceof PKCS12BagAttributeCarrier))
<add> {
<ide> fail("private key not implementing PKCS12 attribute carrier");
<ide> }
<ide>
<del> k1 = (GOST3410PublicKey) serializeDeserialize(vKey);
<del>
<del> if (!k1.getY().equals(((GOST3410PublicKey) vKey).getY())) {
<add> k1 = (GOST3410PublicKey)serializeDeserialize(vKey);
<add>
<add> if (!k1.getY().equals(((GOST3410PublicKey)vKey).getY()))
<add> {
<ide> fail("public number not deserialised properly");
<ide> }
<ide>
<del> if (!k1.getParameters().equals(((GOST3410PublicKey) vKey).getParameters())) {
<add> if (!k1.getParameters().equals(((GOST3410PublicKey)vKey).getParameters()))
<add> {
<ide> fail("public parameters not deserialised properly");
<ide> }
<ide>
<ide>
<ide> s.update(data);
<ide>
<del> if (!s.verify(sigBytes)) {
<add> if (!s.verify(sigBytes))
<add> {
<ide> fail("ECGOST3410 verification failed");
<ide> }
<ide>
<ide> f = KeyFactory.getInstance("ECGOST3410", "BC");
<ide>
<ide> x509s = new X509EncodedKeySpec(vKey.getEncoded());
<del> ECPublicKey eck1 = (ECPublicKey) f.generatePublic(x509s);
<del>
<del> if (!eck1.getQ().equals(((ECPublicKey) vKey).getQ())) {
<add> ECPublicKey eck1 = (ECPublicKey)f.generatePublic(x509s);
<add>
<add> if (!eck1.getQ().equals(((ECPublicKey)vKey).getQ()))
<add> {
<ide> fail("public number not decoded properly");
<ide> }
<ide>
<del> if (!eck1.getParameters().equals(((ECPublicKey) vKey).getParameters())) {
<add> if (!eck1.getParameters().equals(((ECPublicKey)vKey).getParameters()))
<add> {
<ide> fail("public parameters not decoded properly");
<ide> }
<ide>
<ide> pkcs8 = new PKCS8EncodedKeySpec(sKey.getEncoded());
<del> ECPrivateKey eck2 = (ECPrivateKey) f.generatePrivate(pkcs8);
<del>
<del> if (!eck2.getD().equals(((ECPrivateKey) sKey).getD())) {
<del> fail("private number not decoded properly");
<del> }
<del>
<del> if (!eck2.getParameters().equals(((ECPrivateKey) sKey).getParameters())) {
<del> fail("private number not decoded properly");
<del> }
<del>
<del> eck2 = (ECPrivateKey) serializeDeserialize(sKey);
<del> if (!eck2.getD().equals(((ECPrivateKey) sKey).getD())) {
<del> fail("private number not decoded properly");
<del> }
<del>
<del> if (!eck2.getParameters().equals(((ECPrivateKey) sKey).getParameters())) {
<add> ECPrivateKey eck2 = (ECPrivateKey)f.generatePrivate(pkcs8);
<add>
<add> if (!eck2.getD().equals(((ECPrivateKey)sKey).getD()))
<add> {
<add> fail("private number not decoded properly");
<add> }
<add>
<add> if (!eck2.getParameters().equals(((ECPrivateKey)sKey).getParameters()))
<add> {
<add> fail("private number not decoded properly");
<add> }
<add>
<add> eck2 = (ECPrivateKey)serializeDeserialize(sKey);
<add> if (!eck2.getD().equals(((ECPrivateKey)sKey).getD()))
<add> {
<add> fail("private number not decoded properly");
<add> }
<add>
<add> if (!eck2.getParameters().equals(((ECPrivateKey)sKey).getParameters()))
<add> {
<ide> fail("private number not decoded properly");
<ide> }
<ide>
<ide> checkEquals(eck2, sKey);
<ide>
<del> if (!(eck2 instanceof PKCS12BagAttributeCarrier)) {
<add> if (!(eck2 instanceof PKCS12BagAttributeCarrier))
<add> {
<ide> fail("private key not implementing PKCS12 attribute carrier");
<ide> }
<ide>
<del> eck1 = (ECPublicKey) serializeDeserialize(vKey);
<del>
<del> if (!eck1.getQ().equals(((ECPublicKey) vKey).getQ())) {
<add> eck1 = (ECPublicKey)serializeDeserialize(vKey);
<add>
<add> if (!eck1.getQ().equals(((ECPublicKey)vKey).getQ()))
<add> {
<ide> fail("public number not decoded properly");
<ide> }
<ide>
<del> if (!eck1.getParameters().equals(((ECPublicKey) vKey).getParameters())) {
<add> if (!eck1.getParameters().equals(((ECPublicKey)vKey).getParameters()))
<add> {
<ide> fail("public parameters not decoded properly");
<ide> }
<ide>
<ide> }
<ide>
<ide>
<del> private void ecGOST34102012256Test() throws Exception {
<add> private void ecGOST34102012256Test()
<add> throws Exception
<add> {
<ide>
<ide> BigInteger r = new BigInteger("29700980915817952874371204983938256990422752107994319651632687982059210933395");
<ide> BigInteger s = new BigInteger("574973400270084654178925310019147038455227042649098563933718999175515839552");
<ide> BigInteger mod_p = new BigInteger("57896044618658097711785492504343953926634992332820282019728792003956564821041"); //p
<ide>
<ide> ECCurve curve = new ECCurve.Fp(
<del> mod_p, // p
<del> new BigInteger("7"), // a
<del> new BigInteger("43308876546767276905765904595650931995942111794451039583252968842033849580414")); // b
<add> mod_p, // p
<add> new BigInteger("7"), // a
<add> new BigInteger("43308876546767276905765904595650931995942111794451039583252968842033849580414")); // b
<ide>
<ide> ECParameterSpec spec = new ECParameterSpec(
<del> curve,
<del> curve.createPoint(
<del> new BigInteger("2"), // x
<del> new BigInteger("4018974056539037503335449422937059775635739389905545080690979365213431566280")), // y
<del> new BigInteger("57896044618658097711785492504343953927082934583725450622380973592137631069619")); // q
<add> curve,
<add> curve.createPoint(
<add> new BigInteger("2"), // x
<add> new BigInteger("4018974056539037503335449422937059775635739389905545080690979365213431566280")), // y
<add> new BigInteger("57896044618658097711785492504343953927082934583725450622380973592137631069619")); // q
<ide>
<ide> ECPrivateKeySpec priKey = new ECPrivateKeySpec(
<del> new BigInteger("55441196065363246126355624130324183196576709222340016572108097750006097525544"), // d
<del> spec);
<add> new BigInteger("55441196065363246126355624130324183196576709222340016572108097750006097525544"), // d
<add> spec);
<ide>
<ide> ECPublicKeySpec pubKey = new ECPublicKeySpec(
<del> curve.createPoint(
<del> new BigInteger("57520216126176808443631405023338071176630104906313632182896741342206604859403"), // x
<del> new BigInteger("17614944419213781543809391949654080031942662045363639260709847859438286763994")), // y
<del> spec);
<add> curve.createPoint(
<add> new BigInteger("57520216126176808443631405023338071176630104906313632182896741342206604859403"), // x
<add> new BigInteger("17614944419213781543809391949654080031942662045363639260709847859438286763994")), // y
<add> spec);
<ide>
<ide> KeyFactory f = KeyFactory.getInstance("ECGOST3410-2012", "BC");
<ide> PrivateKey sKey = f.generatePrivate(priKey);
<ide>
<ide> byte[] rev = e.toByteArray();
<ide> byte[] message = new byte[rev.length];
<del> for (int i = 0; i != rev.length; i++) {
<add> for (int i = 0; i != rev.length; i++)
<add> {
<ide> message[i] = rev[rev.length - 1 - i];
<ide> }
<ide> BigInteger[] sig = signer.generateSignature(message);
<ide>
<ide> ECPublicKey ecPublicKey = (ECPublicKey)vKey;
<ide> param = new ECPublicKeyParameters(
<del> ecPublicKey.getQ(),
<del> new ECDomainParameters(spec.getCurve(), spec.getG(), spec.getN()));
<del> signer.init(false, param );
<del>
<del> if(!signer.verifySignature(message, sig[0], sig[1])){
<add> ecPublicKey.getQ(),
<add> new ECDomainParameters(spec.getCurve(), spec.getG(), spec.getN()));
<add> signer.init(false, param);
<add>
<add> if (!signer.verifySignature(message, sig[0], sig[1]))
<add> {
<ide> fail("ECGOST3410 2012 verification failed");
<ide> }
<ide>
<del> if (!r.equals(sig[0])) {
<add> if (!r.equals(sig[0]))
<add> {
<ide> fail(
<del> ": r component wrong." + Strings.lineSeparator()
<del> + " expecting: " + r + Strings.lineSeparator()
<del> + " got : " + sig[0]);
<del> }
<del>
<del> if (!s.equals(sig[1])) {
<add> ": r component wrong." + Strings.lineSeparator()
<add> + " expecting: " + r + Strings.lineSeparator()
<add> + " got : " + sig[0]);
<add> }
<add>
<add> if (!s.equals(sig[1]))
<add> {
<ide> fail(
<del> ": s component wrong." + Strings.lineSeparator()
<del> + " expecting: " + s + Strings.lineSeparator()
<del> + " got : " + sig[1]);
<add> ": s component wrong." + Strings.lineSeparator()
<add> + " expecting: " + s + Strings.lineSeparator()
<add> + " got : " + sig[1]);
<ide> }
<ide>
<ide> KeyPairGenerator g = KeyPairGenerator.getInstance("ECGOST3410-2012", "BC");
<ide>
<ide> }
<ide>
<del> private void ecGOST34102012512Test() throws Exception {
<add> private void ecGOST34102012512Test()
<add> throws Exception
<add> {
<ide>
<ide> BigInteger r = new BigInteger("2489204477031349265072864643032147753667451319282131444027498637357611092810221795101871412928823716805959828708330284243653453085322004442442534151761462");
<ide> BigInteger s = new BigInteger("864523221707669519038849297382936917075023735848431579919598799313385180564748877195639672460179421760770893278030956807690115822709903853682831835159370");
<ide> BigInteger mod_p = new BigInteger("3623986102229003635907788753683874306021320925534678605086546150450856166624002482588482022271496854025090823603058735163734263822371964987228582907372403"); //p
<ide>
<ide> ECCurve curve = new ECCurve.Fp(
<del> mod_p, // p
<del> new BigInteger("7"), // a
<del> new BigInteger("1518655069210828534508950034714043154928747527740206436194018823352809982443793732829756914785974674866041605397883677596626326413990136959047435811826396")); // b
<add> mod_p, // p
<add> new BigInteger("7"), // a
<add> new BigInteger("1518655069210828534508950034714043154928747527740206436194018823352809982443793732829756914785974674866041605397883677596626326413990136959047435811826396")); // b
<ide>
<ide> ECParameterSpec spec = new ECParameterSpec(
<del> curve,
<del> curve.createPoint(
<del> new BigInteger("1928356944067022849399309401243137598997786635459507974357075491307766592685835441065557681003184874819658004903212332884252335830250729527632383493573274"), // x
<del> new BigInteger("2288728693371972859970012155529478416353562327329506180314497425931102860301572814141997072271708807066593850650334152381857347798885864807605098724013854")), // y
<del> new BigInteger("3623986102229003635907788753683874306021320925534678605086546150450856166623969164898305032863068499961404079437936585455865192212970734808812618120619743")); // q
<add> curve,
<add> curve.createPoint(
<add> new BigInteger("1928356944067022849399309401243137598997786635459507974357075491307766592685835441065557681003184874819658004903212332884252335830250729527632383493573274"), // x
<add> new BigInteger("2288728693371972859970012155529478416353562327329506180314497425931102860301572814141997072271708807066593850650334152381857347798885864807605098724013854")), // y
<add> new BigInteger("3623986102229003635907788753683874306021320925534678605086546150450856166623969164898305032863068499961404079437936585455865192212970734808812618120619743")); // q
<ide>
<ide> ECPrivateKeySpec priKey = new ECPrivateKeySpec(
<del> new BigInteger("610081804136373098219538153239847583006845519069531562982388135354890606301782255383608393423372379057665527595116827307025046458837440766121180466875860"), // d
<del> spec);
<add> new BigInteger("610081804136373098219538153239847583006845519069531562982388135354890606301782255383608393423372379057665527595116827307025046458837440766121180466875860"), // d
<add> spec);
<ide>
<ide> ECPublicKeySpec pubKey = new ECPublicKeySpec(
<del> curve.createPoint(
<del> new BigInteger("909546853002536596556690768669830310006929272546556281596372965370312498563182320436892870052842808608262832456858223580713780290717986855863433431150561"), // x
<del> new BigInteger("2921457203374425620632449734248415455640700823559488705164895837509539134297327397380287741428246088626609329139441895016863758984106326600572476822372076")), // y
<del> spec);
<add> curve.createPoint(
<add> new BigInteger("909546853002536596556690768669830310006929272546556281596372965370312498563182320436892870052842808608262832456858223580713780290717986855863433431150561"), // x
<add> new BigInteger("2921457203374425620632449734248415455640700823559488705164895837509539134297327397380287741428246088626609329139441895016863758984106326600572476822372076")), // y
<add> spec);
<ide>
<ide> KeyFactory f = KeyFactory.getInstance("ECGOST3410-2012", "BC");
<ide> PrivateKey sKey = f.generatePrivate(priKey);
<ide>
<ide> byte[] rev = e.toByteArray();
<ide> byte[] message = new byte[rev.length];
<del> for (int i = 0; i != rev.length; i++) {
<add> for (int i = 0; i != rev.length; i++)
<add> {
<ide> message[i] = rev[rev.length - 1 - i];
<ide> }
<ide> BigInteger[] sig = signer.generateSignature(message);
<ide>
<ide> ECPublicKey ecPublicKey = (ECPublicKey)vKey;
<ide> param = new ECPublicKeyParameters(
<del> ecPublicKey.getQ(),
<del> new ECDomainParameters(spec.getCurve(), spec.getG(), spec.getN()));
<del> signer.init(false, param );
<del>
<del> if(!signer.verifySignature(message, sig[0], sig[1])){
<add> ecPublicKey.getQ(),
<add> new ECDomainParameters(spec.getCurve(), spec.getG(), spec.getN()));
<add> signer.init(false, param);
<add>
<add> if (!signer.verifySignature(message, sig[0], sig[1]))
<add> {
<ide> fail("ECGOST3410 2012 verification failed");
<ide> }
<ide>
<del> if (!r.equals(sig[0])) {
<add> if (!r.equals(sig[0]))
<add> {
<ide> fail(
<del> ": r component wrong." + Strings.lineSeparator()
<del> + " expecting: " + r + Strings.lineSeparator()
<del> + " got : " + sig[0]);
<del> }
<del>
<del> if (!s.equals(sig[1])) {
<add> ": r component wrong." + Strings.lineSeparator()
<add> + " expecting: " + r + Strings.lineSeparator()
<add> + " got : " + sig[0]);
<add> }
<add>
<add> if (!s.equals(sig[1]))
<add> {
<ide> fail(
<del> ": s component wrong." + Strings.lineSeparator()
<del> + " expecting: " + s + Strings.lineSeparator()
<del> + " got : " + sig[1]);
<del> }
<del>
<del>
<del>
<del> }
<del>
<add> ": s component wrong." + Strings.lineSeparator()
<add> + " expecting: " + s + Strings.lineSeparator()
<add> + " got : " + sig[1]);
<add> }
<add>
<add>
<add> }
<ide>
<ide>
<ide> private void signatureGost12Test(String signatureAlg, int expectedSignLen, KeyPair p)
<del> throws Exception {
<add> throws Exception
<add> {
<ide> byte[] data = {1, 2, 3, 4, 5, 6, 7, 8, 9, 0};
<ide>
<ide> PrivateKey sKey = p.getPrivate();
<ide>
<ide> byte[] sigBytes = s.sign();
<ide>
<del> if (sigBytes.length != expectedSignLen) {
<add> if (sigBytes.length != expectedSignLen)
<add> {
<ide> fail(signatureAlg + " signature failed");
<ide> }
<ide>
<ide>
<ide> s.update(data);
<ide>
<del> if (!s.verify(sigBytes)) {
<add> if (!s.verify(sigBytes))
<add> {
<ide> fail(signatureAlg + " verification failed");
<ide> }
<ide>
<ide> }
<ide>
<del> private void encodedGost12Test(KeyPair p) throws Exception {
<add> private void encodedGost12Test(KeyPair p)
<add> throws Exception
<add> {
<ide> PrivateKey sKey = p.getPrivate();
<ide> PublicKey vKey = p.getPublic();
<ide>
<ide> KeyFactory f = KeyFactory.getInstance("ECGOST3410-2012", "BC");
<ide> X509EncodedKeySpec x509s = new X509EncodedKeySpec(vKey.getEncoded());
<del> ECPublicKey eck1 = (ECPublicKey) f.generatePublic(x509s);
<del>
<del> if (!eck1.getQ().equals(((ECPublicKey) vKey).getQ())) {
<add> ECPublicKey eck1 = (ECPublicKey)f.generatePublic(x509s);
<add>
<add> if (!eck1.getQ().equals(((ECPublicKey)vKey).getQ()))
<add> {
<ide> fail("public number not decoded properly");
<ide> }
<ide>
<del> if (!eck1.getParameters().equals(((ECPublicKey) vKey).getParameters())) {
<add> if (!eck1.getParameters().equals(((ECPublicKey)vKey).getParameters()))
<add> {
<ide> fail("public parameters not decoded properly");
<ide> }
<ide>
<ide> PKCS8EncodedKeySpec pkcs8 = new PKCS8EncodedKeySpec(sKey.getEncoded());
<del> ECPrivateKey eck2 = (ECPrivateKey) f.generatePrivate(pkcs8);
<del>
<del> if (!eck2.getD().equals(((ECPrivateKey) sKey).getD())) {
<del> fail("private number not decoded properly");
<del> }
<del>
<del> if (!eck2.getParameters().equals(((ECPrivateKey) sKey).getParameters())) {
<del> fail("private number not decoded properly");
<del> }
<del>
<del> eck2 = (ECPrivateKey) serializeDeserialize(sKey);
<del> if (!eck2.getD().equals(((ECPrivateKey) sKey).getD())) {
<del> fail("private number not decoded properly");
<del> }
<del>
<del> if (!eck2.getParameters().equals(((ECPrivateKey) sKey).getParameters())) {
<add> ECPrivateKey eck2 = (ECPrivateKey)f.generatePrivate(pkcs8);
<add>
<add> if (!eck2.getD().equals(((ECPrivateKey)sKey).getD()))
<add> {
<add> fail("private number not decoded properly");
<add> }
<add>
<add> if (!eck2.getParameters().equals(((ECPrivateKey)sKey).getParameters()))
<add> {
<add> fail("private number not decoded properly");
<add> }
<add>
<add> eck2 = (ECPrivateKey)serializeDeserialize(sKey);
<add> if (!eck2.getD().equals(((ECPrivateKey)sKey).getD()))
<add> {
<add> fail("private number not decoded properly");
<add> }
<add>
<add> if (!eck2.getParameters().equals(((ECPrivateKey)sKey).getParameters()))
<add> {
<ide> fail("private number not decoded properly");
<ide> }
<ide>
<ide> checkEquals(eck2, sKey);
<ide>
<del> if (!(eck2 instanceof PKCS12BagAttributeCarrier)) {
<add> if (!(eck2 instanceof PKCS12BagAttributeCarrier))
<add> {
<ide> fail("private key not implementing PKCS12 attribute carrier");
<ide> }
<ide>
<del> eck1 = (ECPublicKey) serializeDeserialize(vKey);
<del>
<del> if (!eck1.getQ().equals(((ECPublicKey) vKey).getQ())) {
<add> eck1 = (ECPublicKey)serializeDeserialize(vKey);
<add>
<add> if (!eck1.getQ().equals(((ECPublicKey)vKey).getQ()))
<add> {
<ide> fail("public number not decoded properly");
<ide> }
<ide>
<del> if (!eck1.getParameters().equals(((ECPublicKey) vKey).getParameters())) {
<add> if (!eck1.getParameters().equals(((ECPublicKey)vKey).getParameters()))
<add> {
<ide> fail("public parameters not decoded properly");
<ide> }
<ide>
<ide> }
<ide>
<ide> private void keyStoreTest(PrivateKey sKey, PublicKey vKey)
<del> throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException, NoSuchProviderException, SignatureException, InvalidKeyException, UnrecoverableKeyException {
<add> throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException, NoSuchProviderException, SignatureException, InvalidKeyException, UnrecoverableKeyException
<add> {
<ide> //
<ide> // keystore test
<ide> //
<ide>
<ide> ks.load(new ByteArrayInputStream(bOut.toByteArray()), "gost".toCharArray());
<ide>
<del> PrivateKey gKey = (PrivateKey) ks.getKey("gost", "gost".toCharArray());
<del> }
<del>
<del> private void checkEquals(Object o1, Object o2) {
<del> if (!o1.equals(o2)) {
<add> PrivateKey gKey = (PrivateKey)ks.getKey("gost", "gost".toCharArray());
<add> }
<add>
<add> private void checkEquals(Object o1, Object o2)
<add> {
<add> if (!o1.equals(o2))
<add> {
<ide> fail("comparison test failed");
<ide> }
<ide>
<del> if (o1.hashCode() != o2.hashCode()) {
<add> if (o1.hashCode() != o2.hashCode())
<add> {
<ide> fail("hashCode test failed");
<ide> }
<ide> }
<ide>
<ide> private void parametersTest()
<del> throws Exception {
<add> throws Exception
<add> {
<ide> // AlgorithmParameterGenerator a = AlgorithmParameterGenerator.getInstance("GOST3410", "BC");
<ide> // a.init(512, random);
<ide> // AlgorithmParameters params = a.generateParameters();
<ide>
<ide> s.update(data);
<ide>
<del> if (!s.verify(sigBytes)) {
<add> if (!s.verify(sigBytes))
<add> {
<ide> fail("GOST3410 verification failed");
<ide> }
<ide>
<ide> }
<ide>
<ide> private BigInteger[] decode(
<del> byte[] encoding) {
<add> byte[] encoding)
<add> {
<ide> byte[] r = new byte[32];
<ide> byte[] s = new byte[32];
<ide>
<ide> }
<ide>
<ide> private Object serializeDeserialize(Object o)
<del> throws Exception {
<add> throws Exception
<add> {
<ide> ByteArrayOutputStream bOut = new ByteArrayOutputStream();
<ide> ObjectOutputStream oOut = new ObjectOutputStream(bOut);
<ide>
<ide> return oIn.readObject();
<ide> }
<ide>
<del> public String getName() {
<add> public String getName()
<add> {
<ide> return "GOST3410/ECGOST3410/ECGOST3410 2012";
<ide> }
<ide>
<ide> public void performTest()
<del> throws Exception {
<add> throws Exception
<add> {
<ide> ecGOST3410Test();
<del> ecGOST34102012256Test();
<del> ecGOST34102012512Test();
<add>
<add> if (Security.getProvider("BC").containsKey("KeyFactory.ECGOST3410-2012"))
<add> {
<add> ecGOST34102012256Test();
<add> ecGOST34102012512Test();
<add> }
<add>
<ide> generationTest();
<ide> parametersTest();
<ide>
<ide> }
<ide>
<del> protected byte[] toByteArray(String input) {
<add> protected byte[] toByteArray(String input)
<add> {
<ide> byte[] bytes = new byte[input.length()];
<ide>
<del> for (int i = 0; i != bytes.length; i++) {
<del> bytes[i] = (byte) input.charAt(i);
<add> for (int i = 0; i != bytes.length; i++)
<add> {
<add> bytes[i] = (byte)input.charAt(i);
<ide> }
<ide>
<ide> return bytes;
<ide> }
<ide>
<ide> public static void main(
<del> String[] args) {
<add> String[] args)
<add> {
<ide> Security.addProvider(new BouncyCastleProvider());
<ide>
<ide> runTest(new GOST3410Test()); |
|
JavaScript | bsd-3-clause | 589960ef2e91960b8d817d8621c789991f6fcf73 | 0 | no-context/moo,tjvr/moo,no-context/moo,tjvr/moo |
const fs = require('fs')
const moo = require('../moo')
const compile = moo.compile
const python = require('./python')
function lexAll(lexer) {return Array.from(lexer)}
describe('moo compiler', () => {
test("warns for /g, /y, /i, /m", () => {
expect(() => compile({ word: /foo/ })).not.toThrow()
expect(() => compile({ word: /foo/g })).toThrow()
expect(() => compile({ word: /foo/i })).toThrow()
expect(() => compile({ word: /foo/y })).toThrow()
expect(() => compile({ word: /foo/m })).toThrow()
})
// TODO warns for multiple capture groups
// TODO wraps zero capture groups
// TODO warns if no lineBreaks: true
test('sorts regexps and strings', () => {
let lexer = moo.compile({
tok: [/t[ok]+/, /\w/, 'tok', 'token']
})
expect(lexer.re.source.replace(/[(?:)]/g, '')).toBe('token|tok|t[ok]+|\\w')
})
test('warns about missing states', () => {
const rules = [
{match: '=', next: 'missing'},
{match: '=', push: 'missing'},
]
for (const rule of rules) {
expect(() => moo.states({start: {thing: rule}}))
.toThrow("Missing state 'missing' (in token 'thing' of state 'start')")
}
})
test('warns about inappropriate state-switching options', () => {
const rules = [
{match: '=', next: 'state'},
{match: '=', push: 'state'},
{match: '=', pop: true},
]
for (const rule of rules) {
expect(() => moo.compile({thing: rule}))
.toThrow("State-switching options are not allowed in stateless lexers (for token 'thing')")
}
})
})
describe('capturing groups', () => {
test('compiles list of capturing RegExps', () => {
expect(() => moo.compile({
tok: [/(foo)/, /(bar)/]
})).not.toThrow()
})
test('captures & reports correct size', () => {
let lexer = moo.compile({
fubar: /fu(bar)/,
string: /"(.*?)"/,
full: /(quxx)/,
moo: /moo(moo)*moo/,
space: / +/,
})
lexer.reset('fubar "yes" quxx moomoomoomoo')
let tokens = lexAll(lexer).filter(t => t.type !== 'space')
expect(tokens.shift()).toMatchObject({ type: 'fubar', value: 'bar', size: 5 })
expect(tokens.shift()).toMatchObject({ type: 'string', value: 'yes', size: 5 })
expect(tokens.shift()).toMatchObject({ value: 'quxx', size: 4 })
expect(tokens.shift()).toMatchObject({ value: 'moo', size: 12 })
})
})
describe('moo lexer', () => {
var simpleLexer = compile({
word: /[a-z]+/,
number: /[0-9]+/,
ws: / +/,
})
test('vaguely works', () => {
simpleLexer.reset('ducks are 123 bad')
expect(simpleLexer.next()).toMatchObject({ type: 'word', value: 'ducks' })
expect(simpleLexer.next()).toMatchObject({ type: 'ws', value: ' ' })
expect(simpleLexer.next()).toMatchObject({ type: 'word', value: 'are' })
})
test('is iterable', () => {
simpleLexer.reset('only 321 cows')
const toks = [['word', 'only'], ['ws', ' '], ['number', '321'], ['ws', ' '], ['word', 'cows']]
for (const t of simpleLexer) {
const [type, value] = toks.shift()
expect(t).toMatchObject({type, value})
}
expect(simpleLexer.next()).not.toBeTruthy()
})
test('accepts rules in an object', () => {
const lexer = compile({
word: /[a-z]+/,
number: /[0-9]+/,
space: / +/,
})
lexer.reset('ducks are 123 bad')
expect(lexer.next()).toMatchObject({type: 'word', value: 'ducks'})
expect(lexer.next()).toMatchObject({type: 'space', value: ' '})
})
test('accepts a list of regexps', () => {
const lexer = compile({
number: [
/[0-9]+\.[0-9]+/,
/[0-9]+/,
],
space: / +/,
})
lexer.reset('12.04 123 3.14')
var tokens = lexAll(lexer).filter(t => t.type !== 'space')
expect(tokens.shift()).toMatchObject({type: 'number', value: '12.04'})
expect(tokens.shift()).toMatchObject({type: 'number', value: '123'})
expect(tokens.shift()).toMatchObject({type: 'number', value: '3.14'})
})
test('no capture groups', () => {
let lexer = compile({
a: /a+/,
b: /b|c/,
})
lexer.reset('aaaaabcbcbcbc')
expect(lexer.next().value).toEqual('aaaaa')
expect(lexer.next().value).toEqual('b')
expect(lexer.next().value).toEqual('c')
expect(lexer.next().value).toEqual('b')
})
test('multiline', () => {
var lexer = compile({
file: { match: /([^]+)/, lineBreaks: true },
}).reset('I like to moo\na lot')
expect(lexer.next().value).toBe('I like to moo\na lot')
})
test('match EOL $', () => {
var lexer = compile({
x_eol: /x$/,
x: /x/,
WS: / +/,
NL: { match: /\n/, lineBreaks: true },
other: /[^ \n]+/,
}).reset('x \n x\n yz x')
let tokens = lexAll(lexer).filter(t => t.type !== 'WS')
expect(tokens.map(t => [t.type, t.value])).toEqual([
['x', 'x'],
['NL', '\n'],
['x_eol', 'x'],
['NL', '\n'],
['other', 'yz'],
['x_eol', 'x'],
])
})
test('match BOL ^', () => {
var lexer = compile({
x_bol: /^x/,
x: /x/,
WS: / +/,
NL: { match: /\n/, lineBreaks: true },
other: /[^ \n]+/,
}).reset('x \n x\nx yz')
let tokens = lexAll(lexer).filter(t => t.type !== 'WS')
expect(tokens.map(t => [t.type, t.value])).toEqual([
['x_bol', 'x'],
['NL', '\n'],
['x', 'x'],
['NL', '\n'],
['x_bol', 'x'],
['other', 'yz'],
])
})
test('token to string conversion', () => {
const lexer = compile({
apples: /()a/,
pears: /p/,
}).reset('ap')
expect(String(lexer.next())).toBe('apples')
expect(String(lexer.next())).toBe('p')
})
// TODO test / design API for errors
// - check the reported error location
test('kurt tokens', () => {
let pythonLexer = compile(python.rules)
let tokens = lexAll(pythonLexer.reset(fs.readFileSync('test/kurt.py', 'utf-8')))
expect(tokens.length).toBe(14513)
})
// TODO test clone()
})
describe('moo stateful lexer', () => {
test('switches states', () => {
const lexer = moo.states({
start: {
word: /\w+/,
eq: {match: '=', next: 'ab'},
},
ab: {
a: 'a',
b: 'b',
semi: {match: ';', next: 'start'},
},
})
lexer.reset('one=ab;two=')
expect(lexAll(lexer).map(({type, value}) => [type, value])).toEqual([
['word', 'one'],
['eq', '='],
['a', 'a'],
['b', 'b'],
['semi', ';'],
['word', 'two'],
['eq', '='],
])
})
const parens = moo.states({
start: {
word: /\w+/,
lpar: {match: '(', push: 'inner'},
rpar: ')',
},
inner: {
thing: /\w+/,
lpar: {match: '(', push: 'inner'},
rpar: {match: ')', pop: true},
},
})
test('maintains a stack', () => {
parens.reset('a(b(c)d)e')
expect(lexAll(parens).map(({type, value}) => [type, value])).toEqual([
['word', 'a'],
['lpar', '('],
['thing', 'b'],
['lpar', '('],
['thing', 'c'],
['rpar', ')'],
['thing', 'd'],
['rpar', ')'],
['word', 'e'],
])
})
test('allows popping too many times', () => {
parens.reset(')e')
expect(lexAll(parens).map(({type, value}) => [type, value])).toEqual([
['rpar', ')'],
['word', 'e'],
])
})
test('lexes interpolation example', () => {
let lexer = moo.states({
main: {
strstart: {match: '`', push: 'lit'},
ident: /\w+/,
lbrace: {match: '{', push: 'main'},
rbrace: {match: '}', pop: 1},
colon: ':',
space: {match: /\s+/, lineBreaks: true},
},
lit: {
interp: {match: '${', push: 'main'},
escape: /\\./,
strend: {match: '`', pop: 1},
const: {match: /(?:[^$`]|\$(?!\{))+/, lineBreaks: true},
},
}).feed('`a${{c: d}}e`')
expect(lexAll(lexer).map(t => t.type).join(' ')).toBe('strstart const interp lbrace ident colon space ident rbrace rbrace const strend')
})
})
describe('line numbers', () => {
var testLexer = compile({
WS: / +/,
word: /[a-z]+/,
NL: { match: /\n/, lineBreaks: true },
})
test('counts line numbers', () => {
var tokens = lexAll(testLexer.reset('cow\nfarm\ngrass'))
expect(tokens.map(t => t.value)).toEqual(['cow', '\n', 'farm', '\n', 'grass'])
expect(tokens.map(t => t.lineBreaks)).toEqual([0, 1, 0, 1, 0])
expect(tokens.map(t => t.size)).toEqual([3, 1, 4, 1, 5])
expect(tokens.map(t => t.line)).toEqual([1, 1, 2, 2, 3])
expect(tokens.map(t => t.col)).toEqual([1, 4, 1, 5, 1])
})
test('tracks columns', () => {
var lexer = compile({
WS: / +/,
thing: { match: /[a-z\n]+/, lineBreaks: true },
})
lexer.reset('pie cheese\nsalad what\n ')
expect(lexer.next()).toMatchObject({ value: 'pie', col: 1 })
expect(lexer.next()).toMatchObject({ value: ' ', col: 4 })
expect(lexer.next()).toMatchObject({ value: 'cheese\nsalad', col: 5, line: 1 })
expect(lexer.next()).toMatchObject({ value: ' ', col: 6, line: 2 })
expect(lexer.next()).toMatchObject({ value: 'what\n', col: 7, line: 2 })
expect(lexer.next()).toMatchObject({ value: ' ', col: 1, line: 3 })
})
test('tries to warn if rule matches \\n', () => {
expect(() => compile([['whitespace', /\s+/]])).toThrow()
expect(() => compile([['multiline', /q[^]*/]])).not.toThrow()
})
test('resets state', () => {
var lexer = compile({
WS: / +/,
word: /[a-z]+/,
})
lexer.reset('potatoes\nsalad')
expect(lexer).toMatchObject({buffer: 'potatoes\nsalad', line: 1, col: 1})
lexAll(lexer)
expect(lexer).toMatchObject({line: 2, col: 6})
lexer.reset('cheesecake')
expect(lexer).toMatchObject({buffer: 'cheesecake', line: 1, col: 1})
})
// TODO test clone()
})
describe('save/restore', () => {
const testLexer = compile({
word: /[a-z]+/,
NL: { match: '\n', lineBreaks: true },
})
test('can be saved', () => {
testLexer.reset('one\ntwo')
lexAll(testLexer)
expect(testLexer.save()).toEqual({line: 2, col: 4})
})
test('can be restored', () => {
testLexer.reset('\nthree', {line: 2, col: 4})
expect(testLexer).toMatchObject({line: 2, col: 4, buffer: '\nthree'})
})
})
describe('python tokenizer', () => {
test("1 + 2", () => {
expect(python.outputTokens("1 + 2")).toEqual([
'NUMBER "1"',
'OP "+"',
'NUMBER "2"',
'ENDMARKER ""',
])
})
// use non-greedy matching
test('triple-quoted strings', () => {
let example = '"""abc""" 1+1 """def"""'
let pythonLexer = compile(python.rules)
expect(lexAll(pythonLexer.reset(example)).map(t => t.value)).toEqual(
['"""abc"""', " ", "1", "+", "1", " ", '"""def"""']
)
})
test('example python file', () => {
expect(python.outputTokens(python.pythonFile)).toEqual([
// 'ENCODING "utf-8"',
'COMMENT "#!/usr/local/bin/python3"',
'NL "\\n"',
'NAME "import"',
'NAME "sys"',
'NEWLINE "\\n"',
'NAME "from"',
'NAME "tokenize"',
'NAME "import"',
'NAME "tokenize"',
'OP ","',
'NAME "tok_name"',
'NEWLINE "\\n"',
'NAME "import"',
'NAME "json"',
'NEWLINE "\\n"',
'NAME "from"',
'NAME "io"',
'NAME "import"',
'NAME "BytesIO"',
'NEWLINE "\\n"',
'NL "\\n"',
'NAME "path"',
'OP "="',
'NAME "sys"',
'OP "."',
'NAME "argv"',
'OP "["',
'NUMBER "1"',
'OP "]"',
'NEWLINE "\\n"',
'NAME "for"',
'NAME "info"',
'NAME "in"',
'NAME "tokenize"',
'OP "("',
'NAME "open"',
'OP "("',
'NAME "path"',
'OP ","',
'STRING "rb"',
'OP ")"',
'OP "."',
'NAME "readline"',
'OP ")"',
'OP ":"',
'NEWLINE "\\n"',
'INDENT " "',
'NAME "print"',
'OP "("',
'NAME "tok_name"',
'OP "["',
'NAME "info"',
'OP "."',
'NAME "type"',
'OP "]"',
'OP ","',
'NAME "json"',
'OP "."',
'NAME "dumps"',
'OP "("',
'NAME "info"',
'OP "."',
'NAME "string"',
'OP ")"',
'OP ")"',
'NEWLINE "\\n"',
// 'NL "\\n"',
'DEDENT ""',
'ENDMARKER ""',
])
})
test("kurt python", () => {
let tokens = python.outputTokens(fs.readFileSync('test/kurt.py', 'utf-8'))
expect(tokens[100]).toBe('NAME "def"')
expect(tokens.pop()).toBe('ENDMARKER ""')
tokens.pop()
expect(tokens.pop()).not.toBe('ERRORTOKEN ""')
expect(tokens.length).toBe(11616)
// let expected = fs.readFileSync('test/kurt-tokens.txt', 'utf-8').split('\n')
// expect(tokens).toEqual(expected)
})
})
describe('tosh tokenizer', () => {
const tosh = require('./tosh')
test('tosh', () => {
let oldTokens = tosh.oldTokenizer(tosh.exampleFile)
expect(tosh.tokenize(tosh.exampleFile)).toEqual(oldTokens)
})
})
| test/test.js |
const fs = require('fs')
const moo = require('../moo')
const compile = moo.compile
const python = require('./python')
function lexAll(lexer) {
var tokens = []
var token
while ((token = lexer.next())) {
tokens.push(token)
}
return tokens
}
describe('moo compiler', () => {
test("warns for /g, /y, /i, /m", () => {
expect(() => compile({ word: /foo/ })).not.toThrow()
expect(() => compile({ word: /foo/g })).toThrow()
expect(() => compile({ word: /foo/i })).toThrow()
expect(() => compile({ word: /foo/y })).toThrow()
expect(() => compile({ word: /foo/m })).toThrow()
})
// TODO warns for multiple capture groups
// TODO wraps zero capture groups
// TODO warns if no lineBreaks: true
test('sorts regexps and strings', () => {
let lexer = moo.compile({
tok: [/t[ok]+/, /\w/, 'tok', 'token']
})
expect(lexer.re.source.replace(/[(?:)]/g, '')).toBe('token|tok|t[ok]+|\\w')
})
test('warns about missing states', () => {
const rules = [
{match: '=', next: 'missing'},
{match: '=', push: 'missing'},
]
for (const rule of rules) {
expect(() => moo.states({start: {thing: rule}}))
.toThrow("Missing state 'missing' (in token 'thing' of state 'start')")
}
})
test('warns about inappropriate state-switching options', () => {
const rules = [
{match: '=', next: 'state'},
{match: '=', push: 'state'},
{match: '=', pop: true},
]
for (const rule of rules) {
expect(() => moo.compile({thing: rule}))
.toThrow("State-switching options are not allowed in stateless lexers (for token 'thing')")
}
})
})
describe('capturing groups', () => {
test('compiles list of capturing RegExps', () => {
expect(() => moo.compile({
tok: [/(foo)/, /(bar)/]
})).not.toThrow()
})
test('captures & reports correct size', () => {
let lexer = moo.compile({
fubar: /fu(bar)/,
string: /"(.*?)"/,
full: /(quxx)/,
moo: /moo(moo)*moo/,
space: / +/,
})
lexer.reset('fubar "yes" quxx moomoomoomoo')
let tokens = lexAll(lexer).filter(t => t.type !== 'space')
expect(tokens.shift()).toMatchObject({ type: 'fubar', value: 'bar', size: 5 })
expect(tokens.shift()).toMatchObject({ type: 'string', value: 'yes', size: 5 })
expect(tokens.shift()).toMatchObject({ value: 'quxx', size: 4 })
expect(tokens.shift()).toMatchObject({ value: 'moo', size: 12 })
})
})
describe('moo lexer', () => {
var simpleLexer = compile({
word: /[a-z]+/,
number: /[0-9]+/,
ws: / +/,
})
test('vaguely works', () => {
simpleLexer.reset('ducks are 123 bad')
expect(simpleLexer.next()).toMatchObject({ type: 'word', value: 'ducks' })
expect(simpleLexer.next()).toMatchObject({ type: 'ws', value: ' ' })
expect(simpleLexer.next()).toMatchObject({ type: 'word', value: 'are' })
})
test('is iterable', () => {
simpleLexer.reset('only 321 cows')
const toks = [['word', 'only'], ['ws', ' '], ['number', '321'], ['ws', ' '], ['word', 'cows']]
for (const t of simpleLexer) {
const [type, value] = toks.shift()
expect(t).toMatchObject({type, value})
}
expect(simpleLexer.next()).not.toBeTruthy()
})
test('accepts rules in an object', () => {
const lexer = compile({
word: /[a-z]+/,
number: /[0-9]+/,
space: / +/,
})
lexer.reset('ducks are 123 bad')
expect(lexer.next()).toMatchObject({type: 'word', value: 'ducks'})
expect(lexer.next()).toMatchObject({type: 'space', value: ' '})
})
test('accepts a list of regexps', () => {
const lexer = compile({
number: [
/[0-9]+\.[0-9]+/,
/[0-9]+/,
],
space: / +/,
})
lexer.reset('12.04 123 3.14')
var tokens = lexAll(lexer).filter(t => t.type !== 'space')
expect(tokens.shift()).toMatchObject({type: 'number', value: '12.04'})
expect(tokens.shift()).toMatchObject({type: 'number', value: '123'})
expect(tokens.shift()).toMatchObject({type: 'number', value: '3.14'})
})
test('no capture groups', () => {
let lexer = compile({
a: /a+/,
b: /b|c/,
})
lexer.reset('aaaaabcbcbcbc')
expect(lexer.next().value).toEqual('aaaaa')
expect(lexer.next().value).toEqual('b')
expect(lexer.next().value).toEqual('c')
expect(lexer.next().value).toEqual('b')
})
test('multiline', () => {
var lexer = compile({
file: { match: /([^]+)/, lineBreaks: true },
}).reset('I like to moo\na lot')
expect(lexer.next().value).toBe('I like to moo\na lot')
})
test('match EOL $', () => {
var lexer = compile({
x_eol: /x$/,
x: /x/,
WS: / +/,
NL: { match: /\n/, lineBreaks: true },
other: /[^ \n]+/,
}).reset('x \n x\n yz x')
let tokens = lexAll(lexer).filter(t => t.type !== 'WS')
expect(tokens.map(t => [t.type, t.value])).toEqual([
['x', 'x'],
['NL', '\n'],
['x_eol', 'x'],
['NL', '\n'],
['other', 'yz'],
['x_eol', 'x'],
])
})
test('match BOL ^', () => {
var lexer = compile({
x_bol: /^x/,
x: /x/,
WS: / +/,
NL: { match: /\n/, lineBreaks: true },
other: /[^ \n]+/,
}).reset('x \n x\nx yz')
let tokens = lexAll(lexer).filter(t => t.type !== 'WS')
expect(tokens.map(t => [t.type, t.value])).toEqual([
['x_bol', 'x'],
['NL', '\n'],
['x', 'x'],
['NL', '\n'],
['x_bol', 'x'],
['other', 'yz'],
])
})
test('token to string conversion', () => {
const lexer = compile({
apples: /()a/,
pears: /p/,
}).reset('ap')
expect(String(lexer.next())).toBe('apples')
expect(String(lexer.next())).toBe('p')
})
// TODO test / design API for errors
// - check the reported error location
test('kurt tokens', () => {
let pythonLexer = compile(python.rules)
let tokens = lexAll(pythonLexer.reset(fs.readFileSync('test/kurt.py', 'utf-8')))
expect(tokens.length).toBe(14513)
})
// TODO test clone()
})
describe('moo stateful lexer', () => {
test('switches states', () => {
const lexer = moo.states({
start: {
word: /\w+/,
eq: {match: '=', next: 'ab'},
},
ab: {
a: 'a',
b: 'b',
semi: {match: ';', next: 'start'},
},
})
lexer.reset('one=ab;two=')
expect(lexAll(lexer).map(({type, value}) => [type, value])).toEqual([
['word', 'one'],
['eq', '='],
['a', 'a'],
['b', 'b'],
['semi', ';'],
['word', 'two'],
['eq', '='],
])
})
const parens = moo.states({
start: {
word: /\w+/,
lpar: {match: '(', push: 'inner'},
rpar: ')',
},
inner: {
thing: /\w+/,
lpar: {match: '(', push: 'inner'},
rpar: {match: ')', pop: true},
},
})
test('maintains a stack', () => {
parens.reset('a(b(c)d)e')
expect(lexAll(parens).map(({type, value}) => [type, value])).toEqual([
['word', 'a'],
['lpar', '('],
['thing', 'b'],
['lpar', '('],
['thing', 'c'],
['rpar', ')'],
['thing', 'd'],
['rpar', ')'],
['word', 'e'],
])
})
test('allows popping too many times', () => {
parens.reset(')e')
expect(lexAll(parens).map(({type, value}) => [type, value])).toEqual([
['rpar', ')'],
['word', 'e'],
])
})
test('lexes interpolation example', () => {
let lexer = moo.states({
main: {
strstart: {match: '`', push: 'lit'},
ident: /\w+/,
lbrace: {match: '{', push: 'main'},
rbrace: {match: '}', pop: 1},
colon: ':',
space: {match: /\s+/, lineBreaks: true},
},
lit: {
interp: {match: '${', push: 'main'},
escape: /\\./,
strend: {match: '`', pop: 1},
const: {match: /(?:[^$`]|\$(?!\{))+/, lineBreaks: true},
},
}).feed('`a${{c: d}}e`')
expect(lexAll(lexer).map(t => t.type).join(' ')).toBe('strstart const interp lbrace ident colon space ident rbrace rbrace const strend')
})
})
describe('line numbers', () => {
var testLexer = compile({
WS: / +/,
word: /[a-z]+/,
NL: { match: /\n/, lineBreaks: true },
})
test('counts line numbers', () => {
var tokens = lexAll(testLexer.reset('cow\nfarm\ngrass'))
expect(tokens.map(t => t.value)).toEqual(['cow', '\n', 'farm', '\n', 'grass'])
expect(tokens.map(t => t.lineBreaks)).toEqual([0, 1, 0, 1, 0])
expect(tokens.map(t => t.size)).toEqual([3, 1, 4, 1, 5])
expect(tokens.map(t => t.line)).toEqual([1, 1, 2, 2, 3])
expect(tokens.map(t => t.col)).toEqual([1, 4, 1, 5, 1])
})
test('tracks columns', () => {
var lexer = compile({
WS: / +/,
thing: { match: /[a-z\n]+/, lineBreaks: true },
})
lexer.reset('pie cheese\nsalad what\n ')
expect(lexer.next()).toMatchObject({ value: 'pie', col: 1 })
expect(lexer.next()).toMatchObject({ value: ' ', col: 4 })
expect(lexer.next()).toMatchObject({ value: 'cheese\nsalad', col: 5, line: 1 })
expect(lexer.next()).toMatchObject({ value: ' ', col: 6, line: 2 })
expect(lexer.next()).toMatchObject({ value: 'what\n', col: 7, line: 2 })
expect(lexer.next()).toMatchObject({ value: ' ', col: 1, line: 3 })
})
test('tries to warn if rule matches \\n', () => {
expect(() => compile([['whitespace', /\s+/]])).toThrow()
expect(() => compile([['multiline', /q[^]*/]])).not.toThrow()
})
test('resets state', () => {
var lexer = compile({
WS: / +/,
word: /[a-z]+/,
})
lexer.reset('potatoes\nsalad')
expect(lexer).toMatchObject({buffer: 'potatoes\nsalad', line: 1, col: 1})
lexAll(lexer)
expect(lexer).toMatchObject({line: 2, col: 6})
lexer.reset('cheesecake')
expect(lexer).toMatchObject({buffer: 'cheesecake', line: 1, col: 1})
})
// TODO test clone()
})
describe('save/restore', () => {
const testLexer = compile({
word: /[a-z]+/,
NL: { match: '\n', lineBreaks: true },
})
test('can be saved', () => {
testLexer.reset('one\ntwo')
lexAll(testLexer)
expect(testLexer.save()).toEqual({line: 2, col: 4})
})
test('can be restored', () => {
testLexer.reset('\nthree', {line: 2, col: 4})
expect(testLexer).toMatchObject({line: 2, col: 4, buffer: '\nthree'})
})
})
describe('python tokenizer', () => {
test("1 + 2", () => {
expect(python.outputTokens("1 + 2")).toEqual([
'NUMBER "1"',
'OP "+"',
'NUMBER "2"',
'ENDMARKER ""',
])
})
// use non-greedy matching
test('triple-quoted strings', () => {
let example = '"""abc""" 1+1 """def"""'
let pythonLexer = compile(python.rules)
expect(lexAll(pythonLexer.reset(example)).map(t => t.value)).toEqual(
['"""abc"""', " ", "1", "+", "1", " ", '"""def"""']
)
})
test('example python file', () => {
expect(python.outputTokens(python.pythonFile)).toEqual([
// 'ENCODING "utf-8"',
'COMMENT "#!/usr/local/bin/python3"',
'NL "\\n"',
'NAME "import"',
'NAME "sys"',
'NEWLINE "\\n"',
'NAME "from"',
'NAME "tokenize"',
'NAME "import"',
'NAME "tokenize"',
'OP ","',
'NAME "tok_name"',
'NEWLINE "\\n"',
'NAME "import"',
'NAME "json"',
'NEWLINE "\\n"',
'NAME "from"',
'NAME "io"',
'NAME "import"',
'NAME "BytesIO"',
'NEWLINE "\\n"',
'NL "\\n"',
'NAME "path"',
'OP "="',
'NAME "sys"',
'OP "."',
'NAME "argv"',
'OP "["',
'NUMBER "1"',
'OP "]"',
'NEWLINE "\\n"',
'NAME "for"',
'NAME "info"',
'NAME "in"',
'NAME "tokenize"',
'OP "("',
'NAME "open"',
'OP "("',
'NAME "path"',
'OP ","',
'STRING "rb"',
'OP ")"',
'OP "."',
'NAME "readline"',
'OP ")"',
'OP ":"',
'NEWLINE "\\n"',
'INDENT " "',
'NAME "print"',
'OP "("',
'NAME "tok_name"',
'OP "["',
'NAME "info"',
'OP "."',
'NAME "type"',
'OP "]"',
'OP ","',
'NAME "json"',
'OP "."',
'NAME "dumps"',
'OP "("',
'NAME "info"',
'OP "."',
'NAME "string"',
'OP ")"',
'OP ")"',
'NEWLINE "\\n"',
// 'NL "\\n"',
'DEDENT ""',
'ENDMARKER ""',
])
})
test("kurt python", () => {
let tokens = python.outputTokens(fs.readFileSync('test/kurt.py', 'utf-8'))
expect(tokens[100]).toBe('NAME "def"')
expect(tokens.pop()).toBe('ENDMARKER ""')
tokens.pop()
expect(tokens.pop()).not.toBe('ERRORTOKEN ""')
expect(tokens.length).toBe(11616)
// let expected = fs.readFileSync('test/kurt-tokens.txt', 'utf-8').split('\n')
// expect(tokens).toEqual(expected)
})
})
describe('tosh tokenizer', () => {
const tosh = require('./tosh')
test('tosh', () => {
let oldTokens = tosh.oldTokenizer(tosh.exampleFile)
expect(tosh.tokenize(tosh.exampleFile)).toEqual(oldTokens)
})
})
| Implement lexAll the cool way
| test/test.js | Implement lexAll the cool way | <ide><path>est/test.js
<ide> const compile = moo.compile
<ide> const python = require('./python')
<ide>
<del>
<del>function lexAll(lexer) {
<del> var tokens = []
<del> var token
<del> while ((token = lexer.next())) {
<del> tokens.push(token)
<del> }
<del> return tokens
<del>}
<del>
<add>function lexAll(lexer) {return Array.from(lexer)}
<ide>
<ide>
<ide> describe('moo compiler', () => { |
|
JavaScript | isc | 7f61f1bdc9f57c9f6144b086c163fdaecc30916a | 0 | braddunbar/ozymandias,braddunbar/ozymandias | 'use strict'
const assets = require('./assets')
const qs = require('querystring')
const React = require('react')
const ReactDOM = require('react-dom/server')
const toJSON = require('object-tojson')
const url = require('url')
module.exports = (req, res, next) => {
res._react = (view, locals) => {
locals.layout = false
res.render(view, locals, (e, state) => {
if (e) return res.error(e)
const location = url.parse(req.originalUrl)
const params = qs.parse((location.search || '').slice(1))
Object.assign(state, params, {
path: location.pathname,
url: req.originalUrl,
version: assets.version
})
res.format({
json: () => res.json(state),
html: () => {
state = toJSON(state)
const component = req.component || req.app.get('component')
const element = React.createElement(component, state)
const html = ReactDOM.renderToString(element)
res.render('layout', {
layout: false,
state: state,
content: `<div id='root'>${html}</div>`
})
}
})
})
}
next()
}
| react.js | 'use strict'
const assets = require('./assets')
const qs = require('querystring')
const React = require('react')
const ReactDOM = require('react-dom/server')
const toJSON = require('object-tojson')
const url = require('url')
module.exports = (req, res, next) => {
res._react = (view, locals) => {
locals.layout = false
res.render(view, locals, (e, state) => {
if (e) return res.error(e)
const location = url.parse(req.originalUrl)
const params = qs.parse((location.search || '').slice(1))
Object.assign(state, params, {
path: location.pathname,
url: req.originalUrl,
version: assets.version
})
if (req.accepts('json')) return res.json(state)
state = toJSON(state)
const component = req.component || req.app.get('component')
const element = React.createElement(component, state)
const html = ReactDOM.renderToString(element)
res.render('layout', {
layout: false,
state: state,
content: `<div id='root'>${html}</div>`
})
})
}
next()
}
| Use res.format
| react.js | Use res.format | <ide><path>eact.js
<ide> version: assets.version
<ide> })
<ide>
<del> if (req.accepts('json')) return res.json(state)
<add> res.format({
<ide>
<del> state = toJSON(state)
<add> json: () => res.json(state),
<ide>
<del> const component = req.component || req.app.get('component')
<del> const element = React.createElement(component, state)
<del> const html = ReactDOM.renderToString(element)
<add> html: () => {
<add> state = toJSON(state)
<ide>
<del> res.render('layout', {
<del> layout: false,
<del> state: state,
<del> content: `<div id='root'>${html}</div>`
<add> const component = req.component || req.app.get('component')
<add> const element = React.createElement(component, state)
<add> const html = ReactDOM.renderToString(element)
<add>
<add> res.render('layout', {
<add> layout: false,
<add> state: state,
<add> content: `<div id='root'>${html}</div>`
<add> })
<add> }
<add>
<ide> })
<ide> })
<ide> } |
|
JavaScript | apache-2.0 | 68065e71bc41609d0467d5ddfc278440452c91de | 0 | o2r-project/o2r-muncher,o2r-project/o2r-muncher,nuest/o2r-muncher,o2r-project/o2r-muncher,o2r-project/o2r-muncher,nuest/o2r-muncher,nuest/o2r-muncher,nuest/o2r-muncher,nuest/o2r-muncher,o2r-project/o2r-muncher | /*
* (C) Copyright 2016 o2r project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/* eslint-env mocha */
const assert = require('chai').assert;
const request = require('request');
const config = require('../config/config');
const chai = require('chai');
chai.use(require('chai-datetime'));
const createCompendiumPostRequest = require('./util').createCompendiumPostRequest;
require("./setup")
const cookie_o2r = 's:C0LIrsxGtHOGHld8Nv2jedjL4evGgEHo.GMsWD5Vveq0vBt7/4rGeoH5Xx7Dd2pgZR9DvhKCyDTY';
const cookie_plain = 's:yleQfdYnkh-sbj9Ez--_TWHVhXeXNEgq.qRmINNdkRuJ+iHGg5woRa9ydziuJ+DzFG9GnAZRvaaM';
const cookie_admin = 's:hJRjapOTVCEvlMYCb8BXovAOi2PEOC4i.IEPb0lmtGojn2cVk2edRuomIEanX6Ddz87egE5Pe8UM';
const cookie_editor = 's:xWHihqZq6jEAObwbfowO5IwdnBxohM7z.VxqsRC5A1VqJVspChcxVPuzEKtRE+aKLF8k3nvCcZ8g';
describe('Reading compendium metadata', () => {
let compendium_id = '';
before(function (done) {
let req = createCompendiumPostRequest('./test/bagtainers/metatainer', cookie_o2r);
this.timeout(10000);
request(req, (err, res, body) => {
compendium_id = JSON.parse(body).id;
done();
});
});
describe('GET /api/v1/compendium/<id of loaded compendium>', () => {
it('should respond with HTTP 200 OK', (done) => {
request(global.test_host + '/api/v1/compendium', (err, res) => {
assert.ifError(err);
assert.equal(res.statusCode, 200);
done();
});
});
it('should respond with a valid JSON document', (done) => {
request(global.test_host + '/api/v1/compendium', (err, res, body) => {
assert.ifError(err);
assert.isObject(JSON.parse(body));
done();
});
});
it('should respond with document containing metadata properties', (done) => {
request(global.test_host + '/api/v1/compendium/' + compendium_id, (err, res, body) => {
assert.ifError(err);
let response = JSON.parse(body);
assert.property(response, 'metadata');
done();
});
});
});
describe('Metadata objects contents for compendium', () => {
var metadata = {};
it('should response with document', (done) => {
request(global.test_host + '/api/v1/compendium/' + compendium_id, (err, res, body) => {
assert.ifError(err);
let response = JSON.parse(body);
metadata = response.metadata[config.meta.extract.targetElement];
done();
//console.log(JSON.stringify(metadata));
});
});
it('should contain non-empty title', (done) => {
assert.property(metadata, 'title');
assert.propertyNotVal(metadata, 'title', '');
done();
});
it('should contain correct title', (done) => {
assert.property(metadata, 'title');
assert.include(metadata.title, 'This is the title');
done();
});
it('should contain correct abstract', (done) => {
assert.property(metadata, 'abstract');
assert.include(metadata.abstract, 'Suspendisse ac ornare ligula.');
done();
});
let main_file = 'document.Rmd';
it('should contain non-empty paperSource', (done) => {
assert.property(metadata, 'paperSource');
assert.propertyVal(metadata, 'paperSource', main_file);
done();
});
it('should contain filepath information', (done) => {
assert.property(metadata, 'file');
done();
});
it('should contain correct filepath', (done) => {
assert.property(metadata.file, 'filepath');
assert.propertyVal(metadata.file, 'filepath', compendium_id + '/data/' + main_file);
done();
});
it('should contain correct file', (done) => {
assert.property(metadata.file, 'filename');
assert.propertyVal(metadata.file, 'filename', main_file);
done();
});
it('should contain the correct erc identifier', (done) => {
assert.property(metadata, 'ercIdentifier');
assert.propertyVal(metadata, 'ercIdentifier', compendium_id);
done();
});
it('should contain author array with all author names', (done) => {
assert.property(metadata, 'author');
assert.isArray(metadata.author);
let authorNames = metadata.author.map(function (author) { return author.name; });
assert.include(authorNames, 'Ted Tester');
assert.include(authorNames, 'Carl Connauthora');
done();
});
});
});
describe('Updating compendium metadata', () => {
let compendium_id = '';
before(function (done) {
let req = createCompendiumPostRequest('./test/bagtainers/metatainer', cookie_o2r);
this.timeout(10000);
request(req, (err, res, body) => {
compendium_id = JSON.parse(body).id;
done();
});
});
describe('GET /api/v1/compendium/<id of loaded compendium>/metadata', () => {
it('should respond with HTTP 200 OK', (done) => {
request(global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata', (err, res, body) => {
assert.ifError(err);
assert.equal(res.statusCode, 200);
done();
});
});
it('should respond with a valid JSON document', (done) => {
request(global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata', (err, res, body) => {
assert.ifError(err);
assert.isObject(JSON.parse(body));
done();
});
});
it('should respond with document containing _only_ the o2r metadata properties', (done) => {
request(global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata', (err, res, body) => {
assert.ifError(err);
let response = JSON.parse(body);
assert.property(response, 'metadata');
assert.property(response, 'id');
assert.notProperty(response, 'raw');
assert.property(response.metadata, 'o2r');
assert.notProperty(response.metadata, 'raw');
assert.notProperty(response.metadata, 'zenodo');
assert.notProperty(response.metadata, 'orcid');
assert.notProperty(response.metadata, 'cris');
assert.propertyVal(response.metadata.o2r, 'title', 'This is the title: it contains a colon');
done();
});
});
});
let data = {
'o2r': {
'title': 'New title on the block',
'author': 'npm test!'
}
};
let j = request.jar();
let ck = request.cookie('connect.sid=' + cookie_plain);
j.setCookie(ck, global.test_host);
let req_doc_plain = {
method: 'PUT',
jar: j,
json: data,
timeout: 10000
};
let j2 = request.jar();
let ck2 = request.cookie('connect.sid=' + cookie_o2r);
j2.setCookie(ck2, global.test_host);
let req_doc_o2r = {
method: 'PUT',
jar: j2,
json: data,
timeout: 10000
};
let j3 = request.jar();
let ck3 = request.cookie('connect.sid=' + cookie_editor);
j3.setCookie(ck3, global.test_host);
let req_doc_editor = {
method: 'PUT',
jar: j3,
json: {
'o2r': {
'title': 'New edited title on the block',
'author': 'editor!'
}
},
timeout: 10000
};
describe('PUT /api/v1/compendium/<id of loaded compendium>/metadata with wrong user', () => {
it('should respond with HTTP 401', (done) => {
req_doc_plain.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req_doc_plain, (err, res, body) => {
assert.ifError(err);
assert.equal(res.statusCode, 401);
done();
});
}).timeout(20000);
it('should respond with a valid JSON document with error message', (done) => {
req_doc_plain.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req_doc_plain, (err, res, body) => {
assert.ifError(err);
assert.isObject(body);
assert.propertyVal(body, 'error', 'not authorized to edit metadata of ' + compendium_id);
done();
});
}).timeout(20000);
});
describe('PUT /api/v1/compendium/<id of loaded compendium>/metadata with *author* user', () => {
it('should respond with HTTP 200', (done) => {
req_doc_o2r.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req_doc_o2r, (err, res, body) => {
assert.ifError(err);
assert.equal(res.statusCode, 200);
done();
});
}).timeout(20000);
it('should respond with a valid JSON document with the updated metadata', (done) => {
req_doc_o2r.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req_doc_o2r, (err, res, body) => {
assert.ifError(err);
assert.isObject(body);
assert.include(body.metadata.o2r.title, 'New title on the block');
done();
});
}).timeout(20000);
it('should have the updated metadata in the metadata section', (done) => {
request(global.test_host + '/api/v1/compendium/' + compendium_id, (err, res, body) => {
assert.ifError(err);
let response = JSON.parse(body);
assert.property(response, 'metadata');
assert.property(response.metadata, 'o2r');
assert.property(response.metadata, 'raw');
assert.property(response.metadata.o2r, 'title');
assert.property(response.metadata.o2r, 'author');
assert.propertyVal(response.metadata.o2r, 'title', 'New title on the block');
assert.propertyVal(response.metadata.o2r, 'author', 'npm test!');
assert.notProperty(response.metadata.o2r, 'abstract');
assert.notProperty(response.metadata.o2r, 'file');
done();
});
}).timeout(20000);
});
describe('PUT /api/v1/compendium/<id of loaded compendium>/metadata with *editor* user', () => {
it('should respond with a valid JSON document with the updated metadata', (done) => {
req_doc_editor.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req_doc_editor, (err, res, body) => {
assert.ifError(err);
assert.isObject(body);
assert.include(body.metadata.o2r.title, 'New edited title on the block');
done();
});
}).timeout(20000);
it('should have the updated metadata in the metadata section', (done) => {
request(global.test_host + '/api/v1/compendium/' + compendium_id, (err, res, body) => {
assert.ifError(err);
let response = JSON.parse(body);
assert.propertyVal(response.metadata.o2r, 'title', 'New edited title on the block');
done();
});
}).timeout(20000);
});
describe('PUT /api/v1/compendium/<id of loaded compendium>/metadata with invalid payload', () => {
let data = "{ \
'o2r': { \
[] \
'title': // yes this is invalid by purpose \
} \
}";
let j = request.jar();
let ck = request.cookie('connect.sid=' + cookie_o2r);
j.setCookie(ck, global.test_host);
let req = {
method: 'PUT',
jar: j,
json: data,
timeout: 10000
};
it('should respond with HTTP 400', (done) => {
req.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req, (err, res, body) => {
assert.ifError(err);
assert.equal(res.statusCode, 400);
done();
});
}).timeout(20000);
it('should respond with a valid JSON document and error message', (done) => {
req.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req, (err, res, body) => {
assert.ifError(err);
assert.include(body, 'SyntaxError');
done();
});
}).timeout(20000);
});
describe('PUT /api/v1/compendium/<id of loaded compendium>/metadata with invalid payload structure', () => {
let data = {
'not_o2r': {
'title': 'New title on the block (NTOTB)'
}
};
let j = request.jar();
let ck = request.cookie('connect.sid=' + cookie_o2r);
j.setCookie(ck, global.test_host);
let req = {
method: 'PUT',
jar: j,
json: data,
timeout: 10000
};
it('should respond with HTTP 422', (done) => {
req.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req, (err, res, body) => {
assert.ifError(err);
assert.equal(res.statusCode, 422);
done();
});
});
it('should respond with a valid JSON document and error message', (done) => {
req.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req, (err, res, body) => {
assert.ifError(err);
assert.isObject(body);
assert.property(body, 'error');
assert.propertyVal(body, 'error', "JSON with root element 'o2r' required");
done();
});
});
});
});
describe('Brokering updated compendium metadata', () => {
let compendium_id = '';
before(function (done) {
let req = createCompendiumPostRequest('./test/bagtainers/metatainer', cookie_o2r);
this.timeout(10000);
request(req, (err, res, body) => {
compendium_id = JSON.parse(body).id;
let data = {
'o2r': {
'title': 'New brokered title on the block'
}
};
let j2 = request.jar();
let ck2 = request.cookie('connect.sid=' + cookie_o2r);
j2.setCookie(ck2, global.test_host);
let req_doc_o2r = {
method: 'PUT',
jar: j2,
json: data,
timeout: 10000
};
req_doc_o2r.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req_doc_o2r, (err, res, body) => {
assert.ifError(err);
done();
});
});
});
describe('PUT /api/v1/compendium/<id of loaded compendium>/metadata with author user', () => {
it('should have the brokered metadata in the respective section', (done) => {
console.log(global.test_host + '/api/v1/compendium/' + compendium_id);
request(global.test_host + '/api/v1/compendium/' + compendium_id, (err, res, body) => {
assert.ifError(err);
let response = JSON.parse(body);
assert.property(response, 'metadata');
assert.property(response.metadata, 'zenodo');
assert.property(response.metadata.zenodo, 'metadata');
assert.property(response.metadata.zenodo.metadata, 'title');
assert.propertyVal(response.metadata.zenodo.metadata, 'title', 'New brokered title on the block');
done();
});
}).timeout(20000);
});
});
| test/meta.js | /*
* (C) Copyright 2016 o2r project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/* eslint-env mocha */
const assert = require('chai').assert;
const request = require('request');
const config = require('../config/config');
const chai = require('chai');
chai.use(require('chai-datetime'));
const createCompendiumPostRequest = require('./util').createCompendiumPostRequest;
require("./setup")
const cookie_o2r = 's:C0LIrsxGtHOGHld8Nv2jedjL4evGgEHo.GMsWD5Vveq0vBt7/4rGeoH5Xx7Dd2pgZR9DvhKCyDTY';
const cookie_plain = 's:yleQfdYnkh-sbj9Ez--_TWHVhXeXNEgq.qRmINNdkRuJ+iHGg5woRa9ydziuJ+DzFG9GnAZRvaaM';
const cookie_admin = 's:hJRjapOTVCEvlMYCb8BXovAOi2PEOC4i.IEPb0lmtGojn2cVk2edRuomIEanX6Ddz87egE5Pe8UM';
const cookie_editor = 's:xWHihqZq6jEAObwbfowO5IwdnBxohM7z.VxqsRC5A1VqJVspChcxVPuzEKtRE+aKLF8k3nvCcZ8g';
describe('Reading compendium metadata', () => {
let compendium_id = '';
before(function (done) {
let req = createCompendiumPostRequest('./test/bagtainers/metatainer', cookie_o2r);
this.timeout(10000);
request(req, (err, res, body) => {
compendium_id = JSON.parse(body).id;
done();
});
});
describe('GET /api/v1/compendium/<id of loaded compendium>', () => {
it('should respond with HTTP 200 OK', (done) => {
request(global.test_host + '/api/v1/compendium', (err, res) => {
assert.ifError(err);
assert.equal(res.statusCode, 200);
done();
});
});
it('should respond with a valid JSON document', (done) => {
request(global.test_host + '/api/v1/compendium', (err, res, body) => {
assert.ifError(err);
assert.isObject(JSON.parse(body));
done();
});
});
it('should respond with document containing metadata properties', (done) => {
request(global.test_host + '/api/v1/compendium/' + compendium_id, (err, res, body) => {
assert.ifError(err);
let response = JSON.parse(body);
assert.property(response, 'metadata');
done();
});
});
});
describe('Metadata objects contents for compendium', () => {
var metadata = {};
it('should response with document', (done) => {
request(global.test_host + '/api/v1/compendium/' + compendium_id, (err, res, body) => {
assert.ifError(err);
let response = JSON.parse(body);
metadata = response.metadata[config.meta.extract.targetElement];
done();
//console.log(JSON.stringify(metadata));
});
});
it('should contain non-empty title', (done) => {
assert.property(metadata, 'title');
assert.propertyNotVal(metadata, 'title', '');
done();
});
it('should contain correct title', (done) => {
assert.property(metadata, 'title');
assert.include(metadata.title, 'This is the title');
done();
});
it('should contain correct abstract', (done) => {
assert.property(metadata, 'abstract');
assert.include(metadata.abstract, 'Suspendisse ac ornare ligula.');
done();
});
let main_file = 'document.Rmd';
it('should contain non-empty paperSource', (done) => {
assert.property(metadata, 'paperSource');
assert.propertyVal(metadata, 'paperSource', main_file);
done();
});
it('should contain filepath information', (done) => {
assert.property(metadata, 'file');
done();
});
it('should contain correct filepath', (done) => {
assert.property(metadata.file, 'filepath');
assert.propertyVal(metadata.file, 'filepath', compendium_id + '/data/' + main_file);
done();
});
it('should contain correct file', (done) => {
assert.property(metadata.file, 'filename');
assert.propertyVal(metadata.file, 'filename', main_file);
done();
});
it('should contain the correct erc identifier', (done) => {
assert.property(metadata, 'ercIdentifier');
assert.propertyVal(metadata, 'ercIdentifier', compendium_id);
done();
});
it('should contain author array with all author names', (done) => {
assert.property(metadata, 'author');
assert.isArray(metadata.author);
let authorNames = metadata.author.map(function (author) { return author.name; });
assert.include(authorNames, 'Ted Tester');
assert.include(authorNames, 'Carl Connauthora');
done();
});
});
});
describe('Updating compendium metadata', () => {
let compendium_id = '';
before(function (done) {
let req = createCompendiumPostRequest('./test/bagtainers/metatainer', cookie_o2r);
this.timeout(10000);
request(req, (err, res, body) => {
compendium_id = JSON.parse(body).id;
done();
});
});
describe('GET /api/v1/compendium/<id of loaded compendium>/metadata', () => {
it('should respond with HTTP 200 OK', (done) => {
request(global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata', (err, res, body) => {
assert.ifError(err);
assert.equal(res.statusCode, 200);
done();
});
});
it('should respond with a valid JSON document', (done) => {
request(global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata', (err, res, body) => {
assert.ifError(err);
assert.isObject(JSON.parse(body));
done();
});
});
it('should respond with document containing _only_ the o2r metadata properties', (done) => {
request(global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata', (err, res, body) => {
assert.ifError(err);
let response = JSON.parse(body);
assert.property(response, 'metadata');
assert.property(response, 'id');
assert.notProperty(response, 'raw');
assert.property(response.metadata, 'o2r');
assert.notProperty(response.metadata, 'raw');
assert.notProperty(response.metadata, 'zenodo');
assert.notProperty(response.metadata, 'orcid');
assert.notProperty(response.metadata, 'cris');
assert.propertyVal(response.metadata.o2r, 'title', 'This is the title: it contains a colon');
done();
});
});
});
let data = {
'o2r': {
'title': 'New title on the block',
'author': 'npm test!'
}
};
let j = request.jar();
let ck = request.cookie('connect.sid=' + cookie_plain);
j.setCookie(ck, global.test_host);
let req_doc_plain = {
method: 'PUT',
jar: j,
json: data,
timeout: 10000
};
let j2 = request.jar();
let ck2 = request.cookie('connect.sid=' + cookie_o2r);
j2.setCookie(ck2, global.test_host);
let req_doc_o2r = {
method: 'PUT',
jar: j2,
json: data,
timeout: 10000
};
let j3 = request.jar();
let ck3 = request.cookie('connect.sid=' + cookie_editor);
j3.setCookie(ck3, global.test_host);
let req_doc_editor = {
method: 'PUT',
jar: j3,
json: {
'o2r': {
'title': 'New edited title on the block',
'author': 'editor!'
}
},
timeout: 10000
};
describe('PUT /api/v1/compendium/<id of loaded compendium>/metadata with wrong user', () => {
it('should respond with HTTP 401', (done) => {
req_doc_plain.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req_doc_plain, (err, res, body) => {
assert.ifError(err);
assert.equal(res.statusCode, 401);
done();
});
}).timeout(20000);
it('should respond with a valid JSON document with error message', (done) => {
req_doc_plain.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req_doc_plain, (err, res, body) => {
assert.ifError(err);
assert.isObject(body);
assert.propertyVal(body, 'error', 'not authorized');
done();
});
}).timeout(20000);
});
describe('PUT /api/v1/compendium/<id of loaded compendium>/metadata with *author* user', () => {
it('should respond with HTTP 200', (done) => {
req_doc_o2r.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req_doc_o2r, (err, res, body) => {
assert.ifError(err);
assert.equal(res.statusCode, 200);
done();
});
}).timeout(20000);
it('should respond with a valid JSON document with the updated metadata', (done) => {
req_doc_o2r.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req_doc_o2r, (err, res, body) => {
assert.ifError(err);
assert.isObject(body);
assert.include(body.metadata.o2r.title, 'New title on the block');
done();
});
}).timeout(20000);
it('should have the updated metadata in the metadata section', (done) => {
request(global.test_host + '/api/v1/compendium/' + compendium_id, (err, res, body) => {
assert.ifError(err);
let response = JSON.parse(body);
assert.property(response, 'metadata');
assert.property(response.metadata, 'o2r');
assert.property(response.metadata, 'raw');
assert.property(response.metadata.o2r, 'title');
assert.property(response.metadata.o2r, 'author');
assert.propertyVal(response.metadata.o2r, 'title', 'New title on the block');
assert.propertyVal(response.metadata.o2r, 'author', 'npm test!');
assert.notProperty(response.metadata.o2r, 'abstract');
assert.notProperty(response.metadata.o2r, 'file');
done();
});
}).timeout(20000);
});
describe('PUT /api/v1/compendium/<id of loaded compendium>/metadata with *editor* user', () => {
it('should respond with a valid JSON document with the updated metadata', (done) => {
req_doc_editor.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req_doc_editor, (err, res, body) => {
assert.ifError(err);
assert.isObject(body);
assert.include(body.metadata.o2r.title, 'New edited title on the block');
done();
});
}).timeout(20000);
it('should have the updated metadata in the metadata section', (done) => {
request(global.test_host + '/api/v1/compendium/' + compendium_id, (err, res, body) => {
assert.ifError(err);
let response = JSON.parse(body);
assert.propertyVal(response.metadata.o2r, 'title', 'New edited title on the block');
done();
});
}).timeout(20000);
});
describe('PUT /api/v1/compendium/<id of loaded compendium>/metadata with invalid payload', () => {
let data = "{ \
'o2r': { \
[] \
'title': // yes this is invalid by purpose \
} \
}";
let j = request.jar();
let ck = request.cookie('connect.sid=' + cookie_o2r);
j.setCookie(ck, global.test_host);
let req = {
method: 'PUT',
jar: j,
json: data,
timeout: 10000
};
it('should respond with HTTP 400', (done) => {
req.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req, (err, res, body) => {
assert.ifError(err);
assert.equal(res.statusCode, 400);
done();
});
}).timeout(20000);
it('should respond with a valid JSON document and error message', (done) => {
req.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req, (err, res, body) => {
assert.ifError(err);
assert.include(body, 'SyntaxError');
done();
});
}).timeout(20000);
});
describe('PUT /api/v1/compendium/<id of loaded compendium>/metadata with invalid payload structure', () => {
let data = {
'not_o2r': {
'title': 'New title on the block (NTOTB)'
}
};
let j = request.jar();
let ck = request.cookie('connect.sid=' + cookie_o2r);
j.setCookie(ck, global.test_host);
let req = {
method: 'PUT',
jar: j,
json: data,
timeout: 10000
};
it('should respond with HTTP 422', (done) => {
req.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req, (err, res, body) => {
assert.ifError(err);
assert.equal(res.statusCode, 422);
done();
});
});
it('should respond with a valid JSON document and error message', (done) => {
req.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req, (err, res, body) => {
assert.ifError(err);
assert.isObject(body);
assert.property(body, 'error');
assert.propertyVal(body, 'error', "JSON with root element 'o2r' required");
done();
});
});
});
});
describe('Brokering updated compendium metadata', () => {
let compendium_id = '';
before(function (done) {
let req = createCompendiumPostRequest('./test/bagtainers/metatainer', cookie_o2r);
this.timeout(10000);
request(req, (err, res, body) => {
compendium_id = JSON.parse(body).id;
let data = {
'o2r': {
'title': 'New brokered title on the block'
}
};
let j2 = request.jar();
let ck2 = request.cookie('connect.sid=' + cookie_o2r);
j2.setCookie(ck2, global.test_host);
let req_doc_o2r = {
method: 'PUT',
jar: j2,
json: data,
timeout: 10000
};
req_doc_o2r.uri = global.test_host + '/api/v1/compendium/' + compendium_id + '/metadata';
request(req_doc_o2r, (err, res, body) => {
assert.ifError(err);
done();
});
});
});
describe('PUT /api/v1/compendium/<id of loaded compendium>/metadata with author user', () => {
it('should have the brokered metadata in the respective section', (done) => {
console.log(global.test_host + '/api/v1/compendium/' + compendium_id);
request(global.test_host + '/api/v1/compendium/' + compendium_id, (err, res, body) => {
assert.ifError(err);
let response = JSON.parse(body);
assert.property(response, 'metadata');
assert.property(response.metadata, 'zenodo');
assert.property(response.metadata.zenodo, 'metadata');
assert.property(response.metadata.zenodo.metadata, 'title');
assert.propertyVal(response.metadata.zenodo.metadata, 'title', 'New brokered title on the block');
done();
});
}).timeout(20000);
});
});
| fix test with new error message
| test/meta.js | fix test with new error message | <ide><path>est/meta.js
<ide> request(req_doc_plain, (err, res, body) => {
<ide> assert.ifError(err);
<ide> assert.isObject(body);
<del> assert.propertyVal(body, 'error', 'not authorized');
<add> assert.propertyVal(body, 'error', 'not authorized to edit metadata of ' + compendium_id);
<ide> done();
<ide> });
<ide> }).timeout(20000); |
|
JavaScript | agpl-3.0 | 9eec37d4e61bb7d56c680defde10037022d3a1b2 | 0 | frankrousseau/cozy-controller-old,frankrousseau/cozy-controller-old | /*
* git.js: Implementation of the repository pattern for remote git repositories.
*
* (C) 2010, Nodejitsu Inc.
*
*/
var util = require('util'),
path = require('path'),
exec = require('child_process').exec,
haibu = require('../../haibu'),
Repository = require('./repository').Repository;
//
// ### function Git (app, options)
// #### @app {App} Application manifest to wrap
// #### @options {Object} Options for this instance
// Constructor function for the Git repository object. Responsible
// for cloning, and updating Git repositories.
//
var Git = exports.Git = function (app, options) {
return Repository.call(this, app, options);
};
// Inherit from Repository
util.inherits(Git, Repository);
//
// ### function validate ()
// #### @keys {Array||String} The keys to check in app. (i.e. 'scripts.start')
// #### @app {Object} (optional) The app object to check. if not given this.app will be used.
// #### @return {Error|| undefined} undefined if valid, Error if not
// Checks Application configuration attributes used by this repository type
//
Git.prototype.validate = function (keys, app) {
keys = keys || [];
return Repository.prototype.validate.call(this, keys.concat('repository.url'), app);
}
//
// ### function init (callback)
// #### @callback {function} Continuation to respond to.
// Initializes the git repository associated with the application
// and this instance. Checks out the specific branch in `app.repository.branch`
// if it exists. Initializes and updates git submodules. Initializes npm dependencies
// through calling `self.installDependencies`.
//
Git.prototype.init = function (callback) {
var self = this;
function installNpm () {
self.installDependencies(function (err, packages) {
return err ? callback(err) : callback(null, true, packages);
});
}
haibu.emit('git:clone', 'info', {
type: 'git',
user: self.app.user,
name: self.app.name,
from: self.app.repository.url,
to: self.appDir
});
// TODO (indexzero): Validate the security of this regular expression since it is on the command line.
var commands, match = self.app.repository.url.match(/\/([\w\-_\.]+)\.git$/);
if (!match) {
var err = new Error('Invalid git url: ' + self.app.repository.url);
err.blame = {
type: 'user',
message: 'Repository configuration present but provides invalid Git URL'
};
return callback(err);
}
// Set the home directory of the app managed by this instance.
self._setHome(match[1]);
// Setup the git commands to be executed
commands = [
'cd ' + self.appDir + ' && git clone ' + self.app.repository.url,
'cd ' + path.join(self.appDir, match[1])
];
if (self.app.repository.branch) {
commands[1] += ' && git checkout ' + self.app.repository.branch;
}
commands[1] += ' && git submodule update --init --recursive';
function executeUntilEmpty() {
var command = commands.shift();
// Remark: Using 'exec' here because chaining 'spawn' is not effective here
exec(command, function (err, stdout, stderr) {
if (err !== null) {
haibu.emit('git:clone', 'error', {
url: self.app.repository.url,
dir: self.appDir,
app: self.app.name,
error: err.message,
command: command,
type: 'git',
user: self.app.user
});
callback(err, false);
}
else if (commands.length > 0) {
executeUntilEmpty();
}
else if (commands.length === 0) {
installNpm();
}
});
}
executeUntilEmpty();
};
var _spawnOptions = haibu.getSpawnOptions;
haibu.getSpawnOptions = function getSpawnOptions(target) {
var options = _spawnOptions.apply(this, arguments);
options.env.USER = haibu.config.get('useraccounts:prefix') + target.user;
options.env.HOME = path.join(haibu.config.get('directories:apps'), target.user);
options.env.TEMP = path.join(options.env.HOME, target.name, '.tmp');
options.env.TMPDIR = path.join(options.env.HOME, target.name, '.tmp');
return options;
}
//
// ### function update (callback)
// #### @callback {function} Continuation to respond to.
// Updates the git repository associated with the application
// and this instance. Checks out the specific branch in `app.repository.branch`
// if it exists. Updates git submodules. Updates npm dependencies
// through calling `self.installDependencies`.
//
Git.prototype.update = function (callback) {
var self = this;
function installNpm () {
self.installDependencies(function (err, packages) {
return err ? callback(err) : callback(null, true, packages);
});
}
haibu.emit('git:pull', 'info', {
type: 'git',
user: self.app.user,
name: self.app.name,
from: self.app.repository.url,
to: self.appDir
});
// TODO (indexzero): Validate the security of this regular expression since it is on the command line.
var commands, match = self.app.repository.url.match(/\/([\w\-_\.]+)\.git$/);
if (!match) {
var err = new Error('Invalid git url: ' + self.app.repository.url);
err.blame = {
type: 'user',
message: 'Repository configuration present but provides invalid Git URL'
};
return callback(err);
}
// Set the home directory of the app managed by this instance.
self._setHome(match[1]);
// Setup the git commands to be executed
if (self.app.repository.branch) {
commands = [
'cd ' + path.join(self.appDir, match[1]) + ' && git pull origin ' + self.app.repository.branch,
'cd ' + path.join(self.appDir, match[1])
];
} else {
commands = [
'cd ' + path.join(self.appDir, match[1]) + ' && git pull',
'cd ' + path.join(self.appDir, match[1])
];
}
commands[1] += ' && git submodule update --recursive';
var options = haibu.getSpawnOptions(self.app);
options.cwd = path.join(self.appDir, match[1]);
function executeUntilEmpty() {
var command = commands.shift();
// Remark: Using 'exec' here because chaining 'spawn' is not effective her
exec(command, options, function (err, stdout, stderr) {
if (err !== null) {
haibu.emit('git:pull', 'error', {
url: self.app.repository.url,
dir: self.appDir,
app: self.app.name,
error: err.message,
command: command,
type: 'git',
user: self.app.user
});
callback(err, false);
}
else if (commands.length > 0) {
executeUntilEmpty();
}
else if (commands.length === 0) {
installNpm();
}
});
}
executeUntilEmpty();
}; | lib/haibu/repositories/git.js | /*
* git.js: Implementation of the repository pattern for remote git repositories.
*
* (C) 2010, Nodejitsu Inc.
*
*/
var util = require('util'),
path = require('path'),
exec = require('child_process').exec,
haibu = require('../../haibu'),
Repository = require('./repository').Repository;
//
// ### function Git (app, options)
// #### @app {App} Application manifest to wrap
// #### @options {Object} Options for this instance
// Constructor function for the Git repository object. Responsible
// for cloning, and updating Git repositories.
//
var Git = exports.Git = function (app, options) {
return Repository.call(this, app, options);
};
// Inherit from Repository
util.inherits(Git, Repository);
//
// ### function validate ()
// #### @keys {Array||String} The keys to check in app. (i.e. 'scripts.start')
// #### @app {Object} (optional) The app object to check. if not given this.app will be used.
// #### @return {Error|| undefined} undefined if valid, Error if not
// Checks Application configuration attributes used by this repository type
//
Git.prototype.validate = function (keys, app) {
keys = keys || [];
return Repository.prototype.validate.call(this, keys.concat('repository.url'), app);
}
//
// ### function init (callback)
// #### @callback {function} Continuation to respond to.
// Initializes the git repository associated with the application
// and this instance. Checks out the specific branch in `app.repository.branch`
// if it exists. Initializes and updates git submodules. Initializes npm dependencies
// through calling `self.installDependencies`.
//
Git.prototype.init = function (callback) {
var self = this;
function installNpm () {
self.installDependencies(function (err, packages) {
return err ? callback(err) : callback(null, true, packages);
});
}
function installRequirementsPython() {
self.installDependenciesPython(function (err, packages) {
return err ? callback(err) : callback(null, true, packages);
});
}
function installRequirements () {
server = self.app.scripts.start;
if (server.slice(server.lastIndexOf("."),server.length) === ".py") {
installRequirementsPython();
} else {
installNpm();
}
}
haibu.emit('git:clone', 'info', {
type: 'git',
user: self.app.user,
name: self.app.name,
from: self.app.repository.url,
to: self.appDir
});
// TODO (indexzero): Validate the security of this regular expression since it is on the command line.
var commands, match = self.app.repository.url.match(/\/([\w\-_\.]+)\.git$/);
if (!match) {
var err = new Error('Invalid git url: ' + self.app.repository.url);
err.blame = {
type: 'user',
message: 'Repository configuration present but provides invalid Git URL'
};
return callback(err);
}
// Set the home directory of the app managed by this instance.
self._setHome(match[1]);
// Setup the git commands to be executed
commands = [
'cd ' + self.appDir + ' && git clone ' + self.app.repository.url,
'cd ' + path.join(self.appDir, match[1])
];
if (self.app.repository.branch) {
commands[1] += ' && git checkout ' + self.app.repository.branch;
}
commands[1] += ' && git submodule update --init --recursive';
function executeUntilEmpty() {
var command = commands.shift();
// Remark: Using 'exec' here because chaining 'spawn' is not effective here
exec(command, function (err, stdout, stderr) {
if (err !== null) {
haibu.emit('git:clone', 'error', {
url: self.app.repository.url,
dir: self.appDir,
app: self.app.name,
error: err.message,
command: command,
type: 'git',
user: self.app.user
});
callback(err, false);
}
else if (commands.length > 0) {
executeUntilEmpty();
}
else if (commands.length === 0) {
installRequirements();
}
});
}
executeUntilEmpty();
};
var _spawnOptions = haibu.getSpawnOptions;
haibu.getSpawnOptions = function getSpawnOptions(target) {
var options = _spawnOptions.apply(this, arguments);
options.env.USER = haibu.config.get('useraccounts:prefix') + target.user;
options.env.HOME = path.join(haibu.config.get('directories:apps'), target.user);
options.env.TEMP = path.join(options.env.HOME, target.name, '.tmp');
options.env.TMPDIR = path.join(options.env.HOME, target.name, '.tmp');
return options;
}
//
// ### function update (callback)
// #### @callback {function} Continuation to respond to.
// Updates the git repository associated with the application
// and this instance. Checks out the specific branch in `app.repository.branch`
// if it exists. Updates git submodules. Updates npm dependencies
// through calling `self.installDependencies`.
//
Git.prototype.update = function (callback) {
var self = this;
function installNpm () {
self.installDependencies(function (err, packages) {
return err ? callback(err) : callback(null, true, packages);
});
}
function installRequirementsPython() {
self.installDependenciesPython(function (err, packages) {
return err ? callback(err) : callback(null, true, packages);
});
}
function installRequirements () {
server = self.app.scripts.start;
if (server.slice(server.lastIndexOf("."),server.length) === ".py") {
installRequirementsPython();
} else {
installNpm();
}
}
haibu.emit('git:pull', 'info', {
type: 'git',
user: self.app.user,
name: self.app.name,
from: self.app.repository.url,
to: self.appDir
});
// TODO (indexzero): Validate the security of this regular expression since it is on the command line.
var commands, match = self.app.repository.url.match(/\/([\w\-_\.]+)\.git$/);
if (!match) {
var err = new Error('Invalid git url: ' + self.app.repository.url);
err.blame = {
type: 'user',
message: 'Repository configuration present but provides invalid Git URL'
};
return callback(err);
}
// Set the home directory of the app managed by this instance.
self._setHome(match[1]);
// Setup the git commands to be executed
if (self.app.repository.branch) {
commands = [
'cd ' + path.join(self.appDir, match[1]) + ' && git pull origin ' + self.app.repository.branch,
'cd ' + path.join(self.appDir, match[1])
];
} else {
commands = [
'cd ' + path.join(self.appDir, match[1]) + ' && git pull',
'cd ' + path.join(self.appDir, match[1])
];
}
commands[1] += ' && git submodule update --recursive';
var options = haibu.getSpawnOptions(self.app);
options.cwd = path.join(self.appDir, match[1]);
function executeUntilEmpty() {
var command = commands.shift();
// Remark: Using 'exec' here because chaining 'spawn' is not effective her
console.log(command);
exec(command, options, function (err, stdout, stderr) {
if (err !== null) {
haibu.emit('git:pull', 'error', {
url: self.app.repository.url,
dir: self.appDir,
app: self.app.name,
error: err.message,
command: command,
type: 'git',
user: self.app.user
});
callback(err, false);
}
else if (commands.length > 0) {
executeUntilEmpty();
}
else if (commands.length === 0) {
installRequirements();
}
});
}
executeUntilEmpty();
}; | code cleaning
| lib/haibu/repositories/git.js | code cleaning | <ide><path>ib/haibu/repositories/git.js
<ide> self.installDependencies(function (err, packages) {
<ide> return err ? callback(err) : callback(null, true, packages);
<ide> });
<del> }
<del>
<del> function installRequirementsPython() {
<del> self.installDependenciesPython(function (err, packages) {
<del> return err ? callback(err) : callback(null, true, packages);
<del> });
<del> }
<del>
<del> function installRequirements () {
<del> server = self.app.scripts.start;
<del> if (server.slice(server.lastIndexOf("."),server.length) === ".py") {
<del> installRequirementsPython();
<del> } else {
<del> installNpm();
<del> }
<ide> }
<ide>
<ide> haibu.emit('git:clone', 'info', {
<ide> executeUntilEmpty();
<ide> }
<ide> else if (commands.length === 0) {
<del> installRequirements();
<add> installNpm();
<ide> }
<ide> });
<ide> }
<ide> self.installDependencies(function (err, packages) {
<ide> return err ? callback(err) : callback(null, true, packages);
<ide> });
<del> }
<del>
<del> function installRequirementsPython() {
<del> self.installDependenciesPython(function (err, packages) {
<del> return err ? callback(err) : callback(null, true, packages);
<del> });
<del> }
<del>
<del> function installRequirements () {
<del> server = self.app.scripts.start;
<del> if (server.slice(server.lastIndexOf("."),server.length) === ".py") {
<del> installRequirementsPython();
<del> } else {
<del> installNpm();
<del> }
<ide> }
<ide>
<ide> haibu.emit('git:pull', 'info', {
<ide> var command = commands.shift();
<ide>
<ide> // Remark: Using 'exec' here because chaining 'spawn' is not effective her
<del> console.log(command);
<ide> exec(command, options, function (err, stdout, stderr) {
<ide> if (err !== null) {
<ide> haibu.emit('git:pull', 'error', {
<ide> executeUntilEmpty();
<ide> }
<ide> else if (commands.length === 0) {
<del> installRequirements();
<add> installNpm();
<ide> }
<ide> });
<ide> } |
|
Java | bsd-3-clause | a8acce1d816b0540b972e8ff7780350ca718829b | 0 | depryf/naaccr-xml,depryf/naaccr-xml | /*
* Copyright (C) 2015 Information Management Services, Inc.
*/
package com.imsweb.naaccrxml;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.Writer;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.regex.Pattern;
import org.junit.Assert;
import org.junit.Test;
import com.imsweb.naaccrxml.entity.dictionary.NaaccrDictionary;
import com.imsweb.naaccrxml.entity.dictionary.NaaccrDictionaryItem;
import static org.junit.Assert.fail;
public class NaaccrXmlDictionaryUtilsTest {
@Test
public void testInternalDictionaries() throws IOException {
for (String version : NaaccrFormat.getSupportedVersions()) {
List<NaaccrDictionaryItem> items = new ArrayList<>();
// make sure internal base dictionaries are valid
try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("naaccr-dictionary-" + version + ".xml"))) {
NaaccrDictionary dict = NaaccrXmlDictionaryUtils.readDictionary(reader);
Assert.assertTrue(version, NaaccrXmlDictionaryUtils.validateBaseDictionary(dict).isEmpty());
Assert.assertTrue(version, NaaccrXmlDictionaryUtils.BASE_DICTIONARY_URI_PATTERN.matcher(dict.getDictionaryUri()).matches());
items.addAll(dict.getItems());
}
// make sure internal default user dictionaries are valid
try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("user-defined-naaccr-dictionary-" + version + ".xml"))) {
NaaccrDictionary dict = NaaccrXmlDictionaryUtils.readDictionary(reader);
Assert.assertTrue(version, NaaccrXmlDictionaryUtils.validateUserDictionary(dict).isEmpty());
Assert.assertTrue(version, NaaccrXmlDictionaryUtils.DEFAULT_USER_DICTIONARY_URI_PATTERN.matcher(dict.getDictionaryUri()).matches());
items.addAll(dict.getItems());
}
// make sure the combination of fields doesn't leave any gaps
items.sort(Comparator.comparing(NaaccrDictionaryItem::getStartColumn));
for (int i = 0; i < items.size() - 1; i++)
if (items.get(i).getStartColumn() + items.get(i).getLength() != items.get(i + 1).getStartColumn())
fail("Found a gap after item " + items.get(i).getNaaccrId());
// make sure IDs are no longer than 50 characters (this will be enforced by the standard in a future version)
for (NaaccrDictionaryItem item : items)
if (item.getNaaccrId().length() > 50)
fail("Found item with ID too long: " + item.getNaaccrId());
}
// clear the caches, force other tests to reload them again
NaaccrXmlDictionaryUtils.clearCachedDictionaries();
}
@Test
public void testReadDictionary() throws IOException {
// get a base dictionary
NaaccrDictionary baseDictionary1 = NaaccrXmlDictionaryUtils.getBaseDictionaryByVersion(NaaccrFormat.NAACCR_VERSION_140);
NaaccrDictionary baseDictionary2 = NaaccrXmlDictionaryUtils.getBaseDictionaryByUri(baseDictionary1.getDictionaryUri());
Assert.assertEquals(baseDictionary1.getDictionaryUri(), baseDictionary2.getDictionaryUri());
Assert.assertEquals(baseDictionary1.getNaaccrVersion(), baseDictionary2.getNaaccrVersion());
Assert.assertEquals(baseDictionary1.getSpecificationVersion(), baseDictionary2.getSpecificationVersion());
Assert.assertEquals(baseDictionary1.getItems().size(), baseDictionary2.getItems().size());
Assert.assertEquals(NaaccrXmlUtils.CURRENT_SPECIFICATION_VERSION, baseDictionary1.getSpecificationVersion());
// get a default user dictionary
NaaccrDictionary defaultUserDictionary1 = NaaccrXmlDictionaryUtils.getDefaultUserDictionaryByVersion(NaaccrFormat.NAACCR_VERSION_140);
NaaccrDictionary defaultUserDictionary2 = NaaccrXmlDictionaryUtils.getDefaultUserDictionaryByUri(baseDictionary1.getDictionaryUri());
Assert.assertEquals(defaultUserDictionary1.getDictionaryUri(), defaultUserDictionary2.getDictionaryUri());
Assert.assertEquals(defaultUserDictionary1.getNaaccrVersion(), defaultUserDictionary2.getNaaccrVersion());
Assert.assertEquals(defaultUserDictionary1.getItems().size(), defaultUserDictionary2.getItems().size());
Assert.assertEquals(NaaccrXmlUtils.CURRENT_SPECIFICATION_VERSION, defaultUserDictionary1.getSpecificationVersion());
// read a provided user dictionary
try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("data/dictionary/testing-user-dictionary-140.xml"))) {
NaaccrDictionary defaultUserDictionary = NaaccrXmlDictionaryUtils.readDictionary(reader);
Assert.assertEquals(SpecificationVersion.SPEC_1_0, defaultUserDictionary.getSpecificationVersion());
Assert.assertEquals(4, defaultUserDictionary.getItems().size());
}
// try to read a user dictionary with an error (bad start column)
boolean exceptionAppend = false;
try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("data/dictionary/testing-user-dictionary-140-bad1.xml"))) {
NaaccrXmlDictionaryUtils.readDictionary(reader);
}
catch (IOException e) {
exceptionAppend = true;
}
Assert.assertTrue(exceptionAppend);
// try to read a user dictionary with another error (NPCR item definition redefines the NAACCR number)
exceptionAppend = false;
try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("data/dictionary/testing-user-dictionary-140-bad2.xml"))) {
NaaccrXmlDictionaryUtils.readDictionary(reader);
}
catch (IOException e) {
exceptionAppend = true;
}
Assert.assertTrue(exceptionAppend);
// this one defines an item in a bad location, but it doesn't define a NAACCR version, so no exception, but if a NAACCR version is provided, the validation should fail
try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("data/dictionary/testing-user-dictionary-140-bad3.xml"))) {
NaaccrDictionary dict = NaaccrXmlDictionaryUtils.readDictionary(reader);
Assert.assertTrue(NaaccrXmlDictionaryUtils.validateUserDictionary(dict).isEmpty());
Assert.assertFalse(NaaccrXmlDictionaryUtils.validateUserDictionary(dict, "140").isEmpty());
Assert.assertFalse(NaaccrXmlDictionaryUtils.validateUserDictionary(dict, "160").isEmpty());
}
// try to read a user dictionary with another error (missing dictionaryUri attribute)
exceptionAppend = false;
try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("data/dictionary/testing-user-dictionary-140-bad4.xml"))) {
NaaccrXmlDictionaryUtils.readDictionary(reader);
}
catch (IOException e) {
exceptionAppend = true;
}
Assert.assertTrue(exceptionAppend);
}
@Test
public void testWriteDictionary() throws IOException {
NaaccrDictionary dict = new NaaccrDictionary();
dict.setNaaccrVersion("140");
dict.setDictionaryUri("whatever");
dict.setDescription("Another whatever");
NaaccrDictionaryItem item = new NaaccrDictionaryItem();
item.setNaaccrId("myVariable");
item.setParentXmlElement(NaaccrXmlUtils.NAACCR_XML_TAG_TUMOR);
item.setNaaccrNum(10000);
item.setRecordTypes("A,M,C,I");
item.setDataType(NaaccrXmlDictionaryUtils.NAACCR_DATA_TYPE_NUMERIC);
item.setLength(2);
item.setStartColumn(2340);
item.setNaaccrName("My Variable");
item.setSourceOfStandard("ME");
item.setPadding(NaaccrXmlDictionaryUtils.NAACCR_PADDING_RIGHT_BLANK);
item.setTrim(NaaccrXmlDictionaryUtils.NAACCR_TRIM_NONE);
item.setRegexValidation("0[0-8]");
dict.addItem(item);
// write using a writer
File file = TestingUtils.createFile("dict-write-test.xml", false);
try (Writer writer = new FileWriter(file)) {
NaaccrXmlDictionaryUtils.writeDictionary(dict, writer);
}
NaaccrDictionary newDict = NaaccrXmlDictionaryUtils.readDictionary(file);
Assert.assertEquals("140", newDict.getNaaccrVersion());
Assert.assertEquals("whatever", newDict.getDictionaryUri());
Assert.assertEquals("Another whatever", newDict.getDescription());
Assert.assertEquals(1, newDict.getItems().size());
Assert.assertNotNull(newDict.getItemByNaaccrId("myVariable"));
Assert.assertNotNull(newDict.getItemByNaaccrNum(10000));
// write using a file
NaaccrXmlDictionaryUtils.writeDictionary(dict, file);
newDict = NaaccrXmlDictionaryUtils.readDictionary(file);
Assert.assertEquals("140", newDict.getNaaccrVersion());
Assert.assertEquals("whatever", newDict.getDictionaryUri());
Assert.assertEquals("Another whatever", newDict.getDescription());
Assert.assertEquals(1, newDict.getItems().size());
Assert.assertNotNull(newDict.getItemByNaaccrId("myVariable"));
Assert.assertNotNull(newDict.getItemByNaaccrNum(10000));
}
@Test
public void testValidateUserDictionary() {
NaaccrDictionary dict = new NaaccrDictionary();
dict.setNaaccrVersion("160");
dict.setDictionaryUri("whatever");
dict.setSpecificationVersion(NaaccrXmlUtils.CURRENT_SPECIFICATION_VERSION);
// validate good dictionary
NaaccrDictionaryItem item = new NaaccrDictionaryItem();
item.setNaaccrId("myVariable");
item.setNaaccrName("My Variable");
item.setParentXmlElement(NaaccrXmlUtils.NAACCR_XML_TAG_PATIENT);
item.setNaaccrNum(10000);
item.setLength(1);
dict.setItems(Collections.singletonList(item));
Assert.assertTrue(NaaccrXmlDictionaryUtils.validateUserDictionary(dict).isEmpty());
// this one re-defines the NPCR field but with a different number, which is not allowed
item = new NaaccrDictionaryItem();
item.setNaaccrId("npcrSpecificField");
item.setNaaccrName("NPCR Specific Field");
item.setParentXmlElement(NaaccrXmlUtils.NAACCR_XML_TAG_TUMOR);
item.setNaaccrNum(10000);
item.setLength(75);
item.setRecordTypes("A,M,C,I");
dict.setItems(Collections.singletonList(item));
Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
// this one defines an item that has the same number as the base
item = new NaaccrDictionaryItem();
item.setNaaccrId("myVariable");
item.setNaaccrName("My Variable");
item.setParentXmlElement(NaaccrXmlUtils.NAACCR_XML_TAG_TUMOR);
item.setNaaccrNum(240);
item.setLength(1);
dict.setItems(Collections.singletonList(item));
Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
item.setNaaccrNum(999999);
Assert.assertTrue(NaaccrXmlDictionaryUtils.validateUserDictionary(dict).isEmpty());
// this one defines an item that is too long
item = new NaaccrDictionaryItem();
item.setNaaccrId("myVariableWithSomeVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryLongId!");
item.setNaaccrName("My Variable");
item.setParentXmlElement(NaaccrXmlUtils.NAACCR_XML_TAG_TUMOR);
item.setNaaccrNum(10000);
item.setLength(1);
dict.setItems(Collections.singletonList(item));
Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
item.setNaaccrId("myVariable");
Assert.assertTrue(NaaccrXmlDictionaryUtils.validateUserDictionary(dict).isEmpty());
}
@Test
public void testValidateDictionaries() {
// base dictionary by itself is valid
NaaccrDictionary baseDictionary = NaaccrXmlDictionaryUtils.getBaseDictionaryByVersion("160");
Assert.assertTrue(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, Collections.emptyList()).isEmpty());
// add two valid user-defined dictionaries
List<NaaccrDictionary> userDictionaries = new ArrayList<>();
NaaccrDictionary userDictionary1 = new NaaccrDictionary();
userDictionary1.setNaaccrVersion("160");
userDictionary1.setDictionaryUri("whatever1");
userDictionary1.setSpecificationVersion(NaaccrXmlUtils.CURRENT_SPECIFICATION_VERSION);
NaaccrDictionaryItem item1 = new NaaccrDictionaryItem();
item1.setNaaccrId("myVariable1");
item1.setNaaccrName("My Variable1");
item1.setParentXmlElement(NaaccrXmlUtils.NAACCR_XML_TAG_PATIENT);
item1.setNaaccrNum(10000);
item1.setLength(1);
userDictionary1.setItems(Collections.singletonList(item1));
userDictionaries.add(userDictionary1);
NaaccrDictionary userDictionary2 = new NaaccrDictionary();
userDictionary2.setDictionaryUri("whatever2");
userDictionary2.setSpecificationVersion(NaaccrXmlUtils.CURRENT_SPECIFICATION_VERSION);
NaaccrDictionaryItem item2 = new NaaccrDictionaryItem();
item2.setNaaccrId("myVariable2");
item2.setNaaccrName("My Variable2");
item2.setParentXmlElement(NaaccrXmlUtils.NAACCR_XML_TAG_PATIENT);
item2.setNaaccrNum(10001);
item2.setLength(1);
userDictionary2.setItems(Collections.singletonList(item2));
userDictionaries.add(userDictionary2);
Assert.assertTrue(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries).isEmpty());
// NAACCR ID repeats a base one
item2.setNaaccrId("vitalStatus");
Assert.assertFalse(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries).isEmpty());
item2.setNaaccrId("myVariable2");
// NAACCR ID repeats a user-defined ones
item2.setNaaccrId("myVariable1");
Assert.assertFalse(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries).isEmpty());
item2.setNaaccrId("myVariable2");
// NAACCR number repeats a base one
item2.setNaaccrNum(10);
Assert.assertFalse(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries).isEmpty());
item2.setNaaccrNum(10001);
// NAACCR number repeats a user-defined one
item2.setNaaccrNum(10000);
Assert.assertFalse(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries).isEmpty());
item2.setNaaccrNum(10001);
// items overlap
item1.setStartColumn(2340);
item2.setStartColumn(2340);
Assert.assertFalse(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries).isEmpty());
}
@Test
public void testValidationRegex() {
// "alpha": uppercase letters, A-Z, no spaces, full length needs to be filled in
Pattern pattern = NaaccrXmlDictionaryUtils.getDataTypePattern(NaaccrXmlDictionaryUtils.NAACCR_DATA_TYPE_ALPHA);
Assert.assertTrue(pattern.matcher("A").matches());
Assert.assertTrue(pattern.matcher("AVALUE").matches());
Assert.assertFalse(pattern.matcher("A VALUE").matches());
Assert.assertFalse(pattern.matcher(" A").matches());
Assert.assertFalse(pattern.matcher("A ").matches());
Assert.assertFalse(pattern.matcher("a").matches());
Assert.assertFalse(pattern.matcher("a value").matches());
Assert.assertFalse(pattern.matcher("123").matches());
Assert.assertFalse(pattern.matcher("A123").matches());
Assert.assertFalse(pattern.matcher("123A").matches());
Assert.assertFalse(pattern.matcher("A!").matches());
// "digits": digits, 0-9, no spaces, full length needs to be filled in
pattern = NaaccrXmlDictionaryUtils.getDataTypePattern(NaaccrXmlDictionaryUtils.NAACCR_DATA_TYPE_DIGITS);
Assert.assertTrue(pattern.matcher("1").matches());
Assert.assertTrue(pattern.matcher("123").matches());
Assert.assertFalse(pattern.matcher("12 3").matches());
Assert.assertFalse(pattern.matcher(" 1").matches());
Assert.assertFalse(pattern.matcher("1 ").matches());
Assert.assertFalse(pattern.matcher("a value").matches());
Assert.assertFalse(pattern.matcher("1A23").matches());
Assert.assertFalse(pattern.matcher("A123").matches());
Assert.assertFalse(pattern.matcher("123A").matches());
Assert.assertFalse(pattern.matcher("1!").matches());
// "mixed": uppercase letters or digits, A-Z,0-9, no spaces, full length needs to be filled in
pattern = NaaccrXmlDictionaryUtils.getDataTypePattern(NaaccrXmlDictionaryUtils.NAACCR_DATA_TYPE_MIXED);
Assert.assertTrue(pattern.matcher("A").matches());
Assert.assertTrue(pattern.matcher("AVALUE").matches());
Assert.assertFalse(pattern.matcher("A VALUE").matches());
Assert.assertFalse(pattern.matcher(" A").matches());
Assert.assertFalse(pattern.matcher("A ").matches());
Assert.assertFalse(pattern.matcher("a").matches());
Assert.assertFalse(pattern.matcher("a value").matches());
Assert.assertTrue(pattern.matcher("123").matches());
Assert.assertTrue(pattern.matcher("A123").matches());
Assert.assertTrue(pattern.matcher("123A").matches());
Assert.assertFalse(pattern.matcher("A!").matches());
// "numeric": digits, 0-9 with optional period, no spaces but value can be smaller than the length
pattern = NaaccrXmlDictionaryUtils.getDataTypePattern(NaaccrXmlDictionaryUtils.NAACCR_DATA_TYPE_NUMERIC);
Assert.assertTrue(pattern.matcher("1").matches());
Assert.assertTrue(pattern.matcher("123").matches());
Assert.assertFalse(pattern.matcher("12 3").matches());
Assert.assertFalse(pattern.matcher(" 1").matches());
Assert.assertFalse(pattern.matcher("1 ").matches());
Assert.assertFalse(pattern.matcher("a value").matches());
Assert.assertFalse(pattern.matcher("1A23").matches());
Assert.assertFalse(pattern.matcher("A123").matches());
Assert.assertFalse(pattern.matcher("123A").matches());
Assert.assertFalse(pattern.matcher("1!").matches());
Assert.assertTrue(pattern.matcher("1.0").matches());
Assert.assertTrue(pattern.matcher("0.123").matches());
Assert.assertFalse(pattern.matcher(".123").matches());
Assert.assertFalse(pattern.matcher("1.").matches());
// "text": no checking on this value
pattern = NaaccrXmlDictionaryUtils.getDataTypePattern(NaaccrXmlDictionaryUtils.NAACCR_DATA_TYPE_TEXT);
Assert.assertTrue(pattern.matcher("A").matches());
Assert.assertTrue(pattern.matcher("AVALUE").matches());
Assert.assertTrue(pattern.matcher("A VALUE").matches());
Assert.assertTrue(pattern.matcher(" A").matches());
Assert.assertTrue(pattern.matcher("A ").matches());
Assert.assertTrue(pattern.matcher("a").matches());
Assert.assertTrue(pattern.matcher("a value").matches());
Assert.assertTrue(pattern.matcher("123").matches());
Assert.assertTrue(pattern.matcher("A123").matches());
Assert.assertTrue(pattern.matcher("123A").matches());
Assert.assertTrue(pattern.matcher("A!").matches());
// "date": digits, YYYY or YYYYMM or YYYYMMDD
pattern = NaaccrXmlDictionaryUtils.getDataTypePattern(NaaccrXmlDictionaryUtils.NAACCR_DATA_TYPE_DATE);
Assert.assertTrue(pattern.matcher("20100615").matches());
Assert.assertTrue(pattern.matcher("201006").matches());
Assert.assertTrue(pattern.matcher("2010").matches());
Assert.assertFalse(pattern.matcher("201006 ").matches());
Assert.assertFalse(pattern.matcher("2010 15").matches());
Assert.assertFalse(pattern.matcher(" 0615").matches());
Assert.assertFalse(pattern.matcher("0615").matches());
Assert.assertFalse(pattern.matcher("15").matches());
Assert.assertFalse(pattern.matcher("A").matches());
Assert.assertFalse(pattern.matcher("20100615!").matches());
Assert.assertFalse(pattern.matcher("17000615").matches());
Assert.assertFalse(pattern.matcher("20101315").matches());
Assert.assertFalse(pattern.matcher("20100632").matches());
}
@Test
public void testCreateNaaccrIdFromItemName() {
Assert.assertEquals("", NaaccrXmlDictionaryUtils.createNaaccrIdFromItemName(""));
Assert.assertEquals("testTestTest", NaaccrXmlDictionaryUtils.createNaaccrIdFromItemName("Test Test Test"));
Assert.assertEquals("testSomeThingElse123", NaaccrXmlDictionaryUtils.createNaaccrIdFromItemName("test: (ignored); some_thing # else --123!!!"));
Assert.assertEquals("phase1NumberOfFractions", NaaccrXmlDictionaryUtils.createNaaccrIdFromItemName("Phase I Number of Fractions"));
Assert.assertEquals("lnHeadAndNeckLevels6To7", NaaccrXmlDictionaryUtils.createNaaccrIdFromItemName("LN Head and Neck Levels VI-VII"));
}
@Test
public void testGetMergedDictionaries() {
Assert.assertNotNull(NaaccrXmlDictionaryUtils.getMergedDictionaries(NaaccrFormat.NAACCR_VERSION_160));
}
@Test
public void testStandardDictionaries() throws IOException {
for (String version : NaaccrFormat.getSupportedVersions()) {
Path path1 = Paths.get(TestingUtils.getWorkingDirectory() + "/src/main/resources/naaccr-dictionary-" + version + ".xml");
Path path2 = Paths.get("build/tmp-dictionary-" + version + ".xml");
NaaccrXmlDictionaryUtils.writeDictionary(NaaccrXmlDictionaryUtils.getBaseDictionaryByVersion(version), path2.toFile());
if (!TestingUtils.readFileAsOneString(path1.toFile()).replace("\r", "").equals(TestingUtils.readFileAsOneString(path2.toFile()).replace("\r", "")))
Assert.fail("Dictionary for version " + version + " needs to be re-created, it contains differences from what would be created by the library!");
path1 = Paths.get(TestingUtils.getWorkingDirectory() + "/src/main/resources/user-defined-naaccr-dictionary-" + version + ".xml");
path2 = Paths.get("build/tmp-dictionary-" + version + ".xml");
NaaccrXmlDictionaryUtils.writeDictionary(NaaccrXmlDictionaryUtils.getDefaultUserDictionaryByVersion(version), path2.toFile());
if (!TestingUtils.readFileAsOneString(path1.toFile()).replace("\r", "").equals(TestingUtils.readFileAsOneString(path2.toFile()).replace("\r", "")))
Assert.fail("User dictionary for version " + version + " needs to be re-created, it contains differences from what would be created by the library!");
}
}
}
| src/test/java/com/imsweb/naaccrxml/NaaccrXmlDictionaryUtilsTest.java | /*
* Copyright (C) 2015 Information Management Services, Inc.
*/
package com.imsweb.naaccrxml;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.Writer;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.regex.Pattern;
import org.junit.Assert;
import org.junit.Test;
import com.imsweb.naaccrxml.entity.dictionary.NaaccrDictionary;
import com.imsweb.naaccrxml.entity.dictionary.NaaccrDictionaryItem;
import static org.junit.Assert.fail;
public class NaaccrXmlDictionaryUtilsTest {
@Test
public void testInternalDictionaries() throws IOException {
for (String version : NaaccrFormat.getSupportedVersions()) {
List<NaaccrDictionaryItem> items = new ArrayList<>();
// make sure internal base dictionaries are valid
try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("naaccr-dictionary-" + version + ".xml"))) {
NaaccrDictionary dict = NaaccrXmlDictionaryUtils.readDictionary(reader);
Assert.assertNull(version, NaaccrXmlDictionaryUtils.validateBaseDictionary(dict));
Assert.assertTrue(version, NaaccrXmlDictionaryUtils.BASE_DICTIONARY_URI_PATTERN.matcher(dict.getDictionaryUri()).matches());
items.addAll(dict.getItems());
}
// make sure internal default user dictionaries are valid
try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("user-defined-naaccr-dictionary-" + version + ".xml"))) {
NaaccrDictionary dict = NaaccrXmlDictionaryUtils.readDictionary(reader);
Assert.assertNull(version, NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
Assert.assertTrue(version, NaaccrXmlDictionaryUtils.DEFAULT_USER_DICTIONARY_URI_PATTERN.matcher(dict.getDictionaryUri()).matches());
items.addAll(dict.getItems());
}
// make sure the combination of fields doesn't leave any gaps
items.sort(Comparator.comparing(NaaccrDictionaryItem::getStartColumn));
for (int i = 0; i < items.size() - 1; i++)
if (items.get(i).getStartColumn() + items.get(i).getLength() != items.get(i + 1).getStartColumn())
fail("Found a gap after item " + items.get(i).getNaaccrId());
// make sure IDs are no longer than 50 characters (this will be enforced by the standard in a future version)
for (NaaccrDictionaryItem item : items)
if (item.getNaaccrId().length() > 50)
fail("Found item with ID too long: " + item.getNaaccrId());
}
// clear the caches, force other tests to reload them again
NaaccrXmlDictionaryUtils.clearCachedDictionaries();
}
@Test
public void testReadDictionary() throws IOException {
// get a base dictionary
NaaccrDictionary baseDictionary1 = NaaccrXmlDictionaryUtils.getBaseDictionaryByVersion(NaaccrFormat.NAACCR_VERSION_140);
NaaccrDictionary baseDictionary2 = NaaccrXmlDictionaryUtils.getBaseDictionaryByUri(baseDictionary1.getDictionaryUri());
Assert.assertEquals(baseDictionary1.getDictionaryUri(), baseDictionary2.getDictionaryUri());
Assert.assertEquals(baseDictionary1.getNaaccrVersion(), baseDictionary2.getNaaccrVersion());
Assert.assertEquals(baseDictionary1.getSpecificationVersion(), baseDictionary2.getSpecificationVersion());
Assert.assertEquals(baseDictionary1.getItems().size(), baseDictionary2.getItems().size());
Assert.assertEquals(NaaccrXmlUtils.CURRENT_SPECIFICATION_VERSION, baseDictionary1.getSpecificationVersion());
// get a default user dictionary
NaaccrDictionary defaultUserDictionary1 = NaaccrXmlDictionaryUtils.getDefaultUserDictionaryByVersion(NaaccrFormat.NAACCR_VERSION_140);
NaaccrDictionary defaultUserDictionary2 = NaaccrXmlDictionaryUtils.getDefaultUserDictionaryByUri(baseDictionary1.getDictionaryUri());
Assert.assertEquals(defaultUserDictionary1.getDictionaryUri(), defaultUserDictionary2.getDictionaryUri());
Assert.assertEquals(defaultUserDictionary1.getNaaccrVersion(), defaultUserDictionary2.getNaaccrVersion());
Assert.assertEquals(defaultUserDictionary1.getItems().size(), defaultUserDictionary2.getItems().size());
Assert.assertEquals(NaaccrXmlUtils.CURRENT_SPECIFICATION_VERSION, defaultUserDictionary1.getSpecificationVersion());
// read a provided user dictionary
try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("data/dictionary/testing-user-dictionary-140.xml"))) {
NaaccrDictionary defaultUserDictionary = NaaccrXmlDictionaryUtils.readDictionary(reader);
Assert.assertEquals(SpecificationVersion.SPEC_1_0, defaultUserDictionary.getSpecificationVersion());
Assert.assertEquals(4, defaultUserDictionary.getItems().size());
}
// try to read a user dictionary with an error (bad start column)
boolean exceptionAppend = false;
try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("data/dictionary/testing-user-dictionary-140-bad1.xml"))) {
NaaccrXmlDictionaryUtils.readDictionary(reader);
}
catch (IOException e) {
exceptionAppend = true;
}
Assert.assertTrue(exceptionAppend);
// try to read a user dictionary with another error (NPCR item definition redefines the NAACCR number)
exceptionAppend = false;
try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("data/dictionary/testing-user-dictionary-140-bad2.xml"))) {
NaaccrXmlDictionaryUtils.readDictionary(reader);
}
catch (IOException e) {
exceptionAppend = true;
}
Assert.assertTrue(exceptionAppend);
// this one defines an item in a bad location, but it doesn't define a NAACCR version, so no exception, but if a NAACCR version is provided, the validation should fail
try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("data/dictionary/testing-user-dictionary-140-bad3.xml"))) {
NaaccrDictionary dict = NaaccrXmlDictionaryUtils.readDictionary(reader);
Assert.assertNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict, "140"));
Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict, "160"));
}
// try to read a user dictionary with another error (missing dictionaryUri attribute)
exceptionAppend = false;
try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("data/dictionary/testing-user-dictionary-140-bad4.xml"))) {
NaaccrXmlDictionaryUtils.readDictionary(reader);
}
catch (IOException e) {
exceptionAppend = true;
}
Assert.assertTrue(exceptionAppend);
}
@Test
public void testWriteDictionary() throws IOException {
NaaccrDictionary dict = new NaaccrDictionary();
dict.setNaaccrVersion("140");
dict.setDictionaryUri("whatever");
dict.setDescription("Another whatever");
NaaccrDictionaryItem item = new NaaccrDictionaryItem();
item.setNaaccrId("myVariable");
item.setParentXmlElement(NaaccrXmlUtils.NAACCR_XML_TAG_TUMOR);
item.setNaaccrNum(10000);
item.setRecordTypes("A,M,C,I");
item.setDataType(NaaccrXmlDictionaryUtils.NAACCR_DATA_TYPE_NUMERIC);
item.setLength(2);
item.setStartColumn(2340);
item.setNaaccrName("My Variable");
item.setSourceOfStandard("ME");
item.setPadding(NaaccrXmlDictionaryUtils.NAACCR_PADDING_RIGHT_BLANK);
item.setTrim(NaaccrXmlDictionaryUtils.NAACCR_TRIM_NONE);
item.setRegexValidation("0[0-8]");
dict.addItem(item);
// write using a writer
File file = TestingUtils.createFile("dict-write-test.xml", false);
try (Writer writer = new FileWriter(file)) {
NaaccrXmlDictionaryUtils.writeDictionary(dict, writer);
}
NaaccrDictionary newDict = NaaccrXmlDictionaryUtils.readDictionary(file);
Assert.assertEquals("140", newDict.getNaaccrVersion());
Assert.assertEquals("whatever", newDict.getDictionaryUri());
Assert.assertEquals("Another whatever", newDict.getDescription());
Assert.assertEquals(1, newDict.getItems().size());
Assert.assertNotNull(newDict.getItemByNaaccrId("myVariable"));
Assert.assertNotNull(newDict.getItemByNaaccrNum(10000));
// write using a file
NaaccrXmlDictionaryUtils.writeDictionary(dict, file);
newDict = NaaccrXmlDictionaryUtils.readDictionary(file);
Assert.assertEquals("140", newDict.getNaaccrVersion());
Assert.assertEquals("whatever", newDict.getDictionaryUri());
Assert.assertEquals("Another whatever", newDict.getDescription());
Assert.assertEquals(1, newDict.getItems().size());
Assert.assertNotNull(newDict.getItemByNaaccrId("myVariable"));
Assert.assertNotNull(newDict.getItemByNaaccrNum(10000));
}
@Test
public void testValidateUserDictionary() {
NaaccrDictionary dict = new NaaccrDictionary();
dict.setNaaccrVersion("160");
dict.setDictionaryUri("whatever");
dict.setSpecificationVersion(NaaccrXmlUtils.CURRENT_SPECIFICATION_VERSION);
// validate good dictionary
NaaccrDictionaryItem item = new NaaccrDictionaryItem();
item.setNaaccrId("myVariable");
item.setNaaccrName("My Variable");
item.setParentXmlElement(NaaccrXmlUtils.NAACCR_XML_TAG_PATIENT);
item.setNaaccrNum(10000);
item.setLength(1);
dict.setItems(Collections.singletonList(item));
Assert.assertNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
// this one re-defines the NPCR field but with a different number, which is not allowed
item = new NaaccrDictionaryItem();
item.setNaaccrId("npcrSpecificField");
item.setNaaccrName("NPCR Specific Field");
item.setParentXmlElement(NaaccrXmlUtils.NAACCR_XML_TAG_TUMOR);
item.setNaaccrNum(10000);
item.setLength(75);
item.setRecordTypes("A,M,C,I");
dict.setItems(Collections.singletonList(item));
Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
// this one defines an item that has the same number as the base
item = new NaaccrDictionaryItem();
item.setNaaccrId("myVariable");
item.setNaaccrName("My Variable");
item.setParentXmlElement(NaaccrXmlUtils.NAACCR_XML_TAG_TUMOR);
item.setNaaccrNum(240);
item.setLength(1);
dict.setItems(Collections.singletonList(item));
Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
item.setNaaccrNum(999999);
Assert.assertNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
// this one defines an item that is too long
item = new NaaccrDictionaryItem();
item.setNaaccrId("myVariableWithSomeVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryVeryLongId!");
item.setNaaccrName("My Variable");
item.setParentXmlElement(NaaccrXmlUtils.NAACCR_XML_TAG_TUMOR);
item.setNaaccrNum(10000);
item.setLength(1);
dict.setItems(Collections.singletonList(item));
Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
item.setNaaccrId("myVariable");
Assert.assertNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
}
@Test
public void testValidateDictionaries() {
// base dictionary by itself is valid
NaaccrDictionary baseDictionary = NaaccrXmlDictionaryUtils.getBaseDictionaryByVersion("160");
Assert.assertNull(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, Collections.emptyList()));
// add two valid user-defined dictionaries
List<NaaccrDictionary> userDictionaries = new ArrayList<>();
NaaccrDictionary userDictionary1 = new NaaccrDictionary();
userDictionary1.setNaaccrVersion("160");
userDictionary1.setDictionaryUri("whatever1");
userDictionary1.setSpecificationVersion(NaaccrXmlUtils.CURRENT_SPECIFICATION_VERSION);
NaaccrDictionaryItem item1 = new NaaccrDictionaryItem();
item1.setNaaccrId("myVariable1");
item1.setNaaccrName("My Variable1");
item1.setParentXmlElement(NaaccrXmlUtils.NAACCR_XML_TAG_PATIENT);
item1.setNaaccrNum(10000);
item1.setLength(1);
userDictionary1.setItems(Collections.singletonList(item1));
userDictionaries.add(userDictionary1);
NaaccrDictionary userDictionary2 = new NaaccrDictionary();
userDictionary2.setDictionaryUri("whatever2");
userDictionary2.setSpecificationVersion(NaaccrXmlUtils.CURRENT_SPECIFICATION_VERSION);
NaaccrDictionaryItem item2 = new NaaccrDictionaryItem();
item2.setNaaccrId("myVariable2");
item2.setNaaccrName("My Variable2");
item2.setParentXmlElement(NaaccrXmlUtils.NAACCR_XML_TAG_PATIENT);
item2.setNaaccrNum(10001);
item2.setLength(1);
userDictionary2.setItems(Collections.singletonList(item2));
userDictionaries.add(userDictionary2);
Assert.assertNull(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries));
// NAACCR ID repeats a base one
item2.setNaaccrId("vitalStatus");
Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries));
item2.setNaaccrId("myVariable2");
// NAACCR ID repeats a user-defined ones
item2.setNaaccrId("myVariable1");
Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries));
item2.setNaaccrId("myVariable2");
// NAACCR number repeats a base one
item2.setNaaccrNum(10);
Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries));
item2.setNaaccrNum(10001);
// NAACCR number repeats a user-defined one
item2.setNaaccrNum(10000);
Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries));
item2.setNaaccrNum(10001);
// items overlap
item1.setStartColumn(2340);
item2.setStartColumn(2340);
Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries));
}
@Test
public void testValidationRegex() {
// "alpha": uppercase letters, A-Z, no spaces, full length needs to be filled in
Pattern pattern = NaaccrXmlDictionaryUtils.getDataTypePattern(NaaccrXmlDictionaryUtils.NAACCR_DATA_TYPE_ALPHA);
Assert.assertTrue(pattern.matcher("A").matches());
Assert.assertTrue(pattern.matcher("AVALUE").matches());
Assert.assertFalse(pattern.matcher("A VALUE").matches());
Assert.assertFalse(pattern.matcher(" A").matches());
Assert.assertFalse(pattern.matcher("A ").matches());
Assert.assertFalse(pattern.matcher("a").matches());
Assert.assertFalse(pattern.matcher("a value").matches());
Assert.assertFalse(pattern.matcher("123").matches());
Assert.assertFalse(pattern.matcher("A123").matches());
Assert.assertFalse(pattern.matcher("123A").matches());
Assert.assertFalse(pattern.matcher("A!").matches());
// "digits": digits, 0-9, no spaces, full length needs to be filled in
pattern = NaaccrXmlDictionaryUtils.getDataTypePattern(NaaccrXmlDictionaryUtils.NAACCR_DATA_TYPE_DIGITS);
Assert.assertTrue(pattern.matcher("1").matches());
Assert.assertTrue(pattern.matcher("123").matches());
Assert.assertFalse(pattern.matcher("12 3").matches());
Assert.assertFalse(pattern.matcher(" 1").matches());
Assert.assertFalse(pattern.matcher("1 ").matches());
Assert.assertFalse(pattern.matcher("a value").matches());
Assert.assertFalse(pattern.matcher("1A23").matches());
Assert.assertFalse(pattern.matcher("A123").matches());
Assert.assertFalse(pattern.matcher("123A").matches());
Assert.assertFalse(pattern.matcher("1!").matches());
// "mixed": uppercase letters or digits, A-Z,0-9, no spaces, full length needs to be filled in
pattern = NaaccrXmlDictionaryUtils.getDataTypePattern(NaaccrXmlDictionaryUtils.NAACCR_DATA_TYPE_MIXED);
Assert.assertTrue(pattern.matcher("A").matches());
Assert.assertTrue(pattern.matcher("AVALUE").matches());
Assert.assertFalse(pattern.matcher("A VALUE").matches());
Assert.assertFalse(pattern.matcher(" A").matches());
Assert.assertFalse(pattern.matcher("A ").matches());
Assert.assertFalse(pattern.matcher("a").matches());
Assert.assertFalse(pattern.matcher("a value").matches());
Assert.assertTrue(pattern.matcher("123").matches());
Assert.assertTrue(pattern.matcher("A123").matches());
Assert.assertTrue(pattern.matcher("123A").matches());
Assert.assertFalse(pattern.matcher("A!").matches());
// "numeric": digits, 0-9 with optional period, no spaces but value can be smaller than the length
pattern = NaaccrXmlDictionaryUtils.getDataTypePattern(NaaccrXmlDictionaryUtils.NAACCR_DATA_TYPE_NUMERIC);
Assert.assertTrue(pattern.matcher("1").matches());
Assert.assertTrue(pattern.matcher("123").matches());
Assert.assertFalse(pattern.matcher("12 3").matches());
Assert.assertFalse(pattern.matcher(" 1").matches());
Assert.assertFalse(pattern.matcher("1 ").matches());
Assert.assertFalse(pattern.matcher("a value").matches());
Assert.assertFalse(pattern.matcher("1A23").matches());
Assert.assertFalse(pattern.matcher("A123").matches());
Assert.assertFalse(pattern.matcher("123A").matches());
Assert.assertFalse(pattern.matcher("1!").matches());
Assert.assertTrue(pattern.matcher("1.0").matches());
Assert.assertTrue(pattern.matcher("0.123").matches());
Assert.assertFalse(pattern.matcher(".123").matches());
Assert.assertFalse(pattern.matcher("1.").matches());
// "text": no checking on this value
pattern = NaaccrXmlDictionaryUtils.getDataTypePattern(NaaccrXmlDictionaryUtils.NAACCR_DATA_TYPE_TEXT);
Assert.assertTrue(pattern.matcher("A").matches());
Assert.assertTrue(pattern.matcher("AVALUE").matches());
Assert.assertTrue(pattern.matcher("A VALUE").matches());
Assert.assertTrue(pattern.matcher(" A").matches());
Assert.assertTrue(pattern.matcher("A ").matches());
Assert.assertTrue(pattern.matcher("a").matches());
Assert.assertTrue(pattern.matcher("a value").matches());
Assert.assertTrue(pattern.matcher("123").matches());
Assert.assertTrue(pattern.matcher("A123").matches());
Assert.assertTrue(pattern.matcher("123A").matches());
Assert.assertTrue(pattern.matcher("A!").matches());
// "date": digits, YYYY or YYYYMM or YYYYMMDD
pattern = NaaccrXmlDictionaryUtils.getDataTypePattern(NaaccrXmlDictionaryUtils.NAACCR_DATA_TYPE_DATE);
Assert.assertTrue(pattern.matcher("20100615").matches());
Assert.assertTrue(pattern.matcher("201006").matches());
Assert.assertTrue(pattern.matcher("2010").matches());
Assert.assertFalse(pattern.matcher("201006 ").matches());
Assert.assertFalse(pattern.matcher("2010 15").matches());
Assert.assertFalse(pattern.matcher(" 0615").matches());
Assert.assertFalse(pattern.matcher("0615").matches());
Assert.assertFalse(pattern.matcher("15").matches());
Assert.assertFalse(pattern.matcher("A").matches());
Assert.assertFalse(pattern.matcher("20100615!").matches());
Assert.assertFalse(pattern.matcher("17000615").matches());
Assert.assertFalse(pattern.matcher("20101315").matches());
Assert.assertFalse(pattern.matcher("20100632").matches());
}
@Test
public void testCreateNaaccrIdFromItemName() {
Assert.assertEquals("", NaaccrXmlDictionaryUtils.createNaaccrIdFromItemName(""));
Assert.assertEquals("testTestTest", NaaccrXmlDictionaryUtils.createNaaccrIdFromItemName("Test Test Test"));
Assert.assertEquals("testSomeThingElse123", NaaccrXmlDictionaryUtils.createNaaccrIdFromItemName("test: (ignored); some_thing # else --123!!!"));
Assert.assertEquals("phase1NumberOfFractions", NaaccrXmlDictionaryUtils.createNaaccrIdFromItemName("Phase I Number of Fractions"));
Assert.assertEquals("lnHeadAndNeckLevels6To7", NaaccrXmlDictionaryUtils.createNaaccrIdFromItemName("LN Head and Neck Levels VI-VII"));
}
@Test
public void testGetMergedDictionaries() {
Assert.assertNotNull(NaaccrXmlDictionaryUtils.getMergedDictionaries(NaaccrFormat.NAACCR_VERSION_160));
}
@Test
public void testStandardDictionaries() throws IOException {
for (String version : NaaccrFormat.getSupportedVersions()) {
Path path1 = Paths.get(TestingUtils.getWorkingDirectory() + "/src/main/resources/naaccr-dictionary-" + version + ".xml");
Path path2 = Paths.get("build/tmp-dictionary-" + version + ".xml");
NaaccrXmlDictionaryUtils.writeDictionary(NaaccrXmlDictionaryUtils.getBaseDictionaryByVersion(version), path2.toFile());
if (!TestingUtils.readFileAsOneString(path1.toFile()).replace("\r", "").equals(TestingUtils.readFileAsOneString(path2.toFile()).replace("\r", "")))
Assert.fail("Dictionary for version " + version + " needs to be re-created, it contains differences from what would be created by the library!");
path1 = Paths.get(TestingUtils.getWorkingDirectory() + "/src/main/resources/user-defined-naaccr-dictionary-" + version + ".xml");
path2 = Paths.get("build/tmp-dictionary-" + version + ".xml");
NaaccrXmlDictionaryUtils.writeDictionary(NaaccrXmlDictionaryUtils.getDefaultUserDictionaryByVersion(version), path2.toFile());
if (!TestingUtils.readFileAsOneString(path1.toFile()).replace("\r", "").equals(TestingUtils.readFileAsOneString(path2.toFile()).replace("\r", "")))
Assert.fail("User dictionary for version " + version + " needs to be re-created, it contains differences from what would be created by the library!");
}
}
}
| Fixed unit tests (#125)
| src/test/java/com/imsweb/naaccrxml/NaaccrXmlDictionaryUtilsTest.java | Fixed unit tests (#125) | <ide><path>rc/test/java/com/imsweb/naaccrxml/NaaccrXmlDictionaryUtilsTest.java
<ide> // make sure internal base dictionaries are valid
<ide> try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("naaccr-dictionary-" + version + ".xml"))) {
<ide> NaaccrDictionary dict = NaaccrXmlDictionaryUtils.readDictionary(reader);
<del> Assert.assertNull(version, NaaccrXmlDictionaryUtils.validateBaseDictionary(dict));
<add> Assert.assertTrue(version, NaaccrXmlDictionaryUtils.validateBaseDictionary(dict).isEmpty());
<ide> Assert.assertTrue(version, NaaccrXmlDictionaryUtils.BASE_DICTIONARY_URI_PATTERN.matcher(dict.getDictionaryUri()).matches());
<ide> items.addAll(dict.getItems());
<ide> }
<ide> // make sure internal default user dictionaries are valid
<ide> try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("user-defined-naaccr-dictionary-" + version + ".xml"))) {
<ide> NaaccrDictionary dict = NaaccrXmlDictionaryUtils.readDictionary(reader);
<del> Assert.assertNull(version, NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
<add> Assert.assertTrue(version, NaaccrXmlDictionaryUtils.validateUserDictionary(dict).isEmpty());
<ide> Assert.assertTrue(version, NaaccrXmlDictionaryUtils.DEFAULT_USER_DICTIONARY_URI_PATTERN.matcher(dict.getDictionaryUri()).matches());
<ide> items.addAll(dict.getItems());
<ide> }
<ide> // this one defines an item in a bad location, but it doesn't define a NAACCR version, so no exception, but if a NAACCR version is provided, the validation should fail
<ide> try (Reader reader = new InputStreamReader(Thread.currentThread().getContextClassLoader().getResourceAsStream("data/dictionary/testing-user-dictionary-140-bad3.xml"))) {
<ide> NaaccrDictionary dict = NaaccrXmlDictionaryUtils.readDictionary(reader);
<del> Assert.assertNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
<del> Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict, "140"));
<del> Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict, "160"));
<add> Assert.assertTrue(NaaccrXmlDictionaryUtils.validateUserDictionary(dict).isEmpty());
<add> Assert.assertFalse(NaaccrXmlDictionaryUtils.validateUserDictionary(dict, "140").isEmpty());
<add> Assert.assertFalse(NaaccrXmlDictionaryUtils.validateUserDictionary(dict, "160").isEmpty());
<ide> }
<ide>
<ide> // try to read a user dictionary with another error (missing dictionaryUri attribute)
<ide> item.setNaaccrNum(10000);
<ide> item.setLength(1);
<ide> dict.setItems(Collections.singletonList(item));
<del> Assert.assertNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
<add> Assert.assertTrue(NaaccrXmlDictionaryUtils.validateUserDictionary(dict).isEmpty());
<ide>
<ide> // this one re-defines the NPCR field but with a different number, which is not allowed
<ide> item = new NaaccrDictionaryItem();
<ide> dict.setItems(Collections.singletonList(item));
<ide> Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
<ide> item.setNaaccrNum(999999);
<del> Assert.assertNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
<add> Assert.assertTrue(NaaccrXmlDictionaryUtils.validateUserDictionary(dict).isEmpty());
<ide>
<ide> // this one defines an item that is too long
<ide> item = new NaaccrDictionaryItem();
<ide> dict.setItems(Collections.singletonList(item));
<ide> Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
<ide> item.setNaaccrId("myVariable");
<del> Assert.assertNull(NaaccrXmlDictionaryUtils.validateUserDictionary(dict));
<add> Assert.assertTrue(NaaccrXmlDictionaryUtils.validateUserDictionary(dict).isEmpty());
<ide> }
<ide>
<ide> @Test
<ide>
<ide> // base dictionary by itself is valid
<ide> NaaccrDictionary baseDictionary = NaaccrXmlDictionaryUtils.getBaseDictionaryByVersion("160");
<del> Assert.assertNull(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, Collections.emptyList()));
<add> Assert.assertTrue(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, Collections.emptyList()).isEmpty());
<ide>
<ide> // add two valid user-defined dictionaries
<ide> List<NaaccrDictionary> userDictionaries = new ArrayList<>();
<ide> item2.setLength(1);
<ide> userDictionary2.setItems(Collections.singletonList(item2));
<ide> userDictionaries.add(userDictionary2);
<del> Assert.assertNull(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries));
<add> Assert.assertTrue(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries).isEmpty());
<ide>
<ide> // NAACCR ID repeats a base one
<ide> item2.setNaaccrId("vitalStatus");
<del> Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries));
<add> Assert.assertFalse(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries).isEmpty());
<ide> item2.setNaaccrId("myVariable2");
<ide>
<ide> // NAACCR ID repeats a user-defined ones
<ide> item2.setNaaccrId("myVariable1");
<del> Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries));
<add> Assert.assertFalse(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries).isEmpty());
<ide> item2.setNaaccrId("myVariable2");
<ide>
<ide> // NAACCR number repeats a base one
<ide> item2.setNaaccrNum(10);
<del> Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries));
<add> Assert.assertFalse(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries).isEmpty());
<ide> item2.setNaaccrNum(10001);
<ide>
<ide> // NAACCR number repeats a user-defined one
<ide> item2.setNaaccrNum(10000);
<del> Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries));
<add> Assert.assertFalse(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries).isEmpty());
<ide> item2.setNaaccrNum(10001);
<ide>
<ide> // items overlap
<ide> item1.setStartColumn(2340);
<ide> item2.setStartColumn(2340);
<del> Assert.assertNotNull(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries));
<add> Assert.assertFalse(NaaccrXmlDictionaryUtils.validateDictionaries(baseDictionary, userDictionaries).isEmpty());
<ide> }
<ide>
<ide> @Test |
|
Java | apache-2.0 | 2c41df6f087390d3c07b30be452f8c4a5ff2e091 | 0 | libris/librisxl,libris/librisxl,libris/librisxl | package whelk.export.marc;
import org.apache.commons.lang3.StringUtils;
import se.kb.libris.export.ExportProfile;
import se.kb.libris.util.marc.MarcRecord;
import se.kb.libris.util.marc.io.Iso2709MarcRecordWriter;
import se.kb.libris.util.marc.io.MarcRecordWriter;
import se.kb.libris.util.marc.io.MarcXmlRecordWriter;
import whelk.Document;
import whelk.Whelk;
import whelk.converter.marc.JsonLD2MarcXMLConverter;
import whelk.util.MarcExport;
import whelk.util.ThreadPool;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.sql.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Vector;
public class TotalExport
{
private final int BATCH_SIZE = 200;
private JsonLD2MarcXMLConverter m_toMarcXmlConverter;
private Whelk m_whelk;
public TotalExport(Whelk whelk)
{
m_whelk = whelk;
m_toMarcXmlConverter = new JsonLD2MarcXMLConverter(whelk.createMarcFrameConverter());
}
class Batch
{
public Batch(ExportProfile profile, MarcRecordWriter output)
{
bibUrisToConvert = new ArrayList<>(BATCH_SIZE);
this.profile = profile;
this.output = output;
}
List<String> bibUrisToConvert;
ExportProfile profile;
MarcRecordWriter output;
}
public static void main(String[] args)
throws IOException, SQLException, InterruptedException
{
if (args.length != 3)
printUsageAndExit();
ExportProfile profile = new ExportProfile(new File(args[0]));
long size = Long.parseLong(args[1]);
long segment = Long.parseLong(args[2]);
String encoding = profile.getProperty("characterencoding");
if (encoding.equals("Latin1Strip")) {
encoding = "ISO-8859-1";
}
MarcRecordWriter output = null;
if (profile.getProperty("format", "ISO2709").equalsIgnoreCase("MARCXML"))
output = new MarcXmlRecordWriter(System.out, encoding);
else
output = new Iso2709MarcRecordWriter(System.out, encoding);
new TotalExport(Whelk.createLoadedCoreWhelk()).dump(profile, size, segment, output);
output.close();
}
private static void printUsageAndExit()
{
System.out.println("Usage: java -Dxl.secret.properties=SECRETPROPSFILE -jar marc_export.jar PROFILE-FILE SEGMENT-SIZE SEGMENT");
System.out.println("");
System.out.println(" PROFILE-FILE should be a Java-properties file with the export-profile settings.");
System.out.println(" SEGMENT-SIZE is the number of records to dump in each segment.");
System.out.println(" SEGMENT is the number of the segment to be dumped.");
System.out.println("");
System.out.println("For example:");
System.out.println(" java -jar marc_export.jar export.properties 1000 1");
System.out.println("Would generate the second segment (each consisting of 1000 records) of all records held by whatever");
System.out.println("is in location=[] in export.properties.");
System.out.println("");
System.out.println("To not use segmentation, both SEGMENT and SEGMENT-SIZE should be '0'.");
System.exit(1);
}
private void dump(ExportProfile profile, long size, long segment, MarcRecordWriter output)
throws SQLException, InterruptedException
{
ThreadPool threadPool = new ThreadPool(4 * Runtime.getRuntime().availableProcessors());
Batch batch = new Batch(profile, output);
try (Connection connection = getConnection();
PreparedStatement statement = getAllHeldURIsStatement(profile, size, size*segment, connection);
ResultSet resultSet = statement.executeQuery())
{
while (resultSet.next())
{
String bibMainEntityUri = resultSet.getString(1);
batch.bibUrisToConvert.add(bibMainEntityUri);
if (batch.bibUrisToConvert.size() >= BATCH_SIZE)
{
threadPool.executeOnThread(batch, this::executeBatch);
batch = new Batch(profile, output);
}
}
if (!batch.bibUrisToConvert.isEmpty())
threadPool.executeOnThread(batch, this::executeBatch);
}
threadPool.joinAll();
}
private void executeBatch(Batch batch, int threadIndex)
{
try (Connection connection = m_whelk.getStorage().getConnection())
{
for (String bibUri : batch.bibUrisToConvert)
{
String systemID = m_whelk.getStorage().getSystemIdByIri(bibUri, connection);
if (systemID == null)
continue;
Document document = m_whelk.getStorage().loadEmbellished(systemID, m_whelk.getJsonld());
Vector<MarcRecord> result = MarcExport.compileVirtualMarcRecord(batch.profile, document, m_whelk, m_toMarcXmlConverter);
if (result == null) // A conversion error will already have been logged.
continue;
for (MarcRecord mr : result)
{
try
{
batch.output.writeRecord(mr);
} catch (Exception e)
{
throw new RuntimeException(e);
}
}
}
} catch (SQLException e)
{
throw new RuntimeException(e);
}
}
private Connection getConnection()
throws SQLException
{
Connection connection = m_whelk.getStorage().getConnection();
connection.setAutoCommit(false);
return connection;
}
private PreparedStatement getAllHeldURIsStatement(ExportProfile profile, long limit, long offset, Connection connection)
throws SQLException
{
String locations = profile.getProperty("locations", "");
if (isObviouslyBadSql(locations)) // Not watertight, but this is not user-input. It's admin-input.
throw new RuntimeException("SQL INJECTION SUSPECTED.");
List<String> libraryUriList = Arrays.asList(locations.split(" "));
String stringList = "'https://libris.kb.se/library/" + String.join("', 'https://libris.kb.se/library/", libraryUriList) + "'";
String sql = "SELECT data#>>'{@graph,1,itemOf,@id}' FROM lddb WHERE collection = 'hold' AND data#>>'{@graph,1,heldBy,@id}' IN (£)";
sql = sql.replace("£", stringList);
PreparedStatement preparedStatement = connection.prepareStatement(sql);
if (limit != 0)
{
sql += " ORDER BY created LIMIT ? OFFSET ?";
preparedStatement = connection.prepareStatement(sql);
preparedStatement.setLong(1, limit);
preparedStatement.setLong(2, offset);
}
preparedStatement.setFetchSize(100);
return preparedStatement;
}
private boolean isObviouslyBadSql(String sql)
{
String[] badWords =
{
"DROP",
"TRUNCATE",
"MODIFY",
"ALTER",
"UPDATE",
};
for (String word : badWords)
if (StringUtils.containsIgnoreCase(sql, word))
return true;
return false;
}
}
| marc_export/src/main/java/whelk/export/marc/TotalExport.java | package whelk.export.marc;
import org.apache.commons.lang3.StringUtils;
import se.kb.libris.export.ExportProfile;
import se.kb.libris.util.marc.MarcRecord;
import se.kb.libris.util.marc.io.Iso2709MarcRecordWriter;
import se.kb.libris.util.marc.io.MarcRecordWriter;
import se.kb.libris.util.marc.io.MarcXmlRecordWriter;
import whelk.Document;
import whelk.Whelk;
import whelk.converter.marc.JsonLD2MarcXMLConverter;
import whelk.util.MarcExport;
import whelk.util.ThreadPool;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.sql.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Vector;
public class TotalExport
{
private final int BATCH_SIZE = 200;
private JsonLD2MarcXMLConverter m_toMarcXmlConverter;
private Whelk m_whelk;
public TotalExport(Whelk whelk)
{
m_whelk = whelk;
m_toMarcXmlConverter = new JsonLD2MarcXMLConverter(whelk.createMarcFrameConverter());
}
class Batch
{
public Batch(ExportProfile profile, MarcRecordWriter output)
{
bibUrisToConvert = new ArrayList<>(BATCH_SIZE);
this.profile = profile;
this.output = output;
}
List<String> bibUrisToConvert;
ExportProfile profile;
MarcRecordWriter output;
}
public static void main(String[] args)
throws IOException, SQLException, InterruptedException
{
if (args.length != 3)
printUsageAndExit();
ExportProfile profile = new ExportProfile(new File(args[0]));
long size = Long.parseLong(args[1]);
long segment = Long.parseLong(args[2]);
String encoding = profile.getProperty("characterencoding");
if (encoding.equals("Latin1Strip")) {
encoding = "ISO-8859-1";
}
MarcRecordWriter output = null;
if (profile.getProperty("format", "ISO2709").equalsIgnoreCase("MARCXML"))
output = new MarcXmlRecordWriter(System.out, encoding);
else
output = new Iso2709MarcRecordWriter(System.out, encoding);
new TotalExport(Whelk.createLoadedCoreWhelk()).dump(profile, size, segment, output);
}
private static void printUsageAndExit()
{
System.out.println("Usage: java -Dxl.secret.properties=SECRETPROPSFILE -jar marc_export.jar PROFILE-FILE SEGMENT-SIZE SEGMENT");
System.out.println("");
System.out.println(" PROFILE-FILE should be a Java-properties file with the export-profile settings.");
System.out.println(" SEGMENT-SIZE is the number of records to dump in each segment.");
System.out.println(" SEGMENT is the number of the segment to be dumped.");
System.out.println("");
System.out.println("For example:");
System.out.println(" java -jar marc_export.jar export.properties 1000 1");
System.out.println("Would generate the second segment (each consisting of 1000 records) of all records held by whatever");
System.out.println("is in location=[] in export.properties.");
System.out.println("");
System.out.println("To not use segmentation, both SEGMENT and SEGMENT-SIZE should be '0'.");
System.exit(1);
}
private void dump(ExportProfile profile, long size, long segment, MarcRecordWriter output)
throws SQLException, InterruptedException
{
ThreadPool threadPool = new ThreadPool(4 * Runtime.getRuntime().availableProcessors());
Batch batch = new Batch(profile, output);
try (Connection connection = getConnection();
PreparedStatement statement = getAllHeldURIsStatement(profile, size, size*segment, connection);
ResultSet resultSet = statement.executeQuery())
{
while (resultSet.next())
{
String bibMainEntityUri = resultSet.getString(1);
batch.bibUrisToConvert.add(bibMainEntityUri);
if (batch.bibUrisToConvert.size() >= BATCH_SIZE)
{
threadPool.executeOnThread(batch, this::executeBatch);
batch = new Batch(profile, output);
}
}
if (!batch.bibUrisToConvert.isEmpty())
threadPool.executeOnThread(batch, this::executeBatch);
}
threadPool.joinAll();
}
private void executeBatch(Batch batch, int threadIndex)
{
try (Connection connection = m_whelk.getStorage().getConnection())
{
for (String bibUri : batch.bibUrisToConvert)
{
String systemID = m_whelk.getStorage().getSystemIdByIri(bibUri, connection);
if (systemID == null)
continue;
Document document = m_whelk.getStorage().loadEmbellished(systemID, m_whelk.getJsonld());
Vector<MarcRecord> result = MarcExport.compileVirtualMarcRecord(batch.profile, document, m_whelk, m_toMarcXmlConverter);
if (result == null) // A conversion error will already have been logged.
continue;
for (MarcRecord mr : result)
{
try
{
batch.output.writeRecord(mr);
} catch (Exception e)
{
throw new RuntimeException(e);
}
}
}
} catch (SQLException e)
{
throw new RuntimeException(e);
}
}
private Connection getConnection()
throws SQLException
{
Connection connection = m_whelk.getStorage().getConnection();
connection.setAutoCommit(false);
return connection;
}
private PreparedStatement getAllHeldURIsStatement(ExportProfile profile, long limit, long offset, Connection connection)
throws SQLException
{
String locations = profile.getProperty("locations", "");
if (isObviouslyBadSql(locations)) // Not watertight, but this is not user-input. It's admin-input.
throw new RuntimeException("SQL INJECTION SUSPECTED.");
List<String> libraryUriList = Arrays.asList(locations.split(" "));
String stringList = "'https://libris.kb.se/library/" + String.join("', 'https://libris.kb.se/library/", libraryUriList) + "'";
String sql = "SELECT data#>>'{@graph,1,itemOf,@id}' FROM lddb WHERE collection = 'hold' AND data#>>'{@graph,1,heldBy,@id}' IN (£)";
sql = sql.replace("£", stringList);
PreparedStatement preparedStatement = connection.prepareStatement(sql);
if (limit != 0)
{
sql += " ORDER BY created LIMIT ? OFFSET ?";
preparedStatement = connection.prepareStatement(sql);
preparedStatement.setLong(1, limit);
preparedStatement.setLong(2, offset);
}
preparedStatement.setFetchSize(100);
return preparedStatement;
}
private boolean isObviouslyBadSql(String sql)
{
String[] badWords =
{
"DROP",
"TRUNCATE",
"MODIFY",
"ALTER",
"UPDATE",
};
for (String word : badWords)
if (StringUtils.containsIgnoreCase(sql, word))
return true;
return false;
}
}
| Close output before exiting
| marc_export/src/main/java/whelk/export/marc/TotalExport.java | Close output before exiting | <ide><path>arc_export/src/main/java/whelk/export/marc/TotalExport.java
<ide> output = new Iso2709MarcRecordWriter(System.out, encoding);
<ide>
<ide> new TotalExport(Whelk.createLoadedCoreWhelk()).dump(profile, size, segment, output);
<add> output.close();
<ide> }
<ide>
<ide> private static void printUsageAndExit() |
|
Java | bsd-3-clause | acce2bdb7b01133cec4d63555a7ed432ace3d0c1 | 0 | sabitaacharya/semanticvectors,Lucky-Dhakad/semanticvectors,anhth12/semanticvectors,Lucky-Dhakad/semanticvectors,anhth12/semanticvectors,anhth12/semanticvectors,anhth12/semanticvectors,Lucky-Dhakad/semanticvectors,sabitaacharya/semanticvectors,sabitaacharya/semanticvectors,sabitaacharya/semanticvectors,Lucky-Dhakad/semanticvectors | /**
Copyright (c) 2008, University of Pittsburgh
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of the University of Pittsburgh nor the names
of its contributors may be used to endorse or promote products
derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**/
package pitt.search.semanticvectors;
import java.util.Arrays;
import java.util.Hashtable;
import java.util.Enumeration;
import java.util.Random;
import java.io.File;
import java.io.IOException;
import java.lang.RuntimeException;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.index.TermPositionVector;
import org.apache.lucene.store.FSDirectory;
/**
* Implementation of vector store that creates term by term
* cooccurence vectors by iterating through all the documents in a
* Lucene index. This class implements a sliding context window
* approach, as used by Burgess and Lund (HAL) and Schutze amongst
* others Uses a sparse representation for the basic document vectors,
* which saves considerable space for collections with many individual
* documents.
*
* @author Trevor Cohen, Dominic Widdows.
*/
public class TermTermVectorsFromLucene implements VectorStore {
private boolean retraining = false;
private VectorStoreRAM termVectors;
private VectorStore indexVectors;
private IndexReader indexReader;
private int seedLength;
private String[] fieldsToIndex;
private int minFreq;
private int windowSize;
private float[][] localindexvectors;
private short[][] localsparseindexvectors;
private LuceneUtils lUtils;
private int nonAlphabet;
static final short NONEXISTENT = -1;
/**
* @return The object's indexReader.
*/
public IndexReader getIndexReader(){ return this.indexReader; }
/**
* @return The object's basicTermVectors.
*/
public VectorStore getBasicTermVectors(){ return this.termVectors; }
public String[] getFieldsToIndex(){ return this.fieldsToIndex; }
// Basic VectorStore interface methods implemented through termVectors.
public float[] getVector(Object term) {
return termVectors.getVector(term);
}
public Enumeration getAllVectors() {
return termVectors.getAllVectors();
}
public int getNumVectors() {
return termVectors.getNumVectors();
}
/**
* @param indexDir Directory containing Lucene index.
* @param seedLength Number of +1 or -1 entries in basic
* vectors. Should be even to give same number of each.
* @param minFreq The minimum term frequency for a term to be indexed.
* @param windowSize The size of the sliding context window.
* @param fieldsToIndex These fields will be indexed.
*/
public TermTermVectorsFromLucene(String indexDir,
int seedLength,
int minFreq,
int nonAlphabet,
int windowSize,
VectorStore basicTermVectors,
String[] fieldsToIndex)
throws IOException, RuntimeException {
this.minFreq = minFreq;
this.nonAlphabet = nonAlphabet;
this.fieldsToIndex = fieldsToIndex;
this.seedLength = seedLength;
this.windowSize = windowSize;
// Check that the Lucene index contains Term Positions.
LuceneUtils.CompressIndex(indexDir);
this.indexReader = IndexReader.open(FSDirectory.open(new File(indexDir)));
java.util.Collection fields_with_positions =
indexReader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION);
if (fields_with_positions.isEmpty()) {
System.err.println("Term-term indexing requires a Lucene index containing TermPositionVectors");
System.err.println("Try rebuilding Lucene index using pitt.search.lucene.IndexFilePositions");
throw new IOException("Lucene indexes not built correctly.");
}
lUtils = new LuceneUtils(indexDir);
// If basicTermVectors was passed in, set state accordingly.
if (basicTermVectors != null) {
retraining = true;
this.indexVectors = basicTermVectors;
System.out.println("Reusing basic term vectors; number of terms: "
+ basicTermVectors.getNumVectors());
} else {
this.indexVectors = new VectorStoreSparseRAM();
}
Random random = new Random();
this.termVectors = new VectorStoreRAM();
// Iterate through an enumeration of terms and allocate termVector memory.
// If not retraining, create random elemental vectors as well.
System.err.println("Creating basic term vectors ...");
TermEnum terms = this.indexReader.terms();
int tc = 0;
while(terms.next()){
Term term = terms.term();
// Skip terms that don't pass the filter.
if (!lUtils.termFilter(terms.term(), fieldsToIndex)) {
continue;
}
tc++;
float[] termVector = new float[Flags.dimension];
// Place each term vector in the vector store.
this.termVectors.putVector(term.text(), termVector);
// Do the same for random index vectors unless retraining with trained term vectors
if (!retraining) {
short[] indexVector = VectorUtils.generateRandomVector(seedLength, random);
((VectorStoreSparseRAM) this.indexVectors).putVector(term.text(), indexVector);
}
}
System.err.println("There are " + tc + " terms (and " + indexReader.numDocs() + " docs)");
// Iterate through documents.
int numdocs = this.indexReader.numDocs();
for (int dc = 0; dc < numdocs; ++dc) {
/* output progress counter */
if ((dc % 10000 == 0) || (dc < 10000 && dc % 1000 == 0)) {
System.err.print(dc + " ... ");
}
try {
for (String field: fieldsToIndex) {
TermPositionVector vex = (TermPositionVector) indexReader.getTermFreqVector(dc, field);
if (vex != null) processTermPositionVector(vex);
}
}
catch (Exception e)
{
System.err.println("\nFailed to process document "+indexReader.document(dc).get("path")+"\n");
}
}
System.err.println("\nCreated " + termVectors.getNumVectors() + " term vectors ...");
System.err.println("\nNormalizing term vectors");
Enumeration e = termVectors.getAllVectors();
while (e.hasMoreElements()) {
ObjectVector temp = (ObjectVector) e.nextElement();
float[] next = temp.getVector();
next = VectorUtils.getNormalizedVector(next);
temp.setVector(next);
}
// If building a permutation index, these need to be written out to be reused.
if ((Flags.positionalmethod.equals("permutation") || (Flags.positionalmethod.equals("permutation_plus_basic") ))&& !retraining) {
String randFile = "randomvectors.bin";
System.err.println("\nWriting random vectors to "+randFile);
System.err.println("\nNormalizing random vectors");
Enumeration f = indexVectors.getAllVectors();
while (f.hasMoreElements()) {
ObjectVector temp = (ObjectVector) f.nextElement();
float[] next = temp.getVector();
next = VectorUtils.getNormalizedVector(next);
temp.setVector(next);
}
new VectorStoreWriter().WriteVectors(randFile, this.indexVectors);
}
}
/**
For each term, add term index vector
* for any term occurring within a window of size windowSize such
* that for example if windowSize = 5 with the window over the
* phrase "your life is your life" the index vectors for terms
* "your" and "life" would each be added to the term vector for
* "is" twice.
*
* TermPositionVectors contain arrays of (1) terms as text (2)
* term frequencies and (3) term positions within a
* document. The index of a particular term within this array
* will be referred to as the 'local index' in comments.
*/
private void processTermPositionVector(TermPositionVector vex) throws java.lang.ArrayIndexOutOfBoundsException {
int[] freqs = vex.getTermFrequencies();
// Find number of positions in document (across all terms).
int numwords = freqs.length;
int numpositions = 0;
for (short tcn = 0; tcn < numwords; ++tcn) {
int[] posns = vex.getTermPositions(tcn);
for (int pc = 0; pc < posns.length; ++pc) {
numpositions = Math.max(numpositions, posns[pc]);
}
}
numpositions += 1; //convert from zero-based index to count
// Create local random index and term vectors for relevant terms.
if (retraining)
localindexvectors = new float[numwords][Flags.dimension];
else
localsparseindexvectors = new short[numwords][seedLength];
float[][] localtermvectors = new float[numwords][Flags.dimension];
// Create index with one space for each position.
short[] positions = new short[numpositions];
Arrays.fill(positions, NONEXISTENT);
String[] docterms = vex.getTerms();
for (short tcn = 0; tcn < numwords; ++tcn) {
// Insert local term indices in position vector.
int[] posns = vex.getTermPositions(tcn); // Get all positions of term in document
for (int pc = 0; pc < posns.length; ++pc) {
// Set position of index vector to local
// (document-specific) index of term in this position.
int position = posns[pc];
positions[position] = tcn;
}
// Only terms that have passed the term filter are included in the VectorStores.
if (this.indexVectors.getVector(docterms[tcn]) != null) {
// Retrieve relevant random index vectors.
if (retraining)
localindexvectors[tcn] = indexVectors.getVector(docterms[tcn]);
else
localsparseindexvectors[tcn] =
((VectorStoreSparseRAM) indexVectors).getSparseVector(docterms[tcn]);
// Retrieve the float[] arrays of relevant term vectors.
localtermvectors[tcn] = termVectors.getVector(docterms[tcn]);
}
}
/** Iterate through positions adding index vectors of terms
* occurring within window to term vector for focus term
**/
int w2 = windowSize / 2;
for (int p = 0; p < positions.length; ++p) {
int focusposn = p;
int focusterm = positions[focusposn];
if (focusterm == NONEXISTENT) continue;
int windowstart = Math.max(0, p - w2);
int windowend = Math.min(focusposn + w2, positions.length - 1);
/* add random vector (in condensed (signed index + 1)
* representation) to term vector by adding -1 or +1 to the
* location (index - 1) according to the sign of the index.
* (The -1 and +1 are necessary because there is no signed
* version of 0, so we'd have no way of telling that the
* zeroth position in the array should be plus or minus 1.)
* See also generateRandomVector method below.
*/
for (int w = windowstart; w <= windowend; w++) {
if (w == focusposn) continue;
int coterm = positions[w];
if (coterm == NONEXISTENT) continue;
// calculate permutation required for either Sahlgren (2008) implementation
// encoding word order, or encoding direction as in Burgess and Lund's HAL
float[] localindex= new float[0];
short[] localsparseindex = new short[0];
if (retraining) localindex = localindexvectors[coterm].clone();
else localsparseindex = localsparseindexvectors[coterm].clone();
//combine 'content' and 'order' information - first add the unpermuted vector
if (Flags.positionalmethod.equals("permutation_plus_basic"))
{
// docterms[coterm] contains the term in position[w] in this document.
if (this.indexVectors.getVector(docterms[coterm]) != null && localtermvectors[focusterm] != null) {
if (retraining)
VectorUtils.addVectors(localtermvectors[focusterm],localindex,1);
else
VectorUtils.addVectors(localtermvectors[focusterm],localsparseindex,1);
}
}
if (Flags.positionalmethod.equals("permutation") || Flags.positionalmethod.equals("permutation_plus_basic")) {
int permutation = w - focusposn;
if (retraining)
localindex = VectorUtils.permuteVector(localindex , permutation);
else localsparseindex = VectorUtils.permuteVector(localsparseindex, permutation);
} else if (Flags.positionalmethod.equals("directional")) {
if (retraining)
localindex = VectorUtils.permuteVector(localindex, new Float(Math.signum(w-focusposn)).intValue());
else localsparseindex = VectorUtils.permuteVector(localsparseindex, new Float(Math.signum(w-focusposn)).intValue());
}
// docterms[coterm] contains the term in position[w] in this document.
if (this.indexVectors.getVector(docterms[coterm]) != null && localtermvectors[focusterm] != null) {
if (retraining)
VectorUtils.addVectors(localtermvectors[focusterm],localindex,1);
else
VectorUtils.addVectors(localtermvectors[focusterm],localsparseindex,1);
}
}
}
}
}
| src/pitt/search/semanticvectors/TermTermVectorsFromLucene.java | /**
Copyright (c) 2008, University of Pittsburgh
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of the University of Pittsburgh nor the names
of its contributors may be used to endorse or promote products
derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**/
package pitt.search.semanticvectors;
import java.util.Arrays;
import java.util.Hashtable;
import java.util.Enumeration;
import java.util.Random;
import java.io.File;
import java.io.IOException;
import java.lang.RuntimeException;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.index.TermPositionVector;
import org.apache.lucene.store.FSDirectory;
/**
* Implementation of vector store that creates term by term
* cooccurence vectors by iterating through all the documents in a
* Lucene index. This class implements a sliding context window
* approach, as used by Burgess and Lund (HAL) and Schutze amongst
* others Uses a sparse representation for the basic document vectors,
* which saves considerable space for collections with many individual
* documents.
*
* @author Trevor Cohen, Dominic Widdows.
*/
public class TermTermVectorsFromLucene implements VectorStore {
private boolean retraining = false;
private VectorStoreRAM termVectors;
private VectorStore indexVectors;
private IndexReader indexReader;
private int seedLength;
private String[] fieldsToIndex;
private int minFreq;
private int windowSize;
private float[][] localindexvectors;
private short[][] localsparseindexvectors;
private LuceneUtils lUtils;
private int nonAlphabet;
static final short NONEXISTENT = -1;
/**
* @return The object's indexReader.
*/
public IndexReader getIndexReader(){ return this.indexReader; }
/**
* @return The object's basicTermVectors.
*/
public VectorStore getBasicTermVectors(){ return this.termVectors; }
public String[] getFieldsToIndex(){ return this.fieldsToIndex; }
// Basic VectorStore interface methods implemented through termVectors.
public float[] getVector(Object term) {
return termVectors.getVector(term);
}
public Enumeration getAllVectors() {
return termVectors.getAllVectors();
}
public int getNumVectors() {
return termVectors.getNumVectors();
}
/**
* @param indexDir Directory containing Lucene index.
* @param seedLength Number of +1 or -1 entries in basic
* vectors. Should be even to give same number of each.
* @param minFreq The minimum term frequency for a term to be indexed.
* @param windowSize The size of the sliding context window.
* @param fieldsToIndex These fields will be indexed.
*/
public TermTermVectorsFromLucene(String indexDir,
int seedLength,
int minFreq,
int nonAlphabet,
int windowSize,
VectorStore basicTermVectors,
String[] fieldsToIndex)
throws IOException, RuntimeException {
this.minFreq = minFreq;
this.nonAlphabet = nonAlphabet;
this.fieldsToIndex = fieldsToIndex;
this.seedLength = seedLength;
this.windowSize = windowSize;
// Check that the Lucene index contains Term Positions.
LuceneUtils.CompressIndex(indexDir);
this.indexReader = IndexReader.open(FSDirectory.open(new File(indexDir)));
java.util.Collection fields_with_positions =
indexReader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION);
if (fields_with_positions.isEmpty()) {
System.err.println("Term-term indexing requires a Lucene index containing TermPositionVectors");
System.err.println("Try rebuilding Lucene index using pitt.search.lucene.IndexFilePositions");
throw new IOException("Lucene indexes not built correctly.");
}
lUtils = new LuceneUtils(indexDir);
// If basicTermVectors was passed in, set state accordingly.
if (basicTermVectors != null) {
retraining = true;
this.indexVectors = basicTermVectors;
System.out.println("Reusing basic term vectors; number of terms: "
+ basicTermVectors.getNumVectors());
} else {
this.indexVectors = new VectorStoreSparseRAM();
}
Random random = new Random();
this.termVectors = new VectorStoreRAM();
// Iterate through an enumeration of terms and allocate termVector memory.
// If not retraining, create random elemental vectors as well.
System.err.println("Creating basic term vectors ...");
TermEnum terms = this.indexReader.terms();
int tc = 0;
while(terms.next()){
Term term = terms.term();
// Skip terms that don't pass the filter.
if (!lUtils.termFilter(terms.term(), fieldsToIndex)) {
continue;
}
tc++;
float[] termVector = new float[Flags.dimension];
// Place each term vector in the vector store.
this.termVectors.putVector(term.text(), termVector);
// Do the same for random index vectors unless retraining with trained term vectors
if (!retraining) {
short[] indexVector = VectorUtils.generateRandomVector(seedLength, random);
((VectorStoreSparseRAM) this.indexVectors).putVector(term.text(), indexVector);
}
}
System.err.println("There are " + tc + " terms (and " + indexReader.numDocs() + " docs)");
// Iterate through documents.
int numdocs = this.indexReader.numDocs();
for (int dc = 0; dc < numdocs; ++dc) {
/* output progress counter */
if ((dc % 10000 == 0) || (dc < 10000 && dc % 1000 == 0)) {
System.err.print(dc + " ... ");
}
try {
for (String field: fieldsToIndex) {
TermPositionVector vex = (TermPositionVector) indexReader.getTermFreqVector(dc, field);
if (vex != null) processTermPositionVector(vex);
}
}
catch (Exception e)
{
System.err.println("\nFailed to process document "+indexReader.document(dc).get("path")+"\n");
}
}
System.err.println("\nCreated " + termVectors.getNumVectors() + " term vectors ...");
System.err.println("\nNormalizing term vectors");
Enumeration e = termVectors.getAllVectors();
while (e.hasMoreElements()) {
ObjectVector temp = (ObjectVector) e.nextElement();
float[] next = temp.getVector();
next = VectorUtils.getNormalizedVector(next);
temp.setVector(next);
}
// If building a permutation index, these need to be written out to be reused.
if ((Flags.positionalmethod.equals("permutation") | (Flags.positionalmethod.equals("permutation_plus_basic") ))&& !retraining) {
String randFile = "randomvectors.bin";
System.err.println("\nWriting random vectors to "+randFile);
System.err.println("\nNormalizing random vectors");
Enumeration f = indexVectors.getAllVectors();
while (f.hasMoreElements()) {
ObjectVector temp = (ObjectVector) f.nextElement();
float[] next = temp.getVector();
next = VectorUtils.getNormalizedVector(next);
temp.setVector(next);
}
new VectorStoreWriter().WriteVectors(randFile, this.indexVectors);
}
}
/**
For each term, add term index vector
* for any term occurring within a window of size windowSize such
* that for example if windowSize = 5 with the window over the
* phrase "your life is your life" the index vectors for terms
* "your" and "life" would each be added to the term vector for
* "is" twice.
*
* TermPositionVectors contain arrays of (1) terms as text (2)
* term frequencies and (3) term positions within a
* document. The index of a particular term within this array
* will be referred to as the 'local index' in comments.
*/
private void processTermPositionVector(TermPositionVector vex) throws java.lang.ArrayIndexOutOfBoundsException {
int[] freqs = vex.getTermFrequencies();
// Find number of positions in document (across all terms).
int numwords = freqs.length;
int numpositions = 0;
for (short tcn = 0; tcn < numwords; ++tcn) {
int[] posns = vex.getTermPositions(tcn);
for (int pc = 0; pc < posns.length; ++pc) {
numpositions = Math.max(numpositions, posns[pc]);
}
}
numpositions += 1; //convert from zero-based index to count
// Create local random index and term vectors for relevant terms.
if (retraining)
localindexvectors = new float[numwords][Flags.dimension];
else
localsparseindexvectors = new short[numwords][seedLength];
float[][] localtermvectors = new float[numwords][Flags.dimension];
// Create index with one space for each position.
short[] positions = new short[numpositions];
Arrays.fill(positions, NONEXISTENT);
String[] docterms = vex.getTerms();
for (short tcn = 0; tcn < numwords; ++tcn) {
// Insert local term indices in position vector.
int[] posns = vex.getTermPositions(tcn); // Get all positions of term in document
for (int pc = 0; pc < posns.length; ++pc) {
// Set position of index vector to local
// (document-specific) index of term in this position.
int position = posns[pc];
positions[position] = tcn;
}
// Only terms that have passed the term filter are included in the VectorStores.
if (this.indexVectors.getVector(docterms[tcn]) != null) {
// Retrieve relevant random index vectors.
if (retraining)
localindexvectors[tcn] = indexVectors.getVector(docterms[tcn]);
else
localsparseindexvectors[tcn] =
((VectorStoreSparseRAM) indexVectors).getSparseVector(docterms[tcn]);
// Retrieve the float[] arrays of relevant term vectors.
localtermvectors[tcn] = termVectors.getVector(docterms[tcn]);
}
}
/** Iterate through positions adding index vectors of terms
* occurring within window to term vector for focus term
**/
int w2 = windowSize / 2;
for (int p = 0; p < positions.length; ++p) {
int focusposn = p;
int focusterm = positions[focusposn];
if (focusterm == NONEXISTENT) continue;
int windowstart = Math.max(0, p - w2);
int windowend = Math.min(focusposn + w2, positions.length - 1);
/* add random vector (in condensed (signed index + 1)
* representation) to term vector by adding -1 or +1 to the
* location (index - 1) according to the sign of the index.
* (The -1 and +1 are necessary because there is no signed
* version of 0, so we'd have no way of telling that the
* zeroth position in the array should be plus or minus 1.)
* See also generateRandomVector method below.
*/
for (int w = windowstart; w <= windowend; w++) {
if (w == focusposn) continue;
int coterm = positions[w];
if (coterm == NONEXISTENT) continue;
// calculate permutation required for either Sahlgren (2008) implementation
// encoding word order, or encoding direction as in Burgess and Lund's HAL
float[] localindex= new float[0];
short[] localsparseindex = new short[0];
if (retraining) localindex = localindexvectors[coterm].clone();
else localsparseindex = localsparseindexvectors[coterm].clone();
//combine 'content' and 'order' information - first add the unpermuted vector
if (Flags.positionalmethod.equals("permutation_plus_basic"))
{
// docterms[coterm] contains the term in position[w] in this document.
if (this.indexVectors.getVector(docterms[coterm]) != null && localtermvectors[focusterm] != null) {
if (retraining)
VectorUtils.addVectors(localtermvectors[focusterm],localindex,1);
else
VectorUtils.addVectors(localtermvectors[focusterm],localsparseindex,1);
}
}
if (Flags.positionalmethod.equals("permutation") | Flags.positionalmethod.equals("permutation_plus_basic")) {
int permutation = w - focusposn;
if (retraining)
localindex = VectorUtils.permuteVector(localindex , permutation);
else localsparseindex = VectorUtils.permuteVector(localsparseindex, permutation);
} else if (Flags.positionalmethod.equals("directional")) {
if (retraining)
localindex = VectorUtils.permuteVector(localindex, new Float(Math.signum(w-focusposn)).intValue());
else localsparseindex = VectorUtils.permuteVector(localsparseindex, new Float(Math.signum(w-focusposn)).intValue());
}
// docterms[coterm] contains the term in position[w] in this document.
if (this.indexVectors.getVector(docterms[coterm]) != null && localtermvectors[focusterm] != null) {
if (retraining)
VectorUtils.addVectors(localtermvectors[focusterm],localindex,1);
else
VectorUtils.addVectors(localtermvectors[focusterm],localsparseindex,1);
}
}
}
}
}
| Changed two bitwise OR operators (ie '|') into conditional OR operators (ie '||'). This allows short-circuiting the test when the first condition is true and it also reads easier because there is no expectation that the operands have bitwise meaning.
| src/pitt/search/semanticvectors/TermTermVectorsFromLucene.java | Changed two bitwise OR operators (ie '|') into conditional OR operators (ie '||'). This allows short-circuiting the test when the first condition is true and it also reads easier because there is no expectation that the operands have bitwise meaning. | <ide><path>rc/pitt/search/semanticvectors/TermTermVectorsFromLucene.java
<ide> }
<ide>
<ide> // If building a permutation index, these need to be written out to be reused.
<del> if ((Flags.positionalmethod.equals("permutation") | (Flags.positionalmethod.equals("permutation_plus_basic") ))&& !retraining) {
<add> if ((Flags.positionalmethod.equals("permutation") || (Flags.positionalmethod.equals("permutation_plus_basic") ))&& !retraining) {
<ide> String randFile = "randomvectors.bin";
<ide> System.err.println("\nWriting random vectors to "+randFile);
<ide> System.err.println("\nNormalizing random vectors");
<ide> }
<ide> }
<ide>
<del> if (Flags.positionalmethod.equals("permutation") | Flags.positionalmethod.equals("permutation_plus_basic")) {
<add> if (Flags.positionalmethod.equals("permutation") || Flags.positionalmethod.equals("permutation_plus_basic")) {
<ide> int permutation = w - focusposn;
<ide> if (retraining)
<ide> localindex = VectorUtils.permuteVector(localindex , permutation); |
|
Java | isc | 1b7a94424f177a410ab454aac39e6f8f99f6aebe | 0 | io7m/jparasol,io7m/jparasol | /*
* Copyright © 2013 <[email protected]> http://io7m.com
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package com.io7m.jparasol.parser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Nonnull;
import com.io7m.jaux.Constraints;
import com.io7m.jaux.Constraints.ConstraintError;
import com.io7m.jaux.UnreachableCodeException;
import com.io7m.jaux.functional.Option;
import com.io7m.jaux.functional.Option.None;
import com.io7m.jparasol.ModulePath;
import com.io7m.jparasol.PackagePath;
import com.io7m.jparasol.lexer.Lexer;
import com.io7m.jparasol.lexer.LexerError;
import com.io7m.jparasol.lexer.Token;
import com.io7m.jparasol.lexer.Token.TokenDiscard;
import com.io7m.jparasol.lexer.Token.TokenIdentifierLower;
import com.io7m.jparasol.lexer.Token.TokenIdentifierUpper;
import com.io7m.jparasol.lexer.Token.TokenLet;
import com.io7m.jparasol.lexer.Token.TokenLiteralBoolean;
import com.io7m.jparasol.lexer.Token.TokenLiteralInteger;
import com.io7m.jparasol.lexer.Token.TokenLiteralIntegerDecimal;
import com.io7m.jparasol.lexer.Token.TokenLiteralReal;
import com.io7m.jparasol.lexer.Token.Type;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDFunction;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDFunctionArgument;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDFunctionDefined;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDFunctionExternal;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDImport;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDModule;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDPackage;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShader;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragment;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragmentInput;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragmentLocal;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragmentLocalDiscard;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragmentLocalValue;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragmentOutput;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragmentOutputAssignment;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragmentParameter;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragmentParameters;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderProgram;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderVertex;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderVertexInput;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderVertexOutput;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderVertexOutputAssignment;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderVertexParameter;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderVertexParameters;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDType;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDTypeRecord;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDTypeRecordField;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDValue;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDValueLocal;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDeclarationModuleLevel;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIEApplication;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIEBoolean;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIEConditional;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIEInteger;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIELet;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIENew;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIEReal;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIERecord;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIERecordProjection;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIESwizzle;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIEVariable;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIRecordFieldAssignment;
import com.io7m.jparasol.untyped.ast.initial.UASTIShaderPath;
import com.io7m.jparasol.untyped.ast.initial.UASTITypePath;
import com.io7m.jparasol.untyped.ast.initial.UASTIUnchecked;
import com.io7m.jparasol.untyped.ast.initial.UASTIUnit;
import com.io7m.jparasol.untyped.ast.initial.UASTIValuePath;
public final class Parser
{
public static @Nonnull Parser newInternalParser(
final @Nonnull Lexer lexer)
throws ConstraintError,
IOException,
LexerError
{
return new Parser(true, lexer);
}
public static @Nonnull Parser newParser(
final @Nonnull Lexer lexer)
throws ConstraintError,
IOException,
LexerError
{
return new Parser(false, lexer);
}
private final boolean internal;
private final @Nonnull Lexer lexer;
private final @Nonnull StringBuilder message;
private @Nonnull Token token;
private Parser(
final boolean internal,
final @Nonnull Lexer lexer)
throws ConstraintError,
IOException,
LexerError
{
this.internal = internal;
this.lexer = Constraints.constrainNotNull(lexer, "Lexer");
this.message = new StringBuilder();
this.token = lexer.token();
}
public @Nonnull
UASTIDShaderFragment<UASTIUnchecked>
declarationFragmentShader()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_FRAGMENT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_IS);
final List<UASTIDShaderFragmentParameters<UASTIUnchecked>> decls =
this.declarationFragmentShaderParameterDeclarations();
final List<UASTIDShaderFragmentInput<UASTIUnchecked>> inputs =
new ArrayList<UASTIDShaderFragmentInput<UASTIUnchecked>>();
final List<UASTIDShaderFragmentOutput<UASTIUnchecked>> outputs =
new ArrayList<UASTIDShaderFragmentOutput<UASTIUnchecked>>();
final List<UASTIDShaderFragmentParameter<UASTIUnchecked>> parameters =
new ArrayList<UASTIDShaderFragmentParameter<UASTIUnchecked>>();
for (int index = 0; index < decls.size(); ++index) {
final UASTIDShaderFragmentParameters<UASTIUnchecked> d =
decls.get(index);
if (d instanceof UASTIDShaderFragmentInput<?>) {
inputs.add((UASTIDShaderFragmentInput<UASTIUnchecked>) d);
continue;
}
if (d instanceof UASTIDShaderFragmentOutput<?>) {
outputs.add((UASTIDShaderFragmentOutput<UASTIUnchecked>) d);
continue;
}
if (d instanceof UASTIDShaderFragmentParameter<?>) {
parameters.add((UASTIDShaderFragmentParameter<UASTIUnchecked>) d);
continue;
}
}
this.parserExpectOneOf(new Type[] { Type.TOKEN_WITH, Type.TOKEN_AS });
final List<UASTIDShaderFragmentLocal<UASTIUnchecked>> values;
switch (this.token.getType()) {
case TOKEN_WITH:
{
this.parserConsumeExact(Type.TOKEN_WITH);
values = this.declarationFragmentShaderLocals();
break;
}
// $CASES-OMITTED$
default:
{
values = new ArrayList<UASTIDShaderFragmentLocal<UASTIUnchecked>>();
break;
}
}
this.parserConsumeExact(Type.TOKEN_AS);
final List<UASTIDShaderFragmentOutputAssignment<UASTIUnchecked>> assigns =
this.declarationFragmentShaderOutputAssignments();
this.parserConsumeExact(Type.TOKEN_END);
return new UASTIDShaderFragment<UASTIUnchecked>(
name,
inputs,
outputs,
parameters,
values,
assigns);
}
public
UASTIDShaderFragmentInput<UASTIUnchecked>
declarationFragmentShaderInput()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_IN);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_COLON);
return new UASTIDShaderFragmentInput<UASTIUnchecked>(
name,
this.declarationTypePath());
}
public @Nonnull
UASTIDShaderFragmentLocalDiscard<UASTIUnchecked>
declarationFragmentShaderLocalDiscard()
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_DISCARD);
final TokenDiscard discard = (TokenDiscard) this.token;
this.parserConsumeExact(Type.TOKEN_DISCARD);
this.parserConsumeExact(Type.TOKEN_ROUND_LEFT);
final UASTIExpression<UASTIUnchecked> expr = this.expression();
this.parserConsumeExact(Type.TOKEN_ROUND_RIGHT);
return new UASTIDShaderFragmentLocalDiscard<UASTIUnchecked>(discard, expr);
}
public
List<UASTIDShaderFragmentLocal<UASTIUnchecked>>
declarationFragmentShaderLocals()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final ArrayList<UASTIDShaderFragmentLocal<UASTIUnchecked>> locals =
new ArrayList<UASTIDShaderFragmentLocal<UASTIUnchecked>>();
for (;;) {
switch (this.token.getType()) {
case TOKEN_VALUE:
locals.add(new UASTIDShaderFragmentLocalValue<UASTIUnchecked>(this
.declarationValueLocal()));
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
case TOKEN_DISCARD:
locals.add(this.declarationFragmentShaderLocalDiscard());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
return locals;
}
}
}
public
UASTIDShaderFragmentOutput<UASTIUnchecked>
declarationFragmentShaderOutput()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_OUT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_COLON);
final UASTITypePath type = this.declarationTypePath();
this.parserConsumeExact(Type.TOKEN_AS);
this.parserExpectExact(Type.TOKEN_LITERAL_INTEGER_DECIMAL);
final TokenLiteralIntegerDecimal index =
(TokenLiteralIntegerDecimal) this.token;
this.parserConsumeExact(Type.TOKEN_LITERAL_INTEGER_DECIMAL);
return new UASTIDShaderFragmentOutput<UASTIUnchecked>(name, type, index
.getValue()
.intValue());
}
public @Nonnull
UASTIDShaderFragmentOutputAssignment<UASTIUnchecked>
declarationFragmentShaderOutputAssignment()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_OUT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_EQUALS);
final UASTIEVariable<UASTIUnchecked> value =
new UASTIEVariable<UASTIUnchecked>(this.declarationValuePath());
return new UASTIDShaderFragmentOutputAssignment<UASTIUnchecked>(
name,
value);
}
public @Nonnull
List<UASTIDShaderFragmentOutputAssignment<UASTIUnchecked>>
declarationFragmentShaderOutputAssignments()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final List<UASTIDShaderFragmentOutputAssignment<UASTIUnchecked>> assigns =
new ArrayList<UASTIDShaderFragmentOutputAssignment<UASTIUnchecked>>();
assigns.add(this.declarationFragmentShaderOutputAssignment());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
for (;;) {
switch (this.token.getType()) {
case TOKEN_OUT:
assigns.add(this.declarationFragmentShaderOutputAssignment());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
return assigns;
}
}
}
public @Nonnull
UASTIDShaderFragmentParameter<UASTIUnchecked>
declarationFragmentShaderParameter()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_PARAMETER);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_COLON);
return new UASTIDShaderFragmentParameter<UASTIUnchecked>(
name,
this.declarationTypePath());
}
public @Nonnull
UASTIDShaderFragmentParameters<UASTIUnchecked>
declarationFragmentShaderParameterDeclaration()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserExpectOneOf(new Type[] {
Type.TOKEN_IN,
Type.TOKEN_OUT,
Type.TOKEN_PARAMETER });
switch (this.token.getType()) {
case TOKEN_IN:
return this.declarationFragmentShaderInput();
case TOKEN_OUT:
return this.declarationFragmentShaderOutput();
case TOKEN_PARAMETER:
return this.declarationFragmentShaderParameter();
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull
List<UASTIDShaderFragmentParameters<UASTIUnchecked>>
declarationFragmentShaderParameterDeclarations()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final List<UASTIDShaderFragmentParameters<UASTIUnchecked>> declarations =
new ArrayList<UASTIDShaderFragmentParameters<UASTIUnchecked>>();
for (;;) {
switch (this.token.getType()) {
case TOKEN_IN:
case TOKEN_OUT:
case TOKEN_PARAMETER:
declarations.add(this
.declarationFragmentShaderParameterDeclaration());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
return declarations;
}
}
}
public @Nonnull UASTIDFunction<UASTIUnchecked> declarationFunction()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_FUNCTION);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
final List<UASTIDFunctionArgument<UASTIUnchecked>> args =
this.declarationFunctionArguments();
this.parserConsumeExact(Type.TOKEN_COLON);
final UASTITypePath type = this.declarationTypePath();
this.parserConsumeExact(Type.TOKEN_EQUALS);
switch (this.token.getType()) {
case TOKEN_EXTERNAL:
/**
* If parsing an "internal" unit, then allow "external" functions.
*/
if (this.internal) {
this.parserConsumeExact(Type.TOKEN_EXTERNAL);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower ext = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
return new UASTIDFunctionExternal<UASTIUnchecked>(
name,
args,
type,
ext);
}
/**
* Otherwise, attempt to parse "external" as an expression, which will
* result in an error.
*/
return new UASTIDFunctionDefined<UASTIUnchecked>(
name,
args,
type,
this.expression());
// $CASES-OMITTED$
default:
return new UASTIDFunctionDefined<UASTIUnchecked>(
name,
args,
type,
this.expression());
}
}
public @Nonnull
UASTIDFunctionArgument<UASTIUnchecked>
declarationFunctionArgument()
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_COLON);
return new UASTIDFunctionArgument<UASTIUnchecked>(
name,
this.declarationTypePath());
}
public @Nonnull
List<UASTIDFunctionArgument<UASTIUnchecked>>
declarationFunctionArguments()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_ROUND_LEFT);
final ArrayList<UASTIDFunctionArgument<UASTIUnchecked>> args =
new ArrayList<UASTIDFunctionArgument<UASTIUnchecked>>();
args.add(this.declarationFunctionArgument());
boolean done = false;
while (done == false) {
switch (this.token.getType()) {
case TOKEN_COMMA:
this.parserConsumeExact(Type.TOKEN_COMMA);
args.add(this.declarationFunctionArgument());
break;
// $CASES-OMITTED$
default:
done = true;
}
}
this.parserConsumeExact(Type.TOKEN_ROUND_RIGHT);
return args;
}
public @Nonnull UASTIDImport<UASTIUnchecked> declarationImport()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_IMPORT);
final PackagePath path = this.declarationPackagePath();
this.parserExpectExact(Type.TOKEN_IDENTIFIER_UPPER);
final TokenIdentifierUpper name = (TokenIdentifierUpper) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_UPPER);
switch (this.token.getType()) {
case TOKEN_AS:
{
this.parserConsumeExact(Type.TOKEN_AS);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_UPPER);
final TokenIdentifierUpper rename = (TokenIdentifierUpper) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_UPPER);
return new UASTIDImport<UASTIUnchecked>(
new ModulePath(path, name),
Option.some(rename));
}
// $CASES-OMITTED$
default:
final None<TokenIdentifierUpper> none = Option.none();
return new UASTIDImport<UASTIUnchecked>(
new ModulePath(path, name),
none);
}
}
public @Nonnull List<UASTIDImport<UASTIUnchecked>> declarationImports()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final ArrayList<UASTIDImport<UASTIUnchecked>> imports =
new ArrayList<UASTIDImport<UASTIUnchecked>>();
for (;;) {
switch (this.token.getType()) {
case TOKEN_IMPORT:
imports.add(this.declarationImport());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
return imports;
}
}
}
public @Nonnull UASTIDModule<UASTIUnchecked> declarationModule()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_MODULE);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_UPPER);
final TokenIdentifierUpper name = (TokenIdentifierUpper) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_UPPER);
this.parserConsumeExact(Type.TOKEN_IS);
final List<UASTIDImport<UASTIUnchecked>> imports =
this.declarationImports();
final List<UASTIDeclarationModuleLevel<UASTIUnchecked>> declarations =
this.declarationModuleLevels();
this.parserConsumeExact(Type.TOKEN_END);
return new UASTIDModule<UASTIUnchecked>(name, imports, declarations);
}
public @Nonnull
UASTIDeclarationModuleLevel<UASTIUnchecked>
declarationModuleLevel()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserExpectOneOf(new Type[] {
Type.TOKEN_VALUE,
Type.TOKEN_FUNCTION,
Type.TOKEN_TYPE,
Type.TOKEN_SHADER });
switch (this.token.getType()) {
case TOKEN_VALUE:
return this.declarationValue();
case TOKEN_FUNCTION:
return this.declarationFunction();
case TOKEN_TYPE:
return this.declarationType();
case TOKEN_SHADER:
return this.declarationShader();
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull
List<UASTIDeclarationModuleLevel<UASTIUnchecked>>
declarationModuleLevels()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final ArrayList<UASTIDeclarationModuleLevel<UASTIUnchecked>> decls =
new ArrayList<UASTIDeclarationModuleLevel<UASTIUnchecked>>();
for (;;) {
switch (this.token.getType()) {
case TOKEN_VALUE:
case TOKEN_FUNCTION:
case TOKEN_TYPE:
case TOKEN_SHADER:
decls.add(this.declarationModuleLevel());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
return decls;
}
}
}
public @Nonnull List<UASTIDModule<UASTIUnchecked>> declarationModules()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final List<UASTIDModule<UASTIUnchecked>> modules =
new ArrayList<UASTIDModule<UASTIUnchecked>>();
for (;;) {
switch (this.token.getType()) {
case TOKEN_MODULE:
modules.add(this.declarationModule());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
return modules;
}
}
}
public @Nonnull UASTIDPackage<UASTIUnchecked> declarationPackage()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_PACKAGE);
return new UASTIDPackage<UASTIUnchecked>(this.declarationPackagePath());
}
public @Nonnull PackagePath declarationPackagePath()
throws ConstraintError,
ParserError,
IOException,
LexerError
{
final ArrayList<TokenIdentifierLower> components =
new ArrayList<TokenIdentifierLower>();
boolean done = false;
while (done == false) {
switch (this.token.getType()) {
case TOKEN_IDENTIFIER_LOWER:
{
components.add((TokenIdentifierLower) this.token);
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
switch (this.token.getType()) {
case TOKEN_DOT:
this.parserConsumeExact(Type.TOKEN_DOT);
break;
// $CASES-OMITTED$
default:
done = true;
break;
}
break;
}
// $CASES-OMITTED$
default:
done = true;
break;
}
}
return new PackagePath(components);
}
public @Nonnull
UASTIDShaderProgram<UASTIUnchecked>
declarationProgramShader()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_PROGRAM);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_IS);
this.parserConsumeExact(Type.TOKEN_VERTEX);
final UASTIShaderPath vertex = this.declarationShaderPath();
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
this.parserConsumeExact(Type.TOKEN_FRAGMENT);
final UASTIShaderPath fragment = this.declarationShaderPath();
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
this.parserConsumeExact(Type.TOKEN_END);
return new UASTIDShaderProgram<UASTIUnchecked>(name, vertex, fragment);
}
public @Nonnull UASTIDShader<UASTIUnchecked> declarationShader()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_SHADER);
this.parserExpectOneOf(new Type[] {
Type.TOKEN_VERTEX,
Type.TOKEN_FRAGMENT,
Type.TOKEN_PROGRAM });
switch (this.token.getType()) {
case TOKEN_VERTEX:
return this.declarationVertexShader();
case TOKEN_FRAGMENT:
return this.declarationFragmentShader();
case TOKEN_PROGRAM:
return this.declarationProgramShader();
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull UASTIShaderPath declarationShaderPath()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserExpectOneOf(new Type[] {
Type.TOKEN_IDENTIFIER_LOWER,
Type.TOKEN_IDENTIFIER_UPPER });
switch (this.token.getType()) {
case TOKEN_IDENTIFIER_LOWER:
{
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
final None<TokenIdentifierUpper> none = Option.none();
return new UASTIShaderPath(none, name);
}
case TOKEN_IDENTIFIER_UPPER:
{
final TokenIdentifierUpper module = (TokenIdentifierUpper) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_UPPER);
this.parserConsumeExact(Type.TOKEN_DOT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
return new UASTIShaderPath(Option.some(module), name);
}
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull UASTIDType<UASTIUnchecked> declarationType()
throws ConstraintError,
ParserError,
IOException,
LexerError
{
this.parserConsumeExact(Type.TOKEN_TYPE);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_IS);
this.parserExpectOneOf(new Type[] { Type.TOKEN_RECORD });
switch (this.token.getType()) {
case TOKEN_RECORD:
this.parserConsumeExact(Type.TOKEN_RECORD);
final List<UASTIDTypeRecordField<UASTIUnchecked>> fields =
this.declarationTypeRecordFields();
this.parserConsumeExact(Type.TOKEN_END);
return new UASTIDTypeRecord<UASTIUnchecked>(name, fields);
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull UASTITypePath declarationTypePath()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserExpectOneOf(new Type[] {
Type.TOKEN_IDENTIFIER_LOWER,
Type.TOKEN_IDENTIFIER_UPPER });
switch (this.token.getType()) {
case TOKEN_IDENTIFIER_LOWER:
{
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
final None<TokenIdentifierUpper> none = Option.none();
return new UASTITypePath(none, name);
}
case TOKEN_IDENTIFIER_UPPER:
{
final TokenIdentifierUpper module = (TokenIdentifierUpper) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_UPPER);
this.parserConsumeExact(Type.TOKEN_DOT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
return new UASTITypePath(Option.some(module), name);
}
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull
UASTIDTypeRecordField<UASTIUnchecked>
declarationTypeRecordField()
throws ConstraintError,
ParserError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_COLON);
final UASTITypePath type = this.declarationTypePath();
return new UASTIDTypeRecordField<UASTIUnchecked>(name, type);
}
public @Nonnull
List<UASTIDTypeRecordField<UASTIUnchecked>>
declarationTypeRecordFields()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final ArrayList<UASTIDTypeRecordField<UASTIUnchecked>> args =
new ArrayList<UASTIDTypeRecordField<UASTIUnchecked>>();
args.add(this.declarationTypeRecordField());
boolean done = false;
while (done == false) {
switch (this.token.getType()) {
case TOKEN_COMMA:
this.parserConsumeExact(Type.TOKEN_COMMA);
args.add(this.declarationTypeRecordField());
break;
// $CASES-OMITTED$
default:
done = true;
}
}
return args;
}
public @Nonnull UASTIDValue<UASTIUnchecked> declarationValue()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_VALUE);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this
.parserExpectOneOf(new Type[] { Type.TOKEN_COLON, Type.TOKEN_EQUALS });
switch (this.token.getType()) {
case TOKEN_COLON:
{
this.parserConsumeExact(Type.TOKEN_COLON);
final UASTITypePath path = this.declarationTypePath();
this.parserConsumeExact(Type.TOKEN_EQUALS);
return new UASTIDValue<UASTIUnchecked>(
name,
Option.some(path),
this.expression());
}
case TOKEN_EQUALS:
{
this.parserConsumeExact(Type.TOKEN_EQUALS);
final Option<UASTITypePath> none = Option.none();
return new UASTIDValue<UASTIUnchecked>(name, none, this.expression());
}
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull UASTIDValueLocal<UASTIUnchecked> declarationValueLocal()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_VALUE);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this
.parserExpectOneOf(new Type[] { Type.TOKEN_COLON, Type.TOKEN_EQUALS });
switch (this.token.getType()) {
case TOKEN_COLON:
{
this.parserConsumeExact(Type.TOKEN_COLON);
final UASTITypePath path = this.declarationTypePath();
this.parserConsumeExact(Type.TOKEN_EQUALS);
return new UASTIDValueLocal<UASTIUnchecked>(
name,
Option.some(path),
this.expression());
}
case TOKEN_EQUALS:
{
this.parserConsumeExact(Type.TOKEN_EQUALS);
final Option<UASTITypePath> none = Option.none();
return new UASTIDValueLocal<UASTIUnchecked>(
name,
none,
this.expression());
}
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull
List<UASTIDValueLocal<UASTIUnchecked>>
declarationValueLocals()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final List<UASTIDValueLocal<UASTIUnchecked>> values =
new ArrayList<UASTIDValueLocal<UASTIUnchecked>>();
values.add(this.declarationValueLocal());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
boolean done = false;
while (done == false) {
switch (this.token.getType()) {
case TOKEN_VALUE:
values.add(this.declarationValueLocal());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
done = true;
}
}
return values;
}
public @Nonnull UASTIValuePath declarationValuePath()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserExpectOneOf(new Type[] {
Type.TOKEN_IDENTIFIER_LOWER,
Type.TOKEN_IDENTIFIER_UPPER });
switch (this.token.getType()) {
case TOKEN_IDENTIFIER_LOWER:
{
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
final None<TokenIdentifierUpper> none = Option.none();
return new UASTIValuePath(none, name);
}
case TOKEN_IDENTIFIER_UPPER:
{
final TokenIdentifierUpper module = (TokenIdentifierUpper) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_UPPER);
this.parserConsumeExact(Type.TOKEN_DOT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
return new UASTIValuePath(Option.some(module), name);
}
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull
UASTIDShaderVertex<UASTIUnchecked>
declarationVertexShader()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_VERTEX);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_IS);
final List<UASTIDShaderVertexParameters<UASTIUnchecked>> decls =
this.declarationVertexShaderParameterDeclarations();
final List<UASTIDShaderVertexInput<UASTIUnchecked>> inputs =
new ArrayList<UASTIDShaderVertexInput<UASTIUnchecked>>();
final List<UASTIDShaderVertexOutput<UASTIUnchecked>> outputs =
new ArrayList<UASTIDShaderVertexOutput<UASTIUnchecked>>();
final List<UASTIDShaderVertexParameter<UASTIUnchecked>> parameters =
new ArrayList<UASTIDShaderVertexParameter<UASTIUnchecked>>();
for (int index = 0; index < decls.size(); ++index) {
final UASTIDShaderVertexParameters<UASTIUnchecked> d = decls.get(index);
if (d instanceof UASTIDShaderVertexInput<?>) {
inputs.add((UASTIDShaderVertexInput<UASTIUnchecked>) d);
continue;
}
if (d instanceof UASTIDShaderVertexOutput<?>) {
outputs.add((UASTIDShaderVertexOutput<UASTIUnchecked>) d);
continue;
}
if (d instanceof UASTIDShaderVertexParameter<?>) {
parameters.add((UASTIDShaderVertexParameter<UASTIUnchecked>) d);
continue;
}
}
this.parserExpectOneOf(new Type[] { Type.TOKEN_WITH, Type.TOKEN_AS });
final List<UASTIDValueLocal<UASTIUnchecked>> values;
switch (this.token.getType()) {
case TOKEN_WITH:
{
this.parserConsumeExact(Type.TOKEN_WITH);
values = this.declarationValueLocals();
break;
}
// $CASES-OMITTED$
default:
{
values = new ArrayList<UASTIDValueLocal<UASTIUnchecked>>();
break;
}
}
this.parserConsumeExact(Type.TOKEN_AS);
final List<UASTIDShaderVertexOutputAssignment<UASTIUnchecked>> assigns =
this.declarationVertexShaderOutputAssignments();
this.parserConsumeExact(Type.TOKEN_END);
return new UASTIDShaderVertex<UASTIUnchecked>(
name,
inputs,
outputs,
parameters,
values,
assigns);
}
public
UASTIDShaderVertexInput<UASTIUnchecked>
declarationVertexShaderInput()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_IN);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_COLON);
return new UASTIDShaderVertexInput<UASTIUnchecked>(
name,
this.declarationTypePath());
}
public
UASTIDShaderVertexOutput<UASTIUnchecked>
declarationVertexShaderOutput()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_OUT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_COLON);
return new UASTIDShaderVertexOutput<UASTIUnchecked>(
name,
this.declarationTypePath());
}
public @Nonnull
UASTIDShaderVertexOutputAssignment<UASTIUnchecked>
declarationVertexShaderOutputAssignment()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_OUT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_EQUALS);
final UASTIEVariable<UASTIUnchecked> value =
new UASTIEVariable<UASTIUnchecked>(this.declarationValuePath());
return new UASTIDShaderVertexOutputAssignment<UASTIUnchecked>(name, value);
}
public @Nonnull
List<UASTIDShaderVertexOutputAssignment<UASTIUnchecked>>
declarationVertexShaderOutputAssignments()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final List<UASTIDShaderVertexOutputAssignment<UASTIUnchecked>> assigns =
new ArrayList<UASTIDShaderVertexOutputAssignment<UASTIUnchecked>>();
assigns.add(this.declarationVertexShaderOutputAssignment());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
for (;;) {
switch (this.token.getType()) {
case TOKEN_OUT:
assigns.add(this.declarationVertexShaderOutputAssignment());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
return assigns;
}
}
}
public @Nonnull
UASTIDShaderVertexParameter<UASTIUnchecked>
declarationVertexShaderParameter()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_PARAMETER);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_COLON);
return new UASTIDShaderVertexParameter<UASTIUnchecked>(
name,
this.declarationTypePath());
}
public @Nonnull
UASTIDShaderVertexParameters<UASTIUnchecked>
declarationVertexShaderParameterDeclaration()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserExpectOneOf(new Type[] {
Type.TOKEN_IN,
Type.TOKEN_OUT,
Type.TOKEN_PARAMETER });
switch (this.token.getType()) {
case TOKEN_IN:
return this.declarationVertexShaderInput();
case TOKEN_OUT:
return this.declarationVertexShaderOutput();
case TOKEN_PARAMETER:
return this.declarationVertexShaderParameter();
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull
List<UASTIDShaderVertexParameters<UASTIUnchecked>>
declarationVertexShaderParameterDeclarations()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final List<UASTIDShaderVertexParameters<UASTIUnchecked>> declarations =
new ArrayList<UASTIDShaderVertexParameters<UASTIUnchecked>>();
for (;;) {
switch (this.token.getType()) {
case TOKEN_IN:
case TOKEN_OUT:
case TOKEN_PARAMETER:
declarations
.add(this.declarationVertexShaderParameterDeclaration());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
return declarations;
}
}
}
public @Nonnull UASTIExpression<UASTIUnchecked> expression()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
return this.expressionPost(this.expressionPre());
}
public @Nonnull
List<UASTIExpression<UASTIUnchecked>>
expressionApplicationArguments()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_ROUND_LEFT);
final ArrayList<UASTIExpression<UASTIUnchecked>> arguments =
new ArrayList<UASTIExpression<UASTIUnchecked>>();
arguments.add(this.expression());
boolean done = false;
while (done == false) {
switch (this.token.getType()) {
case TOKEN_COMMA:
this.parserConsumeExact(Type.TOKEN_COMMA);
arguments.add(this.expression());
break;
// $CASES-OMITTED$
default:
done = true;
break;
}
}
this.parserConsumeExact(Type.TOKEN_ROUND_RIGHT);
return arguments;
}
public @Nonnull UASTIEBoolean<UASTIUnchecked> expressionBoolean()
throws ConstraintError,
ParserError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_LITERAL_BOOLEAN);
final UASTIEBoolean<UASTIUnchecked> t =
new UASTIEBoolean<UASTIUnchecked>((TokenLiteralBoolean) this.token);
this.parserConsumeAny();
return t;
}
public @Nonnull UASTIEConditional<UASTIUnchecked> expressionConditional()
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserConsumeExact(Type.TOKEN_IF);
final UASTIExpression<UASTIUnchecked> econd = this.expression();
this.parserConsumeExact(Type.TOKEN_THEN);
final UASTIExpression<UASTIUnchecked> eleft = this.expression();
this.parserConsumeExact(Type.TOKEN_ELSE);
final UASTIExpression<UASTIUnchecked> eright = this.expression();
this.parserConsumeExact(Type.TOKEN_END);
return new UASTIEConditional<UASTIUnchecked>(econd, eleft, eright);
}
public @Nonnull UASTIEInteger<UASTIUnchecked> expressionInteger()
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_LITERAL_INTEGER_DECIMAL);
final UASTIEInteger<UASTIUnchecked> t =
new UASTIEInteger<UASTIUnchecked>((TokenLiteralInteger) this.token);
this.parserConsumeAny();
return t;
}
public @Nonnull UASTIELet<UASTIUnchecked> expressionLet()
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_LET);
final TokenLet let = (TokenLet) this.token;
this.parserConsumeExact(Type.TOKEN_LET);
final List<UASTIDValueLocal<UASTIUnchecked>> bindings =
this.declarationValueLocals();
this.parserConsumeExact(Type.TOKEN_IN);
final UASTIExpression<UASTIUnchecked> body = this.expression();
this.parserConsumeExact(Type.TOKEN_END);
return new UASTIELet<UASTIUnchecked>(let, bindings, body);
}
public @Nonnull UASTIENew<UASTIUnchecked> expressionNew()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_NEW);
final UASTITypePath path = this.declarationTypePath();
return new UASTIENew<UASTIUnchecked>(
path,
this.expressionApplicationArguments());
}
private @Nonnull UASTIExpression<UASTIUnchecked> expressionPost(
final @Nonnull UASTIExpression<UASTIUnchecked> e)
throws ParserError,
IOException,
LexerError,
ConstraintError
{
switch (this.token.getType()) {
case TOKEN_DOT:
return this.expressionPost(this.expressionRecordProjection(e));
case TOKEN_SQUARE_LEFT:
return this.expressionPost(this.expressionSwizzle(e));
// $CASES-OMITTED$
default:
return e;
}
}
private @Nonnull UASTIExpression<UASTIUnchecked> expressionPre()
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserExpectOneOf(new Type[] {
Type.TOKEN_LITERAL_INTEGER_DECIMAL,
Type.TOKEN_LITERAL_BOOLEAN,
Type.TOKEN_LITERAL_REAL,
Type.TOKEN_IDENTIFIER_LOWER,
Type.TOKEN_IDENTIFIER_UPPER,
Type.TOKEN_IF,
Type.TOKEN_LET,
Type.TOKEN_NEW,
Type.TOKEN_RECORD });
switch (this.token.getType()) {
case TOKEN_LITERAL_INTEGER_DECIMAL:
return this.expressionInteger();
case TOKEN_LITERAL_REAL:
return this.expressionReal();
case TOKEN_LITERAL_BOOLEAN:
return this.expressionBoolean();
case TOKEN_IDENTIFIER_LOWER:
return this.expressionVariableOrApplication();
case TOKEN_IDENTIFIER_UPPER:
return this.expressionVariableOrApplication();
case TOKEN_IF:
return this.expressionConditional();
case TOKEN_LET:
return this.expressionLet();
case TOKEN_NEW:
return this.expressionNew();
case TOKEN_RECORD:
return this.expressionRecord();
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull UASTIEReal<UASTIUnchecked> expressionReal()
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_LITERAL_REAL);
final UASTIEReal<UASTIUnchecked> t =
new UASTIEReal<UASTIUnchecked>((TokenLiteralReal) this.token);
this.parserConsumeAny();
return t;
}
public @Nonnull UASTIERecord<UASTIUnchecked> expressionRecord()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_RECORD);
final UASTITypePath path = this.declarationTypePath();
this.parserConsumeExact(Type.TOKEN_CURLY_LEFT);
final List<UASTIRecordFieldAssignment<UASTIUnchecked>> fields =
new ArrayList<UASTIRecordFieldAssignment<UASTIUnchecked>>();
fields.add(this.expressionRecordFieldAssignment());
this.expressionRecordActual(fields);
this.parserConsumeExact(Type.TOKEN_CURLY_RIGHT);
return new UASTIERecord<UASTIUnchecked>(path, fields);
}
private void expressionRecordActual(
final @Nonnull List<UASTIRecordFieldAssignment<UASTIUnchecked>> fields)
throws ParserError,
IOException,
LexerError,
ConstraintError
{
switch (this.token.getType()) {
case TOKEN_COMMA:
this.parserConsumeExact(Type.TOKEN_COMMA);
fields.add(this.expressionRecordFieldAssignment());
this.expressionRecordActual(fields);
break;
// $CASES-OMITTED$
default:
return;
}
}
public @Nonnull
UASTIRecordFieldAssignment<UASTIUnchecked>
expressionRecordFieldAssignment()
throws ConstraintError,
ParserError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_EQUALS);
return new UASTIRecordFieldAssignment<UASTIUnchecked>(
name,
this.expression());
}
public @Nonnull
UASTIERecordProjection<UASTIUnchecked>
expressionRecordProjection(
final @Nonnull UASTIExpression<UASTIUnchecked> e)
throws ConstraintError,
ParserError,
IOException,
LexerError
{
this.parserConsumeExact(Type.TOKEN_DOT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final UASTIERecordProjection<UASTIUnchecked> r =
new UASTIERecordProjection<UASTIUnchecked>(
e,
(TokenIdentifierLower) this.token);
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
return r;
}
public UASTIESwizzle<UASTIUnchecked> expressionSwizzle(
final @Nonnull UASTIExpression<UASTIUnchecked> e)
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_SQUARE_LEFT);
final ArrayList<TokenIdentifierLower> fields =
new ArrayList<TokenIdentifierLower>();
fields.add(this.expressionSwizzleField());
boolean done = false;
while (done == false) {
switch (this.token.getType()) {
case TOKEN_IDENTIFIER_LOWER:
fields.add(this.expressionSwizzleField());
break;
// $CASES-OMITTED$
default:
done = true;
}
}
this.parserConsumeExact(Type.TOKEN_SQUARE_RIGHT);
return new UASTIESwizzle<UASTIUnchecked>(e, fields);
}
public @Nonnull TokenIdentifierLower expressionSwizzleField()
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
return name;
}
public @Nonnull
UASTIExpression<UASTIUnchecked>
expressionVariableOrApplication()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final UASTIValuePath path = this.declarationValuePath();
switch (this.token.getType()) {
case TOKEN_ROUND_LEFT:
return new UASTIEApplication<UASTIUnchecked>(
path,
this.expressionApplicationArguments());
// $CASES-OMITTED$
default:
return new UASTIEVariable<UASTIUnchecked>(path);
}
}
protected void parserConsumeAny()
throws IOException,
LexerError,
ConstraintError
{
this.token = this.lexer.token();
}
protected void parserConsumeExact(
final @Nonnull Token.Type type)
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserExpectExact(type);
this.parserConsumeAny();
}
protected void parserExpectExact(
final @Nonnull Token.Type type)
throws ParserError,
ConstraintError
{
if (this.token.getType() != type) {
this.message.setLength(0);
this.message.append("Expected ");
this.message.append(type.getDescription());
this.message.append(" but got ");
this.parserShowToken();
throw new ParserError(
this.message.toString(),
this.lexer.getFile(),
this.token.getPosition());
}
}
protected void parserExpectOneOf(
final @Nonnull Token.Type types[])
throws ParserError,
ConstraintError
{
for (final Type want : types) {
if (this.token.getType() == want) {
return;
}
}
this.message.setLength(0);
this.message.append("Expected one of {");
for (int index = 0; index < types.length; ++index) {
final Type t = types[index];
this.message.append(t);
if ((index + 1) != types.length) {
this.message.append(", ");
}
}
this.message.append("} but got ");
this.parserShowToken();
throw new ParserError(
this.message.toString(),
this.lexer.getFile(),
this.token.getPosition());
}
private void parserShowToken()
{
this.message.append(this.token.getType().getDescription());
switch (this.token.getType()) {
case TOKEN_IDENTIFIER_LOWER:
{
final TokenIdentifierLower t =
(Token.TokenIdentifierLower) this.token;
this.message.append("('");
this.message.append(t.getActual());
this.message.append("')");
break;
}
case TOKEN_IDENTIFIER_UPPER:
{
final TokenIdentifierUpper t =
(Token.TokenIdentifierUpper) this.token;
this.message.append("('");
this.message.append(t.getActual());
this.message.append("')");
break;
}
// $CASES-OMITTED$
default:
break;
}
}
public @Nonnull UASTIUnit<UASTIUnchecked> unit()
throws ConstraintError,
ParserError,
IOException,
LexerError
{
final UASTIDPackage<UASTIUnchecked> pack = this.declarationPackage();
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
final List<UASTIDModule<UASTIUnchecked>> modules =
this.declarationModules();
return new UASTIUnit<UASTIUnchecked>(this.lexer.getFile(), pack, modules);
}
}
| io7m-jparasol-compiler-core/src/main/java/com/io7m/jparasol/parser/Parser.java | /*
* Copyright © 2013 <[email protected]> http://io7m.com
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package com.io7m.jparasol.parser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Nonnull;
import com.io7m.jaux.Constraints;
import com.io7m.jaux.Constraints.ConstraintError;
import com.io7m.jaux.UnreachableCodeException;
import com.io7m.jaux.functional.Option;
import com.io7m.jaux.functional.Option.None;
import com.io7m.jparasol.ModulePath;
import com.io7m.jparasol.PackagePath;
import com.io7m.jparasol.lexer.Lexer;
import com.io7m.jparasol.lexer.LexerError;
import com.io7m.jparasol.lexer.Token;
import com.io7m.jparasol.lexer.Token.TokenDiscard;
import com.io7m.jparasol.lexer.Token.TokenIdentifierLower;
import com.io7m.jparasol.lexer.Token.TokenIdentifierUpper;
import com.io7m.jparasol.lexer.Token.TokenLet;
import com.io7m.jparasol.lexer.Token.TokenLiteralBoolean;
import com.io7m.jparasol.lexer.Token.TokenLiteralInteger;
import com.io7m.jparasol.lexer.Token.TokenLiteralIntegerDecimal;
import com.io7m.jparasol.lexer.Token.TokenLiteralReal;
import com.io7m.jparasol.lexer.Token.Type;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDFunction;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDFunctionArgument;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDFunctionDefined;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDFunctionExternal;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDImport;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDModule;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDPackage;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShader;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragment;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragmentInput;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragmentLocal;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragmentLocalDiscard;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragmentLocalValue;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragmentOutput;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragmentOutputAssignment;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragmentParameter;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderFragmentParameters;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderProgram;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderVertex;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderVertexInput;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderVertexOutput;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderVertexOutputAssignment;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderVertexParameter;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDShaderVertexParameters;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDType;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDTypeRecord;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDTypeRecordField;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDValue;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDValueLocal;
import com.io7m.jparasol.untyped.ast.initial.UASTIDeclaration.UASTIDeclarationModuleLevel;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIEApplication;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIEBoolean;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIEConditional;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIEInteger;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIELet;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIENew;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIEReal;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIERecord;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIERecordProjection;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIESwizzle;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIEVariable;
import com.io7m.jparasol.untyped.ast.initial.UASTIExpression.UASTIRecordFieldAssignment;
import com.io7m.jparasol.untyped.ast.initial.UASTIShaderPath;
import com.io7m.jparasol.untyped.ast.initial.UASTITypePath;
import com.io7m.jparasol.untyped.ast.initial.UASTIUnchecked;
import com.io7m.jparasol.untyped.ast.initial.UASTIUnit;
import com.io7m.jparasol.untyped.ast.initial.UASTIValuePath;
public final class Parser
{
public static @Nonnull Parser newInternalParser(
final @Nonnull Lexer lexer)
throws ConstraintError,
IOException,
LexerError
{
return new Parser(true, lexer);
}
public static @Nonnull Parser newParser(
final @Nonnull Lexer lexer)
throws ConstraintError,
IOException,
LexerError
{
return new Parser(false, lexer);
}
private final boolean internal;
private final @Nonnull Lexer lexer;
private final @Nonnull StringBuilder message;
private @Nonnull Token token;
private Parser(
final boolean internal,
final @Nonnull Lexer lexer)
throws ConstraintError,
IOException,
LexerError
{
this.internal = internal;
this.lexer = Constraints.constrainNotNull(lexer, "Lexer");
this.message = new StringBuilder();
this.token = lexer.token();
}
public @Nonnull
UASTIDShaderFragment<UASTIUnchecked>
declarationFragmentShader()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_FRAGMENT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_IS);
final List<UASTIDShaderFragmentParameters<UASTIUnchecked>> decls =
this.declarationFragmentShaderParameterDeclarations();
final List<UASTIDShaderFragmentInput<UASTIUnchecked>> inputs =
new ArrayList<UASTIDShaderFragmentInput<UASTIUnchecked>>();
final List<UASTIDShaderFragmentOutput<UASTIUnchecked>> outputs =
new ArrayList<UASTIDShaderFragmentOutput<UASTIUnchecked>>();
final List<UASTIDShaderFragmentParameter<UASTIUnchecked>> parameters =
new ArrayList<UASTIDShaderFragmentParameter<UASTIUnchecked>>();
for (int index = 0; index < decls.size(); ++index) {
final UASTIDShaderFragmentParameters<UASTIUnchecked> d =
decls.get(index);
if (d instanceof UASTIDShaderFragmentInput<?>) {
inputs.add((UASTIDShaderFragmentInput<UASTIUnchecked>) d);
continue;
}
if (d instanceof UASTIDShaderFragmentOutput<?>) {
outputs.add((UASTIDShaderFragmentOutput<UASTIUnchecked>) d);
continue;
}
if (d instanceof UASTIDShaderFragmentParameter<?>) {
parameters.add((UASTIDShaderFragmentParameter<UASTIUnchecked>) d);
continue;
}
}
this.parserExpectOneOf(new Type[] { Type.TOKEN_WITH, Type.TOKEN_AS });
final List<UASTIDShaderFragmentLocal<UASTIUnchecked>> values;
switch (this.token.getType()) {
case TOKEN_WITH:
{
this.parserConsumeExact(Type.TOKEN_WITH);
values = this.declarationFragmentShaderLocals();
break;
}
// $CASES-OMITTED$
default:
{
values = new ArrayList<UASTIDShaderFragmentLocal<UASTIUnchecked>>();
break;
}
}
this.parserConsumeExact(Type.TOKEN_AS);
final List<UASTIDShaderFragmentOutputAssignment<UASTIUnchecked>> assigns =
this.declarationFragmentShaderOutputAssignments();
this.parserConsumeExact(Type.TOKEN_END);
return new UASTIDShaderFragment<UASTIUnchecked>(
name,
inputs,
outputs,
parameters,
values,
assigns);
}
public
UASTIDShaderFragmentInput<UASTIUnchecked>
declarationFragmentShaderInput()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_IN);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_COLON);
return new UASTIDShaderFragmentInput<UASTIUnchecked>(
name,
this.declarationTypePath());
}
public @Nonnull
UASTIDShaderFragmentLocalDiscard<UASTIUnchecked>
declarationFragmentShaderLocalDiscard()
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_DISCARD);
final TokenDiscard discard = (TokenDiscard) this.token;
this.parserConsumeExact(Type.TOKEN_DISCARD);
this.parserConsumeExact(Type.TOKEN_ROUND_LEFT);
final UASTIExpression<UASTIUnchecked> expr = this.expression();
this.parserConsumeExact(Type.TOKEN_ROUND_RIGHT);
return new UASTIDShaderFragmentLocalDiscard<UASTIUnchecked>(discard, expr);
}
public
List<UASTIDShaderFragmentLocal<UASTIUnchecked>>
declarationFragmentShaderLocals()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final ArrayList<UASTIDShaderFragmentLocal<UASTIUnchecked>> locals =
new ArrayList<UASTIDShaderFragmentLocal<UASTIUnchecked>>();
for (;;) {
switch (this.token.getType()) {
case TOKEN_VALUE:
locals.add(new UASTIDShaderFragmentLocalValue<UASTIUnchecked>(this
.declarationValueLocal()));
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
case TOKEN_DISCARD:
locals.add(this.declarationFragmentShaderLocalDiscard());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
return locals;
}
}
}
public
UASTIDShaderFragmentOutput<UASTIUnchecked>
declarationFragmentShaderOutput()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_OUT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_COLON);
final UASTITypePath type = this.declarationTypePath();
this.parserConsumeExact(Type.TOKEN_AS);
this.parserExpectExact(Type.TOKEN_LITERAL_INTEGER_DECIMAL);
final TokenLiteralIntegerDecimal index =
(TokenLiteralIntegerDecimal) this.token;
this.parserConsumeExact(Type.TOKEN_LITERAL_INTEGER_DECIMAL);
return new UASTIDShaderFragmentOutput<UASTIUnchecked>(name, type, index
.getValue()
.intValue());
}
public @Nonnull
UASTIDShaderFragmentOutputAssignment<UASTIUnchecked>
declarationFragmentShaderOutputAssignment()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_OUT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_EQUALS);
final UASTIEVariable<UASTIUnchecked> value =
new UASTIEVariable<UASTIUnchecked>(this.declarationValuePath());
return new UASTIDShaderFragmentOutputAssignment<UASTIUnchecked>(
name,
value);
}
public @Nonnull
List<UASTIDShaderFragmentOutputAssignment<UASTIUnchecked>>
declarationFragmentShaderOutputAssignments()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final List<UASTIDShaderFragmentOutputAssignment<UASTIUnchecked>> assigns =
new ArrayList<UASTIDShaderFragmentOutputAssignment<UASTIUnchecked>>();
for (;;) {
switch (this.token.getType()) {
case TOKEN_OUT:
assigns.add(this.declarationFragmentShaderOutputAssignment());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
return assigns;
}
}
}
public @Nonnull
UASTIDShaderFragmentParameter<UASTIUnchecked>
declarationFragmentShaderParameter()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_PARAMETER);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_COLON);
return new UASTIDShaderFragmentParameter<UASTIUnchecked>(
name,
this.declarationTypePath());
}
public @Nonnull
UASTIDShaderFragmentParameters<UASTIUnchecked>
declarationFragmentShaderParameterDeclaration()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserExpectOneOf(new Type[] {
Type.TOKEN_IN,
Type.TOKEN_OUT,
Type.TOKEN_PARAMETER });
switch (this.token.getType()) {
case TOKEN_IN:
return this.declarationFragmentShaderInput();
case TOKEN_OUT:
return this.declarationFragmentShaderOutput();
case TOKEN_PARAMETER:
return this.declarationFragmentShaderParameter();
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull
List<UASTIDShaderFragmentParameters<UASTIUnchecked>>
declarationFragmentShaderParameterDeclarations()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final List<UASTIDShaderFragmentParameters<UASTIUnchecked>> declarations =
new ArrayList<UASTIDShaderFragmentParameters<UASTIUnchecked>>();
for (;;) {
switch (this.token.getType()) {
case TOKEN_IN:
case TOKEN_OUT:
case TOKEN_PARAMETER:
declarations.add(this
.declarationFragmentShaderParameterDeclaration());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
return declarations;
}
}
}
public @Nonnull UASTIDFunction<UASTIUnchecked> declarationFunction()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_FUNCTION);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
final List<UASTIDFunctionArgument<UASTIUnchecked>> args =
this.declarationFunctionArguments();
this.parserConsumeExact(Type.TOKEN_COLON);
final UASTITypePath type = this.declarationTypePath();
this.parserConsumeExact(Type.TOKEN_EQUALS);
switch (this.token.getType()) {
case TOKEN_EXTERNAL:
/**
* If parsing an "internal" unit, then allow "external" functions.
*/
if (this.internal) {
this.parserConsumeExact(Type.TOKEN_EXTERNAL);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower ext = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
return new UASTIDFunctionExternal<UASTIUnchecked>(
name,
args,
type,
ext);
}
/**
* Otherwise, attempt to parse "external" as an expression, which will
* result in an error.
*/
return new UASTIDFunctionDefined<UASTIUnchecked>(
name,
args,
type,
this.expression());
// $CASES-OMITTED$
default:
return new UASTIDFunctionDefined<UASTIUnchecked>(
name,
args,
type,
this.expression());
}
}
public @Nonnull
UASTIDFunctionArgument<UASTIUnchecked>
declarationFunctionArgument()
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_COLON);
return new UASTIDFunctionArgument<UASTIUnchecked>(
name,
this.declarationTypePath());
}
public @Nonnull
List<UASTIDFunctionArgument<UASTIUnchecked>>
declarationFunctionArguments()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_ROUND_LEFT);
final ArrayList<UASTIDFunctionArgument<UASTIUnchecked>> args =
new ArrayList<UASTIDFunctionArgument<UASTIUnchecked>>();
args.add(this.declarationFunctionArgument());
boolean done = false;
while (done == false) {
switch (this.token.getType()) {
case TOKEN_COMMA:
this.parserConsumeExact(Type.TOKEN_COMMA);
args.add(this.declarationFunctionArgument());
break;
// $CASES-OMITTED$
default:
done = true;
}
}
this.parserConsumeExact(Type.TOKEN_ROUND_RIGHT);
return args;
}
public @Nonnull UASTIDImport<UASTIUnchecked> declarationImport()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_IMPORT);
final PackagePath path = this.declarationPackagePath();
this.parserExpectExact(Type.TOKEN_IDENTIFIER_UPPER);
final TokenIdentifierUpper name = (TokenIdentifierUpper) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_UPPER);
switch (this.token.getType()) {
case TOKEN_AS:
{
this.parserConsumeExact(Type.TOKEN_AS);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_UPPER);
final TokenIdentifierUpper rename = (TokenIdentifierUpper) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_UPPER);
return new UASTIDImport<UASTIUnchecked>(
new ModulePath(path, name),
Option.some(rename));
}
// $CASES-OMITTED$
default:
final None<TokenIdentifierUpper> none = Option.none();
return new UASTIDImport<UASTIUnchecked>(
new ModulePath(path, name),
none);
}
}
public @Nonnull List<UASTIDImport<UASTIUnchecked>> declarationImports()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final ArrayList<UASTIDImport<UASTIUnchecked>> imports =
new ArrayList<UASTIDImport<UASTIUnchecked>>();
for (;;) {
switch (this.token.getType()) {
case TOKEN_IMPORT:
imports.add(this.declarationImport());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
return imports;
}
}
}
public @Nonnull UASTIDModule<UASTIUnchecked> declarationModule()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_MODULE);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_UPPER);
final TokenIdentifierUpper name = (TokenIdentifierUpper) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_UPPER);
this.parserConsumeExact(Type.TOKEN_IS);
final List<UASTIDImport<UASTIUnchecked>> imports =
this.declarationImports();
final List<UASTIDeclarationModuleLevel<UASTIUnchecked>> declarations =
this.declarationModuleLevels();
this.parserConsumeExact(Type.TOKEN_END);
return new UASTIDModule<UASTIUnchecked>(name, imports, declarations);
}
public @Nonnull
UASTIDeclarationModuleLevel<UASTIUnchecked>
declarationModuleLevel()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserExpectOneOf(new Type[] {
Type.TOKEN_VALUE,
Type.TOKEN_FUNCTION,
Type.TOKEN_TYPE,
Type.TOKEN_SHADER });
switch (this.token.getType()) {
case TOKEN_VALUE:
return this.declarationValue();
case TOKEN_FUNCTION:
return this.declarationFunction();
case TOKEN_TYPE:
return this.declarationType();
case TOKEN_SHADER:
return this.declarationShader();
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull
List<UASTIDeclarationModuleLevel<UASTIUnchecked>>
declarationModuleLevels()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final ArrayList<UASTIDeclarationModuleLevel<UASTIUnchecked>> decls =
new ArrayList<UASTIDeclarationModuleLevel<UASTIUnchecked>>();
for (;;) {
switch (this.token.getType()) {
case TOKEN_VALUE:
case TOKEN_FUNCTION:
case TOKEN_TYPE:
case TOKEN_SHADER:
decls.add(this.declarationModuleLevel());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
return decls;
}
}
}
public @Nonnull List<UASTIDModule<UASTIUnchecked>> declarationModules()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final List<UASTIDModule<UASTIUnchecked>> modules =
new ArrayList<UASTIDModule<UASTIUnchecked>>();
for (;;) {
switch (this.token.getType()) {
case TOKEN_MODULE:
modules.add(this.declarationModule());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
return modules;
}
}
}
public @Nonnull UASTIDPackage<UASTIUnchecked> declarationPackage()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_PACKAGE);
return new UASTIDPackage<UASTIUnchecked>(this.declarationPackagePath());
}
public @Nonnull PackagePath declarationPackagePath()
throws ConstraintError,
ParserError,
IOException,
LexerError
{
final ArrayList<TokenIdentifierLower> components =
new ArrayList<TokenIdentifierLower>();
boolean done = false;
while (done == false) {
switch (this.token.getType()) {
case TOKEN_IDENTIFIER_LOWER:
{
components.add((TokenIdentifierLower) this.token);
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
switch (this.token.getType()) {
case TOKEN_DOT:
this.parserConsumeExact(Type.TOKEN_DOT);
break;
// $CASES-OMITTED$
default:
done = true;
break;
}
break;
}
// $CASES-OMITTED$
default:
done = true;
break;
}
}
return new PackagePath(components);
}
public @Nonnull
UASTIDShaderProgram<UASTIUnchecked>
declarationProgramShader()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_PROGRAM);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_IS);
this.parserConsumeExact(Type.TOKEN_VERTEX);
final UASTIShaderPath vertex = this.declarationShaderPath();
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
this.parserConsumeExact(Type.TOKEN_FRAGMENT);
final UASTIShaderPath fragment = this.declarationShaderPath();
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
this.parserConsumeExact(Type.TOKEN_END);
return new UASTIDShaderProgram<UASTIUnchecked>(name, vertex, fragment);
}
public @Nonnull UASTIDShader<UASTIUnchecked> declarationShader()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_SHADER);
this.parserExpectOneOf(new Type[] {
Type.TOKEN_VERTEX,
Type.TOKEN_FRAGMENT,
Type.TOKEN_PROGRAM });
switch (this.token.getType()) {
case TOKEN_VERTEX:
return this.declarationVertexShader();
case TOKEN_FRAGMENT:
return this.declarationFragmentShader();
case TOKEN_PROGRAM:
return this.declarationProgramShader();
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull UASTIShaderPath declarationShaderPath()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserExpectOneOf(new Type[] {
Type.TOKEN_IDENTIFIER_LOWER,
Type.TOKEN_IDENTIFIER_UPPER });
switch (this.token.getType()) {
case TOKEN_IDENTIFIER_LOWER:
{
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
final None<TokenIdentifierUpper> none = Option.none();
return new UASTIShaderPath(none, name);
}
case TOKEN_IDENTIFIER_UPPER:
{
final TokenIdentifierUpper module = (TokenIdentifierUpper) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_UPPER);
this.parserConsumeExact(Type.TOKEN_DOT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
return new UASTIShaderPath(Option.some(module), name);
}
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull UASTIDType<UASTIUnchecked> declarationType()
throws ConstraintError,
ParserError,
IOException,
LexerError
{
this.parserConsumeExact(Type.TOKEN_TYPE);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_IS);
this.parserExpectOneOf(new Type[] { Type.TOKEN_RECORD });
switch (this.token.getType()) {
case TOKEN_RECORD:
this.parserConsumeExact(Type.TOKEN_RECORD);
final List<UASTIDTypeRecordField<UASTIUnchecked>> fields =
this.declarationTypeRecordFields();
this.parserConsumeExact(Type.TOKEN_END);
return new UASTIDTypeRecord<UASTIUnchecked>(name, fields);
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull UASTITypePath declarationTypePath()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserExpectOneOf(new Type[] {
Type.TOKEN_IDENTIFIER_LOWER,
Type.TOKEN_IDENTIFIER_UPPER });
switch (this.token.getType()) {
case TOKEN_IDENTIFIER_LOWER:
{
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
final None<TokenIdentifierUpper> none = Option.none();
return new UASTITypePath(none, name);
}
case TOKEN_IDENTIFIER_UPPER:
{
final TokenIdentifierUpper module = (TokenIdentifierUpper) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_UPPER);
this.parserConsumeExact(Type.TOKEN_DOT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
return new UASTITypePath(Option.some(module), name);
}
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull
UASTIDTypeRecordField<UASTIUnchecked>
declarationTypeRecordField()
throws ConstraintError,
ParserError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_COLON);
final UASTITypePath type = this.declarationTypePath();
return new UASTIDTypeRecordField<UASTIUnchecked>(name, type);
}
public @Nonnull
List<UASTIDTypeRecordField<UASTIUnchecked>>
declarationTypeRecordFields()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final ArrayList<UASTIDTypeRecordField<UASTIUnchecked>> args =
new ArrayList<UASTIDTypeRecordField<UASTIUnchecked>>();
args.add(this.declarationTypeRecordField());
boolean done = false;
while (done == false) {
switch (this.token.getType()) {
case TOKEN_COMMA:
this.parserConsumeExact(Type.TOKEN_COMMA);
args.add(this.declarationTypeRecordField());
break;
// $CASES-OMITTED$
default:
done = true;
}
}
return args;
}
public @Nonnull UASTIDValue<UASTIUnchecked> declarationValue()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_VALUE);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this
.parserExpectOneOf(new Type[] { Type.TOKEN_COLON, Type.TOKEN_EQUALS });
switch (this.token.getType()) {
case TOKEN_COLON:
{
this.parserConsumeExact(Type.TOKEN_COLON);
final UASTITypePath path = this.declarationTypePath();
this.parserConsumeExact(Type.TOKEN_EQUALS);
return new UASTIDValue<UASTIUnchecked>(
name,
Option.some(path),
this.expression());
}
case TOKEN_EQUALS:
{
this.parserConsumeExact(Type.TOKEN_EQUALS);
final Option<UASTITypePath> none = Option.none();
return new UASTIDValue<UASTIUnchecked>(name, none, this.expression());
}
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull UASTIDValueLocal<UASTIUnchecked> declarationValueLocal()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_VALUE);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this
.parserExpectOneOf(new Type[] { Type.TOKEN_COLON, Type.TOKEN_EQUALS });
switch (this.token.getType()) {
case TOKEN_COLON:
{
this.parserConsumeExact(Type.TOKEN_COLON);
final UASTITypePath path = this.declarationTypePath();
this.parserConsumeExact(Type.TOKEN_EQUALS);
return new UASTIDValueLocal<UASTIUnchecked>(
name,
Option.some(path),
this.expression());
}
case TOKEN_EQUALS:
{
this.parserConsumeExact(Type.TOKEN_EQUALS);
final Option<UASTITypePath> none = Option.none();
return new UASTIDValueLocal<UASTIUnchecked>(
name,
none,
this.expression());
}
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull
List<UASTIDValueLocal<UASTIUnchecked>>
declarationValueLocals()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final List<UASTIDValueLocal<UASTIUnchecked>> values =
new ArrayList<UASTIDValueLocal<UASTIUnchecked>>();
values.add(this.declarationValueLocal());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
boolean done = false;
while (done == false) {
switch (this.token.getType()) {
case TOKEN_VALUE:
values.add(this.declarationValueLocal());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
done = true;
}
}
return values;
}
public @Nonnull UASTIValuePath declarationValuePath()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserExpectOneOf(new Type[] {
Type.TOKEN_IDENTIFIER_LOWER,
Type.TOKEN_IDENTIFIER_UPPER });
switch (this.token.getType()) {
case TOKEN_IDENTIFIER_LOWER:
{
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
final None<TokenIdentifierUpper> none = Option.none();
return new UASTIValuePath(none, name);
}
case TOKEN_IDENTIFIER_UPPER:
{
final TokenIdentifierUpper module = (TokenIdentifierUpper) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_UPPER);
this.parserConsumeExact(Type.TOKEN_DOT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
return new UASTIValuePath(Option.some(module), name);
}
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull
UASTIDShaderVertex<UASTIUnchecked>
declarationVertexShader()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_VERTEX);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_IS);
final List<UASTIDShaderVertexParameters<UASTIUnchecked>> decls =
this.declarationVertexShaderParameterDeclarations();
final List<UASTIDShaderVertexInput<UASTIUnchecked>> inputs =
new ArrayList<UASTIDShaderVertexInput<UASTIUnchecked>>();
final List<UASTIDShaderVertexOutput<UASTIUnchecked>> outputs =
new ArrayList<UASTIDShaderVertexOutput<UASTIUnchecked>>();
final List<UASTIDShaderVertexParameter<UASTIUnchecked>> parameters =
new ArrayList<UASTIDShaderVertexParameter<UASTIUnchecked>>();
for (int index = 0; index < decls.size(); ++index) {
final UASTIDShaderVertexParameters<UASTIUnchecked> d = decls.get(index);
if (d instanceof UASTIDShaderVertexInput<?>) {
inputs.add((UASTIDShaderVertexInput<UASTIUnchecked>) d);
continue;
}
if (d instanceof UASTIDShaderVertexOutput<?>) {
outputs.add((UASTIDShaderVertexOutput<UASTIUnchecked>) d);
continue;
}
if (d instanceof UASTIDShaderVertexParameter<?>) {
parameters.add((UASTIDShaderVertexParameter<UASTIUnchecked>) d);
continue;
}
}
this.parserExpectOneOf(new Type[] { Type.TOKEN_WITH, Type.TOKEN_AS });
final List<UASTIDValueLocal<UASTIUnchecked>> values;
switch (this.token.getType()) {
case TOKEN_WITH:
{
this.parserConsumeExact(Type.TOKEN_WITH);
values = this.declarationValueLocals();
break;
}
// $CASES-OMITTED$
default:
{
values = new ArrayList<UASTIDValueLocal<UASTIUnchecked>>();
break;
}
}
this.parserConsumeExact(Type.TOKEN_AS);
final List<UASTIDShaderVertexOutputAssignment<UASTIUnchecked>> assigns =
this.declarationVertexShaderOutputAssignments();
this.parserConsumeExact(Type.TOKEN_END);
return new UASTIDShaderVertex<UASTIUnchecked>(
name,
inputs,
outputs,
parameters,
values,
assigns);
}
public
UASTIDShaderVertexInput<UASTIUnchecked>
declarationVertexShaderInput()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_IN);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_COLON);
return new UASTIDShaderVertexInput<UASTIUnchecked>(
name,
this.declarationTypePath());
}
public
UASTIDShaderVertexOutput<UASTIUnchecked>
declarationVertexShaderOutput()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_OUT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_COLON);
return new UASTIDShaderVertexOutput<UASTIUnchecked>(
name,
this.declarationTypePath());
}
public @Nonnull
UASTIDShaderVertexOutputAssignment<UASTIUnchecked>
declarationVertexShaderOutputAssignment()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_OUT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_EQUALS);
final UASTIEVariable<UASTIUnchecked> value =
new UASTIEVariable<UASTIUnchecked>(this.declarationValuePath());
return new UASTIDShaderVertexOutputAssignment<UASTIUnchecked>(name, value);
}
public @Nonnull
List<UASTIDShaderVertexOutputAssignment<UASTIUnchecked>>
declarationVertexShaderOutputAssignments()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final List<UASTIDShaderVertexOutputAssignment<UASTIUnchecked>> assigns =
new ArrayList<UASTIDShaderVertexOutputAssignment<UASTIUnchecked>>();
for (;;) {
switch (this.token.getType()) {
case TOKEN_OUT:
assigns.add(this.declarationVertexShaderOutputAssignment());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
return assigns;
}
}
}
public @Nonnull
UASTIDShaderVertexParameter<UASTIUnchecked>
declarationVertexShaderParameter()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_PARAMETER);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_COLON);
return new UASTIDShaderVertexParameter<UASTIUnchecked>(
name,
this.declarationTypePath());
}
public @Nonnull
UASTIDShaderVertexParameters<UASTIUnchecked>
declarationVertexShaderParameterDeclaration()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserExpectOneOf(new Type[] {
Type.TOKEN_IN,
Type.TOKEN_OUT,
Type.TOKEN_PARAMETER });
switch (this.token.getType()) {
case TOKEN_IN:
return this.declarationVertexShaderInput();
case TOKEN_OUT:
return this.declarationVertexShaderOutput();
case TOKEN_PARAMETER:
return this.declarationVertexShaderParameter();
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull
List<UASTIDShaderVertexParameters<UASTIUnchecked>>
declarationVertexShaderParameterDeclarations()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final List<UASTIDShaderVertexParameters<UASTIUnchecked>> declarations =
new ArrayList<UASTIDShaderVertexParameters<UASTIUnchecked>>();
for (;;) {
switch (this.token.getType()) {
case TOKEN_IN:
case TOKEN_OUT:
case TOKEN_PARAMETER:
declarations
.add(this.declarationVertexShaderParameterDeclaration());
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
break;
// $CASES-OMITTED$
default:
return declarations;
}
}
}
public @Nonnull UASTIExpression<UASTIUnchecked> expression()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
return this.expressionPost(this.expressionPre());
}
public @Nonnull
List<UASTIExpression<UASTIUnchecked>>
expressionApplicationArguments()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_ROUND_LEFT);
final ArrayList<UASTIExpression<UASTIUnchecked>> arguments =
new ArrayList<UASTIExpression<UASTIUnchecked>>();
arguments.add(this.expression());
boolean done = false;
while (done == false) {
switch (this.token.getType()) {
case TOKEN_COMMA:
this.parserConsumeExact(Type.TOKEN_COMMA);
arguments.add(this.expression());
break;
// $CASES-OMITTED$
default:
done = true;
break;
}
}
this.parserConsumeExact(Type.TOKEN_ROUND_RIGHT);
return arguments;
}
public @Nonnull UASTIEBoolean<UASTIUnchecked> expressionBoolean()
throws ConstraintError,
ParserError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_LITERAL_BOOLEAN);
final UASTIEBoolean<UASTIUnchecked> t =
new UASTIEBoolean<UASTIUnchecked>((TokenLiteralBoolean) this.token);
this.parserConsumeAny();
return t;
}
public @Nonnull UASTIEConditional<UASTIUnchecked> expressionConditional()
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserConsumeExact(Type.TOKEN_IF);
final UASTIExpression<UASTIUnchecked> econd = this.expression();
this.parserConsumeExact(Type.TOKEN_THEN);
final UASTIExpression<UASTIUnchecked> eleft = this.expression();
this.parserConsumeExact(Type.TOKEN_ELSE);
final UASTIExpression<UASTIUnchecked> eright = this.expression();
this.parserConsumeExact(Type.TOKEN_END);
return new UASTIEConditional<UASTIUnchecked>(econd, eleft, eright);
}
public @Nonnull UASTIEInteger<UASTIUnchecked> expressionInteger()
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_LITERAL_INTEGER_DECIMAL);
final UASTIEInteger<UASTIUnchecked> t =
new UASTIEInteger<UASTIUnchecked>((TokenLiteralInteger) this.token);
this.parserConsumeAny();
return t;
}
public @Nonnull UASTIELet<UASTIUnchecked> expressionLet()
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_LET);
final TokenLet let = (TokenLet) this.token;
this.parserConsumeExact(Type.TOKEN_LET);
final List<UASTIDValueLocal<UASTIUnchecked>> bindings =
this.declarationValueLocals();
this.parserConsumeExact(Type.TOKEN_IN);
final UASTIExpression<UASTIUnchecked> body = this.expression();
this.parserConsumeExact(Type.TOKEN_END);
return new UASTIELet<UASTIUnchecked>(let, bindings, body);
}
public @Nonnull UASTIENew<UASTIUnchecked> expressionNew()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_NEW);
final UASTITypePath path = this.declarationTypePath();
return new UASTIENew<UASTIUnchecked>(
path,
this.expressionApplicationArguments());
}
private @Nonnull UASTIExpression<UASTIUnchecked> expressionPost(
final @Nonnull UASTIExpression<UASTIUnchecked> e)
throws ParserError,
IOException,
LexerError,
ConstraintError
{
switch (this.token.getType()) {
case TOKEN_DOT:
return this.expressionPost(this.expressionRecordProjection(e));
case TOKEN_SQUARE_LEFT:
return this.expressionPost(this.expressionSwizzle(e));
// $CASES-OMITTED$
default:
return e;
}
}
private @Nonnull UASTIExpression<UASTIUnchecked> expressionPre()
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserExpectOneOf(new Type[] {
Type.TOKEN_LITERAL_INTEGER_DECIMAL,
Type.TOKEN_LITERAL_BOOLEAN,
Type.TOKEN_LITERAL_REAL,
Type.TOKEN_IDENTIFIER_LOWER,
Type.TOKEN_IDENTIFIER_UPPER,
Type.TOKEN_IF,
Type.TOKEN_LET,
Type.TOKEN_NEW,
Type.TOKEN_RECORD });
switch (this.token.getType()) {
case TOKEN_LITERAL_INTEGER_DECIMAL:
return this.expressionInteger();
case TOKEN_LITERAL_REAL:
return this.expressionReal();
case TOKEN_LITERAL_BOOLEAN:
return this.expressionBoolean();
case TOKEN_IDENTIFIER_LOWER:
return this.expressionVariableOrApplication();
case TOKEN_IDENTIFIER_UPPER:
return this.expressionVariableOrApplication();
case TOKEN_IF:
return this.expressionConditional();
case TOKEN_LET:
return this.expressionLet();
case TOKEN_NEW:
return this.expressionNew();
case TOKEN_RECORD:
return this.expressionRecord();
// $CASES-OMITTED$
default:
throw new UnreachableCodeException();
}
}
public @Nonnull UASTIEReal<UASTIUnchecked> expressionReal()
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_LITERAL_REAL);
final UASTIEReal<UASTIUnchecked> t =
new UASTIEReal<UASTIUnchecked>((TokenLiteralReal) this.token);
this.parserConsumeAny();
return t;
}
public @Nonnull UASTIERecord<UASTIUnchecked> expressionRecord()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_RECORD);
final UASTITypePath path = this.declarationTypePath();
this.parserConsumeExact(Type.TOKEN_CURLY_LEFT);
final List<UASTIRecordFieldAssignment<UASTIUnchecked>> fields =
new ArrayList<UASTIRecordFieldAssignment<UASTIUnchecked>>();
fields.add(this.expressionRecordFieldAssignment());
this.expressionRecordActual(fields);
this.parserConsumeExact(Type.TOKEN_CURLY_RIGHT);
return new UASTIERecord<UASTIUnchecked>(path, fields);
}
private void expressionRecordActual(
final @Nonnull List<UASTIRecordFieldAssignment<UASTIUnchecked>> fields)
throws ParserError,
IOException,
LexerError,
ConstraintError
{
switch (this.token.getType()) {
case TOKEN_COMMA:
this.parserConsumeExact(Type.TOKEN_COMMA);
fields.add(this.expressionRecordFieldAssignment());
this.expressionRecordActual(fields);
break;
// $CASES-OMITTED$
default:
return;
}
}
public @Nonnull
UASTIRecordFieldAssignment<UASTIUnchecked>
expressionRecordFieldAssignment()
throws ConstraintError,
ParserError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
this.parserConsumeExact(Type.TOKEN_EQUALS);
return new UASTIRecordFieldAssignment<UASTIUnchecked>(
name,
this.expression());
}
public @Nonnull
UASTIERecordProjection<UASTIUnchecked>
expressionRecordProjection(
final @Nonnull UASTIExpression<UASTIUnchecked> e)
throws ConstraintError,
ParserError,
IOException,
LexerError
{
this.parserConsumeExact(Type.TOKEN_DOT);
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final UASTIERecordProjection<UASTIUnchecked> r =
new UASTIERecordProjection<UASTIUnchecked>(
e,
(TokenIdentifierLower) this.token);
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
return r;
}
public UASTIESwizzle<UASTIUnchecked> expressionSwizzle(
final @Nonnull UASTIExpression<UASTIUnchecked> e)
throws ParserError,
IOException,
LexerError,
ConstraintError
{
this.parserConsumeExact(Type.TOKEN_SQUARE_LEFT);
final ArrayList<TokenIdentifierLower> fields =
new ArrayList<TokenIdentifierLower>();
fields.add(this.expressionSwizzleField());
boolean done = false;
while (done == false) {
switch (this.token.getType()) {
case TOKEN_IDENTIFIER_LOWER:
fields.add(this.expressionSwizzleField());
break;
// $CASES-OMITTED$
default:
done = true;
}
}
this.parserConsumeExact(Type.TOKEN_SQUARE_RIGHT);
return new UASTIESwizzle<UASTIUnchecked>(e, fields);
}
public @Nonnull TokenIdentifierLower expressionSwizzleField()
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserExpectExact(Type.TOKEN_IDENTIFIER_LOWER);
final TokenIdentifierLower name = (TokenIdentifierLower) this.token;
this.parserConsumeExact(Type.TOKEN_IDENTIFIER_LOWER);
return name;
}
public @Nonnull
UASTIExpression<UASTIUnchecked>
expressionVariableOrApplication()
throws ParserError,
IOException,
LexerError,
ConstraintError
{
final UASTIValuePath path = this.declarationValuePath();
switch (this.token.getType()) {
case TOKEN_ROUND_LEFT:
return new UASTIEApplication<UASTIUnchecked>(
path,
this.expressionApplicationArguments());
// $CASES-OMITTED$
default:
return new UASTIEVariable<UASTIUnchecked>(path);
}
}
protected void parserConsumeAny()
throws IOException,
LexerError,
ConstraintError
{
this.token = this.lexer.token();
}
protected void parserConsumeExact(
final @Nonnull Token.Type type)
throws ParserError,
ConstraintError,
IOException,
LexerError
{
this.parserExpectExact(type);
this.parserConsumeAny();
}
protected void parserExpectExact(
final @Nonnull Token.Type type)
throws ParserError,
ConstraintError
{
if (this.token.getType() != type) {
this.message.setLength(0);
this.message.append("Expected ");
this.message.append(type.getDescription());
this.message.append(" but got ");
this.parserShowToken();
throw new ParserError(
this.message.toString(),
this.lexer.getFile(),
this.token.getPosition());
}
}
protected void parserExpectOneOf(
final @Nonnull Token.Type types[])
throws ParserError,
ConstraintError
{
for (final Type want : types) {
if (this.token.getType() == want) {
return;
}
}
this.message.setLength(0);
this.message.append("Expected one of {");
for (int index = 0; index < types.length; ++index) {
final Type t = types[index];
this.message.append(t);
if ((index + 1) != types.length) {
this.message.append(", ");
}
}
this.message.append("} but got ");
this.parserShowToken();
throw new ParserError(
this.message.toString(),
this.lexer.getFile(),
this.token.getPosition());
}
private void parserShowToken()
{
this.message.append(this.token.getType().getDescription());
switch (this.token.getType()) {
case TOKEN_IDENTIFIER_LOWER:
{
final TokenIdentifierLower t =
(Token.TokenIdentifierLower) this.token;
this.message.append("('");
this.message.append(t.getActual());
this.message.append("')");
break;
}
case TOKEN_IDENTIFIER_UPPER:
{
final TokenIdentifierUpper t =
(Token.TokenIdentifierUpper) this.token;
this.message.append("('");
this.message.append(t.getActual());
this.message.append("')");
break;
}
// $CASES-OMITTED$
default:
break;
}
}
public @Nonnull UASTIUnit<UASTIUnchecked> unit()
throws ConstraintError,
ParserError,
IOException,
LexerError
{
final UASTIDPackage<UASTIUnchecked> pack = this.declarationPackage();
this.parserConsumeExact(Type.TOKEN_SEMICOLON);
final List<UASTIDModule<UASTIUnchecked>> modules =
this.declarationModules();
return new UASTIUnit<UASTIUnchecked>(this.lexer.getFile(), pack, modules);
}
}
| Syntactically require at least one vertex and fragment shader output assignment
| io7m-jparasol-compiler-core/src/main/java/com/io7m/jparasol/parser/Parser.java | Syntactically require at least one vertex and fragment shader output assignment | <ide><path>o7m-jparasol-compiler-core/src/main/java/com/io7m/jparasol/parser/Parser.java
<ide> final List<UASTIDShaderFragmentOutputAssignment<UASTIUnchecked>> assigns =
<ide> new ArrayList<UASTIDShaderFragmentOutputAssignment<UASTIUnchecked>>();
<ide>
<add> assigns.add(this.declarationFragmentShaderOutputAssignment());
<add> this.parserConsumeExact(Type.TOKEN_SEMICOLON);
<add>
<ide> for (;;) {
<ide> switch (this.token.getType()) {
<ide> case TOKEN_OUT:
<ide> final List<UASTIDShaderVertexOutputAssignment<UASTIUnchecked>> assigns =
<ide> new ArrayList<UASTIDShaderVertexOutputAssignment<UASTIUnchecked>>();
<ide>
<add> assigns.add(this.declarationVertexShaderOutputAssignment());
<add> this.parserConsumeExact(Type.TOKEN_SEMICOLON);
<add>
<ide> for (;;) {
<ide> switch (this.token.getType()) {
<ide> case TOKEN_OUT: |
|
Java | mit | 44c1bdd92f143d52182961119a1a3f2271e0b932 | 0 | codesqueak/Nascom | /*
* MIT License
*
* Copyright (c) 2016
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package com.codingrodent.emulator.cards.cpu.nascom2;
import com.codingrodent.emulator.emulator.SystemContext;
import javax.swing.*;
import java.io.*;
class CassetteTape {
private static final int TAPE_DRIVE_LED = 0x010;
private final SystemContext systemContext;
private boolean tapeLED;
private FileInputStream tapeFileInput;
private FileOutputStream tapeFileOutput;
private String readFileName;
private int readAheadChar;
CassetteTape() {
systemContext = SystemContext.createInstance();
tapeLED = false;
tapeFileInput = null;
tapeFileOutput = null;
}
/**
* Control the tape LED
*
* @param data Byte written to port 0
*/
void controlLED(int data) {
tapeLED = 0 != (data & TAPE_DRIVE_LED);
}
/**
* Write data to the UART data register, port 1
*
* @param data UART data
*/
void writeDataToUART(int data) {
try {
if (null != tapeFileOutput) {
tapeFileOutput.write(data);
}
} catch (Exception e) {
systemContext.logErrorEvent("Problem encountered writing to tape file. " + e.getMessage());
}
}
/**
* Read data from the UART data register, port 1
*
* @return data UART Data
*/
int readDataFromUART() {
int oneChar = readAheadChar;
fillReadAhead ();
return oneChar;
}
/**
* Store a char from input in readAheadChar; close input stream if we reach the end.
*/
void fillReadAhead () {
try {
if (tapeFileInput == null) return;
readAheadChar = tapeFileInput.read ();
if (readAheadChar < 0) { // EOF reached
tapeFileInput.close ();
tapeFileInput = null;
}
} catch (IOException e) {
systemContext.logErrorEvent ("Problem encountered reading tape file " + e.getMessage ());
try {
tapeFileInput.close();
} catch (IOException ignored) { }
tapeFileInput = null;
}
}
/**
* Read status from the UART status register, port 2
*
* @return Valid data flag if read LED on and an input file exists
*/
int readStatusFromUART() {
int status = 0x00;
if (null != tapeFileInput ) {
status = status | 0xC0;
}
if (tapeLED) {
status = status | 0x40;
}
return status;
}
/**
* Set the input stream ready for reading
*
* @param readFileName The file to read from
*/
private void setTapeRead(String readFileName) {
if (null != tapeFileInput) {
try {
tapeFileInput.close();
} catch (IOException e) {
systemContext.logErrorEvent("Problem encountered reading tape file. " + e.getMessage());
}
}
tapeFileInput = null;
try {
systemContext.logInfoEvent("Looking for tape to read: " + readFileName);
tapeFileInput = new FileInputStream(readFileName);
this.readFileName = readFileName;
} catch (FileNotFoundException e) {
systemContext.logErrorEvent("Unable to find tape to read: " + readFileName);
}
}
/**
* Set the output stream ready for writing
*
* @param writeFileName The file to write to
*/
private void setTapeWrite(String writeFileName) {
if (null != tapeFileOutput) {
try {
tapeFileOutput.close();
} catch (Exception e) {
systemContext.logErrorEvent("Problem encountered writing to tape file. " + e.getMessage());
}
}
tapeFileOutput = null;
try {
systemContext.logInfoEvent("Looking for tape to write: " + readFileName);
tapeFileOutput = new FileOutputStream(writeFileName);
} catch (FileNotFoundException e) {
systemContext.logErrorEvent("Unable to find tape to write: " + readFileName);
}
}
/**
* Load a new tape image
*/
void loadNewTape() {
JFileChooser fc = new JFileChooser(System.getProperty("user.dir"));
fc.setFileFilter(new dskFileFilter());
int returnValue = fc.showOpenDialog(systemContext.getPrimaryDisplay());
if (returnValue == JFileChooser.APPROVE_OPTION) {
File file = fc.getSelectedFile();
String fileName = file.getAbsolutePath();
systemContext.logDebugEvent("Loading tape file from " + fileName);
setTapeRead(fileName);
}
}
/**
* Set a new tape image to write to
*/
void saveNewTape() {
JFileChooser fc = new JFileChooser(System.getProperty("user.dir"));
fc.setFileFilter(new dskFileFilter());
int returnValue = fc.showSaveDialog(systemContext.getPrimaryDisplay());
if (returnValue == JFileChooser.APPROVE_OPTION) {
File file = fc.getSelectedFile();
String fileName = file.getAbsolutePath();
systemContext.logDebugEvent("Saving tape file to " + fileName);
setTapeWrite(fileName);
}
}
/**
* inner class for the file filter (.cas / .bas)
*/
private static class dskFileFilter extends javax.swing.filechooser.FileFilter {
@Override
public boolean accept(File f) {
if (f.isDirectory()) {
return true;
} else {
int place = f.getName().lastIndexOf('.');
if (place == -1) {
return false;
}
String fileType = f.getName().substring(place).toLowerCase();
return ((0 == fileType.compareTo(".cas")) || (0 == fileType.compareTo(".bas")));
}
}
/**
* Description for file type in dialog box
*/
@Override
public String getDescription() {
return "Cassette Image File";
}
}
}
| src/main/java/com/codingrodent/emulator/cards/cpu/nascom2/CassetteTape.java | /*
* MIT License
*
* Copyright (c) 2016
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package com.codingrodent.emulator.cards.cpu.nascom2;
import com.codingrodent.emulator.emulator.SystemContext;
import javax.swing.*;
import java.io.*;
class CassetteTape {
private static final int TAPE_DRIVE_LED = 0x010;
private final SystemContext systemContext;
private boolean tapeLED;
private FileInputStream tapeFileInput;
private FileOutputStream tapeFileOutput;
private String readFileName;
CassetteTape() {
systemContext = SystemContext.createInstance();
tapeLED = false;
tapeFileInput = null;
tapeFileOutput = null;
}
/**
* Control the tape LED
*
* @param data Byte written to port 0
*/
void controlLED(int data) {
tapeLED = 0 != (data & TAPE_DRIVE_LED);
}
/**
* Write data to the UART data register, port 1
*
* @param data UART data
*/
void writeDataToUART(int data) {
if (tapeLED) {
try {
if (null != tapeFileOutput) {
tapeFileOutput.write(data);
}
} catch (Exception e) {
systemContext.logErrorEvent("Problem encountered writing to tape file. " + e.getMessage());
}
}
}
/**
* Read data from the UART data register, port 1
*
* @return data UART Data
*/
int readDataFromUART() {
if (tapeLED) {
try {
int oneChar = 0x00;
if (0 != tapeFileInput.available()) {
return tapeFileInput.read();
} else {
tapeFileInput.close();
tapeFileInput = new FileInputStream(readFileName);
return oneChar;
}
} catch (Exception e) {
return 0x00;
}
} else {
return 0x00;
}
}
/**
* Read status from the UART status register, port 2
*
* @return Valid data flag if read LED on and an input file exists
*/
int readStatusFromUART() {
int status = 0x00;
if (tapeLED && (null != tapeFileInput)) {
status = status | 0xC0;
}
if (tapeLED) {
status = status | 0x40;
}
return status;
}
/**
* Set the input stream ready for reading
*
* @param readFileName The file to read from
*/
private void setTapeRead(String readFileName) {
if (null != tapeFileInput) {
try {
tapeFileInput.close();
} catch (IOException e) {
systemContext.logErrorEvent("Problem encountered reading tape file. " + e.getMessage());
}
}
tapeFileInput = null;
try {
systemContext.logInfoEvent("Looking for tape to read: " + readFileName);
tapeFileInput = new FileInputStream(readFileName);
this.readFileName = readFileName;
} catch (FileNotFoundException e) {
systemContext.logErrorEvent("Unable to find tape to read: " + readFileName);
}
}
/**
* Set the output stream ready for writing
*
* @param writeFileName The file to write to
*/
private void setTapeWrite(String writeFileName) {
if (null != tapeFileOutput) {
try {
tapeFileOutput.close();
} catch (Exception e) {
systemContext.logErrorEvent("Problem encountered writing to tape file. " + e.getMessage());
}
}
tapeFileOutput = null;
try {
systemContext.logInfoEvent("Looking for tape to write: " + readFileName);
tapeFileOutput = new FileOutputStream(writeFileName);
} catch (FileNotFoundException e) {
systemContext.logErrorEvent("Unable to find tape to write: " + readFileName);
}
}
/**
* Load a new tape image
*/
void loadNewTape() {
JFileChooser fc = new JFileChooser(System.getProperty("user.dir"));
fc.setFileFilter(new dskFileFilter());
int returnValue = fc.showOpenDialog(systemContext.getPrimaryDisplay());
if (returnValue == JFileChooser.APPROVE_OPTION) {
File file = fc.getSelectedFile();
String fileName = file.getAbsolutePath();
systemContext.logDebugEvent("Loading tape file from " + fileName);
setTapeRead(fileName);
}
}
/**
* Set a new tape image to write to
*/
void saveNewTape() {
JFileChooser fc = new JFileChooser(System.getProperty("user.dir"));
fc.setFileFilter(new dskFileFilter());
int returnValue = fc.showSaveDialog(systemContext.getPrimaryDisplay());
if (returnValue == JFileChooser.APPROVE_OPTION) {
File file = fc.getSelectedFile();
String fileName = file.getAbsolutePath();
systemContext.logDebugEvent("Saving tape file to " + fileName);
setTapeWrite(fileName);
}
}
/**
* inner class for the file filter (.cas / .bas)
*/
private static class dskFileFilter extends javax.swing.filechooser.FileFilter {
@Override
public boolean accept(File f) {
if (f.isDirectory()) {
return true;
} else {
int place = f.getName().lastIndexOf('.');
if (place == -1) {
return false;
}
String fileType = f.getName().substring(place).toLowerCase();
return ((0 == fileType.compareTo(".cas")) || (0 == fileType.compareTo(".bas")));
}
}
/**
* Description for file type in dialog box
*/
@Override
public String getDescription() {
return "Cassette Image File";
}
}
} | Play cassette tape on any UART read request; do not automatically loop tape
This is necessary for using ZEAP, and potentially other systems that save data then
expect it to be played back directly into their user interface.
| src/main/java/com/codingrodent/emulator/cards/cpu/nascom2/CassetteTape.java | Play cassette tape on any UART read request; do not automatically loop tape | <ide><path>rc/main/java/com/codingrodent/emulator/cards/cpu/nascom2/CassetteTape.java
<ide> private FileInputStream tapeFileInput;
<ide> private FileOutputStream tapeFileOutput;
<ide> private String readFileName;
<add> private int readAheadChar;
<ide>
<ide> CassetteTape() {
<ide> systemContext = SystemContext.createInstance();
<ide> * @param data UART data
<ide> */
<ide> void writeDataToUART(int data) {
<del> if (tapeLED) {
<del> try {
<del> if (null != tapeFileOutput) {
<del> tapeFileOutput.write(data);
<del> }
<del> } catch (Exception e) {
<del> systemContext.logErrorEvent("Problem encountered writing to tape file. " + e.getMessage());
<del> }
<del> }
<add> try {
<add> if (null != tapeFileOutput) {
<add> tapeFileOutput.write(data);
<add> }
<add> } catch (Exception e) {
<add> systemContext.logErrorEvent("Problem encountered writing to tape file. " + e.getMessage());
<add> }
<ide> }
<ide>
<ide> /**
<ide> * @return data UART Data
<ide> */
<ide> int readDataFromUART() {
<del> if (tapeLED) {
<del> try {
<del> int oneChar = 0x00;
<del> if (0 != tapeFileInput.available()) {
<del> return tapeFileInput.read();
<del> } else {
<del> tapeFileInput.close();
<del> tapeFileInput = new FileInputStream(readFileName);
<del> return oneChar;
<del> }
<del> } catch (Exception e) {
<del> return 0x00;
<del> }
<del> } else {
<del> return 0x00;
<del> }
<add> int oneChar = readAheadChar;
<add> fillReadAhead ();
<add> return oneChar;
<add> }
<add>
<add> /**
<add> * Store a char from input in readAheadChar; close input stream if we reach the end.
<add> */
<add> void fillReadAhead () {
<add> try {
<add> if (tapeFileInput == null) return;
<add> readAheadChar = tapeFileInput.read ();
<add> if (readAheadChar < 0) { // EOF reached
<add> tapeFileInput.close ();
<add> tapeFileInput = null;
<add> }
<add> } catch (IOException e) {
<add> systemContext.logErrorEvent ("Problem encountered reading tape file " + e.getMessage ());
<add> try {
<add> tapeFileInput.close();
<add> } catch (IOException ignored) { }
<add> tapeFileInput = null;
<add> }
<ide> }
<ide>
<ide> /**
<ide> */
<ide> int readStatusFromUART() {
<ide> int status = 0x00;
<del> if (tapeLED && (null != tapeFileInput)) {
<add> if (null != tapeFileInput ) {
<ide> status = status | 0xC0;
<ide> }
<ide> if (tapeLED) { |
|
Java | mit | 076cf872173e10c6ce8f6c69128d67b01b737e03 | 0 | ASzc/keratin-irc,emmettu/keratin-irc | /**
* Copyright (C) 2013 Alexander Szczuczko
*
* This file may be modified and distributed under the terms
* of the MIT license. See the LICENSE file for details.
*/
package ca.szc.keratin.core.misc;
import java.util.LinkedList;
import java.util.List;
public class LineWrap
{
/**
* According to RFC2812, this is strictly 510 characters (as the maximum allowed for the command and its
* parameters), however we'll allow less than that to provide for room for the overhead. This is a rough value and
* may need to become more smaller.
*/
public static final int MAX_IRC_LENGTH = 400;
/**
* Wrap line to lines of the maximum IRC length. Any newline (\n) characters will force a line break where they
* exist.
*
* @param line Long line to wrap
* @return List of wrapped lines containing at least one string
*/
public static List<String> wrap( String line )
{
return wrap( line, MAX_IRC_LENGTH );
}
/**
* Wrap a line to lines of at most the specified length. Any newline (\n) characters will force a line break where
* they exist. Otherwise, line breaks will be assigned to the last whitespace character in a line. If neither of
* these methods works, the line maximum length will be enforced by splitting the string at the length limit.
*
* @param line Long line to wrap.
* @param maxLineLength The maximum length of a line to allow.
* @return List of wrapped lines containing at least one line.
*/
public static List<String> wrap( String raw, final int maxLineLength )
{
// Is this a base case?
if ( raw.length() <= maxLineLength )
{
// \n characters force a break regardless of where they are.
// Search the remainder segment for one.
int endIndex = raw.indexOf( '\n' );
if ( endIndex != -1 )
{
String line = raw.substring( 0, endIndex );
List<String> lines = wrap( raw.substring( endIndex + 1 ), raw.length() );
lines.add( 0, line );
return lines;
}
else
{
// Otherwise this really is a base case
// Init return list
List<String> lines = new LinkedList<String>();
// Add the whole remainder as the first inserted line
lines.add( raw );
return lines;
}
}
else
{
int endIndex = -1;
// \n characters force a break regardless of where they are.
// Search in our segment for one.
endIndex = raw.substring( 0, maxLineLength ).indexOf( '\n' );
// If no \n was found, search for a whitespace character closest to the end of our segment.
if ( endIndex == -1 )
{
char[] rawArray = raw.toCharArray();
for ( int i = maxLineLength; i > 0 && endIndex == -1; i-- )
{
if ( Character.isWhitespace( rawArray[i] ) )
{
endIndex = i;
}
}
}
// If no intelligent split point was found, just use the max size as that point
if ( endIndex == -1 )
{
endIndex = maxLineLength;
}
int splitSpacing = 0;
/*
* Remove/skip-over whitespace splitting characters. This avoids including \n manual split characters, which
* aren't allowed by the IRC protocol, as well as having a space at the start of each automatically wrapped
* line.
*/
if ( Character.isWhitespace( raw.charAt( endIndex ) ) )
{
splitSpacing = 1;
}
String line = raw.substring( 0, endIndex );
// Recurse on remainder of raw
List<String> lines = wrap( raw.substring( endIndex + splitSpacing ), maxLineLength );
// Prepend our line, so the text stays in the original order
lines.add( 0, line );
return lines;
}
}
}
| keratin-core/src/ca/szc/keratin/core/misc/LineWrap.java | package ca.szc.keratin.core.misc;
import java.util.LinkedList;
import java.util.List;
public class LineWrap
{
/**
* According to RFC2812, this is strictly 510 characters (as the maximum allowed for the command and its
* parameters), however we'll allow less than that to provide for room for the overhead. This is a rough value and
* may need to become more smaller.
*/
public static final int MAX_IRC_LENGTH = 400;
/**
* Wrap line to lines of the maximum IRC length. Any newline (\n) characters will force a line break where they
* exist.
*
* @param line Long line to wrap
* @return List of wrapped lines containing at least one string
*/
public static List<String> wrap( String line )
{
return wrap( line, MAX_IRC_LENGTH );
}
/**
* Wrap a line to lines of at most the specified length. Any newline (\n) characters will force a line break where
* they exist. Otherwise, line breaks will be assigned to the last whitespace character in a line. If neither of
* these methods works, the line maximum length will be enforced by splitting the string at the length limit.
*
* @param line Long line to wrap.
* @param maxLineLength The maximum length of a line to allow.
* @return List of wrapped lines containing at least one line.
*/
public static List<String> wrap( String raw, final int maxLineLength )
{
// Is this a base case?
if ( raw.length() <= maxLineLength )
{
// \n characters force a break regardless of where they are.
// Search the remainder segment for one.
int endIndex = raw.indexOf( '\n' );
if ( endIndex != -1 )
{
String line = raw.substring( 0, endIndex );
List<String> lines = wrap( raw.substring( endIndex + 1 ), raw.length() );
lines.add( 0, line );
return lines;
}
else
{
// Otherwise this really is a base case
// Init return list
List<String> lines = new LinkedList<String>();
// Add the whole remainder as the first inserted line
lines.add( raw );
return lines;
}
}
else
{
int endIndex = -1;
// \n characters force a break regardless of where they are.
// Search in our segment for one.
endIndex = raw.substring( 0, maxLineLength ).indexOf( '\n' );
// If no \n was found, search for a whitespace character closest to the end of our segment.
if ( endIndex == -1 )
{
char[] rawArray = raw.toCharArray();
for ( int i = maxLineLength; i > 0 && endIndex == -1; i-- )
{
if ( Character.isWhitespace( rawArray[i] ) )
{
endIndex = i;
}
}
}
// If no intelligent split point was found, just use the max size as that point
if ( endIndex == -1 )
{
endIndex = maxLineLength;
}
int splitSpacing = 0;
/*
* Remove/skip-over whitespace splitting characters. This avoids including \n manual split characters, which
* aren't allowed by the IRC protocol, as well as having a space at the start of each automatically wrapped
* line.
*/
if ( Character.isWhitespace( raw.charAt( endIndex ) ) )
{
splitSpacing = 1;
}
String line = raw.substring( 0, endIndex );
// Recurse on remainder of raw
List<String> lines = wrap( raw.substring( endIndex + splitSpacing ), maxLineLength );
// Prepend our line, so the text stays in the original order
lines.add( 0, line );
return lines;
}
}
}
| Add license header to LineWrap
| keratin-core/src/ca/szc/keratin/core/misc/LineWrap.java | Add license header to LineWrap | <ide><path>eratin-core/src/ca/szc/keratin/core/misc/LineWrap.java
<add>/**
<add> * Copyright (C) 2013 Alexander Szczuczko
<add> *
<add> * This file may be modified and distributed under the terms
<add> * of the MIT license. See the LICENSE file for details.
<add> */
<ide> package ca.szc.keratin.core.misc;
<ide>
<ide> import java.util.LinkedList; |
|
Java | apache-2.0 | aec6b4af48c407a8e055656f0183f8d6eae1c9e2 | 0 | mufaddalq/cloudstack-datera-driver,jcshen007/cloudstack,wido/cloudstack,jcshen007/cloudstack,jcshen007/cloudstack,resmo/cloudstack,wido/cloudstack,mufaddalq/cloudstack-datera-driver,GabrielBrascher/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,wido/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,mufaddalq/cloudstack-datera-driver,wido/cloudstack,mufaddalq/cloudstack-datera-driver,wido/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,wido/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,mufaddalq/cloudstack-datera-driver,wido/cloudstack,mufaddalq/cloudstack-datera-driver,DaanHoogland/cloudstack,jcshen007/cloudstack,resmo/cloudstack,jcshen007/cloudstack,GabrielBrascher/cloudstack | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.upgrade.dao;
import java.io.File;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import org.apache.log4j.Logger;
import com.cloud.utils.crypt.DBEncryptionUtil;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.script.Script;
public class Upgrade302to40 extends Upgrade30xBase implements DbUpgrade {
final static Logger s_logger = Logger.getLogger(Upgrade302to40.class);
@Override
public String[] getUpgradableVersionRange() {
return new String[] { "3.0.2", "4.0.0" };
}
@Override
public String getUpgradedVersion() {
return "4.0.0";
}
@Override
public boolean supportsRollingUpgrade() {
return false;
}
@Override
public File[] getPrepareScripts() {
String script = Script.findScript("", "db/schema-302to40.sql");
if (script == null) {
throw new CloudRuntimeException("Unable to find db/schema-302to40.sql");
}
return new File[] { new File(script) };
}
@Override
public void performDataMigration(Connection conn) {
correctVRProviders(conn);
correctMultiplePhysicaNetworkSetups(conn);
addHostDetailsUniqueKey(conn);
addVpcProvider(conn);
updateRouterNetworkRef(conn);
fixForeignKeys(conn);
}
@Override
public File[] getCleanupScripts() {
String script = Script.findScript("", "db/schema-302to40-cleanup.sql");
if (script == null) {
throw new CloudRuntimeException("Unable to find db/schema-302to40-cleanup.sql");
}
return new File[] { new File(script) };
}
private void correctVRProviders(Connection conn) {
PreparedStatement pstmtVR = null;
ResultSet rsVR = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
try{
pstmtVR = conn.prepareStatement("SELECT id, nsp_id FROM `cloud`.`virtual_router_providers` where type = 'VirtualRouter' AND removed IS NULL");
rsVR = pstmtVR.executeQuery();
while (rsVR.next()) {
long vrId = rsVR.getLong(1);
long nspId = rsVR.getLong(2);
//check that this nspId points to a VR provider.
pstmt = conn.prepareStatement("SELECT physical_network_id, provider_name FROM `cloud`.`physical_network_service_providers` where id = ?");
pstmt.setLong(1, nspId);
rs = pstmt.executeQuery();
if(rs.next()){
long physicalNetworkId = rs.getLong(1);
String providerName = rs.getString(2);
if(!providerName.equalsIgnoreCase("VirtualRouter")){
//mismatch, correct the nsp_id in VR
PreparedStatement pstmt1 = null;
ResultSet rs1 = null;
pstmt1 = conn.prepareStatement("SELECT id FROM `cloud`.`physical_network_service_providers` where physical_network_id = ? AND provider_name = ? AND removed IS NULL");
pstmt1.setLong(1, physicalNetworkId);
pstmt1.setString(2, "VirtualRouter");
rs1 = pstmt1.executeQuery();
if(rs1.next()){
long correctNSPId = rs1.getLong(1);
//update VR entry
PreparedStatement pstmtUpdate = null;
String updateNSPId = "UPDATE `cloud`.`virtual_router_providers` SET nsp_id = ? WHERE id = ?";
pstmtUpdate = conn.prepareStatement(updateNSPId);
pstmtUpdate.setLong(1, correctNSPId);
pstmtUpdate.setLong(2, vrId);
pstmtUpdate.executeUpdate();
pstmtUpdate.close();
}
rs1.close();
pstmt1.close();
}
}
rs.close();
pstmt.close();
}
}catch (SQLException e) {
throw new CloudRuntimeException("Exception while correcting Virtual Router Entries", e);
} finally {
if (rsVR != null) {
try {
rsVR.close();
}catch (SQLException e) {
}
}
if (pstmtVR != null) {
try {
pstmtVR.close();
} catch (SQLException e) {
}
}
if (rs != null) {
try {
rs.close();
}catch (SQLException e) {
}
}
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException e) {
}
}
}
}
private void correctMultiplePhysicaNetworkSetups(Connection conn) {
PreparedStatement pstmtZone = null;
ResultSet rsZone = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
try{
//check if multiple physical networks with 'Guest' Traffic types are present
//Yes:
//1) check if there are guest networks without tags, if yes then add a new physical network with default tag for them
//2) Check if there are physical network tags present
//No: Add unique tag to each physical network
//3) Get all guest networks unique network offering id's
//Clone each for each physical network and add the tag.
//add ntwk service map entries
//update all guest networks of 1 physical network having this offering id to this new offering id
pstmtZone = conn.prepareStatement("SELECT id, domain_id, networktype, name, uuid FROM `cloud`.`data_center`");
rsZone = pstmtZone.executeQuery();
while (rsZone.next()) {
long zoneId = rsZone.getLong(1);
Long domainId = rsZone.getLong(2);
String networkType = rsZone.getString(3);
String zoneName = rsZone.getString(4);
String uuid = rsZone.getString(5);
PreparedStatement pstmtUpdate = null;
if(uuid == null){
uuid = UUID.randomUUID().toString();
String updateUuid = "UPDATE `cloud`.`data_center` SET uuid = ? WHERE id = ?";
pstmtUpdate = conn.prepareStatement(updateUuid);
pstmtUpdate.setString(1, uuid);
pstmtUpdate.setLong(2, zoneId);
pstmtUpdate.executeUpdate();
pstmtUpdate.close();
}
//check if any networks were untagged and remaining to be mapped to a physical network
pstmt = conn.prepareStatement("SELECT count(n.id) FROM networks n WHERE n.physical_network_id IS NULL AND n.traffic_type = 'Guest' and n.data_center_id = ? and n.removed is null");
pstmt.setLong(1, zoneId);
rs = pstmt.executeQuery();
if(rs.next()){
Long count = rs.getLong(1);
if(count > 0){
// find the default tag to use from global config or use 'cloud-private'
String xenGuestLabel = getNetworkLabelFromConfig(conn, "xen.guest.network.device");
//Decrypt this value.
xenGuestLabel = DBEncryptionUtil.decrypt(xenGuestLabel);
//make sure that no physical network with this traffic label already exists. if yes, error out.
if(xenGuestLabel != null){
PreparedStatement pstmt5 = conn.prepareStatement("SELECT count(*) FROM `cloud`.`physical_network_traffic_types` pntt JOIN `cloud`.`physical_network` pn ON pntt.physical_network_id = pn.id WHERE pntt.traffic_type ='Guest' AND pn.data_center_id = ? AND pntt.xen_network_label = ?");
pstmt5.setLong(1, zoneId);
pstmt5.setString(2, xenGuestLabel);
ResultSet rsSameLabel = pstmt5.executeQuery();
if(rsSameLabel.next()){
Long sameLabelcount = rsSameLabel.getLong(1);
if(sameLabelcount > 0){
s_logger.error("There are untagged networks for which we need to add a physical network with Xen traffic label = 'xen.guest.network.device' config value, which is: "+xenGuestLabel);
s_logger.error("However already there are "+sameLabelcount+" physical networks setup with same traffic label, cannot upgrade");
throw new CloudRuntimeException("Cannot upgrade this setup since a physical network with same traffic label: "+xenGuestLabel+" already exists, Please check logs and contact Support.");
}
}
}
//Create a physical network with guest traffic type and this tag
long physicalNetworkId = addPhysicalNetworkToZone(conn, zoneId, zoneName, networkType, null, domainId);
addTrafficType(conn, physicalNetworkId, "Guest", xenGuestLabel, null, null);
addDefaultVRProvider(conn, physicalNetworkId, zoneId);
addDefaultSGProvider(conn, physicalNetworkId, zoneId, networkType, true);
PreparedStatement pstmt3 = conn.prepareStatement("SELECT n.id FROM networks n WHERE n.physical_network_id IS NULL AND n.traffic_type = 'Guest' and n.data_center_id = ? and n.removed is null");
pstmt3.setLong(1, zoneId);
ResultSet rsNet = pstmt3.executeQuery();
s_logger.debug("Adding PhysicalNetwork to VLAN");
s_logger.debug("Adding PhysicalNetwork to user_ip_address");
s_logger.debug("Adding PhysicalNetwork to networks");
while(rsNet.next()){
Long networkId = rsNet.getLong(1);
addPhysicalNtwk_To_Ntwk_IP_Vlan(conn, physicalNetworkId,networkId);
}
rsNet.close();
pstmt3.close();
}
}
rs.close();
pstmt.close();
boolean multiplePhysicalNetworks = false;
pstmt = conn.prepareStatement("SELECT count(*) FROM `cloud`.`physical_network_traffic_types` pntt JOIN `cloud`.`physical_network` pn ON pntt.physical_network_id = pn.id WHERE pntt.traffic_type ='Guest' and pn.data_center_id = ?");
pstmt.setLong(1, zoneId);
rs = pstmt.executeQuery();
if(rs.next()){
Long count = rs.getLong(1);
if(count > 1){
s_logger.debug("There are "+count+" physical networks setup");
multiplePhysicalNetworks = true;
}
}
rs.close();
pstmt.close();
if(multiplePhysicalNetworks){
//check if guest vnet is wrongly configured by earlier upgrade. If yes error out
//check if any vnet is allocated and guest networks are using vnet But the physical network id does not match on the vnet and guest network.
PreparedStatement pstmt4 = conn.prepareStatement("SELECT v.id, v.vnet, v.reservation_id, v.physical_network_id as vpid, n.id, n.physical_network_id as npid FROM `cloud`.`op_dc_vnet_alloc` v JOIN `cloud`.`networks` n ON CONCAT('vlan://' , v.vnet) = n.broadcast_uri WHERE v.taken IS NOT NULL AND v.data_center_id = ? AND n.removed IS NULL AND v.physical_network_id != n.physical_network_id");
pstmt4.setLong(1, zoneId);
ResultSet rsVNet = pstmt4.executeQuery();
if(rsVNet.next()){
String vnet = rsVNet.getString(2);
String networkId = rsVNet.getString(5);
String vpid = rsVNet.getString(4);
String npid = rsVNet.getString(6);
s_logger.error("Guest Vnet assignment is set wrongly . Cannot upgrade until that is corrected. Example- Vnet: "+ vnet +" has physical network id: " + vpid +" ,but the guest network: " +networkId+" that uses it has physical network id: " +npid );
String message = "Cannot upgrade. Your setup has multiple Physical Networks and is using guest Vnet that is assigned wrongly. To upgrade, first correct the setup by doing the following: \n" +
"1. Please rollback to your 2.2.14 setup\n" +
"2. Please stop all VMs using isolated(virtual) networks through CloudStack\n" +
"3. Run following query to find if any networks still have nics allocated:\n\t"+
"a) check if any virtual guest networks still have allocated nics by running:\n\t" +
"SELECT DISTINCT op.id from `cloud`.`op_networks` op JOIN `cloud`.`networks` n on op.id=n.id WHERE nics_count != 0 AND guest_type = 'Virtual';\n\t"+
"b) If this returns any networkd ids, then ensure that all VMs are stopped, no new VM is being started, and then shutdown management server\n\t"+
"c) Clean up the nics count for the 'virtual' network id's returned in step (a) by running this:\n\t"+
"UPDATE `cloud`.`op_networks` SET nics_count = 0 WHERE id = <enter id of virtual network>\n\t"+
"d) Restart management server and wait for all networks to shutdown. [Networks shutdown will be determined by network.gc.interval and network.gc.wait seconds] \n"+
"4. Please ensure all networks are shutdown and all guest Vnet's are free.\n" +
"5. Run upgrade. This will allocate all your guest vnet range to first physical network. \n" +
"6. Reconfigure the vnet ranges for each physical network as desired by using updatePhysicalNetwork API \n" +
"7. Start all your VMs";
s_logger.error(message);
throw new CloudRuntimeException("Cannot upgrade this setup since Guest Vnet assignment to the multiple physical networks is incorrect. Please check the logs for details on how to proceed");
}
rsVNet.close();
pstmt4.close();
//Clean up any vnets that have no live networks/nics
pstmt4 = conn.prepareStatement("SELECT v.id, v.vnet, v.reservation_id FROM `cloud`.`op_dc_vnet_alloc` v LEFT JOIN networks n ON CONCAT('vlan://' , v.vnet) = n.broadcast_uri WHERE v.taken IS NOT NULL AND v.data_center_id = ? AND n.broadcast_uri IS NULL AND n.removed IS NULL");
pstmt4.setLong(1, zoneId);
rsVNet = pstmt4.executeQuery();
while(rsVNet.next()){
Long vnet_id = rsVNet.getLong(1);
String vnetValue = rsVNet.getString(2);
String reservationId = rsVNet.getString(3);
//does this vnet have any nic associated?
PreparedStatement pstmt5 = conn.prepareStatement("SELECT id, instance_id FROM `cloud`.`nics` where broadcast_uri = ? and removed IS NULL");
String uri = "vlan://"+vnetValue;
pstmt5.setString(1, uri);
ResultSet rsNic = pstmt5.executeQuery();
Long nic_id = rsNic.getLong(1);
Long instance_id = rsNic.getLong(2);
if(rsNic.next()){
throw new CloudRuntimeException("Cannot upgrade. Please cleanup the guest vnet: "+ vnetValue +" , it is being used by nic_id: "+ nic_id +" , instance_id: " + instance_id );
}
//free this vnet
String freeVnet = "UPDATE `cloud`.`op_dc_vnet_alloc` SET account_id = NULL, taken = NULL, reservation_id = NULL WHERE id = ?";
pstmtUpdate = conn.prepareStatement(freeVnet);
pstmtUpdate.setLong(1, vnet_id);
pstmtUpdate.executeUpdate();
pstmtUpdate.close();
}
rsVNet.close();
pstmt4.close();
//add tags to the physical networks if not present and clone offerings
pstmt = conn.prepareStatement("SELECT pn.id as pid , ptag.tag as tag FROM `cloud`.`physical_network` pn LEFT JOIN `cloud`.`physical_network_tags` ptag ON pn.id = ptag.physical_network_id where pn.data_center_id = ?");
pstmt.setLong(1, zoneId);
rs = pstmt.executeQuery();
while(rs.next()){
long physicalNetworkId = rs.getLong("pid");
String tag = rs.getString("tag");
if(tag == null){
//need to add unique tag
String newTag = "pNtwk-tag-" + physicalNetworkId;
String updateVnet = "INSERT INTO `cloud`.`physical_network_tags`(tag, physical_network_id) VALUES( ?, ? )";
pstmtUpdate = conn.prepareStatement(updateVnet);
pstmtUpdate.setString(1, newTag);
pstmtUpdate.setLong(2, physicalNetworkId);
pstmtUpdate.executeUpdate();
pstmtUpdate.close();
//clone offerings and tag them with this new tag, if there are any guest networks for this physical network
PreparedStatement pstmt2 = null;
ResultSet rs2 = null;
pstmt2 = conn.prepareStatement("SELECT distinct network_offering_id FROM `cloud`.`networks` where traffic_type= 'Guest' and physical_network_id = ? and removed is null");
pstmt2.setLong(1, physicalNetworkId);
rs2 = pstmt2.executeQuery();
while(rs2.next()){
//clone each offering, add new tag, clone offering-svc-map, update guest networks with new offering id
long networkOfferingId = rs2.getLong(1);
cloneOfferingAndAddTag(conn, networkOfferingId, physicalNetworkId, newTag);
}
rs2.close();
pstmt2.close();
}
}
rs.close();
pstmt.close();
}
}
} catch (SQLException e) {
throw new CloudRuntimeException("Exception while correcting PhysicalNetwork setup", e);
} finally {
if (rsZone != null) {
try {
rsZone.close();
}catch (SQLException e) {
}
}
if (pstmtZone != null) {
try {
pstmtZone.close();
} catch (SQLException e) {
}
}
if (rs != null) {
try {
rs.close();
}catch (SQLException e) {
}
}
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException e) {
}
}
}
}
private void cloneOfferingAndAddTag(Connection conn, long networkOfferingId, long physicalNetworkId, String newTag) {
PreparedStatement pstmt = null;
ResultSet rs = null;
try{
pstmt = conn.prepareStatement("select count(*) from `cloud`.`network_offerings`");
rs = pstmt.executeQuery();
long ntwkOffCount = 0;
while (rs.next()) {
ntwkOffCount = rs.getLong(1);
}
rs.close();
pstmt.close();
pstmt = conn.prepareStatement("DROP TEMPORARY TABLE IF EXISTS `cloud`.`network_offerings2`");
pstmt.executeUpdate();
pstmt = conn.prepareStatement("CREATE TEMPORARY TABLE `cloud`.`network_offerings2` ENGINE=MEMORY SELECT * FROM `cloud`.`network_offerings` WHERE id=1");
pstmt.executeUpdate();
pstmt.close();
// clone the record to
pstmt = conn.prepareStatement("INSERT INTO `cloud`.`network_offerings2` SELECT * FROM `cloud`.`network_offerings` WHERE id=?");
pstmt.setLong(1, networkOfferingId);
pstmt.executeUpdate();
pstmt.close();
pstmt = conn.prepareStatement("SELECT unique_name FROM `cloud`.`network_offerings` WHERE id=?");
pstmt.setLong(1, networkOfferingId);
rs = pstmt.executeQuery();
String uniqueName = null;
while (rs.next()) {
uniqueName = rs.getString(1) + "-" + physicalNetworkId;
}
rs.close();
pstmt.close();
pstmt = conn.prepareStatement("UPDATE `cloud`.`network_offerings2` SET id=?, unique_name=?, name=?, tags=?, uuid=? WHERE id=?");
ntwkOffCount = ntwkOffCount + 1;
long newNetworkOfferingId = ntwkOffCount;
pstmt.setLong(1, newNetworkOfferingId);
pstmt.setString(2, uniqueName);
pstmt.setString(3, uniqueName);
pstmt.setString(4, newTag);
String uuid = UUID.randomUUID().toString();
pstmt.setString(5, uuid);
pstmt.setLong(6, networkOfferingId);
pstmt.executeUpdate();
pstmt.close();
pstmt = conn.prepareStatement("INSERT INTO `cloud`.`network_offerings` SELECT * from `cloud`.`network_offerings2` WHERE id=" + newNetworkOfferingId);
pstmt.executeUpdate();
pstmt.close();
//clone service map
pstmt = conn.prepareStatement("select service, provider from `cloud`.`ntwk_offering_service_map` where network_offering_id=?");
pstmt.setLong(1, networkOfferingId);
rs = pstmt.executeQuery();
while (rs.next()) {
String service = rs.getString(1);
String provider = rs.getString(2);
pstmt = conn.prepareStatement("INSERT INTO `cloud`.`ntwk_offering_service_map` (`network_offering_id`, `service`, `provider`, `created`) values (?,?,?, now())");
pstmt.setLong(1, newNetworkOfferingId);
pstmt.setString(2, service);
pstmt.setString(3, provider);
pstmt.executeUpdate();
}
rs.close();
pstmt.close();
pstmt = conn.prepareStatement("UPDATE `cloud`.`networks` SET network_offering_id=? where physical_network_id=? and traffic_type ='Guest' and network_offering_id="+networkOfferingId);
pstmt.setLong(1, newNetworkOfferingId);
pstmt.setLong(2, physicalNetworkId);
pstmt.executeUpdate();
pstmt.close();
}catch (SQLException e) {
throw new CloudRuntimeException("Exception while cloning NetworkOffering", e);
} finally {
try {
pstmt = conn.prepareStatement("DROP TEMPORARY TABLE `cloud`.`network_offerings2`");
pstmt.executeUpdate();
if (rs != null) {
rs.close();
}
if (pstmt != null) {
pstmt.close();
}
}catch (SQLException e) {
}
}
}
private void addHostDetailsUniqueKey(Connection conn) {
s_logger.debug("Checking if host_details unique key exists, if not we will add it");
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
pstmt = conn.prepareStatement("SHOW INDEX FROM `cloud`.`host_details` WHERE KEY_NAME = 'uk_host_id_name'");
rs = pstmt.executeQuery();
if (rs.next()) {
s_logger.debug("Unique key already exists on host_details - not adding new one");
}else{
//add the key
PreparedStatement pstmtUpdate = conn.prepareStatement("ALTER IGNORE TABLE `cloud`.`host_details` ADD CONSTRAINT UNIQUE KEY `uk_host_id_name` (`host_id`, `name`)");
pstmtUpdate.executeUpdate();
s_logger.debug("Unique key did not exist on host_details - added new one");
pstmtUpdate.close();
}
} catch (SQLException e) {
throw new CloudRuntimeException("Failed to check/update the host_details unique key ", e);
} finally {
try {
if (rs != null) {
rs.close();
}
if (pstmt != null) {
pstmt.close();
}
} catch (SQLException e) {
}
}
}
private void addVpcProvider(Connection conn){
//Encrypt config params and change category to Hidden
s_logger.debug("Adding vpc provider to all physical networks in the system");
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
pstmt = conn.prepareStatement("SELECT id FROM `cloud`.`physical_network` WHERE removed is NULL");
rs = pstmt.executeQuery();
while (rs.next()) {
Long pNtwkId = rs.getLong(1);
//insert provider
pstmt = conn.prepareStatement("INSERT INTO `cloud`.`physical_network_service_providers` " +
"(`physical_network_id`, `provider_name`, `state`, `vpn_service_provided`, `dhcp_service_provided`, " +
"`dns_service_provided`, `gateway_service_provided`, `firewall_service_provided`, `source_nat_service_provided`," +
" `load_balance_service_provided`, `static_nat_service_provided`, `port_forwarding_service_provided`," +
" `user_data_service_provided`, `security_group_service_provided`) " +
"VALUES (?, 'VpcVirtualRouter', 'Enabled', 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0)");
pstmt.setLong(1, pNtwkId);
pstmt.executeUpdate();
//get provider id
pstmt = conn.prepareStatement("SELECT id FROM `cloud`.`physical_network_service_providers` " +
"WHERE physical_network_id=? and provider_name='VpcVirtualRouter'");
pstmt.setLong(1, pNtwkId);
ResultSet rs1 = pstmt.executeQuery();
rs1.next();
long providerId = rs1.getLong(1);
//insert VR element
pstmt = conn.prepareStatement("INSERT INTO `cloud`.`virtual_router_providers` (`nsp_id`, `type`, `enabled`) " +
"VALUES (?, 'VPCVirtualRouter', 1)");
pstmt.setLong(1, providerId);
pstmt.executeUpdate();
s_logger.debug("Added VPC Virtual router provider for physical network id=" + pNtwkId);
}
} catch (SQLException e) {
throw new CloudRuntimeException("Unable add VPC physical network service provider ", e);
} finally {
try {
if (rs != null) {
rs.close();
}
if (pstmt != null) {
pstmt.close();
}
} catch (SQLException e) {
}
}
s_logger.debug("Done adding VPC physical network service providers to all physical networks");
}
private void updateRouterNetworkRef(Connection conn){
//Encrypt config params and change category to Hidden
s_logger.debug("Updating router network ref");
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
pstmt = conn.prepareStatement("SELECT d.id, d.network_id FROM `cloud`.`domain_router` d, `cloud`.`vm_instance` v " +
"WHERE d.id=v.id AND v.removed is NULL");
rs = pstmt.executeQuery();
while (rs.next()) {
Long routerId = rs.getLong(1);
Long networkId = rs.getLong(2);
//get the network type
pstmt = conn.prepareStatement("SELECT guest_type from `cloud`.`networks` where id=?");
pstmt.setLong(1, networkId);
ResultSet rs1 = pstmt.executeQuery();
rs1.next();
String networkType = rs1.getString(1);
//insert the reference
pstmt = conn.prepareStatement("INSERT INTO `cloud`.`router_network_ref` (router_id, network_id, guest_type) " +
"VALUES (?, ?, ?)");
pstmt.setLong(1, routerId);
pstmt.setLong(2, networkId);
pstmt.setString(3, networkType);
pstmt.executeUpdate();
s_logger.debug("Added reference for router id=" + routerId + " and network id=" + networkId);
}
} catch (SQLException e) {
throw new CloudRuntimeException("Failed to update the router/network reference ", e);
} finally {
try {
if (rs != null) {
rs.close();
}
if (pstmt != null) {
pstmt.close();
}
} catch (SQLException e) {
}
}
s_logger.debug("Done updating router/network references");
}
private void fixForeignKeys(Connection conn) {
//Drop the keys (if exist)
List<String> keys = new ArrayList<String>();
keys.add("fk_ssh_keypair__account_id");
keys.add("fk_ssh_keypair__domain_id");
keys.add("fk_ssh_keypairs__account_id");
keys.add("fk_ssh_keypairs__domain_id");
DbUpgradeUtils.dropKeysIfExist(conn, "ssh_keypairs", keys, true);
keys = new ArrayList<String>();
keys.add("fk_ssh_keypair__account_id");
keys.add("fk_ssh_keypair__domain_id");
keys.add("fk_ssh_keypairs__account_id");
keys.add("fk_ssh_keypairs__domain_id");
DbUpgradeUtils.dropKeysIfExist(conn, "ssh_keypairs", keys, false);
//insert the keys anew
try {
PreparedStatement pstmt; pstmt = conn.prepareStatement("ALTER TABLE `cloud`.`ssh_keypairs` ADD " +
"CONSTRAINT `fk_ssh_keypair__account_id` FOREIGN KEY `fk_ssh_keypair__account_id` (`account_id`)" +
" REFERENCES `account` (`id`) ON DELETE CASCADE");
pstmt.executeUpdate();
pstmt.close();
} catch (SQLException e) {
throw new CloudRuntimeException("Unable to execute ssh_keypairs table update for adding account_id foreign key", e);
}
try {
PreparedStatement pstmt; pstmt = conn.prepareStatement("ALTER TABLE `cloud`.`ssh_keypairs` ADD CONSTRAINT" +
" `fk_ssh_keypair__domain_id` FOREIGN KEY `fk_ssh_keypair__domain_id` (`domain_id`) " +
"REFERENCES `domain` (`id`) ON DELETE CASCADE");
pstmt.executeUpdate();
pstmt.close();
} catch (SQLException e) {
throw new CloudRuntimeException("Unable to execute ssh_keypairs table update for adding domain_id foreign key", e);
}
}
}
| server/src/com/cloud/upgrade/dao/Upgrade302to40.java | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.upgrade.dao;
import java.io.File;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import org.apache.log4j.Logger;
import com.cloud.utils.crypt.DBEncryptionUtil;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.script.Script;
public class Upgrade302to40 extends Upgrade30xBase implements DbUpgrade {
final static Logger s_logger = Logger.getLogger(Upgrade302to40.class);
@Override
public String[] getUpgradableVersionRange() {
return new String[] { "3.0.2", "4.0.0" };
}
@Override
public String getUpgradedVersion() {
return "4.0.0";
}
@Override
public boolean supportsRollingUpgrade() {
return false;
}
@Override
public File[] getPrepareScripts() {
String script = Script.findScript("", "db/schema-302to40.sql");
if (script == null) {
throw new CloudRuntimeException("Unable to find db/schema-302to40.sql");
}
return new File[] { new File(script) };
}
@Override
public void performDataMigration(Connection conn) {
correctVRProviders(conn);
correctMultiplePhysicaNetworkSetups(conn);
addHostDetailsUniqueKey(conn);
addVpcProvider(conn);
updateRouterNetworkRef(conn);
fixForeignKeys(conn);
}
@Override
public File[] getCleanupScripts() {
String script = Script.findScript("", "db/schema-302to40-cleanup.sql");
if (script == null) {
throw new CloudRuntimeException("Unable to find db/schema-302to40-cleanup.sql");
}
return new File[] { new File(script) };
}
private void correctVRProviders(Connection conn) {
PreparedStatement pstmtVR = null;
ResultSet rsVR = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
try{
pstmtVR = conn.prepareStatement("SELECT id, nsp_id FROM `cloud`.`virtual_router_providers` where type = 'VirtualRouter' AND removed IS NULL");
rsVR = pstmtVR.executeQuery();
while (rsVR.next()) {
long vrId = rsVR.getLong(1);
long nspId = rsVR.getLong(2);
//check that this nspId points to a VR provider.
pstmt = conn.prepareStatement("SELECT physical_network_id, provider_name FROM `cloud`.`physical_network_service_providers` where id = ?");
pstmt.setLong(1, nspId);
rs = pstmt.executeQuery();
if(rs.next()){
long physicalNetworkId = rs.getLong(1);
String providerName = rs.getString(2);
if(!providerName.equalsIgnoreCase("VirtualRouter")){
//mismatch, correct the nsp_id in VR
PreparedStatement pstmt1 = null;
ResultSet rs1 = null;
pstmt1 = conn.prepareStatement("SELECT id FROM `cloud`.`physical_network_service_providers` where physical_network_id = ? AND provider_name = ? AND removed IS NULL");
pstmt1.setLong(1, physicalNetworkId);
pstmt1.setString(2, "VirtualRouter");
rs1 = pstmt1.executeQuery();
if(rs1.next()){
long correctNSPId = rs1.getLong(1);
//update VR entry
PreparedStatement pstmtUpdate = null;
String updateNSPId = "UPDATE `cloud`.`virtual_router_providers` SET nsp_id = ? WHERE id = ?";
pstmtUpdate = conn.prepareStatement(updateNSPId);
pstmtUpdate.setLong(1, correctNSPId);
pstmtUpdate.setLong(2, vrId);
pstmtUpdate.executeUpdate();
pstmtUpdate.close();
}
rs1.close();
pstmt1.close();
}
}
rs.close();
pstmt.close();
}
}catch (SQLException e) {
throw new CloudRuntimeException("Exception while correcting Virtual Router Entries", e);
} finally {
if (rsVR != null) {
try {
rsVR.close();
}catch (SQLException e) {
}
}
if (pstmtVR != null) {
try {
pstmtVR.close();
} catch (SQLException e) {
}
}
if (rs != null) {
try {
rs.close();
}catch (SQLException e) {
}
}
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException e) {
}
}
}
}
private void correctMultiplePhysicaNetworkSetups(Connection conn) {
PreparedStatement pstmtZone = null;
ResultSet rsZone = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
try{
//check if multiple physical networks with 'Guest' Traffic types are present
//Yes:
//1) check if there are guest networks without tags, if yes then add a new physical network with default tag for them
//2) Check if there are physical network tags present
//No: Add unique tag to each physical network
//3) Get all guest networks unique network offering id's
//Clone each for each physical network and add the tag.
//add ntwk service map entries
//update all guest networks of 1 physical network having this offering id to this new offering id
pstmtZone = conn.prepareStatement("SELECT id, domain_id, networktype, name, uuid FROM `cloud`.`data_center`");
rsZone = pstmtZone.executeQuery();
while (rsZone.next()) {
long zoneId = rsZone.getLong(1);
Long domainId = rsZone.getLong(2);
String networkType = rsZone.getString(3);
String zoneName = rsZone.getString(4);
String uuid = rsZone.getString(5);
PreparedStatement pstmtUpdate = null;
if(uuid == null){
uuid = UUID.randomUUID().toString();
String updateUuid = "UPDATE `cloud`.`data_center` SET uuid = ? WHERE id = ?";
pstmtUpdate = conn.prepareStatement(updateUuid);
pstmtUpdate.setString(1, uuid);
pstmtUpdate.setLong(2, zoneId);
pstmtUpdate.executeUpdate();
pstmtUpdate.close();
}
//check if any networks were untagged and remaining to be mapped to a physical network
pstmt = conn.prepareStatement("SELECT count(n.id) FROM networks n WHERE n.physical_network_id IS NULL AND n.traffic_type = 'Guest' and n.data_center_id = ? and n.removed is null");
pstmt.setLong(1, zoneId);
rs = pstmt.executeQuery();
if(rs.next()){
Long count = rs.getLong(1);
if(count > 0){
// find the default tag to use from global config or use 'cloud-private'
String xenGuestLabel = getNetworkLabelFromConfig(conn, "xen.guest.network.device");
//Decrypt this value.
xenGuestLabel = DBEncryptionUtil.decrypt(xenGuestLabel);
//make sure that no physical network with this traffic label already exists. if yes, error out.
if(xenGuestLabel != null){
PreparedStatement pstmt5 = conn.prepareStatement("SELECT count(*) FROM `cloud`.`physical_network_traffic_types` pntt JOIN `cloud`.`physical_network` pn ON pntt.physical_network_id = pn.id WHERE pntt.traffic_type ='Guest' AND pn.data_center_id = ? AND pntt.xen_network_label = ?");
pstmt5.setLong(1, zoneId);
pstmt5.setString(2, xenGuestLabel);
ResultSet rsSameLabel = pstmt5.executeQuery();
if(rsSameLabel.next()){
Long sameLabelcount = rsSameLabel.getLong(1);
if(sameLabelcount > 0){
s_logger.error("There are untagged networks for which we need to add a physical network with Xen traffic label = 'xen.guest.network.device' config value, which is: "+xenGuestLabel);
s_logger.error("However already there are "+sameLabelcount+" physical networks setup with same traffic label, cannot upgrade");
throw new CloudRuntimeException("Cannot upgrade this setup since a physical network with same traffic label: "+xenGuestLabel+" already exists, Please check logs and contact Support.");
}
}
}
//Create a physical network with guest traffic type and this tag
long physicalNetworkId = addPhysicalNetworkToZone(conn, zoneId, zoneName, networkType, null, domainId);
addTrafficType(conn, physicalNetworkId, "Guest", xenGuestLabel, null, null);
addDefaultVRProvider(conn, physicalNetworkId, zoneId);
addDefaultSGProvider(conn, physicalNetworkId, zoneId, networkType, true);
PreparedStatement pstmt3 = conn.prepareStatement("SELECT n.id FROM networks n WHERE n.physical_network_id IS NULL AND n.traffic_type = 'Guest' and n.data_center_id = ? and n.removed is null");
pstmt3.setLong(1, zoneId);
ResultSet rsNet = pstmt3.executeQuery();
s_logger.debug("Adding PhysicalNetwork to VLAN");
s_logger.debug("Adding PhysicalNetwork to user_ip_address");
s_logger.debug("Adding PhysicalNetwork to networks");
while(rsNet.next()){
Long networkId = rsNet.getLong(1);
addPhysicalNtwk_To_Ntwk_IP_Vlan(conn, physicalNetworkId,networkId);
}
rsNet.close();
pstmt3.close();
}
}
rs.close();
pstmt.close();
boolean multiplePhysicalNetworks = false;
pstmt = conn.prepareStatement("SELECT count(*) FROM `cloud`.`physical_network_traffic_types` pntt JOIN `cloud`.`physical_network` pn ON pntt.physical_network_id = pn.id WHERE pntt.traffic_type ='Guest' and pn.data_center_id = ?");
pstmt.setLong(1, zoneId);
rs = pstmt.executeQuery();
if(rs.next()){
Long count = rs.getLong(1);
if(count > 1){
s_logger.debug("There are "+count+" physical networks setup");
multiplePhysicalNetworks = true;
}
}
rs.close();
pstmt.close();
if(multiplePhysicalNetworks){
//check if guest vnet is wrongly configured by earlier upgrade. If yes error out
//check if any vnet is allocated and guest networks are using vnet But the physical network id does not match on the vnet and guest network.
PreparedStatement pstmt4 = conn.prepareStatement("SELECT v.id, v.vnet, v.reservation_id, v.physical_network_id as vpid, n.id, n.physical_network_id as npid FROM `cloud`.`op_dc_vnet_alloc` v JOIN `cloud`.`networks` n ON CONCAT('vlan://' , v.vnet) = n.broadcast_uri WHERE v.taken IS NOT NULL AND v.data_center_id = ? AND n.removed IS NULL AND v.physical_network_id != n.physical_network_id");
pstmt4.setLong(1, zoneId);
ResultSet rsVNet = pstmt4.executeQuery();
if(rsVNet.next()){
String vnet = rsVNet.getString(2);
String networkId = rsVNet.getString(5);
String vpid = rsVNet.getString(4);
String npid = rsVNet.getString(6);
s_logger.error("Guest Vnet assignment is set wrongly . Cannot upgrade until that is corrected. Example- Vnet: "+ vnet +" has physical network id: " + vpid +" ,but the guest network: " +networkId+" that uses it has physical network id: " +npid );
String message = "Cannot upgrade. Your setup has multiple Physical Networks and is using guest Vnet that is assigned wrongly. To upgrade, first correct the setup by doing the following: \n" +
"1. Please rollback to your 2.2.14 setup\n" +
"2. Please stop all VMs using isolated(virtual) networks through CloudStack\n" +
"3. Run following query to find if any networks still have nics allocated:\n\t"+
"a) check if any virtual guest networks still have allocated nics by running:\n\t" +
"SELECT DISTINCT op.id from `cloud`.`op_networks` op JOIN `cloud`.`networks` n on op.id=n.id WHERE nics_count != 0 AND guest_type = 'Virtual';\n\t"+
"b) If this returns any networkd ids, then ensure that all VMs are stopped, no new VM is being started, and then shutdown management server\n\t"+
"c) Clean up the nics count for the 'virtual' network id's returned in step (a) by running this:\n\t"+
"UPDATE `cloud`.`op_networks` SET nics_count = 0 WHERE id = <enter id of virtual network>\n\t"+
"d) Restart management server and wait for all networks to shutdown. [Networks shutdown will be determined by network.gc.interval and network.gc.wait seconds] \n"+
"4. Please ensure all networks are shutdown and all guest Vnet's are free.\n" +
"5. Run upgrade. This will allocate all your guest vnet range to first physical network. \n" +
"6. Reconfigure the vnet ranges for each physical network as desired by using updatePhysicalNetwork API \n" +
"7. Start all your VMs";
s_logger.error(message);
throw new CloudRuntimeException("Cannot upgrade this setup since Guest Vnet assignment to the multiple physical networks is incorrect. Please check the logs for details on how to proceed");
}
rsVNet.close();
pstmt4.close();
//Clean up any vnets that have no live networks/nics
pstmt4 = conn.prepareStatement("SELECT v.id, v.vnet, v.reservation_id FROM `cloud`.`op_dc_vnet_alloc` v LEFT JOIN networks n ON CONCAT('vlan://' , v.vnet) = n.broadcast_uri WHERE v.taken IS NOT NULL AND v.data_center_id = ? AND n.broadcast_uri IS NULL AND n.removed IS NULL");
pstmt4.setLong(1, zoneId);
rsVNet = pstmt4.executeQuery();
while(rsVNet.next()){
Long vnet_id = rsVNet.getLong(1);
String vnetValue = rsVNet.getString(2);
String reservationId = rsVNet.getString(3);
//does this vnet have any nic associated?
PreparedStatement pstmt5 = conn.prepareStatement("SELECT id, instance_id FROM `cloud`.`nics` where broadcast_uri = ? and removed IS NULL");
String uri = "vlan://"+vnetValue;
pstmt5.setString(1, uri);
ResultSet rsNic = pstmt5.executeQuery();
Long nic_id = rsNic.getLong(1);
Long instance_id = rsNic.getLong(2);
if(rsNic.next()){
throw new CloudRuntimeException("Cannot upgrade. Please cleanup the guest vnet: "+ vnetValue +" , it is being used by nic_id: "+ nic_id +" , instance_id: " + instance_id );
}
//free this vnet
String freeVnet = "UPDATE `cloud`.`op_dc_vnet_alloc` SET account_id = NULL, taken = NULL, reservation_id = NULL WHERE id = ?";
pstmtUpdate = conn.prepareStatement(freeVnet);
pstmtUpdate.setLong(1, vnet_id);
pstmtUpdate.executeUpdate();
pstmtUpdate.close();
}
rsVNet.close();
pstmt4.close();
//add tags to the physical networks if not present and clone offerings
pstmt = conn.prepareStatement("SELECT pn.id as pid , ptag.tag as tag FROM `cloud`.`physical_network` pn LEFT JOIN `cloud`.`physical_network_tags` ptag ON pn.id = ptag.physical_network_id where pn.data_center_id = ?");
pstmt.setLong(1, zoneId);
rs = pstmt.executeQuery();
while(rs.next()){
long physicalNetworkId = rs.getLong("pid");
String tag = rs.getString("tag");
if(tag == null){
//need to add unique tag
String newTag = "pNtwk-tag-" + physicalNetworkId;
String updateVnet = "INSERT INTO `cloud`.`physical_network_tags`(tag, physical_network_id) VALUES( ?, ? )";
pstmtUpdate = conn.prepareStatement(updateVnet);
pstmtUpdate.setString(1, newTag);
pstmtUpdate.setLong(2, physicalNetworkId);
pstmtUpdate.executeUpdate();
pstmtUpdate.close();
//clone offerings and tag them with this new tag, if there are any guest networks for this physical network
PreparedStatement pstmt2 = null;
ResultSet rs2 = null;
pstmt2 = conn.prepareStatement("SELECT distinct network_offering_id FROM `cloud`.`networks` where traffic_type= 'Guest' and physical_network_id = ? and removed is null");
pstmt2.setLong(1, physicalNetworkId);
rs2 = pstmt2.executeQuery();
while(rs2.next()){
//clone each offering, add new tag, clone offering-svc-map, update guest networks with new offering id
long networkOfferingId = rs2.getLong(1);
cloneOfferingAndAddTag(conn, networkOfferingId, physicalNetworkId, newTag);
}
rs2.close();
pstmt2.close();
}
}
rs.close();
pstmt.close();
}
}
} catch (SQLException e) {
throw new CloudRuntimeException("Exception while correcting PhysicalNetwork setup", e);
} finally {
if (rsZone != null) {
try {
rsZone.close();
}catch (SQLException e) {
}
}
if (pstmtZone != null) {
try {
pstmtZone.close();
} catch (SQLException e) {
}
}
if (rs != null) {
try {
rs.close();
}catch (SQLException e) {
}
}
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException e) {
}
}
}
}
private void cloneOfferingAndAddTag(Connection conn, long networkOfferingId, long physicalNetworkId, String newTag) {
PreparedStatement pstmt = null;
ResultSet rs = null;
try{
pstmt = conn.prepareStatement("select count(*) from `cloud`.`network_offerings`");
rs = pstmt.executeQuery();
long ntwkOffCount = 0;
while (rs.next()) {
ntwkOffCount = rs.getLong(1);
}
rs.close();
pstmt.close();
pstmt = conn.prepareStatement("DROP TEMPORARY TABLE IF EXISTS `cloud`.`network_offerings2`");
pstmt.executeUpdate();
pstmt = conn.prepareStatement("CREATE TEMPORARY TABLE `cloud`.`network_offerings2` ENGINE=MEMORY SELECT * FROM `cloud`.`network_offerings` WHERE id=1");
pstmt.executeUpdate();
pstmt.close();
// clone the record to
pstmt = conn.prepareStatement("INSERT INTO `cloud`.`network_offerings2` SELECT * FROM `cloud`.`network_offerings` WHERE id=?");
pstmt.setLong(1, networkOfferingId);
pstmt.executeUpdate();
pstmt.close();
pstmt = conn.prepareStatement("SELECT unique_name FROM `cloud`.`network_offerings` WHERE id=?");
pstmt.setLong(1, networkOfferingId);
rs = pstmt.executeQuery();
String uniqueName = null;
while (rs.next()) {
uniqueName = rs.getString(1) + "-" + physicalNetworkId;
}
rs.close();
pstmt.close();
pstmt = conn.prepareStatement("UPDATE `cloud`.`network_offerings2` SET id=?, unique_name=?, name=?, tags=?, uuid=? WHERE id=?");
ntwkOffCount = ntwkOffCount + 1;
long newNetworkOfferingId = ntwkOffCount;
pstmt.setLong(1, newNetworkOfferingId);
pstmt.setString(2, uniqueName);
pstmt.setString(3, uniqueName);
pstmt.setString(4, newTag);
String uuid = UUID.randomUUID().toString();
pstmt.setString(5, uuid);
pstmt.setLong(6, networkOfferingId);
pstmt.executeUpdate();
pstmt.close();
pstmt = conn.prepareStatement("INSERT INTO `cloud`.`network_offerings` SELECT * from `cloud`.`network_offerings2` WHERE id=" + newNetworkOfferingId);
pstmt.executeUpdate();
pstmt.close();
//clone service map
pstmt = conn.prepareStatement("select service, provider from `cloud`.`ntwk_offering_service_map` where network_offering_id=?");
pstmt.setLong(1, networkOfferingId);
rs = pstmt.executeQuery();
while (rs.next()) {
String service = rs.getString(1);
String provider = rs.getString(2);
pstmt = conn.prepareStatement("INSERT INTO `cloud`.`ntwk_offering_service_map` (`network_offering_id`, `service`, `provider`, `created`) values (?,?,?, now())");
pstmt.setLong(1, newNetworkOfferingId);
pstmt.setString(2, service);
pstmt.setString(3, provider);
pstmt.executeUpdate();
}
rs.close();
pstmt.close();
pstmt = conn.prepareStatement("UPDATE `cloud`.`networks` SET network_offering_id=? where physical_network_id=? and traffic_type ='Guest' and network_offering_id="+networkOfferingId);
pstmt.setLong(1, newNetworkOfferingId);
pstmt.setLong(2, physicalNetworkId);
pstmt.executeUpdate();
pstmt.close();
}catch (SQLException e) {
throw new CloudRuntimeException("Exception while cloning NetworkOffering", e);
} finally {
try {
pstmt = conn.prepareStatement("DROP TEMPORARY TABLE `cloud`.`network_offerings2`");
pstmt.executeUpdate();
if (rs != null) {
rs.close();
}
if (pstmt != null) {
pstmt.close();
}
}catch (SQLException e) {
}
}
}
private void addHostDetailsUniqueKey(Connection conn) {
s_logger.debug("Checking if host_details unique key exists, if not we will add it");
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
pstmt = conn.prepareStatement("SHOW INDEX FROM `cloud`.`host_details` WHERE KEY_NAME = 'uk_host_id_name'");
rs = pstmt.executeQuery();
if (rs.next()) {
s_logger.debug("Unique key already exists on host_details - not adding new one");
}else{
//add the key
PreparedStatement pstmtUpdate = conn.prepareStatement("ALTER TABLE `cloud`.`host_details` ADD CONSTRAINT UNIQUE KEY `uk_host_id_name` (`host_id`, `name`)");
pstmtUpdate.executeUpdate();
s_logger.debug("Unique key did not exist on host_details - added new one");
pstmtUpdate.close();
}
} catch (SQLException e) {
throw new CloudRuntimeException("Failed to check/update the host_details unique key ", e);
} finally {
try {
if (rs != null) {
rs.close();
}
if (pstmt != null) {
pstmt.close();
}
} catch (SQLException e) {
}
}
}
private void addVpcProvider(Connection conn){
//Encrypt config params and change category to Hidden
s_logger.debug("Adding vpc provider to all physical networks in the system");
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
pstmt = conn.prepareStatement("SELECT id FROM `cloud`.`physical_network` WHERE removed is NULL");
rs = pstmt.executeQuery();
while (rs.next()) {
Long pNtwkId = rs.getLong(1);
//insert provider
pstmt = conn.prepareStatement("INSERT INTO `cloud`.`physical_network_service_providers` " +
"(`physical_network_id`, `provider_name`, `state`, `vpn_service_provided`, `dhcp_service_provided`, " +
"`dns_service_provided`, `gateway_service_provided`, `firewall_service_provided`, `source_nat_service_provided`," +
" `load_balance_service_provided`, `static_nat_service_provided`, `port_forwarding_service_provided`," +
" `user_data_service_provided`, `security_group_service_provided`) " +
"VALUES (?, 'VpcVirtualRouter', 'Enabled', 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0)");
pstmt.setLong(1, pNtwkId);
pstmt.executeUpdate();
//get provider id
pstmt = conn.prepareStatement("SELECT id FROM `cloud`.`physical_network_service_providers` " +
"WHERE physical_network_id=? and provider_name='VpcVirtualRouter'");
pstmt.setLong(1, pNtwkId);
ResultSet rs1 = pstmt.executeQuery();
rs1.next();
long providerId = rs1.getLong(1);
//insert VR element
pstmt = conn.prepareStatement("INSERT INTO `cloud`.`virtual_router_providers` (`nsp_id`, `type`, `enabled`) " +
"VALUES (?, 'VPCVirtualRouter', 1)");
pstmt.setLong(1, providerId);
pstmt.executeUpdate();
s_logger.debug("Added VPC Virtual router provider for physical network id=" + pNtwkId);
}
} catch (SQLException e) {
throw new CloudRuntimeException("Unable add VPC physical network service provider ", e);
} finally {
try {
if (rs != null) {
rs.close();
}
if (pstmt != null) {
pstmt.close();
}
} catch (SQLException e) {
}
}
s_logger.debug("Done adding VPC physical network service providers to all physical networks");
}
private void updateRouterNetworkRef(Connection conn){
//Encrypt config params and change category to Hidden
s_logger.debug("Updating router network ref");
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
pstmt = conn.prepareStatement("SELECT d.id, d.network_id FROM `cloud`.`domain_router` d, `cloud`.`vm_instance` v " +
"WHERE d.id=v.id AND v.removed is NULL");
rs = pstmt.executeQuery();
while (rs.next()) {
Long routerId = rs.getLong(1);
Long networkId = rs.getLong(2);
//get the network type
pstmt = conn.prepareStatement("SELECT guest_type from `cloud`.`networks` where id=?");
pstmt.setLong(1, networkId);
ResultSet rs1 = pstmt.executeQuery();
rs1.next();
String networkType = rs1.getString(1);
//insert the reference
pstmt = conn.prepareStatement("INSERT INTO `cloud`.`router_network_ref` (router_id, network_id, guest_type) " +
"VALUES (?, ?, ?)");
pstmt.setLong(1, routerId);
pstmt.setLong(2, networkId);
pstmt.setString(3, networkType);
pstmt.executeUpdate();
s_logger.debug("Added reference for router id=" + routerId + " and network id=" + networkId);
}
} catch (SQLException e) {
throw new CloudRuntimeException("Failed to update the router/network reference ", e);
} finally {
try {
if (rs != null) {
rs.close();
}
if (pstmt != null) {
pstmt.close();
}
} catch (SQLException e) {
}
}
s_logger.debug("Done updating router/network references");
}
private void fixForeignKeys(Connection conn) {
//Drop the keys (if exist)
List<String> keys = new ArrayList<String>();
keys.add("fk_ssh_keypair__account_id");
keys.add("fk_ssh_keypair__domain_id");
keys.add("fk_ssh_keypairs__account_id");
keys.add("fk_ssh_keypairs__domain_id");
DbUpgradeUtils.dropKeysIfExist(conn, "ssh_keypairs", keys, true);
keys = new ArrayList<String>();
keys.add("fk_ssh_keypair__account_id");
keys.add("fk_ssh_keypair__domain_id");
keys.add("fk_ssh_keypairs__account_id");
keys.add("fk_ssh_keypairs__domain_id");
DbUpgradeUtils.dropKeysIfExist(conn, "ssh_keypairs", keys, false);
//insert the keys anew
try {
PreparedStatement pstmt; pstmt = conn.prepareStatement("ALTER TABLE `cloud`.`ssh_keypairs` ADD " +
"CONSTRAINT `fk_ssh_keypair__account_id` FOREIGN KEY `fk_ssh_keypair__account_id` (`account_id`)" +
" REFERENCES `account` (`id`) ON DELETE CASCADE");
pstmt.executeUpdate();
pstmt.close();
} catch (SQLException e) {
throw new CloudRuntimeException("Unable to execute ssh_keypairs table update for adding account_id foreign key", e);
}
try {
PreparedStatement pstmt; pstmt = conn.prepareStatement("ALTER TABLE `cloud`.`ssh_keypairs` ADD CONSTRAINT" +
" `fk_ssh_keypair__domain_id` FOREIGN KEY `fk_ssh_keypair__domain_id` (`domain_id`) " +
"REFERENCES `domain` (`id`) ON DELETE CASCADE");
pstmt.executeUpdate();
pstmt.close();
} catch (SQLException e) {
throw new CloudRuntimeException("Unable to execute ssh_keypairs table update for adding domain_id foreign key", e);
}
}
}
| [ASFCS40]Upgrade to 4.0: host_details unique key addition needs IGNORE keyword to correct the duplicates
| server/src/com/cloud/upgrade/dao/Upgrade302to40.java | [ASFCS40]Upgrade to 4.0: host_details unique key addition needs IGNORE keyword to correct the duplicates | <ide><path>erver/src/com/cloud/upgrade/dao/Upgrade302to40.java
<ide> s_logger.debug("Unique key already exists on host_details - not adding new one");
<ide> }else{
<ide> //add the key
<del> PreparedStatement pstmtUpdate = conn.prepareStatement("ALTER TABLE `cloud`.`host_details` ADD CONSTRAINT UNIQUE KEY `uk_host_id_name` (`host_id`, `name`)");
<add> PreparedStatement pstmtUpdate = conn.prepareStatement("ALTER IGNORE TABLE `cloud`.`host_details` ADD CONSTRAINT UNIQUE KEY `uk_host_id_name` (`host_id`, `name`)");
<ide> pstmtUpdate.executeUpdate();
<ide> s_logger.debug("Unique key did not exist on host_details - added new one");
<ide> pstmtUpdate.close(); |
|
Java | apache-2.0 | cdd0f5d0f9915459fb781c9db232dcba1d6ff2a3 | 0 | StrategyObject/fop,argv-minus-one/fop,StrategyObject/fop,Distrotech/fop,Distrotech/fop,StrategyObject/fop,argv-minus-one/fop,argv-minus-one/fop,StrategyObject/fop,Distrotech/fop,argv-minus-one/fop,argv-minus-one/fop,StrategyObject/fop,Distrotech/fop | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.render.pdf;
import java.awt.Color;
import java.awt.Rectangle;
import java.awt.geom.AffineTransform;
import java.io.IOException;
import org.w3c.dom.Document;
import org.apache.batik.bridge.BridgeContext;
import org.apache.batik.bridge.GVTBuilder;
import org.apache.batik.dom.svg.SVGDOMImplementation;
import org.apache.batik.gvt.GraphicsNode;
import org.apache.batik.util.SVGConstants;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.xmlgraphics.image.loader.Image;
import org.apache.xmlgraphics.image.loader.ImageFlavor;
import org.apache.xmlgraphics.image.loader.impl.ImageXMLDOM;
import org.apache.xmlgraphics.util.UnitConv;
import org.apache.fop.apps.FOUserAgent;
import org.apache.fop.image.loader.batik.BatikImageFlavors;
import org.apache.fop.image.loader.batik.BatikUtil;
import org.apache.fop.render.ImageHandler;
import org.apache.fop.render.ImageHandlerUtil;
import org.apache.fop.render.RenderingContext;
import org.apache.fop.render.pdf.PDFLogicalStructureHandler.MarkedContentInfo;
import org.apache.fop.svg.PDFAElementBridge;
import org.apache.fop.svg.PDFBridgeContext;
import org.apache.fop.svg.PDFGraphics2D;
import org.apache.fop.svg.SVGEventProducer;
import org.apache.fop.svg.SVGUserAgent;
/**
* Image Handler implementation which handles SVG images.
*/
public class PDFImageHandlerSVG implements ImageHandler {
/** logging instance */
private static Log log = LogFactory.getLog(PDFImageHandlerSVG.class);
/** {@inheritDoc} */
public void handleImage(RenderingContext context, // CSOK: MethodLength
Image image, Rectangle pos)
throws IOException {
PDFRenderingContext pdfContext = (PDFRenderingContext)context;
PDFContentGenerator generator = pdfContext.getGenerator();
ImageXMLDOM imageSVG = (ImageXMLDOM)image;
FOUserAgent userAgent = context.getUserAgent();
final float deviceResolution = userAgent.getTargetResolution();
if (log.isDebugEnabled()) {
log.debug("Generating SVG at " + deviceResolution + "dpi.");
}
final float uaResolution = userAgent.getSourceResolution();
SVGUserAgent ua = new SVGUserAgent(userAgent, new AffineTransform());
GVTBuilder builder = new GVTBuilder();
//Controls whether text painted by Batik is generated using text or path operations
boolean strokeText = false;
//TODO connect with configuration elsewhere.
BridgeContext ctx = new PDFBridgeContext(ua,
(strokeText ? null : pdfContext.getFontInfo()),
userAgent.getFactory().getImageManager(),
userAgent.getImageSessionContext(),
new AffineTransform());
//Cloning SVG DOM as Batik attaches non-thread-safe facilities (like the CSS engine)
//to it.
Document clonedDoc = BatikUtil.cloneSVGDocument(imageSVG.getDocument());
GraphicsNode root;
try {
root = builder.build(ctx, clonedDoc);
builder = null;
} catch (Exception e) {
SVGEventProducer eventProducer = SVGEventProducer.Provider.get(
context.getUserAgent().getEventBroadcaster());
eventProducer.svgNotBuilt(this, e, image.getInfo().getOriginalURI());
return;
}
// get the 'width' and 'height' attributes of the SVG document
float w = image.getSize().getWidthMpt();
float h = image.getSize().getHeightMpt();
float sx = pos.width / w;
float sy = pos.height / h;
//Scaling and translation for the bounding box of the image
AffineTransform scaling = new AffineTransform(
sx, 0, 0, sy, pos.x / 1000f, pos.y / 1000f);
double sourceScale = UnitConv.IN2PT / uaResolution;
scaling.scale(sourceScale, sourceScale);
//Scale for higher resolution on-the-fly images from Batik
AffineTransform resolutionScaling = new AffineTransform();
double targetScale = uaResolution / deviceResolution;
resolutionScaling.scale(targetScale, targetScale);
resolutionScaling.scale(1.0 / sx, 1.0 / sy);
//Transformation matrix that establishes the local coordinate system for the SVG graphic
//in relation to the current coordinate system
AffineTransform imageTransform = new AffineTransform();
imageTransform.concatenate(scaling);
imageTransform.concatenate(resolutionScaling);
if (log.isTraceEnabled()) {
log.trace("nat size: " + w + "/" + h);
log.trace("req size: " + pos.width + "/" + pos.height);
log.trace("source res: " + uaResolution + ", targetRes: " + deviceResolution
+ " --> target scaling: " + targetScale);
log.trace(image.getSize());
log.trace("sx: " + sx + ", sy: " + sy);
log.trace("scaling: " + scaling);
log.trace("resolution scaling: " + resolutionScaling);
log.trace("image transform: " + resolutionScaling);
}
/*
* Clip to the svg area.
* Note: To have the svg overlay (under) a text area then use
* an fo:block-container
*/
if (log.isTraceEnabled()) {
generator.comment("SVG setup");
}
generator.saveGraphicsState();
if (context.getUserAgent().isAccessibilityEnabled()) {
MarkedContentInfo mci = pdfContext.getMarkedContentInfo();
generator.beginMarkedContentSequence(mci.tag, mci.mcid);
}
generator.updateColor(Color.black, false, null);
generator.updateColor(Color.black, true, null);
if (!scaling.isIdentity()) {
if (log.isTraceEnabled()) {
generator.comment("viewbox");
}
generator.add(CTMHelper.toPDFString(scaling, false) + " cm\n");
}
//SVGSVGElement svg = ((SVGDocument)doc).getRootElement();
PDFGraphics2D graphics = new PDFGraphics2D(true, pdfContext.getFontInfo(),
generator.getDocument(),
generator.getResourceContext(), pdfContext.getPage().referencePDF(),
"", 0);
graphics.setGraphicContext(new org.apache.xmlgraphics.java2d.GraphicContext());
if (!resolutionScaling.isIdentity()) {
if (log.isTraceEnabled()) {
generator.comment("resolution scaling for " + uaResolution
+ " -> " + deviceResolution);
}
generator.add(
CTMHelper.toPDFString(resolutionScaling, false) + " cm\n");
graphics.scale(
1.0 / resolutionScaling.getScaleX(),
1.0 / resolutionScaling.getScaleY());
}
if (log.isTraceEnabled()) {
generator.comment("SVG start");
}
//Save state and update coordinate system for the SVG image
generator.getState().save();
generator.getState().concatenate(imageTransform);
//Now that we have the complete transformation matrix for the image, we can update the
//transformation matrix for the AElementBridge.
PDFAElementBridge aBridge = (PDFAElementBridge)ctx.getBridge(
SVGDOMImplementation.SVG_NAMESPACE_URI, SVGConstants.SVG_A_TAG);
aBridge.getCurrentTransform().setTransform(generator.getState().getTransform());
graphics.setPaintingState(generator.getState());
graphics.setOutputStream(generator.getOutputStream());
try {
root.paint(graphics);
ctx.dispose();
generator.add(graphics.getString());
} catch (Exception e) {
SVGEventProducer eventProducer = SVGEventProducer.Provider.get(
context.getUserAgent().getEventBroadcaster());
eventProducer.svgRenderingError(this, e, image.getInfo().getOriginalURI());
}
generator.getState().restore();
if (context.getUserAgent().isAccessibilityEnabled()) {
generator.restoreGraphicsStateAccess();
} else {
generator.restoreGraphicsState();
}
if (log.isTraceEnabled()) {
generator.comment("SVG end");
}
}
/** {@inheritDoc} */
public int getPriority() {
return 400;
}
/** {@inheritDoc} */
public Class getSupportedImageClass() {
return ImageXMLDOM.class;
}
/** {@inheritDoc} */
public ImageFlavor[] getSupportedImageFlavors() {
return new ImageFlavor[] {
BatikImageFlavors.SVG_DOM
};
}
/** {@inheritDoc} */
public boolean isCompatible(RenderingContext targetContext, Image image) {
boolean supported = (image == null
|| (image instanceof ImageXMLDOM
&& image.getFlavor().isCompatible(BatikImageFlavors.SVG_DOM)))
&& targetContext instanceof PDFRenderingContext;
if (supported) {
String mode = (String)targetContext.getHint(ImageHandlerUtil.CONVERSION_MODE);
if (ImageHandlerUtil.isConversionModeBitmap(mode)) {
//Disabling this image handler automatically causes a bitmap to be generated
return false;
}
}
return supported;
}
}
| src/java/org/apache/fop/render/pdf/PDFImageHandlerSVG.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.render.pdf;
import java.awt.Color;
import java.awt.Rectangle;
import java.awt.geom.AffineTransform;
import java.io.IOException;
import org.w3c.dom.Document;
import org.apache.batik.bridge.BridgeContext;
import org.apache.batik.bridge.GVTBuilder;
import org.apache.batik.dom.svg.SVGDOMImplementation;
import org.apache.batik.gvt.GraphicsNode;
import org.apache.batik.util.SVGConstants;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.xmlgraphics.image.loader.Image;
import org.apache.xmlgraphics.image.loader.ImageFlavor;
import org.apache.xmlgraphics.image.loader.impl.ImageXMLDOM;
import org.apache.xmlgraphics.util.UnitConv;
import org.apache.fop.apps.FOUserAgent;
import org.apache.fop.image.loader.batik.BatikImageFlavors;
import org.apache.fop.image.loader.batik.BatikUtil;
import org.apache.fop.render.ImageHandler;
import org.apache.fop.render.ImageHandlerUtil;
import org.apache.fop.render.RenderingContext;
import org.apache.fop.render.pdf.PDFLogicalStructureHandler.MarkedContentInfo;
import org.apache.fop.svg.PDFAElementBridge;
import org.apache.fop.svg.PDFBridgeContext;
import org.apache.fop.svg.PDFGraphics2D;
import org.apache.fop.svg.SVGEventProducer;
import org.apache.fop.svg.SVGUserAgent;
/**
* Image Handler implementation which handles SVG images.
*/
public class PDFImageHandlerSVG implements ImageHandler {
/** logging instance */
private static Log log = LogFactory.getLog(PDFImageHandlerSVG.class);
/** {@inheritDoc} */
public void handleImage(RenderingContext context, // CSOK: MethodLength
Image image, Rectangle pos)
throws IOException {
PDFRenderingContext pdfContext = (PDFRenderingContext)context;
PDFContentGenerator generator = pdfContext.getGenerator();
ImageXMLDOM imageSVG = (ImageXMLDOM)image;
FOUserAgent userAgent = context.getUserAgent();
final float deviceResolution = userAgent.getTargetResolution();
if (log.isDebugEnabled()) {
log.debug("Generating SVG at " + deviceResolution + "dpi.");
}
final float uaResolution = userAgent.getSourceResolution();
SVGUserAgent ua = new SVGUserAgent(userAgent, new AffineTransform());
GVTBuilder builder = new GVTBuilder();
//Controls whether text painted by Batik is generated using text or path operations
boolean strokeText = false;
//TODO connect with configuration elsewhere.
BridgeContext ctx = new PDFBridgeContext(ua,
(strokeText ? null : pdfContext.getFontInfo()),
userAgent.getFactory().getImageManager(),
userAgent.getImageSessionContext(),
new AffineTransform());
//Cloning SVG DOM as Batik attaches non-thread-safe facilities (like the CSS engine)
//to it.
Document clonedDoc = BatikUtil.cloneSVGDocument(imageSVG.getDocument());
GraphicsNode root;
try {
root = builder.build(ctx, clonedDoc);
builder = null;
} catch (Exception e) {
SVGEventProducer eventProducer = SVGEventProducer.Provider.get(
context.getUserAgent().getEventBroadcaster());
eventProducer.svgNotBuilt(this, e, image.getInfo().getOriginalURI());
return;
}
// get the 'width' and 'height' attributes of the SVG document
float w = image.getSize().getWidthMpt();
float h = image.getSize().getHeightMpt();
float sx = pos.width / w;
float sy = pos.height / h;
//Scaling and translation for the bounding box of the image
AffineTransform scaling = new AffineTransform(
sx, 0, 0, sy, pos.x / 1000f, pos.y / 1000f);
double sourceScale = UnitConv.IN2PT / uaResolution;
scaling.scale(sourceScale, sourceScale);
//Scale for higher resolution on-the-fly images from Batik
AffineTransform resolutionScaling = new AffineTransform();
double targetScale = uaResolution / deviceResolution;
resolutionScaling.scale(targetScale, targetScale);
resolutionScaling.scale(1.0 / sx, 1.0 / sy);
//Transformation matrix that establishes the local coordinate system for the SVG graphic
//in relation to the current coordinate system
AffineTransform imageTransform = new AffineTransform();
imageTransform.concatenate(scaling);
imageTransform.concatenate(resolutionScaling);
if (log.isTraceEnabled()) {
log.trace("nat size: " + w + "/" + h);
log.trace("req size: " + pos.width + "/" + pos.height);
log.trace("source res: " + uaResolution + ", targetRes: " + deviceResolution
+ " --> target scaling: " + targetScale);
log.trace(image.getSize());
log.trace("sx: " + sx + ", sy: " + sy);
log.trace("scaling: " + scaling);
log.trace("resolution scaling: " + resolutionScaling);
log.trace("image transform: " + resolutionScaling);
}
/*
* Clip to the svg area.
* Note: To have the svg overlay (under) a text area then use
* an fo:block-container
*/
if (log.isTraceEnabled()) {
generator.comment("SVG setup");
}
generator.saveGraphicsState();
if (context.getUserAgent().isAccessibilityEnabled()) {
MarkedContentInfo mci = pdfContext.getMarkedContentInfo();
generator.beginMarkedContentSequence(mci.tag, mci.mcid);
}
generator.setColor(Color.black, false);
generator.setColor(Color.black, true);
if (!scaling.isIdentity()) {
if (log.isTraceEnabled()) {
generator.comment("viewbox");
}
generator.add(CTMHelper.toPDFString(scaling, false) + " cm\n");
}
//SVGSVGElement svg = ((SVGDocument)doc).getRootElement();
PDFGraphics2D graphics = new PDFGraphics2D(true, pdfContext.getFontInfo(),
generator.getDocument(),
generator.getResourceContext(), pdfContext.getPage().referencePDF(),
"", 0);
graphics.setGraphicContext(new org.apache.xmlgraphics.java2d.GraphicContext());
if (!resolutionScaling.isIdentity()) {
if (log.isTraceEnabled()) {
generator.comment("resolution scaling for " + uaResolution
+ " -> " + deviceResolution);
}
generator.add(
CTMHelper.toPDFString(resolutionScaling, false) + " cm\n");
graphics.scale(
1.0 / resolutionScaling.getScaleX(),
1.0 / resolutionScaling.getScaleY());
}
if (log.isTraceEnabled()) {
generator.comment("SVG start");
}
//Save state and update coordinate system for the SVG image
generator.getState().save();
generator.getState().concatenate(imageTransform);
//Now that we have the complete transformation matrix for the image, we can update the
//transformation matrix for the AElementBridge.
PDFAElementBridge aBridge = (PDFAElementBridge)ctx.getBridge(
SVGDOMImplementation.SVG_NAMESPACE_URI, SVGConstants.SVG_A_TAG);
aBridge.getCurrentTransform().setTransform(generator.getState().getTransform());
graphics.setPaintingState(generator.getState());
graphics.setOutputStream(generator.getOutputStream());
try {
root.paint(graphics);
ctx.dispose();
generator.add(graphics.getString());
} catch (Exception e) {
SVGEventProducer eventProducer = SVGEventProducer.Provider.get(
context.getUserAgent().getEventBroadcaster());
eventProducer.svgRenderingError(this, e, image.getInfo().getOriginalURI());
}
generator.getState().restore();
if (context.getUserAgent().isAccessibilityEnabled()) {
generator.restoreGraphicsStateAccess();
} else {
generator.restoreGraphicsState();
}
if (log.isTraceEnabled()) {
generator.comment("SVG end");
}
}
/** {@inheritDoc} */
public int getPriority() {
return 400;
}
/** {@inheritDoc} */
public Class getSupportedImageClass() {
return ImageXMLDOM.class;
}
/** {@inheritDoc} */
public ImageFlavor[] getSupportedImageFlavors() {
return new ImageFlavor[] {
BatikImageFlavors.SVG_DOM
};
}
/** {@inheritDoc} */
public boolean isCompatible(RenderingContext targetContext, Image image) {
boolean supported = (image == null
|| (image instanceof ImageXMLDOM
&& image.getFlavor().isCompatible(BatikImageFlavors.SVG_DOM)))
&& targetContext instanceof PDFRenderingContext;
if (supported) {
String mode = (String)targetContext.getHint(ImageHandlerUtil.CONVERSION_MODE);
if (ImageHandlerUtil.isConversionModeBitmap(mode)) {
//Disabling this image handler automatically causes a bitmap to be generated
return false;
}
}
return supported;
}
}
| Bugzilla 52657: instream foreign object (svg) coloration incorrect
Submitted by Glenn Adams
git-svn-id: 102839466c3b40dd9c7e25c0a1a6d26afc40150a@1291292 13f79535-47bb-0310-9956-ffa450edef68
| src/java/org/apache/fop/render/pdf/PDFImageHandlerSVG.java | Bugzilla 52657: instream foreign object (svg) coloration incorrect Submitted by Glenn Adams | <ide><path>rc/java/org/apache/fop/render/pdf/PDFImageHandlerSVG.java
<ide> MarkedContentInfo mci = pdfContext.getMarkedContentInfo();
<ide> generator.beginMarkedContentSequence(mci.tag, mci.mcid);
<ide> }
<del> generator.setColor(Color.black, false);
<del> generator.setColor(Color.black, true);
<add> generator.updateColor(Color.black, false, null);
<add> generator.updateColor(Color.black, true, null);
<ide>
<ide> if (!scaling.isIdentity()) {
<ide> if (log.isTraceEnabled()) { |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.